diff --git a/node_modules/.bin/acorn b/node_modules/.bin/acorn new file mode 100644 index 0000000..46a3e61 --- /dev/null +++ b/node_modules/.bin/acorn @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../acorn/bin/acorn" "$@" +else + exec node "$basedir/../acorn/bin/acorn" "$@" +fi diff --git a/node_modules/.bin/acorn.cmd b/node_modules/.bin/acorn.cmd new file mode 100644 index 0000000..a9324df --- /dev/null +++ b/node_modules/.bin/acorn.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\acorn\bin\acorn" %* diff --git a/node_modules/.bin/acorn.ps1 b/node_modules/.bin/acorn.ps1 new file mode 100644 index 0000000..6f6dcdd --- /dev/null +++ b/node_modules/.bin/acorn.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../acorn/bin/acorn" $args + } else { + & "$basedir/node$exe" "$basedir/../acorn/bin/acorn" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../acorn/bin/acorn" $args + } else { + & "node$exe" "$basedir/../acorn/bin/acorn" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/browserslist b/node_modules/.bin/browserslist new file mode 100644 index 0000000..68dd69d --- /dev/null +++ b/node_modules/.bin/browserslist @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../browserslist/cli.js" "$@" +else + exec node "$basedir/../browserslist/cli.js" "$@" +fi diff --git a/node_modules/.bin/browserslist.cmd b/node_modules/.bin/browserslist.cmd new file mode 100644 index 0000000..f93c251 --- /dev/null +++ b/node_modules/.bin/browserslist.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\browserslist\cli.js" %* diff --git a/node_modules/.bin/browserslist.ps1 b/node_modules/.bin/browserslist.ps1 new file mode 100644 index 0000000..01e10a0 --- /dev/null +++ b/node_modules/.bin/browserslist.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../browserslist/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../browserslist/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../browserslist/cli.js" $args + } else { + & "node$exe" "$basedir/../browserslist/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/color-support b/node_modules/.bin/color-support new file mode 100644 index 0000000..2d16312 --- /dev/null +++ b/node_modules/.bin/color-support @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../color-support/bin.js" "$@" + ret=$? +else + node "$basedir/../color-support/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/color-support.cmd b/node_modules/.bin/color-support.cmd new file mode 100644 index 0000000..3d87a04 --- /dev/null +++ b/node_modules/.bin/color-support.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\color-support\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\color-support\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/.bin/mkdirp b/node_modules/.bin/mkdirp new file mode 100644 index 0000000..4b00467 --- /dev/null +++ b/node_modules/.bin/mkdirp @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@" + ret=$? +else + node "$basedir/../mkdirp/bin/cmd.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/mkdirp.cmd b/node_modules/.bin/mkdirp.cmd new file mode 100644 index 0000000..0d2cdd7 --- /dev/null +++ b/node_modules/.bin/mkdirp.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\mkdirp\bin\cmd.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\mkdirp\bin\cmd.js" %* +) \ No newline at end of file diff --git a/node_modules/.bin/node-gyp b/node_modules/.bin/node-gyp new file mode 100644 index 0000000..813d1f1 --- /dev/null +++ b/node_modules/.bin/node-gyp @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../node-gyp/bin/node-gyp.js" "$@" + ret=$? +else + node "$basedir/../node-gyp/bin/node-gyp.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/node-gyp.cmd b/node_modules/.bin/node-gyp.cmd new file mode 100644 index 0000000..d97b8bf --- /dev/null +++ b/node_modules/.bin/node-gyp.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\node-gyp\bin\node-gyp.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\node-gyp\bin\node-gyp.js" %* +) \ No newline at end of file diff --git a/node_modules/.bin/node-sass b/node_modules/.bin/node-sass new file mode 100644 index 0000000..f5fb211 --- /dev/null +++ b/node_modules/.bin/node-sass @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../node-sass/bin/node-sass" "$@" + ret=$? +else + node "$basedir/../node-sass/bin/node-sass" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/node-sass.cmd b/node_modules/.bin/node-sass.cmd new file mode 100644 index 0000000..1e46660 --- /dev/null +++ b/node_modules/.bin/node-sass.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\node-sass\bin\node-sass" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\node-sass\bin\node-sass" %* +) \ No newline at end of file diff --git a/node_modules/.bin/node-which b/node_modules/.bin/node-which new file mode 100644 index 0000000..645ab6c --- /dev/null +++ b/node_modules/.bin/node-which @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../which/bin/node-which" "$@" + ret=$? +else + node "$basedir/../which/bin/node-which" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/node-which.cmd b/node_modules/.bin/node-which.cmd new file mode 100644 index 0000000..9985519 --- /dev/null +++ b/node_modules/.bin/node-which.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\which\bin\node-which" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\which\bin\node-which" %* +) \ No newline at end of file diff --git a/node_modules/.bin/nopt b/node_modules/.bin/nopt new file mode 100644 index 0000000..714334e --- /dev/null +++ b/node_modules/.bin/nopt @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@" + ret=$? +else + node "$basedir/../nopt/bin/nopt.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/nopt.cmd b/node_modules/.bin/nopt.cmd new file mode 100644 index 0000000..1626454 --- /dev/null +++ b/node_modules/.bin/nopt.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\nopt\bin\nopt.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\nopt\bin\nopt.js" %* +) \ No newline at end of file diff --git a/node_modules/.bin/resolve b/node_modules/.bin/resolve new file mode 100644 index 0000000..37df56d --- /dev/null +++ b/node_modules/.bin/resolve @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../resolve/bin/resolve" "$@" + ret=$? +else + node "$basedir/../resolve/bin/resolve" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/resolve.cmd b/node_modules/.bin/resolve.cmd new file mode 100644 index 0000000..462802a --- /dev/null +++ b/node_modules/.bin/resolve.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\resolve\bin\resolve" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\resolve\bin\resolve" %* +) \ No newline at end of file diff --git a/node_modules/.bin/rimraf b/node_modules/.bin/rimraf new file mode 100644 index 0000000..3cebd6e --- /dev/null +++ b/node_modules/.bin/rimraf @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../rimraf/bin.js" "$@" + ret=$? +else + node "$basedir/../rimraf/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/rimraf.cmd b/node_modules/.bin/rimraf.cmd new file mode 100644 index 0000000..9333ec6 --- /dev/null +++ b/node_modules/.bin/rimraf.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\rimraf\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\rimraf\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/.bin/sassgraph b/node_modules/.bin/sassgraph new file mode 100644 index 0000000..f85f25b --- /dev/null +++ b/node_modules/.bin/sassgraph @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../sass-graph/bin/sassgraph" "$@" + ret=$? +else + node "$basedir/../sass-graph/bin/sassgraph" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/sassgraph.cmd b/node_modules/.bin/sassgraph.cmd new file mode 100644 index 0000000..f672a90 --- /dev/null +++ b/node_modules/.bin/sassgraph.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\sass-graph\bin\sassgraph" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\sass-graph\bin\sassgraph" %* +) \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 100644 index 0000000..e7b15c9 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../read-pkg/node_modules/semver/bin/semver" "$@" + ret=$? +else + node "$basedir/../read-pkg/node_modules/semver/bin/semver" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/.bin/semver.cmd b/node_modules/.bin/semver.cmd new file mode 100644 index 0000000..21a1517 --- /dev/null +++ b/node_modules/.bin/semver.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\read-pkg\node_modules\semver\bin\semver" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\read-pkg\node_modules\semver\bin\semver" %* +) \ No newline at end of file diff --git a/node_modules/.bin/terser b/node_modules/.bin/terser new file mode 100644 index 0000000..2d3fa89 --- /dev/null +++ b/node_modules/.bin/terser @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../terser/bin/terser" "$@" +else + exec node "$basedir/../terser/bin/terser" "$@" +fi diff --git a/node_modules/.bin/terser.cmd b/node_modules/.bin/terser.cmd new file mode 100644 index 0000000..abf66a8 --- /dev/null +++ b/node_modules/.bin/terser.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\terser\bin\terser" %* diff --git a/node_modules/.bin/terser.ps1 b/node_modules/.bin/terser.ps1 new file mode 100644 index 0000000..0bbfff6 --- /dev/null +++ b/node_modules/.bin/terser.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../terser/bin/terser" $args + } else { + & "$basedir/node$exe" "$basedir/../terser/bin/terser" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../terser/bin/terser" $args + } else { + & "node$exe" "$basedir/../terser/bin/terser" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/update-browserslist-db b/node_modules/.bin/update-browserslist-db new file mode 100644 index 0000000..8cde7e3 --- /dev/null +++ b/node_modules/.bin/update-browserslist-db @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../update-browserslist-db/cli.js" "$@" +else + exec node "$basedir/../update-browserslist-db/cli.js" "$@" +fi diff --git a/node_modules/.bin/update-browserslist-db.cmd b/node_modules/.bin/update-browserslist-db.cmd new file mode 100644 index 0000000..2e14905 --- /dev/null +++ b/node_modules/.bin/update-browserslist-db.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\update-browserslist-db\cli.js" %* diff --git a/node_modules/.bin/update-browserslist-db.ps1 b/node_modules/.bin/update-browserslist-db.ps1 new file mode 100644 index 0000000..7abdf26 --- /dev/null +++ b/node_modules/.bin/update-browserslist-db.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../update-browserslist-db/cli.js" $args + } else { + & "$basedir/node$exe" "$basedir/../update-browserslist-db/cli.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../update-browserslist-db/cli.js" $args + } else { + & "node$exe" "$basedir/../update-browserslist-db/cli.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/webpack b/node_modules/.bin/webpack new file mode 100644 index 0000000..e674801 --- /dev/null +++ b/node_modules/.bin/webpack @@ -0,0 +1,12 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../webpack/bin/webpack.js" "$@" +else + exec node "$basedir/../webpack/bin/webpack.js" "$@" +fi diff --git a/node_modules/.bin/webpack.cmd b/node_modules/.bin/webpack.cmd new file mode 100644 index 0000000..5b1e07b --- /dev/null +++ b/node_modules/.bin/webpack.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\webpack\bin\webpack.js" %* diff --git a/node_modules/.bin/webpack.ps1 b/node_modules/.bin/webpack.ps1 new file mode 100644 index 0000000..57bb525 --- /dev/null +++ b/node_modules/.bin/webpack.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../webpack/bin/webpack.js" $args + } else { + & "$basedir/node$exe" "$basedir/../webpack/bin/webpack.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../webpack/bin/webpack.js" $args + } else { + & "node$exe" "$basedir/../webpack/bin/webpack.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/@babel/code-frame/LICENSE b/node_modules/@babel/code-frame/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/node_modules/@babel/code-frame/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/code-frame/README.md b/node_modules/@babel/code-frame/README.md new file mode 100644 index 0000000..7160755 --- /dev/null +++ b/node_modules/@babel/code-frame/README.md @@ -0,0 +1,19 @@ +# @babel/code-frame + +> Generate errors that contain a code frame that point to source locations. + +See our website [@babel/code-frame](https://babeljs.io/docs/babel-code-frame) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/code-frame +``` + +or using yarn: + +```sh +yarn add @babel/code-frame --dev +``` diff --git a/node_modules/@babel/code-frame/lib/index.js b/node_modules/@babel/code-frame/lib/index.js new file mode 100644 index 0000000..8d8281e --- /dev/null +++ b/node_modules/@babel/code-frame/lib/index.js @@ -0,0 +1,156 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.codeFrameColumns = codeFrameColumns; +exports.default = _default; +var _highlight = require("@babel/highlight"); +var _chalk2 = require("chalk"); +const chalk = _chalk2; +let chalkWithForcedColor = undefined; +function getChalk(forceColor) { + if (forceColor) { + var _chalkWithForcedColor; + (_chalkWithForcedColor = chalkWithForcedColor) != null ? _chalkWithForcedColor : chalkWithForcedColor = new chalk.constructor({ + enabled: true, + level: 1 + }); + return chalkWithForcedColor; + } + return chalk; +} +let deprecationWarningShown = false; +function getDefs(chalk) { + return { + gutter: chalk.grey, + marker: chalk.red.bold, + message: chalk.red.bold + }; +} +const NEWLINE = /\r\n|[\n\r\u2028\u2029]/; +function getMarkerLines(loc, source, opts) { + const startLoc = Object.assign({ + column: 0, + line: -1 + }, loc.start); + const endLoc = Object.assign({}, startLoc, loc.end); + const { + linesAbove = 2, + linesBelow = 3 + } = opts || {}; + const startLine = startLoc.line; + const startColumn = startLoc.column; + const endLine = endLoc.line; + const endColumn = endLoc.column; + let start = Math.max(startLine - (linesAbove + 1), 0); + let end = Math.min(source.length, endLine + linesBelow); + if (startLine === -1) { + start = 0; + } + if (endLine === -1) { + end = source.length; + } + const lineDiff = endLine - startLine; + const markerLines = {}; + if (lineDiff) { + for (let i = 0; i <= lineDiff; i++) { + const lineNumber = i + startLine; + if (!startColumn) { + markerLines[lineNumber] = true; + } else if (i === 0) { + const sourceLength = source[lineNumber - 1].length; + markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1]; + } else if (i === lineDiff) { + markerLines[lineNumber] = [0, endColumn]; + } else { + const sourceLength = source[lineNumber - i].length; + markerLines[lineNumber] = [0, sourceLength]; + } + } + } else { + if (startColumn === endColumn) { + if (startColumn) { + markerLines[startLine] = [startColumn, 0]; + } else { + markerLines[startLine] = true; + } + } else { + markerLines[startLine] = [startColumn, endColumn - startColumn]; + } + } + return { + start, + end, + markerLines + }; +} +function codeFrameColumns(rawLines, loc, opts = {}) { + const highlighted = (opts.highlightCode || opts.forceColor) && (0, _highlight.shouldHighlight)(opts); + const chalk = getChalk(opts.forceColor); + const defs = getDefs(chalk); + const maybeHighlight = (chalkFn, string) => { + return highlighted ? chalkFn(string) : string; + }; + const lines = rawLines.split(NEWLINE); + const { + start, + end, + markerLines + } = getMarkerLines(loc, lines, opts); + const hasColumns = loc.start && typeof loc.start.column === "number"; + const numberMaxWidth = String(end).length; + const highlightedLines = highlighted ? (0, _highlight.default)(rawLines, opts) : rawLines; + let frame = highlightedLines.split(NEWLINE, end).slice(start, end).map((line, index) => { + const number = start + 1 + index; + const paddedNumber = ` ${number}`.slice(-numberMaxWidth); + const gutter = ` ${paddedNumber} |`; + const hasMarker = markerLines[number]; + const lastMarkerLine = !markerLines[number + 1]; + if (hasMarker) { + let markerLine = ""; + if (Array.isArray(hasMarker)) { + const markerSpacing = line.slice(0, Math.max(hasMarker[0] - 1, 0)).replace(/[^\t]/g, " "); + const numberOfMarkers = hasMarker[1] || 1; + markerLine = ["\n ", maybeHighlight(defs.gutter, gutter.replace(/\d/g, " ")), " ", markerSpacing, maybeHighlight(defs.marker, "^").repeat(numberOfMarkers)].join(""); + if (lastMarkerLine && opts.message) { + markerLine += " " + maybeHighlight(defs.message, opts.message); + } + } + return [maybeHighlight(defs.marker, ">"), maybeHighlight(defs.gutter, gutter), line.length > 0 ? ` ${line}` : "", markerLine].join(""); + } else { + return ` ${maybeHighlight(defs.gutter, gutter)}${line.length > 0 ? ` ${line}` : ""}`; + } + }).join("\n"); + if (opts.message && !hasColumns) { + frame = `${" ".repeat(numberMaxWidth + 1)}${opts.message}\n${frame}`; + } + if (highlighted) { + return chalk.reset(frame); + } else { + return frame; + } +} +function _default(rawLines, lineNumber, colNumber, opts = {}) { + if (!deprecationWarningShown) { + deprecationWarningShown = true; + const message = "Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`."; + if (process.emitWarning) { + process.emitWarning(message, "DeprecationWarning"); + } else { + const deprecationError = new Error(message); + deprecationError.name = "DeprecationWarning"; + console.warn(new Error(message)); + } + } + colNumber = Math.max(colNumber, 0); + const location = { + start: { + column: colNumber, + line: lineNumber + } + }; + return codeFrameColumns(rawLines, location, opts); +} + +//# sourceMappingURL=index.js.map diff --git a/node_modules/@babel/code-frame/lib/index.js.map b/node_modules/@babel/code-frame/lib/index.js.map new file mode 100644 index 0000000..739029c --- /dev/null +++ b/node_modules/@babel/code-frame/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"names":["_highlight","require","_chalk2","chalk","_chalk","chalkWithForcedColor","undefined","getChalk","forceColor","_chalkWithForcedColor","constructor","enabled","level","deprecationWarningShown","getDefs","gutter","grey","marker","red","bold","message","NEWLINE","getMarkerLines","loc","source","opts","startLoc","Object","assign","column","line","start","endLoc","end","linesAbove","linesBelow","startLine","startColumn","endLine","endColumn","Math","max","min","length","lineDiff","markerLines","i","lineNumber","sourceLength","codeFrameColumns","rawLines","highlighted","highlightCode","shouldHighlight","defs","maybeHighlight","chalkFn","string","lines","split","hasColumns","numberMaxWidth","String","highlightedLines","highlight","frame","slice","map","index","number","paddedNumber","hasMarker","lastMarkerLine","markerLine","Array","isArray","markerSpacing","replace","numberOfMarkers","repeat","join","reset","_default","colNumber","process","emitWarning","deprecationError","Error","name","console","warn","location"],"sources":["../src/index.ts"],"sourcesContent":["import highlight, { shouldHighlight } from \"@babel/highlight\";\n\nimport _chalk from \"chalk\";\nconst chalk = _chalk as unknown as typeof import(\"chalk-BABEL_8_BREAKING-true\");\ntype Chalk =\n typeof import(\"chalk-BABEL_8_BREAKING-true\").Instance extends new () => infer R\n ? R\n : never;\n\nlet chalkWithForcedColor: Chalk = undefined;\nfunction getChalk(forceColor: boolean) {\n if (forceColor) {\n chalkWithForcedColor ??= process.env.BABEL_8_BREAKING\n ? new chalk.Instance({ level: 1 })\n : // @ts-expect-error .Instance was .constructor in chalk 2\n new chalk.constructor({ enabled: true, level: 1 });\n return chalkWithForcedColor;\n }\n return chalk;\n}\n\nlet deprecationWarningShown = false;\n\ntype Location = {\n column: number;\n line: number;\n};\n\ntype NodeLocation = {\n end?: Location;\n start: Location;\n};\n\nexport interface Options {\n /** Syntax highlight the code as JavaScript for terminals. default: false */\n highlightCode?: boolean;\n /** The number of lines to show above the error. default: 2 */\n linesAbove?: number;\n /** The number of lines to show below the error. default: 3 */\n linesBelow?: number;\n /**\n * Forcibly syntax highlight the code as JavaScript (for non-terminals);\n * overrides highlightCode.\n * default: false\n */\n forceColor?: boolean;\n /**\n * Pass in a string to be displayed inline (if possible) next to the\n * highlighted location in the code. If it can't be positioned inline,\n * it will be placed above the code frame.\n * default: nothing\n */\n message?: string;\n}\n\n/**\n * Chalk styles for code frame token types.\n */\nfunction getDefs(chalk: Chalk) {\n return {\n gutter: chalk.grey,\n marker: chalk.red.bold,\n message: chalk.red.bold,\n };\n}\n\n/**\n * RegExp to test for newlines in terminal.\n */\n\nconst NEWLINE = /\\r\\n|[\\n\\r\\u2028\\u2029]/;\n\n/**\n * Extract what lines should be marked and highlighted.\n */\n\ntype MarkerLines = Record;\n\nfunction getMarkerLines(\n loc: NodeLocation,\n source: Array,\n opts: Options,\n): {\n start: number;\n end: number;\n markerLines: MarkerLines;\n} {\n const startLoc: Location = {\n column: 0,\n line: -1,\n ...loc.start,\n };\n const endLoc: Location = {\n ...startLoc,\n ...loc.end,\n };\n const { linesAbove = 2, linesBelow = 3 } = opts || {};\n const startLine = startLoc.line;\n const startColumn = startLoc.column;\n const endLine = endLoc.line;\n const endColumn = endLoc.column;\n\n let start = Math.max(startLine - (linesAbove + 1), 0);\n let end = Math.min(source.length, endLine + linesBelow);\n\n if (startLine === -1) {\n start = 0;\n }\n\n if (endLine === -1) {\n end = source.length;\n }\n\n const lineDiff = endLine - startLine;\n const markerLines: MarkerLines = {};\n\n if (lineDiff) {\n for (let i = 0; i <= lineDiff; i++) {\n const lineNumber = i + startLine;\n\n if (!startColumn) {\n markerLines[lineNumber] = true;\n } else if (i === 0) {\n const sourceLength = source[lineNumber - 1].length;\n\n markerLines[lineNumber] = [startColumn, sourceLength - startColumn + 1];\n } else if (i === lineDiff) {\n markerLines[lineNumber] = [0, endColumn];\n } else {\n const sourceLength = source[lineNumber - i].length;\n\n markerLines[lineNumber] = [0, sourceLength];\n }\n }\n } else {\n if (startColumn === endColumn) {\n if (startColumn) {\n markerLines[startLine] = [startColumn, 0];\n } else {\n markerLines[startLine] = true;\n }\n } else {\n markerLines[startLine] = [startColumn, endColumn - startColumn];\n }\n }\n\n return { start, end, markerLines };\n}\n\nexport function codeFrameColumns(\n rawLines: string,\n loc: NodeLocation,\n opts: Options = {},\n): string {\n const highlighted =\n (opts.highlightCode || opts.forceColor) && shouldHighlight(opts);\n const chalk = getChalk(opts.forceColor);\n const defs = getDefs(chalk);\n const maybeHighlight = (chalkFn: Chalk, string: string) => {\n return highlighted ? chalkFn(string) : string;\n };\n const lines = rawLines.split(NEWLINE);\n const { start, end, markerLines } = getMarkerLines(loc, lines, opts);\n const hasColumns = loc.start && typeof loc.start.column === \"number\";\n\n const numberMaxWidth = String(end).length;\n\n const highlightedLines = highlighted ? highlight(rawLines, opts) : rawLines;\n\n let frame = highlightedLines\n .split(NEWLINE, end)\n .slice(start, end)\n .map((line, index) => {\n const number = start + 1 + index;\n const paddedNumber = ` ${number}`.slice(-numberMaxWidth);\n const gutter = ` ${paddedNumber} |`;\n const hasMarker = markerLines[number];\n const lastMarkerLine = !markerLines[number + 1];\n if (hasMarker) {\n let markerLine = \"\";\n if (Array.isArray(hasMarker)) {\n const markerSpacing = line\n .slice(0, Math.max(hasMarker[0] - 1, 0))\n .replace(/[^\\t]/g, \" \");\n const numberOfMarkers = hasMarker[1] || 1;\n\n markerLine = [\n \"\\n \",\n maybeHighlight(defs.gutter, gutter.replace(/\\d/g, \" \")),\n \" \",\n markerSpacing,\n maybeHighlight(defs.marker, \"^\").repeat(numberOfMarkers),\n ].join(\"\");\n\n if (lastMarkerLine && opts.message) {\n markerLine += \" \" + maybeHighlight(defs.message, opts.message);\n }\n }\n return [\n maybeHighlight(defs.marker, \">\"),\n maybeHighlight(defs.gutter, gutter),\n line.length > 0 ? ` ${line}` : \"\",\n markerLine,\n ].join(\"\");\n } else {\n return ` ${maybeHighlight(defs.gutter, gutter)}${\n line.length > 0 ? ` ${line}` : \"\"\n }`;\n }\n })\n .join(\"\\n\");\n\n if (opts.message && !hasColumns) {\n frame = `${\" \".repeat(numberMaxWidth + 1)}${opts.message}\\n${frame}`;\n }\n\n if (highlighted) {\n return chalk.reset(frame);\n } else {\n return frame;\n }\n}\n\n/**\n * Create a code frame, adding line numbers, code highlighting, and pointing to a given position.\n */\n\nexport default function (\n rawLines: string,\n lineNumber: number,\n colNumber?: number | null,\n opts: Options = {},\n): string {\n if (!deprecationWarningShown) {\n deprecationWarningShown = true;\n\n const message =\n \"Passing lineNumber and colNumber is deprecated to @babel/code-frame. Please use `codeFrameColumns`.\";\n\n if (process.emitWarning) {\n // A string is directly supplied to emitWarning, because when supplying an\n // Error object node throws in the tests because of different contexts\n process.emitWarning(message, \"DeprecationWarning\");\n } else {\n const deprecationError = new Error(message);\n deprecationError.name = \"DeprecationWarning\";\n console.warn(new Error(message));\n }\n }\n\n colNumber = Math.max(colNumber, 0);\n\n const location: NodeLocation = {\n start: { column: colNumber, line: lineNumber },\n };\n\n return codeFrameColumns(rawLines, location, opts);\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,UAAA,GAAAC,OAAA;AAEA,IAAAC,OAAA,GAAAD,OAAA;AACA,MAAME,KAAK,GAAGC,OAAiE;AAM/E,IAAIC,oBAA2B,GAAGC,SAAS;AAC3C,SAASC,QAAQA,CAACC,UAAmB,EAAE;EACrC,IAAIA,UAAU,EAAE;IAAA,IAAAC,qBAAA;IACd,CAAAA,qBAAA,GAAAJ,oBAAoB,YAAAI,qBAAA,GAApBJ,oBAAoB,GAGhB,IAAIF,KAAK,CAACO,WAAW,CAAC;MAAEC,OAAO,EAAE,IAAI;MAAEC,KAAK,EAAE;IAAE,CAAC,CAAC;IACtD,OAAOP,oBAAoB;EAC7B;EACA,OAAOF,KAAK;AACd;AAEA,IAAIU,uBAAuB,GAAG,KAAK;AAqCnC,SAASC,OAAOA,CAACX,KAAY,EAAE;EAC7B,OAAO;IACLY,MAAM,EAAEZ,KAAK,CAACa,IAAI;IAClBC,MAAM,EAAEd,KAAK,CAACe,GAAG,CAACC,IAAI;IACtBC,OAAO,EAAEjB,KAAK,CAACe,GAAG,CAACC;EACrB,CAAC;AACH;AAMA,MAAME,OAAO,GAAG,yBAAyB;AAQzC,SAASC,cAAcA,CACrBC,GAAiB,EACjBC,MAAqB,EACrBC,IAAa,EAKb;EACA,MAAMC,QAAkB,GAAAC,MAAA,CAAAC,MAAA;IACtBC,MAAM,EAAE,CAAC;IACTC,IAAI,EAAE,CAAC;EAAC,GACLP,GAAG,CAACQ,KAAK,CACb;EACD,MAAMC,MAAgB,GAAAL,MAAA,CAAAC,MAAA,KACjBF,QAAQ,EACRH,GAAG,CAACU,GAAG,CACX;EACD,MAAM;IAAEC,UAAU,GAAG,CAAC;IAAEC,UAAU,GAAG;EAAE,CAAC,GAAGV,IAAI,IAAI,CAAC,CAAC;EACrD,MAAMW,SAAS,GAAGV,QAAQ,CAACI,IAAI;EAC/B,MAAMO,WAAW,GAAGX,QAAQ,CAACG,MAAM;EACnC,MAAMS,OAAO,GAAGN,MAAM,CAACF,IAAI;EAC3B,MAAMS,SAAS,GAAGP,MAAM,CAACH,MAAM;EAE/B,IAAIE,KAAK,GAAGS,IAAI,CAACC,GAAG,CAACL,SAAS,IAAIF,UAAU,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC;EACrD,IAAID,GAAG,GAAGO,IAAI,CAACE,GAAG,CAAClB,MAAM,CAACmB,MAAM,EAAEL,OAAO,GAAGH,UAAU,CAAC;EAEvD,IAAIC,SAAS,KAAK,CAAC,CAAC,EAAE;IACpBL,KAAK,GAAG,CAAC;EACX;EAEA,IAAIO,OAAO,KAAK,CAAC,CAAC,EAAE;IAClBL,GAAG,GAAGT,MAAM,CAACmB,MAAM;EACrB;EAEA,MAAMC,QAAQ,GAAGN,OAAO,GAAGF,SAAS;EACpC,MAAMS,WAAwB,GAAG,CAAC,CAAC;EAEnC,IAAID,QAAQ,EAAE;IACZ,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,IAAIF,QAAQ,EAAEE,CAAC,EAAE,EAAE;MAClC,MAAMC,UAAU,GAAGD,CAAC,GAAGV,SAAS;MAEhC,IAAI,CAACC,WAAW,EAAE;QAChBQ,WAAW,CAACE,UAAU,CAAC,GAAG,IAAI;MAChC,CAAC,MAAM,IAAID,CAAC,KAAK,CAAC,EAAE;QAClB,MAAME,YAAY,GAAGxB,MAAM,CAACuB,UAAU,GAAG,CAAC,CAAC,CAACJ,MAAM;QAElDE,WAAW,CAACE,UAAU,CAAC,GAAG,CAACV,WAAW,EAAEW,YAAY,GAAGX,WAAW,GAAG,CAAC,CAAC;MACzE,CAAC,MAAM,IAAIS,CAAC,KAAKF,QAAQ,EAAE;QACzBC,WAAW,CAACE,UAAU,CAAC,GAAG,CAAC,CAAC,EAAER,SAAS,CAAC;MAC1C,CAAC,MAAM;QACL,MAAMS,YAAY,GAAGxB,MAAM,CAACuB,UAAU,GAAGD,CAAC,CAAC,CAACH,MAAM;QAElDE,WAAW,CAACE,UAAU,CAAC,GAAG,CAAC,CAAC,EAAEC,YAAY,CAAC;MAC7C;IACF;EACF,CAAC,MAAM;IACL,IAAIX,WAAW,KAAKE,SAAS,EAAE;MAC7B,IAAIF,WAAW,EAAE;QACfQ,WAAW,CAACT,SAAS,CAAC,GAAG,CAACC,WAAW,EAAE,CAAC,CAAC;MAC3C,CAAC,MAAM;QACLQ,WAAW,CAACT,SAAS,CAAC,GAAG,IAAI;MAC/B;IACF,CAAC,MAAM;MACLS,WAAW,CAACT,SAAS,CAAC,GAAG,CAACC,WAAW,EAAEE,SAAS,GAAGF,WAAW,CAAC;IACjE;EACF;EAEA,OAAO;IAAEN,KAAK;IAAEE,GAAG;IAAEY;EAAY,CAAC;AACpC;AAEO,SAASI,gBAAgBA,CAC9BC,QAAgB,EAChB3B,GAAiB,EACjBE,IAAa,GAAG,CAAC,CAAC,EACV;EACR,MAAM0B,WAAW,GACf,CAAC1B,IAAI,CAAC2B,aAAa,IAAI3B,IAAI,CAACjB,UAAU,KAAK,IAAA6C,0BAAe,EAAC5B,IAAI,CAAC;EAClE,MAAMtB,KAAK,GAAGI,QAAQ,CAACkB,IAAI,CAACjB,UAAU,CAAC;EACvC,MAAM8C,IAAI,GAAGxC,OAAO,CAACX,KAAK,CAAC;EAC3B,MAAMoD,cAAc,GAAGA,CAACC,OAAc,EAAEC,MAAc,KAAK;IACzD,OAAON,WAAW,GAAGK,OAAO,CAACC,MAAM,CAAC,GAAGA,MAAM;EAC/C,CAAC;EACD,MAAMC,KAAK,GAAGR,QAAQ,CAACS,KAAK,CAACtC,OAAO,CAAC;EACrC,MAAM;IAAEU,KAAK;IAAEE,GAAG;IAAEY;EAAY,CAAC,GAAGvB,cAAc,CAACC,GAAG,EAAEmC,KAAK,EAAEjC,IAAI,CAAC;EACpE,MAAMmC,UAAU,GAAGrC,GAAG,CAACQ,KAAK,IAAI,OAAOR,GAAG,CAACQ,KAAK,CAACF,MAAM,KAAK,QAAQ;EAEpE,MAAMgC,cAAc,GAAGC,MAAM,CAAC7B,GAAG,CAAC,CAACU,MAAM;EAEzC,MAAMoB,gBAAgB,GAAGZ,WAAW,GAAG,IAAAa,kBAAS,EAACd,QAAQ,EAAEzB,IAAI,CAAC,GAAGyB,QAAQ;EAE3E,IAAIe,KAAK,GAAGF,gBAAgB,CACzBJ,KAAK,CAACtC,OAAO,EAAEY,GAAG,CAAC,CACnBiC,KAAK,CAACnC,KAAK,EAAEE,GAAG,CAAC,CACjBkC,GAAG,CAAC,CAACrC,IAAI,EAAEsC,KAAK,KAAK;IACpB,MAAMC,MAAM,GAAGtC,KAAK,GAAG,CAAC,GAAGqC,KAAK;IAChC,MAAME,YAAY,GAAI,IAAGD,MAAO,EAAC,CAACH,KAAK,CAAC,CAACL,cAAc,CAAC;IACxD,MAAM9C,MAAM,GAAI,IAAGuD,YAAa,IAAG;IACnC,MAAMC,SAAS,GAAG1B,WAAW,CAACwB,MAAM,CAAC;IACrC,MAAMG,cAAc,GAAG,CAAC3B,WAAW,CAACwB,MAAM,GAAG,CAAC,CAAC;IAC/C,IAAIE,SAAS,EAAE;MACb,IAAIE,UAAU,GAAG,EAAE;MACnB,IAAIC,KAAK,CAACC,OAAO,CAACJ,SAAS,CAAC,EAAE;QAC5B,MAAMK,aAAa,GAAG9C,IAAI,CACvBoC,KAAK,CAAC,CAAC,EAAE1B,IAAI,CAACC,GAAG,CAAC8B,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CACvCM,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC;QACzB,MAAMC,eAAe,GAAGP,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;QAEzCE,UAAU,GAAG,CACX,KAAK,EACLlB,cAAc,CAACD,IAAI,CAACvC,MAAM,EAAEA,MAAM,CAAC8D,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,EACvD,GAAG,EACHD,aAAa,EACbrB,cAAc,CAACD,IAAI,CAACrC,MAAM,EAAE,GAAG,CAAC,CAAC8D,MAAM,CAACD,eAAe,CAAC,CACzD,CAACE,IAAI,CAAC,EAAE,CAAC;QAEV,IAAIR,cAAc,IAAI/C,IAAI,CAACL,OAAO,EAAE;UAClCqD,UAAU,IAAI,GAAG,GAAGlB,cAAc,CAACD,IAAI,CAAClC,OAAO,EAAEK,IAAI,CAACL,OAAO,CAAC;QAChE;MACF;MACA,OAAO,CACLmC,cAAc,CAACD,IAAI,CAACrC,MAAM,EAAE,GAAG,CAAC,EAChCsC,cAAc,CAACD,IAAI,CAACvC,MAAM,EAAEA,MAAM,CAAC,EACnCe,IAAI,CAACa,MAAM,GAAG,CAAC,GAAI,IAAGb,IAAK,EAAC,GAAG,EAAE,EACjC2C,UAAU,CACX,CAACO,IAAI,CAAC,EAAE,CAAC;IACZ,CAAC,MAAM;MACL,OAAQ,IAAGzB,cAAc,CAACD,IAAI,CAACvC,MAAM,EAAEA,MAAM,CAAE,GAC7Ce,IAAI,CAACa,MAAM,GAAG,CAAC,GAAI,IAAGb,IAAK,EAAC,GAAG,EAChC,EAAC;IACJ;EACF,CAAC,CAAC,CACDkD,IAAI,CAAC,IAAI,CAAC;EAEb,IAAIvD,IAAI,CAACL,OAAO,IAAI,CAACwC,UAAU,EAAE;IAC/BK,KAAK,GAAI,GAAE,GAAG,CAACc,MAAM,CAAClB,cAAc,GAAG,CAAC,CAAE,GAAEpC,IAAI,CAACL,OAAQ,KAAI6C,KAAM,EAAC;EACtE;EAEA,IAAId,WAAW,EAAE;IACf,OAAOhD,KAAK,CAAC8E,KAAK,CAAChB,KAAK,CAAC;EAC3B,CAAC,MAAM;IACL,OAAOA,KAAK;EACd;AACF;AAMe,SAAAiB,SACbhC,QAAgB,EAChBH,UAAkB,EAClBoC,SAAyB,EACzB1D,IAAa,GAAG,CAAC,CAAC,EACV;EACR,IAAI,CAACZ,uBAAuB,EAAE;IAC5BA,uBAAuB,GAAG,IAAI;IAE9B,MAAMO,OAAO,GACX,qGAAqG;IAEvG,IAAIgE,OAAO,CAACC,WAAW,EAAE;MAGvBD,OAAO,CAACC,WAAW,CAACjE,OAAO,EAAE,oBAAoB,CAAC;IACpD,CAAC,MAAM;MACL,MAAMkE,gBAAgB,GAAG,IAAIC,KAAK,CAACnE,OAAO,CAAC;MAC3CkE,gBAAgB,CAACE,IAAI,GAAG,oBAAoB;MAC5CC,OAAO,CAACC,IAAI,CAAC,IAAIH,KAAK,CAACnE,OAAO,CAAC,CAAC;IAClC;EACF;EAEA+D,SAAS,GAAG3C,IAAI,CAACC,GAAG,CAAC0C,SAAS,EAAE,CAAC,CAAC;EAElC,MAAMQ,QAAsB,GAAG;IAC7B5D,KAAK,EAAE;MAAEF,MAAM,EAAEsD,SAAS;MAAErD,IAAI,EAAEiB;IAAW;EAC/C,CAAC;EAED,OAAOE,gBAAgB,CAACC,QAAQ,EAAEyC,QAAQ,EAAElE,IAAI,CAAC;AACnD"} \ No newline at end of file diff --git a/node_modules/@babel/code-frame/package.json b/node_modules/@babel/code-frame/package.json new file mode 100644 index 0000000..7069f1f --- /dev/null +++ b/node_modules/@babel/code-frame/package.json @@ -0,0 +1,29 @@ +{ + "name": "@babel/code-frame", + "version": "7.22.10", + "description": "Generate errors that contain a code frame that point to source locations.", + "author": "The Babel Team (https://babel.dev/team)", + "homepage": "https://babel.dev/docs/en/next/babel-code-frame", + "bugs": "https://github.com/babel/babel/issues?utf8=%E2%9C%93&q=is%3Aissue+is%3Aopen", + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-code-frame" + }, + "main": "./lib/index.js", + "dependencies": { + "@babel/highlight": "^7.22.10", + "chalk": "^2.4.2" + }, + "devDependencies": { + "strip-ansi": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "type": "commonjs" +} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/LICENSE b/node_modules/@babel/helper-validator-identifier/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/helper-validator-identifier/README.md b/node_modules/@babel/helper-validator-identifier/README.md new file mode 100644 index 0000000..4f704c4 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/README.md @@ -0,0 +1,19 @@ +# @babel/helper-validator-identifier + +> Validate identifier/keywords name + +See our website [@babel/helper-validator-identifier](https://babeljs.io/docs/en/babel-helper-validator-identifier) for more information. + +## Install + +Using npm: + +```sh +npm install --save @babel/helper-validator-identifier +``` + +or using yarn: + +```sh +yarn add @babel/helper-validator-identifier +``` diff --git a/node_modules/@babel/helper-validator-identifier/lib/identifier.js b/node_modules/@babel/helper-validator-identifier/lib/identifier.js new file mode 100644 index 0000000..cd1f450 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/identifier.js @@ -0,0 +1,70 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isIdentifierChar = isIdentifierChar; +exports.isIdentifierName = isIdentifierName; +exports.isIdentifierStart = isIdentifierStart; +let nonASCIIidentifierStartChars = "\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u037f\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u052f\u0531-\u0556\u0559\u0560-\u0588\u05d0-\u05ea\u05ef-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u0860-\u086a\u0870-\u0887\u0889-\u088e\u08a0-\u08c9\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0980\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u09fc\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0af9\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c39\u0c3d\u0c58-\u0c5a\u0c5d\u0c60\u0c61\u0c80\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cdd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d04-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d54-\u0d56\u0d5f-\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e86-\u0e8a\u0e8c-\u0ea3\u0ea5\u0ea7-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f5\u13f8-\u13fd\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f8\u1700-\u1711\u171f-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1878\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191e\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19b0-\u19c9\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4c\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1c80-\u1c88\u1c90-\u1cba\u1cbd-\u1cbf\u1ce9-\u1cec\u1cee-\u1cf3\u1cf5\u1cf6\u1cfa\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2118-\u211d\u2124\u2126\u2128\u212a-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309b-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312f\u3131-\u318e\u31a0-\u31bf\u31f0-\u31ff\u3400-\u4dbf\u4e00-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua69d\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua7ca\ua7d0\ua7d1\ua7d3\ua7d5-\ua7d9\ua7f2-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua8fd\ua8fe\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\ua9e0-\ua9e4\ua9e6-\ua9ef\ua9fa-\ua9fe\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa7e-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uab30-\uab5a\uab5c-\uab69\uab70-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc"; +let nonASCIIidentifierChars = "\u200c\u200d\xb7\u0300-\u036f\u0387\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u07fd\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u0898-\u089f\u08ca-\u08e1\u08e3-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u09fe\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0afa-\u0aff\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b55-\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c00-\u0c04\u0c3c\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c81-\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0cf3\u0d00-\u0d03\u0d3b\u0d3c\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d81-\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0de6-\u0def\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0ebc\u0ec8-\u0ece\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1369-\u1371\u1712-\u1715\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u180f-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19d0-\u19da\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1ab0-\u1abd\u1abf-\u1ace\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf4\u1cf7-\u1cf9\u1dc0-\u1dff\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69e\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua82c\ua880\ua881\ua8b4-\ua8c5\ua8d0-\ua8d9\ua8e0-\ua8f1\ua8ff-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\ua9e5\ua9f0-\ua9f9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b-\uaa7d\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe2f\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f"; +const nonASCIIidentifierStart = new RegExp("[" + nonASCIIidentifierStartChars + "]"); +const nonASCIIidentifier = new RegExp("[" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + "]"); +nonASCIIidentifierStartChars = nonASCIIidentifierChars = null; +const astralIdentifierStartCodes = [0, 11, 2, 25, 2, 18, 2, 1, 2, 14, 3, 13, 35, 122, 70, 52, 268, 28, 4, 48, 48, 31, 14, 29, 6, 37, 11, 29, 3, 35, 5, 7, 2, 4, 43, 157, 19, 35, 5, 35, 5, 39, 9, 51, 13, 10, 2, 14, 2, 6, 2, 1, 2, 10, 2, 14, 2, 6, 2, 1, 68, 310, 10, 21, 11, 7, 25, 5, 2, 41, 2, 8, 70, 5, 3, 0, 2, 43, 2, 1, 4, 0, 3, 22, 11, 22, 10, 30, 66, 18, 2, 1, 11, 21, 11, 25, 71, 55, 7, 1, 65, 0, 16, 3, 2, 2, 2, 28, 43, 28, 4, 28, 36, 7, 2, 27, 28, 53, 11, 21, 11, 18, 14, 17, 111, 72, 56, 50, 14, 50, 14, 35, 349, 41, 7, 1, 79, 28, 11, 0, 9, 21, 43, 17, 47, 20, 28, 22, 13, 52, 58, 1, 3, 0, 14, 44, 33, 24, 27, 35, 30, 0, 3, 0, 9, 34, 4, 0, 13, 47, 15, 3, 22, 0, 2, 0, 36, 17, 2, 24, 20, 1, 64, 6, 2, 0, 2, 3, 2, 14, 2, 9, 8, 46, 39, 7, 3, 1, 3, 21, 2, 6, 2, 1, 2, 4, 4, 0, 19, 0, 13, 4, 159, 52, 19, 3, 21, 2, 31, 47, 21, 1, 2, 0, 185, 46, 42, 3, 37, 47, 21, 0, 60, 42, 14, 0, 72, 26, 38, 6, 186, 43, 117, 63, 32, 7, 3, 0, 3, 7, 2, 1, 2, 23, 16, 0, 2, 0, 95, 7, 3, 38, 17, 0, 2, 0, 29, 0, 11, 39, 8, 0, 22, 0, 12, 45, 20, 0, 19, 72, 264, 8, 2, 36, 18, 0, 50, 29, 113, 6, 2, 1, 2, 37, 22, 0, 26, 5, 2, 1, 2, 31, 15, 0, 328, 18, 16, 0, 2, 12, 2, 33, 125, 0, 80, 921, 103, 110, 18, 195, 2637, 96, 16, 1071, 18, 5, 4026, 582, 8634, 568, 8, 30, 18, 78, 18, 29, 19, 47, 17, 3, 32, 20, 6, 18, 689, 63, 129, 74, 6, 0, 67, 12, 65, 1, 2, 0, 29, 6135, 9, 1237, 43, 8, 8936, 3, 2, 6, 2, 1, 2, 290, 16, 0, 30, 2, 3, 0, 15, 3, 9, 395, 2309, 106, 6, 12, 4, 8, 8, 9, 5991, 84, 2, 70, 2, 1, 3, 0, 3, 1, 3, 3, 2, 11, 2, 0, 2, 6, 2, 64, 2, 3, 3, 7, 2, 6, 2, 27, 2, 3, 2, 4, 2, 0, 4, 6, 2, 339, 3, 24, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 30, 2, 24, 2, 7, 1845, 30, 7, 5, 262, 61, 147, 44, 11, 6, 17, 0, 322, 29, 19, 43, 485, 27, 757, 6, 2, 3, 2, 1, 2, 14, 2, 196, 60, 67, 8, 0, 1205, 3, 2, 26, 2, 1, 2, 0, 3, 0, 2, 9, 2, 3, 2, 0, 2, 0, 7, 0, 5, 0, 2, 0, 2, 0, 2, 2, 2, 1, 2, 0, 3, 0, 2, 0, 2, 0, 2, 0, 2, 0, 2, 1, 2, 0, 3, 3, 2, 6, 2, 3, 2, 3, 2, 0, 2, 9, 2, 16, 6, 2, 2, 4, 2, 16, 4421, 42719, 33, 4153, 7, 221, 3, 5761, 15, 7472, 3104, 541, 1507, 4938, 6, 4191]; +const astralIdentifierCodes = [509, 0, 227, 0, 150, 4, 294, 9, 1368, 2, 2, 1, 6, 3, 41, 2, 5, 0, 166, 1, 574, 3, 9, 9, 370, 1, 81, 2, 71, 10, 50, 3, 123, 2, 54, 14, 32, 10, 3, 1, 11, 3, 46, 10, 8, 0, 46, 9, 7, 2, 37, 13, 2, 9, 6, 1, 45, 0, 13, 2, 49, 13, 9, 3, 2, 11, 83, 11, 7, 0, 3, 0, 158, 11, 6, 9, 7, 3, 56, 1, 2, 6, 3, 1, 3, 2, 10, 0, 11, 1, 3, 6, 4, 4, 193, 17, 10, 9, 5, 0, 82, 19, 13, 9, 214, 6, 3, 8, 28, 1, 83, 16, 16, 9, 82, 12, 9, 9, 84, 14, 5, 9, 243, 14, 166, 9, 71, 5, 2, 1, 3, 3, 2, 0, 2, 1, 13, 9, 120, 6, 3, 6, 4, 0, 29, 9, 41, 6, 2, 3, 9, 0, 10, 10, 47, 15, 406, 7, 2, 7, 17, 9, 57, 21, 2, 13, 123, 5, 4, 0, 2, 1, 2, 6, 2, 0, 9, 9, 49, 4, 2, 1, 2, 4, 9, 9, 330, 3, 10, 1, 2, 0, 49, 6, 4, 4, 14, 9, 5351, 0, 7, 14, 13835, 9, 87, 9, 39, 4, 60, 6, 26, 9, 1014, 0, 2, 54, 8, 3, 82, 0, 12, 1, 19628, 1, 4706, 45, 3, 22, 543, 4, 4, 5, 9, 7, 3, 6, 31, 3, 149, 2, 1418, 49, 513, 54, 5, 49, 9, 0, 15, 0, 23, 4, 2, 14, 1361, 6, 2, 16, 3, 6, 2, 1, 2, 4, 101, 0, 161, 6, 10, 9, 357, 0, 62, 13, 499, 13, 983, 6, 110, 6, 6, 9, 4759, 9, 787719, 239]; +function isInAstralSet(code, set) { + let pos = 0x10000; + for (let i = 0, length = set.length; i < length; i += 2) { + pos += set[i]; + if (pos > code) return false; + pos += set[i + 1]; + if (pos >= code) return true; + } + return false; +} +function isIdentifierStart(code) { + if (code < 65) return code === 36; + if (code <= 90) return true; + if (code < 97) return code === 95; + if (code <= 122) return true; + if (code <= 0xffff) { + return code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code)); + } + return isInAstralSet(code, astralIdentifierStartCodes); +} +function isIdentifierChar(code) { + if (code < 48) return code === 36; + if (code < 58) return true; + if (code < 65) return false; + if (code <= 90) return true; + if (code < 97) return code === 95; + if (code <= 122) return true; + if (code <= 0xffff) { + return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code)); + } + return isInAstralSet(code, astralIdentifierStartCodes) || isInAstralSet(code, astralIdentifierCodes); +} +function isIdentifierName(name) { + let isFirst = true; + for (let i = 0; i < name.length; i++) { + let cp = name.charCodeAt(i); + if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) { + const trail = name.charCodeAt(++i); + if ((trail & 0xfc00) === 0xdc00) { + cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff); + } + } + if (isFirst) { + isFirst = false; + if (!isIdentifierStart(cp)) { + return false; + } + } else if (!isIdentifierChar(cp)) { + return false; + } + } + return !isFirst; +} + +//# sourceMappingURL=identifier.js.map diff --git a/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map b/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map new file mode 100644 index 0000000..dd0449d --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/identifier.js.map @@ -0,0 +1 @@ +{"version":3,"names":["nonASCIIidentifierStartChars","nonASCIIidentifierChars","nonASCIIidentifierStart","RegExp","nonASCIIidentifier","astralIdentifierStartCodes","astralIdentifierCodes","isInAstralSet","code","set","pos","i","length","isIdentifierStart","test","String","fromCharCode","isIdentifierChar","isIdentifierName","name","isFirst","cp","charCodeAt","trail"],"sources":["../src/identifier.ts"],"sourcesContent":["import * as charCodes from \"charcodes\";\n\n// ## Character categories\n\n// Big ugly regular expressions that match characters in the\n// whitespace, identifier, and identifier-start categories. These\n// are only applied when a character is found to actually have a\n// code point between 0x80 and 0xffff.\n// Generated by `scripts/generate-identifier-regex.js`.\n\n/* prettier-ignore */\nlet nonASCIIidentifierStartChars = \"\\xaa\\xb5\\xba\\xc0-\\xd6\\xd8-\\xf6\\xf8-\\u02c1\\u02c6-\\u02d1\\u02e0-\\u02e4\\u02ec\\u02ee\\u0370-\\u0374\\u0376\\u0377\\u037a-\\u037d\\u037f\\u0386\\u0388-\\u038a\\u038c\\u038e-\\u03a1\\u03a3-\\u03f5\\u03f7-\\u0481\\u048a-\\u052f\\u0531-\\u0556\\u0559\\u0560-\\u0588\\u05d0-\\u05ea\\u05ef-\\u05f2\\u0620-\\u064a\\u066e\\u066f\\u0671-\\u06d3\\u06d5\\u06e5\\u06e6\\u06ee\\u06ef\\u06fa-\\u06fc\\u06ff\\u0710\\u0712-\\u072f\\u074d-\\u07a5\\u07b1\\u07ca-\\u07ea\\u07f4\\u07f5\\u07fa\\u0800-\\u0815\\u081a\\u0824\\u0828\\u0840-\\u0858\\u0860-\\u086a\\u0870-\\u0887\\u0889-\\u088e\\u08a0-\\u08c9\\u0904-\\u0939\\u093d\\u0950\\u0958-\\u0961\\u0971-\\u0980\\u0985-\\u098c\\u098f\\u0990\\u0993-\\u09a8\\u09aa-\\u09b0\\u09b2\\u09b6-\\u09b9\\u09bd\\u09ce\\u09dc\\u09dd\\u09df-\\u09e1\\u09f0\\u09f1\\u09fc\\u0a05-\\u0a0a\\u0a0f\\u0a10\\u0a13-\\u0a28\\u0a2a-\\u0a30\\u0a32\\u0a33\\u0a35\\u0a36\\u0a38\\u0a39\\u0a59-\\u0a5c\\u0a5e\\u0a72-\\u0a74\\u0a85-\\u0a8d\\u0a8f-\\u0a91\\u0a93-\\u0aa8\\u0aaa-\\u0ab0\\u0ab2\\u0ab3\\u0ab5-\\u0ab9\\u0abd\\u0ad0\\u0ae0\\u0ae1\\u0af9\\u0b05-\\u0b0c\\u0b0f\\u0b10\\u0b13-\\u0b28\\u0b2a-\\u0b30\\u0b32\\u0b33\\u0b35-\\u0b39\\u0b3d\\u0b5c\\u0b5d\\u0b5f-\\u0b61\\u0b71\\u0b83\\u0b85-\\u0b8a\\u0b8e-\\u0b90\\u0b92-\\u0b95\\u0b99\\u0b9a\\u0b9c\\u0b9e\\u0b9f\\u0ba3\\u0ba4\\u0ba8-\\u0baa\\u0bae-\\u0bb9\\u0bd0\\u0c05-\\u0c0c\\u0c0e-\\u0c10\\u0c12-\\u0c28\\u0c2a-\\u0c39\\u0c3d\\u0c58-\\u0c5a\\u0c5d\\u0c60\\u0c61\\u0c80\\u0c85-\\u0c8c\\u0c8e-\\u0c90\\u0c92-\\u0ca8\\u0caa-\\u0cb3\\u0cb5-\\u0cb9\\u0cbd\\u0cdd\\u0cde\\u0ce0\\u0ce1\\u0cf1\\u0cf2\\u0d04-\\u0d0c\\u0d0e-\\u0d10\\u0d12-\\u0d3a\\u0d3d\\u0d4e\\u0d54-\\u0d56\\u0d5f-\\u0d61\\u0d7a-\\u0d7f\\u0d85-\\u0d96\\u0d9a-\\u0db1\\u0db3-\\u0dbb\\u0dbd\\u0dc0-\\u0dc6\\u0e01-\\u0e30\\u0e32\\u0e33\\u0e40-\\u0e46\\u0e81\\u0e82\\u0e84\\u0e86-\\u0e8a\\u0e8c-\\u0ea3\\u0ea5\\u0ea7-\\u0eb0\\u0eb2\\u0eb3\\u0ebd\\u0ec0-\\u0ec4\\u0ec6\\u0edc-\\u0edf\\u0f00\\u0f40-\\u0f47\\u0f49-\\u0f6c\\u0f88-\\u0f8c\\u1000-\\u102a\\u103f\\u1050-\\u1055\\u105a-\\u105d\\u1061\\u1065\\u1066\\u106e-\\u1070\\u1075-\\u1081\\u108e\\u10a0-\\u10c5\\u10c7\\u10cd\\u10d0-\\u10fa\\u10fc-\\u1248\\u124a-\\u124d\\u1250-\\u1256\\u1258\\u125a-\\u125d\\u1260-\\u1288\\u128a-\\u128d\\u1290-\\u12b0\\u12b2-\\u12b5\\u12b8-\\u12be\\u12c0\\u12c2-\\u12c5\\u12c8-\\u12d6\\u12d8-\\u1310\\u1312-\\u1315\\u1318-\\u135a\\u1380-\\u138f\\u13a0-\\u13f5\\u13f8-\\u13fd\\u1401-\\u166c\\u166f-\\u167f\\u1681-\\u169a\\u16a0-\\u16ea\\u16ee-\\u16f8\\u1700-\\u1711\\u171f-\\u1731\\u1740-\\u1751\\u1760-\\u176c\\u176e-\\u1770\\u1780-\\u17b3\\u17d7\\u17dc\\u1820-\\u1878\\u1880-\\u18a8\\u18aa\\u18b0-\\u18f5\\u1900-\\u191e\\u1950-\\u196d\\u1970-\\u1974\\u1980-\\u19ab\\u19b0-\\u19c9\\u1a00-\\u1a16\\u1a20-\\u1a54\\u1aa7\\u1b05-\\u1b33\\u1b45-\\u1b4c\\u1b83-\\u1ba0\\u1bae\\u1baf\\u1bba-\\u1be5\\u1c00-\\u1c23\\u1c4d-\\u1c4f\\u1c5a-\\u1c7d\\u1c80-\\u1c88\\u1c90-\\u1cba\\u1cbd-\\u1cbf\\u1ce9-\\u1cec\\u1cee-\\u1cf3\\u1cf5\\u1cf6\\u1cfa\\u1d00-\\u1dbf\\u1e00-\\u1f15\\u1f18-\\u1f1d\\u1f20-\\u1f45\\u1f48-\\u1f4d\\u1f50-\\u1f57\\u1f59\\u1f5b\\u1f5d\\u1f5f-\\u1f7d\\u1f80-\\u1fb4\\u1fb6-\\u1fbc\\u1fbe\\u1fc2-\\u1fc4\\u1fc6-\\u1fcc\\u1fd0-\\u1fd3\\u1fd6-\\u1fdb\\u1fe0-\\u1fec\\u1ff2-\\u1ff4\\u1ff6-\\u1ffc\\u2071\\u207f\\u2090-\\u209c\\u2102\\u2107\\u210a-\\u2113\\u2115\\u2118-\\u211d\\u2124\\u2126\\u2128\\u212a-\\u2139\\u213c-\\u213f\\u2145-\\u2149\\u214e\\u2160-\\u2188\\u2c00-\\u2ce4\\u2ceb-\\u2cee\\u2cf2\\u2cf3\\u2d00-\\u2d25\\u2d27\\u2d2d\\u2d30-\\u2d67\\u2d6f\\u2d80-\\u2d96\\u2da0-\\u2da6\\u2da8-\\u2dae\\u2db0-\\u2db6\\u2db8-\\u2dbe\\u2dc0-\\u2dc6\\u2dc8-\\u2dce\\u2dd0-\\u2dd6\\u2dd8-\\u2dde\\u3005-\\u3007\\u3021-\\u3029\\u3031-\\u3035\\u3038-\\u303c\\u3041-\\u3096\\u309b-\\u309f\\u30a1-\\u30fa\\u30fc-\\u30ff\\u3105-\\u312f\\u3131-\\u318e\\u31a0-\\u31bf\\u31f0-\\u31ff\\u3400-\\u4dbf\\u4e00-\\ua48c\\ua4d0-\\ua4fd\\ua500-\\ua60c\\ua610-\\ua61f\\ua62a\\ua62b\\ua640-\\ua66e\\ua67f-\\ua69d\\ua6a0-\\ua6ef\\ua717-\\ua71f\\ua722-\\ua788\\ua78b-\\ua7ca\\ua7d0\\ua7d1\\ua7d3\\ua7d5-\\ua7d9\\ua7f2-\\ua801\\ua803-\\ua805\\ua807-\\ua80a\\ua80c-\\ua822\\ua840-\\ua873\\ua882-\\ua8b3\\ua8f2-\\ua8f7\\ua8fb\\ua8fd\\ua8fe\\ua90a-\\ua925\\ua930-\\ua946\\ua960-\\ua97c\\ua984-\\ua9b2\\ua9cf\\ua9e0-\\ua9e4\\ua9e6-\\ua9ef\\ua9fa-\\ua9fe\\uaa00-\\uaa28\\uaa40-\\uaa42\\uaa44-\\uaa4b\\uaa60-\\uaa76\\uaa7a\\uaa7e-\\uaaaf\\uaab1\\uaab5\\uaab6\\uaab9-\\uaabd\\uaac0\\uaac2\\uaadb-\\uaadd\\uaae0-\\uaaea\\uaaf2-\\uaaf4\\uab01-\\uab06\\uab09-\\uab0e\\uab11-\\uab16\\uab20-\\uab26\\uab28-\\uab2e\\uab30-\\uab5a\\uab5c-\\uab69\\uab70-\\uabe2\\uac00-\\ud7a3\\ud7b0-\\ud7c6\\ud7cb-\\ud7fb\\uf900-\\ufa6d\\ufa70-\\ufad9\\ufb00-\\ufb06\\ufb13-\\ufb17\\ufb1d\\ufb1f-\\ufb28\\ufb2a-\\ufb36\\ufb38-\\ufb3c\\ufb3e\\ufb40\\ufb41\\ufb43\\ufb44\\ufb46-\\ufbb1\\ufbd3-\\ufd3d\\ufd50-\\ufd8f\\ufd92-\\ufdc7\\ufdf0-\\ufdfb\\ufe70-\\ufe74\\ufe76-\\ufefc\\uff21-\\uff3a\\uff41-\\uff5a\\uff66-\\uffbe\\uffc2-\\uffc7\\uffca-\\uffcf\\uffd2-\\uffd7\\uffda-\\uffdc\";\n/* prettier-ignore */\nlet nonASCIIidentifierChars = \"\\u200c\\u200d\\xb7\\u0300-\\u036f\\u0387\\u0483-\\u0487\\u0591-\\u05bd\\u05bf\\u05c1\\u05c2\\u05c4\\u05c5\\u05c7\\u0610-\\u061a\\u064b-\\u0669\\u0670\\u06d6-\\u06dc\\u06df-\\u06e4\\u06e7\\u06e8\\u06ea-\\u06ed\\u06f0-\\u06f9\\u0711\\u0730-\\u074a\\u07a6-\\u07b0\\u07c0-\\u07c9\\u07eb-\\u07f3\\u07fd\\u0816-\\u0819\\u081b-\\u0823\\u0825-\\u0827\\u0829-\\u082d\\u0859-\\u085b\\u0898-\\u089f\\u08ca-\\u08e1\\u08e3-\\u0903\\u093a-\\u093c\\u093e-\\u094f\\u0951-\\u0957\\u0962\\u0963\\u0966-\\u096f\\u0981-\\u0983\\u09bc\\u09be-\\u09c4\\u09c7\\u09c8\\u09cb-\\u09cd\\u09d7\\u09e2\\u09e3\\u09e6-\\u09ef\\u09fe\\u0a01-\\u0a03\\u0a3c\\u0a3e-\\u0a42\\u0a47\\u0a48\\u0a4b-\\u0a4d\\u0a51\\u0a66-\\u0a71\\u0a75\\u0a81-\\u0a83\\u0abc\\u0abe-\\u0ac5\\u0ac7-\\u0ac9\\u0acb-\\u0acd\\u0ae2\\u0ae3\\u0ae6-\\u0aef\\u0afa-\\u0aff\\u0b01-\\u0b03\\u0b3c\\u0b3e-\\u0b44\\u0b47\\u0b48\\u0b4b-\\u0b4d\\u0b55-\\u0b57\\u0b62\\u0b63\\u0b66-\\u0b6f\\u0b82\\u0bbe-\\u0bc2\\u0bc6-\\u0bc8\\u0bca-\\u0bcd\\u0bd7\\u0be6-\\u0bef\\u0c00-\\u0c04\\u0c3c\\u0c3e-\\u0c44\\u0c46-\\u0c48\\u0c4a-\\u0c4d\\u0c55\\u0c56\\u0c62\\u0c63\\u0c66-\\u0c6f\\u0c81-\\u0c83\\u0cbc\\u0cbe-\\u0cc4\\u0cc6-\\u0cc8\\u0cca-\\u0ccd\\u0cd5\\u0cd6\\u0ce2\\u0ce3\\u0ce6-\\u0cef\\u0cf3\\u0d00-\\u0d03\\u0d3b\\u0d3c\\u0d3e-\\u0d44\\u0d46-\\u0d48\\u0d4a-\\u0d4d\\u0d57\\u0d62\\u0d63\\u0d66-\\u0d6f\\u0d81-\\u0d83\\u0dca\\u0dcf-\\u0dd4\\u0dd6\\u0dd8-\\u0ddf\\u0de6-\\u0def\\u0df2\\u0df3\\u0e31\\u0e34-\\u0e3a\\u0e47-\\u0e4e\\u0e50-\\u0e59\\u0eb1\\u0eb4-\\u0ebc\\u0ec8-\\u0ece\\u0ed0-\\u0ed9\\u0f18\\u0f19\\u0f20-\\u0f29\\u0f35\\u0f37\\u0f39\\u0f3e\\u0f3f\\u0f71-\\u0f84\\u0f86\\u0f87\\u0f8d-\\u0f97\\u0f99-\\u0fbc\\u0fc6\\u102b-\\u103e\\u1040-\\u1049\\u1056-\\u1059\\u105e-\\u1060\\u1062-\\u1064\\u1067-\\u106d\\u1071-\\u1074\\u1082-\\u108d\\u108f-\\u109d\\u135d-\\u135f\\u1369-\\u1371\\u1712-\\u1715\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17b4-\\u17d3\\u17dd\\u17e0-\\u17e9\\u180b-\\u180d\\u180f-\\u1819\\u18a9\\u1920-\\u192b\\u1930-\\u193b\\u1946-\\u194f\\u19d0-\\u19da\\u1a17-\\u1a1b\\u1a55-\\u1a5e\\u1a60-\\u1a7c\\u1a7f-\\u1a89\\u1a90-\\u1a99\\u1ab0-\\u1abd\\u1abf-\\u1ace\\u1b00-\\u1b04\\u1b34-\\u1b44\\u1b50-\\u1b59\\u1b6b-\\u1b73\\u1b80-\\u1b82\\u1ba1-\\u1bad\\u1bb0-\\u1bb9\\u1be6-\\u1bf3\\u1c24-\\u1c37\\u1c40-\\u1c49\\u1c50-\\u1c59\\u1cd0-\\u1cd2\\u1cd4-\\u1ce8\\u1ced\\u1cf4\\u1cf7-\\u1cf9\\u1dc0-\\u1dff\\u203f\\u2040\\u2054\\u20d0-\\u20dc\\u20e1\\u20e5-\\u20f0\\u2cef-\\u2cf1\\u2d7f\\u2de0-\\u2dff\\u302a-\\u302f\\u3099\\u309a\\ua620-\\ua629\\ua66f\\ua674-\\ua67d\\ua69e\\ua69f\\ua6f0\\ua6f1\\ua802\\ua806\\ua80b\\ua823-\\ua827\\ua82c\\ua880\\ua881\\ua8b4-\\ua8c5\\ua8d0-\\ua8d9\\ua8e0-\\ua8f1\\ua8ff-\\ua909\\ua926-\\ua92d\\ua947-\\ua953\\ua980-\\ua983\\ua9b3-\\ua9c0\\ua9d0-\\ua9d9\\ua9e5\\ua9f0-\\ua9f9\\uaa29-\\uaa36\\uaa43\\uaa4c\\uaa4d\\uaa50-\\uaa59\\uaa7b-\\uaa7d\\uaab0\\uaab2-\\uaab4\\uaab7\\uaab8\\uaabe\\uaabf\\uaac1\\uaaeb-\\uaaef\\uaaf5\\uaaf6\\uabe3-\\uabea\\uabec\\uabed\\uabf0-\\uabf9\\ufb1e\\ufe00-\\ufe0f\\ufe20-\\ufe2f\\ufe33\\ufe34\\ufe4d-\\ufe4f\\uff10-\\uff19\\uff3f\";\n\nconst nonASCIIidentifierStart = new RegExp(\n \"[\" + nonASCIIidentifierStartChars + \"]\",\n);\nconst nonASCIIidentifier = new RegExp(\n \"[\" + nonASCIIidentifierStartChars + nonASCIIidentifierChars + \"]\",\n);\n\nnonASCIIidentifierStartChars = nonASCIIidentifierChars = null;\n\n// These are a run-length and offset-encoded representation of the\n// >0xffff code points that are a valid part of identifiers. The\n// offset starts at 0x10000, and each pair of numbers represents an\n// offset to the next range, and then a size of the range. They were\n// generated by `scripts/generate-identifier-regex.js`.\n/* prettier-ignore */\nconst astralIdentifierStartCodes = [0,11,2,25,2,18,2,1,2,14,3,13,35,122,70,52,268,28,4,48,48,31,14,29,6,37,11,29,3,35,5,7,2,4,43,157,19,35,5,35,5,39,9,51,13,10,2,14,2,6,2,1,2,10,2,14,2,6,2,1,68,310,10,21,11,7,25,5,2,41,2,8,70,5,3,0,2,43,2,1,4,0,3,22,11,22,10,30,66,18,2,1,11,21,11,25,71,55,7,1,65,0,16,3,2,2,2,28,43,28,4,28,36,7,2,27,28,53,11,21,11,18,14,17,111,72,56,50,14,50,14,35,349,41,7,1,79,28,11,0,9,21,43,17,47,20,28,22,13,52,58,1,3,0,14,44,33,24,27,35,30,0,3,0,9,34,4,0,13,47,15,3,22,0,2,0,36,17,2,24,20,1,64,6,2,0,2,3,2,14,2,9,8,46,39,7,3,1,3,21,2,6,2,1,2,4,4,0,19,0,13,4,159,52,19,3,21,2,31,47,21,1,2,0,185,46,42,3,37,47,21,0,60,42,14,0,72,26,38,6,186,43,117,63,32,7,3,0,3,7,2,1,2,23,16,0,2,0,95,7,3,38,17,0,2,0,29,0,11,39,8,0,22,0,12,45,20,0,19,72,264,8,2,36,18,0,50,29,113,6,2,1,2,37,22,0,26,5,2,1,2,31,15,0,328,18,16,0,2,12,2,33,125,0,80,921,103,110,18,195,2637,96,16,1071,18,5,4026,582,8634,568,8,30,18,78,18,29,19,47,17,3,32,20,6,18,689,63,129,74,6,0,67,12,65,1,2,0,29,6135,9,1237,43,8,8936,3,2,6,2,1,2,290,16,0,30,2,3,0,15,3,9,395,2309,106,6,12,4,8,8,9,5991,84,2,70,2,1,3,0,3,1,3,3,2,11,2,0,2,6,2,64,2,3,3,7,2,6,2,27,2,3,2,4,2,0,4,6,2,339,3,24,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,30,2,24,2,7,1845,30,7,5,262,61,147,44,11,6,17,0,322,29,19,43,485,27,757,6,2,3,2,1,2,14,2,196,60,67,8,0,1205,3,2,26,2,1,2,0,3,0,2,9,2,3,2,0,2,0,7,0,5,0,2,0,2,0,2,2,2,1,2,0,3,0,2,0,2,0,2,0,2,0,2,1,2,0,3,3,2,6,2,3,2,3,2,0,2,9,2,16,6,2,2,4,2,16,4421,42719,33,4153,7,221,3,5761,15,7472,3104,541,1507,4938,6,4191];\n/* prettier-ignore */\nconst astralIdentifierCodes = [509,0,227,0,150,4,294,9,1368,2,2,1,6,3,41,2,5,0,166,1,574,3,9,9,370,1,81,2,71,10,50,3,123,2,54,14,32,10,3,1,11,3,46,10,8,0,46,9,7,2,37,13,2,9,6,1,45,0,13,2,49,13,9,3,2,11,83,11,7,0,3,0,158,11,6,9,7,3,56,1,2,6,3,1,3,2,10,0,11,1,3,6,4,4,193,17,10,9,5,0,82,19,13,9,214,6,3,8,28,1,83,16,16,9,82,12,9,9,84,14,5,9,243,14,166,9,71,5,2,1,3,3,2,0,2,1,13,9,120,6,3,6,4,0,29,9,41,6,2,3,9,0,10,10,47,15,406,7,2,7,17,9,57,21,2,13,123,5,4,0,2,1,2,6,2,0,9,9,49,4,2,1,2,4,9,9,330,3,10,1,2,0,49,6,4,4,14,9,5351,0,7,14,13835,9,87,9,39,4,60,6,26,9,1014,0,2,54,8,3,82,0,12,1,19628,1,4706,45,3,22,543,4,4,5,9,7,3,6,31,3,149,2,1418,49,513,54,5,49,9,0,15,0,23,4,2,14,1361,6,2,16,3,6,2,1,2,4,101,0,161,6,10,9,357,0,62,13,499,13,983,6,110,6,6,9,4759,9,787719,239];\n\n// This has a complexity linear to the value of the code. The\n// assumption is that looking up astral identifier characters is\n// rare.\nfunction isInAstralSet(code: number, set: readonly number[]): boolean {\n let pos = 0x10000;\n for (let i = 0, length = set.length; i < length; i += 2) {\n pos += set[i];\n if (pos > code) return false;\n\n pos += set[i + 1];\n if (pos >= code) return true;\n }\n return false;\n}\n\n// Test whether a given character code starts an identifier.\n\nexport function isIdentifierStart(code: number): boolean {\n if (code < charCodes.uppercaseA) return code === charCodes.dollarSign;\n if (code <= charCodes.uppercaseZ) return true;\n if (code < charCodes.lowercaseA) return code === charCodes.underscore;\n if (code <= charCodes.lowercaseZ) return true;\n if (code <= 0xffff) {\n return (\n code >= 0xaa && nonASCIIidentifierStart.test(String.fromCharCode(code))\n );\n }\n return isInAstralSet(code, astralIdentifierStartCodes);\n}\n\n// Test whether a given character is part of an identifier.\n\nexport function isIdentifierChar(code: number): boolean {\n if (code < charCodes.digit0) return code === charCodes.dollarSign;\n if (code < charCodes.colon) return true;\n if (code < charCodes.uppercaseA) return false;\n if (code <= charCodes.uppercaseZ) return true;\n if (code < charCodes.lowercaseA) return code === charCodes.underscore;\n if (code <= charCodes.lowercaseZ) return true;\n if (code <= 0xffff) {\n return code >= 0xaa && nonASCIIidentifier.test(String.fromCharCode(code));\n }\n return (\n isInAstralSet(code, astralIdentifierStartCodes) ||\n isInAstralSet(code, astralIdentifierCodes)\n );\n}\n\n// Test whether a given string is a valid identifier name\n\nexport function isIdentifierName(name: string): boolean {\n let isFirst = true;\n for (let i = 0; i < name.length; i++) {\n // The implementation is based on\n // https://source.chromium.org/chromium/chromium/src/+/master:v8/src/builtins/builtins-string-gen.cc;l=1455;drc=221e331b49dfefadbc6fa40b0c68e6f97606d0b3;bpv=0;bpt=1\n // We reimplement `codePointAt` because `codePointAt` is a V8 builtin which is not inlined by TurboFan (as of M91)\n // since `name` is mostly ASCII, an inlined `charCodeAt` wins here\n let cp = name.charCodeAt(i);\n if ((cp & 0xfc00) === 0xd800 && i + 1 < name.length) {\n const trail = name.charCodeAt(++i);\n if ((trail & 0xfc00) === 0xdc00) {\n cp = 0x10000 + ((cp & 0x3ff) << 10) + (trail & 0x3ff);\n }\n }\n if (isFirst) {\n isFirst = false;\n if (!isIdentifierStart(cp)) {\n return false;\n }\n } else if (!isIdentifierChar(cp)) {\n return false;\n }\n }\n return !isFirst;\n}\n"],"mappings":";;;;;;;;AAWA,IAAIA,4BAA4B,GAAG,8qIAA8qI;AAEjtI,IAAIC,uBAAuB,GAAG,mkFAAmkF;AAEjmF,MAAMC,uBAAuB,GAAG,IAAIC,MAAM,CACxC,GAAG,GAAGH,4BAA4B,GAAG,GACvC,CAAC;AACD,MAAMI,kBAAkB,GAAG,IAAID,MAAM,CACnC,GAAG,GAAGH,4BAA4B,GAAGC,uBAAuB,GAAG,GACjE,CAAC;AAEDD,4BAA4B,GAAGC,uBAAuB,GAAG,IAAI;AAQ7D,MAAMI,0BAA0B,GAAG,CAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,GAAG,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,IAAI,EAAC,EAAE,EAAC,EAAE,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,GAAG,EAAC,IAAI,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,IAAI,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,IAAI,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,IAAI,EAAC,KAAK,EAAC,EAAE,EAAC,IAAI,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,IAAI,EAAC,IAAI,EAAC,GAAG,EAAC,IAAI,EAAC,IAAI,EAAC,CAAC,EAAC,IAAI,CAAC;AAEj+C,MAAMC,qBAAqB,GAAG,CAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,KAAK,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,KAAK,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,IAAI,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,IAAI,EAAC,CAAC,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,EAAE,EAAC,EAAE,EAAC,GAAG,EAAC,EAAE,EAAC,GAAG,EAAC,CAAC,EAAC,GAAG,EAAC,CAAC,EAAC,CAAC,EAAC,CAAC,EAAC,IAAI,EAAC,CAAC,EAAC,MAAM,EAAC,GAAG,CAAC;AAKjwB,SAASC,aAAaA,CAACC,IAAY,EAAEC,GAAsB,EAAW;EACpE,IAAIC,GAAG,GAAG,OAAO;EACjB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEC,MAAM,GAAGH,GAAG,CAACG,MAAM,EAAED,CAAC,GAAGC,MAAM,EAAED,CAAC,IAAI,CAAC,EAAE;IACvDD,GAAG,IAAID,GAAG,CAACE,CAAC,CAAC;IACb,IAAID,GAAG,GAAGF,IAAI,EAAE,OAAO,KAAK;IAE5BE,GAAG,IAAID,GAAG,CAACE,CAAC,GAAG,CAAC,CAAC;IACjB,IAAID,GAAG,IAAIF,IAAI,EAAE,OAAO,IAAI;EAC9B;EACA,OAAO,KAAK;AACd;AAIO,SAASK,iBAAiBA,CAACL,IAAY,EAAW;EACvD,IAAIA,IAAI,KAAuB,EAAE,OAAOA,IAAI,OAAyB;EACrE,IAAIA,IAAI,MAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,KAAuB,EAAE,OAAOA,IAAI,OAAyB;EACrE,IAAIA,IAAI,OAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,IAAI,MAAM,EAAE;IAClB,OACEA,IAAI,IAAI,IAAI,IAAIN,uBAAuB,CAACY,IAAI,CAACC,MAAM,CAACC,YAAY,CAACR,IAAI,CAAC,CAAC;EAE3E;EACA,OAAOD,aAAa,CAACC,IAAI,EAAEH,0BAA0B,CAAC;AACxD;AAIO,SAASY,gBAAgBA,CAACT,IAAY,EAAW;EACtD,IAAIA,IAAI,KAAmB,EAAE,OAAOA,IAAI,OAAyB;EACjE,IAAIA,IAAI,KAAkB,EAAE,OAAO,IAAI;EACvC,IAAIA,IAAI,KAAuB,EAAE,OAAO,KAAK;EAC7C,IAAIA,IAAI,MAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,KAAuB,EAAE,OAAOA,IAAI,OAAyB;EACrE,IAAIA,IAAI,OAAwB,EAAE,OAAO,IAAI;EAC7C,IAAIA,IAAI,IAAI,MAAM,EAAE;IAClB,OAAOA,IAAI,IAAI,IAAI,IAAIJ,kBAAkB,CAACU,IAAI,CAACC,MAAM,CAACC,YAAY,CAACR,IAAI,CAAC,CAAC;EAC3E;EACA,OACED,aAAa,CAACC,IAAI,EAAEH,0BAA0B,CAAC,IAC/CE,aAAa,CAACC,IAAI,EAAEF,qBAAqB,CAAC;AAE9C;AAIO,SAASY,gBAAgBA,CAACC,IAAY,EAAW;EACtD,IAAIC,OAAO,GAAG,IAAI;EAClB,KAAK,IAAIT,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGQ,IAAI,CAACP,MAAM,EAAED,CAAC,EAAE,EAAE;IAKpC,IAAIU,EAAE,GAAGF,IAAI,CAACG,UAAU,CAACX,CAAC,CAAC;IAC3B,IAAI,CAACU,EAAE,GAAG,MAAM,MAAM,MAAM,IAAIV,CAAC,GAAG,CAAC,GAAGQ,IAAI,CAACP,MAAM,EAAE;MACnD,MAAMW,KAAK,GAAGJ,IAAI,CAACG,UAAU,CAAC,EAAEX,CAAC,CAAC;MAClC,IAAI,CAACY,KAAK,GAAG,MAAM,MAAM,MAAM,EAAE;QAC/BF,EAAE,GAAG,OAAO,IAAI,CAACA,EAAE,GAAG,KAAK,KAAK,EAAE,CAAC,IAAIE,KAAK,GAAG,KAAK,CAAC;MACvD;IACF;IACA,IAAIH,OAAO,EAAE;MACXA,OAAO,GAAG,KAAK;MACf,IAAI,CAACP,iBAAiB,CAACQ,EAAE,CAAC,EAAE;QAC1B,OAAO,KAAK;MACd;IACF,CAAC,MAAM,IAAI,CAACJ,gBAAgB,CAACI,EAAE,CAAC,EAAE;MAChC,OAAO,KAAK;IACd;EACF;EACA,OAAO,CAACD,OAAO;AACjB"} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/lib/index.js b/node_modules/@babel/helper-validator-identifier/lib/index.js new file mode 100644 index 0000000..c677cea --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/index.js @@ -0,0 +1,57 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +Object.defineProperty(exports, "isIdentifierChar", { + enumerable: true, + get: function () { + return _identifier.isIdentifierChar; + } +}); +Object.defineProperty(exports, "isIdentifierName", { + enumerable: true, + get: function () { + return _identifier.isIdentifierName; + } +}); +Object.defineProperty(exports, "isIdentifierStart", { + enumerable: true, + get: function () { + return _identifier.isIdentifierStart; + } +}); +Object.defineProperty(exports, "isKeyword", { + enumerable: true, + get: function () { + return _keyword.isKeyword; + } +}); +Object.defineProperty(exports, "isReservedWord", { + enumerable: true, + get: function () { + return _keyword.isReservedWord; + } +}); +Object.defineProperty(exports, "isStrictBindOnlyReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictBindOnlyReservedWord; + } +}); +Object.defineProperty(exports, "isStrictBindReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictBindReservedWord; + } +}); +Object.defineProperty(exports, "isStrictReservedWord", { + enumerable: true, + get: function () { + return _keyword.isStrictReservedWord; + } +}); +var _identifier = require("./identifier"); +var _keyword = require("./keyword"); + +//# sourceMappingURL=index.js.map diff --git a/node_modules/@babel/helper-validator-identifier/lib/index.js.map b/node_modules/@babel/helper-validator-identifier/lib/index.js.map new file mode 100644 index 0000000..cc9ad3d --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"names":["_identifier","require","_keyword"],"sources":["../src/index.ts"],"sourcesContent":["export {\n isIdentifierName,\n isIdentifierChar,\n isIdentifierStart,\n} from \"./identifier\";\nexport {\n isReservedWord,\n isStrictBindOnlyReservedWord,\n isStrictBindReservedWord,\n isStrictReservedWord,\n isKeyword,\n} from \"./keyword\";\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,IAAAA,WAAA,GAAAC,OAAA;AAKA,IAAAC,QAAA,GAAAD,OAAA"} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/lib/keyword.js b/node_modules/@babel/helper-validator-identifier/lib/keyword.js new file mode 100644 index 0000000..054cf84 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/keyword.js @@ -0,0 +1,35 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.isKeyword = isKeyword; +exports.isReservedWord = isReservedWord; +exports.isStrictBindOnlyReservedWord = isStrictBindOnlyReservedWord; +exports.isStrictBindReservedWord = isStrictBindReservedWord; +exports.isStrictReservedWord = isStrictReservedWord; +const reservedWords = { + keyword: ["break", "case", "catch", "continue", "debugger", "default", "do", "else", "finally", "for", "function", "if", "return", "switch", "throw", "try", "var", "const", "while", "with", "new", "this", "super", "class", "extends", "export", "import", "null", "true", "false", "in", "instanceof", "typeof", "void", "delete"], + strict: ["implements", "interface", "let", "package", "private", "protected", "public", "static", "yield"], + strictBind: ["eval", "arguments"] +}; +const keywords = new Set(reservedWords.keyword); +const reservedWordsStrictSet = new Set(reservedWords.strict); +const reservedWordsStrictBindSet = new Set(reservedWords.strictBind); +function isReservedWord(word, inModule) { + return inModule && word === "await" || word === "enum"; +} +function isStrictReservedWord(word, inModule) { + return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word); +} +function isStrictBindOnlyReservedWord(word) { + return reservedWordsStrictBindSet.has(word); +} +function isStrictBindReservedWord(word, inModule) { + return isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word); +} +function isKeyword(word) { + return keywords.has(word); +} + +//# sourceMappingURL=keyword.js.map diff --git a/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map b/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map new file mode 100644 index 0000000..52a9e99 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/lib/keyword.js.map @@ -0,0 +1 @@ +{"version":3,"names":["reservedWords","keyword","strict","strictBind","keywords","Set","reservedWordsStrictSet","reservedWordsStrictBindSet","isReservedWord","word","inModule","isStrictReservedWord","has","isStrictBindOnlyReservedWord","isStrictBindReservedWord","isKeyword"],"sources":["../src/keyword.ts"],"sourcesContent":["const reservedWords = {\n keyword: [\n \"break\",\n \"case\",\n \"catch\",\n \"continue\",\n \"debugger\",\n \"default\",\n \"do\",\n \"else\",\n \"finally\",\n \"for\",\n \"function\",\n \"if\",\n \"return\",\n \"switch\",\n \"throw\",\n \"try\",\n \"var\",\n \"const\",\n \"while\",\n \"with\",\n \"new\",\n \"this\",\n \"super\",\n \"class\",\n \"extends\",\n \"export\",\n \"import\",\n \"null\",\n \"true\",\n \"false\",\n \"in\",\n \"instanceof\",\n \"typeof\",\n \"void\",\n \"delete\",\n ],\n strict: [\n \"implements\",\n \"interface\",\n \"let\",\n \"package\",\n \"private\",\n \"protected\",\n \"public\",\n \"static\",\n \"yield\",\n ],\n strictBind: [\"eval\", \"arguments\"],\n};\nconst keywords = new Set(reservedWords.keyword);\nconst reservedWordsStrictSet = new Set(reservedWords.strict);\nconst reservedWordsStrictBindSet = new Set(reservedWords.strictBind);\n\n/**\n * Checks if word is a reserved word in non-strict mode\n */\nexport function isReservedWord(word: string, inModule: boolean): boolean {\n return (inModule && word === \"await\") || word === \"enum\";\n}\n\n/**\n * Checks if word is a reserved word in non-binding strict mode\n *\n * Includes non-strict reserved words\n */\nexport function isStrictReservedWord(word: string, inModule: boolean): boolean {\n return isReservedWord(word, inModule) || reservedWordsStrictSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode, but it is allowed as\n * a normal identifier.\n */\nexport function isStrictBindOnlyReservedWord(word: string): boolean {\n return reservedWordsStrictBindSet.has(word);\n}\n\n/**\n * Checks if word is a reserved word in binding strict mode\n *\n * Includes non-strict reserved words and non-binding strict reserved words\n */\nexport function isStrictBindReservedWord(\n word: string,\n inModule: boolean,\n): boolean {\n return (\n isStrictReservedWord(word, inModule) || isStrictBindOnlyReservedWord(word)\n );\n}\n\nexport function isKeyword(word: string): boolean {\n return keywords.has(word);\n}\n"],"mappings":";;;;;;;;;;AAAA,MAAMA,aAAa,GAAG;EACpBC,OAAO,EAAE,CACP,OAAO,EACP,MAAM,EACN,OAAO,EACP,UAAU,EACV,UAAU,EACV,SAAS,EACT,IAAI,EACJ,MAAM,EACN,SAAS,EACT,KAAK,EACL,UAAU,EACV,IAAI,EACJ,QAAQ,EACR,QAAQ,EACR,OAAO,EACP,KAAK,EACL,KAAK,EACL,OAAO,EACP,OAAO,EACP,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,EACP,OAAO,EACP,SAAS,EACT,QAAQ,EACR,QAAQ,EACR,MAAM,EACN,MAAM,EACN,OAAO,EACP,IAAI,EACJ,YAAY,EACZ,QAAQ,EACR,MAAM,EACN,QAAQ,CACT;EACDC,MAAM,EAAE,CACN,YAAY,EACZ,WAAW,EACX,KAAK,EACL,SAAS,EACT,SAAS,EACT,WAAW,EACX,QAAQ,EACR,QAAQ,EACR,OAAO,CACR;EACDC,UAAU,EAAE,CAAC,MAAM,EAAE,WAAW;AAClC,CAAC;AACD,MAAMC,QAAQ,GAAG,IAAIC,GAAG,CAACL,aAAa,CAACC,OAAO,CAAC;AAC/C,MAAMK,sBAAsB,GAAG,IAAID,GAAG,CAACL,aAAa,CAACE,MAAM,CAAC;AAC5D,MAAMK,0BAA0B,GAAG,IAAIF,GAAG,CAACL,aAAa,CAACG,UAAU,CAAC;AAK7D,SAASK,cAAcA,CAACC,IAAY,EAAEC,QAAiB,EAAW;EACvE,OAAQA,QAAQ,IAAID,IAAI,KAAK,OAAO,IAAKA,IAAI,KAAK,MAAM;AAC1D;AAOO,SAASE,oBAAoBA,CAACF,IAAY,EAAEC,QAAiB,EAAW;EAC7E,OAAOF,cAAc,CAACC,IAAI,EAAEC,QAAQ,CAAC,IAAIJ,sBAAsB,CAACM,GAAG,CAACH,IAAI,CAAC;AAC3E;AAMO,SAASI,4BAA4BA,CAACJ,IAAY,EAAW;EAClE,OAAOF,0BAA0B,CAACK,GAAG,CAACH,IAAI,CAAC;AAC7C;AAOO,SAASK,wBAAwBA,CACtCL,IAAY,EACZC,QAAiB,EACR;EACT,OACEC,oBAAoB,CAACF,IAAI,EAAEC,QAAQ,CAAC,IAAIG,4BAA4B,CAACJ,IAAI,CAAC;AAE9E;AAEO,SAASM,SAASA,CAACN,IAAY,EAAW;EAC/C,OAAOL,QAAQ,CAACQ,GAAG,CAACH,IAAI,CAAC;AAC3B"} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/package.json b/node_modules/@babel/helper-validator-identifier/package.json new file mode 100644 index 0000000..7579010 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/package.json @@ -0,0 +1,28 @@ +{ + "name": "@babel/helper-validator-identifier", + "version": "7.22.5", + "description": "Validate identifier/keywords name", + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-helper-validator-identifier" + }, + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "main": "./lib/index.js", + "exports": { + ".": "./lib/index.js", + "./package.json": "./package.json" + }, + "devDependencies": { + "@unicode/unicode-15.0.0": "^1.3.1", + "charcodes": "^0.2.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "author": "The Babel Team (https://babel.dev/team)", + "type": "commonjs" +} \ No newline at end of file diff --git a/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js b/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js new file mode 100644 index 0000000..aca8710 --- /dev/null +++ b/node_modules/@babel/helper-validator-identifier/scripts/generate-identifier-regex.js @@ -0,0 +1,75 @@ +"use strict"; + +// Always use the latest available version of Unicode! +// https://tc39.github.io/ecma262/#sec-conformance +const version = "15.0.0"; + +const start = require("@unicode/unicode-" + + version + + "/Binary_Property/ID_Start/code-points.js").filter(function (ch) { + return ch > 0x7f; +}); +let last = -1; +const cont = [0x200c, 0x200d].concat( + require("@unicode/unicode-" + + version + + "/Binary_Property/ID_Continue/code-points.js").filter(function (ch) { + return ch > 0x7f && search(start, ch, last + 1) == -1; + }) +); + +function search(arr, ch, starting) { + for (let i = starting; arr[i] <= ch && i < arr.length; last = i++) { + if (arr[i] === ch) return i; + } + return -1; +} + +function pad(str, width) { + while (str.length < width) str = "0" + str; + return str; +} + +function esc(code) { + const hex = code.toString(16); + if (hex.length <= 2) return "\\x" + pad(hex, 2); + else return "\\u" + pad(hex, 4); +} + +function generate(chars) { + const astral = []; + let re = ""; + for (let i = 0, at = 0x10000; i < chars.length; i++) { + const from = chars[i]; + let to = from; + while (i < chars.length - 1 && chars[i + 1] == to + 1) { + i++; + to++; + } + if (to <= 0xffff) { + if (from == to) re += esc(from); + else if (from + 1 == to) re += esc(from) + esc(to); + else re += esc(from) + "-" + esc(to); + } else { + astral.push(from - at, to - from); + at = to; + } + } + return { nonASCII: re, astral: astral }; +} + +const startData = generate(start); +const contData = generate(cont); + +console.log("/* prettier-ignore */"); +console.log('let nonASCIIidentifierStartChars = "' + startData.nonASCII + '";'); +console.log("/* prettier-ignore */"); +console.log('let nonASCIIidentifierChars = "' + contData.nonASCII + '";'); +console.log("/* prettier-ignore */"); +console.log( + "const astralIdentifierStartCodes = " + JSON.stringify(startData.astral) + ";" +); +console.log("/* prettier-ignore */"); +console.log( + "const astralIdentifierCodes = " + JSON.stringify(contData.astral) + ";" +); diff --git a/node_modules/@babel/highlight/LICENSE b/node_modules/@babel/highlight/LICENSE new file mode 100644 index 0000000..f31575e --- /dev/null +++ b/node_modules/@babel/highlight/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2014-present Sebastian McKenzie and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@babel/highlight/README.md b/node_modules/@babel/highlight/README.md new file mode 100644 index 0000000..4c2ec87 --- /dev/null +++ b/node_modules/@babel/highlight/README.md @@ -0,0 +1,19 @@ +# @babel/highlight + +> Syntax highlight JavaScript strings for output in terminals. + +See our website [@babel/highlight](https://babeljs.io/docs/babel-highlight) for more information. + +## Install + +Using npm: + +```sh +npm install --save-dev @babel/highlight +``` + +or using yarn: + +```sh +yarn add @babel/highlight --dev +``` diff --git a/node_modules/@babel/highlight/lib/index.js b/node_modules/@babel/highlight/lib/index.js new file mode 100644 index 0000000..9f14f10 --- /dev/null +++ b/node_modules/@babel/highlight/lib/index.js @@ -0,0 +1,106 @@ +"use strict"; + +Object.defineProperty(exports, "__esModule", { + value: true +}); +exports.default = highlight; +exports.shouldHighlight = shouldHighlight; +var _jsTokens = require("js-tokens"); +var _helperValidatorIdentifier = require("@babel/helper-validator-identifier"); +var _chalk2 = require("chalk"); +const chalk = _chalk2; +const sometimesKeywords = new Set(["as", "async", "from", "get", "of", "set"]); +function getDefs(chalk) { + return { + keyword: chalk.cyan, + capitalized: chalk.yellow, + jsxIdentifier: chalk.yellow, + punctuator: chalk.yellow, + number: chalk.magenta, + string: chalk.green, + regex: chalk.magenta, + comment: chalk.grey, + invalid: chalk.white.bgRed.bold + }; +} +const NEWLINE = /\r\n|[\n\r\u2028\u2029]/; +const BRACKET = /^[()[\]{}]$/; +let tokenize; +{ + const JSX_TAG = /^[a-z][\w-]*$/i; + const getTokenType = function (token, offset, text) { + if (token.type === "name") { + if ((0, _helperValidatorIdentifier.isKeyword)(token.value) || (0, _helperValidatorIdentifier.isStrictReservedWord)(token.value, true) || sometimesKeywords.has(token.value)) { + return "keyword"; + } + if (JSX_TAG.test(token.value) && (text[offset - 1] === "<" || text.slice(offset - 2, offset) == " colorize(str)).join("\n"); + } else { + highlighted += value; + } + } + return highlighted; +} +function shouldHighlight(options) { + return !!chalk.supportsColor || options.forceColor; +} +let chalkWithForcedColor = undefined; +function getChalk(forceColor) { + if (forceColor) { + var _chalkWithForcedColor; + (_chalkWithForcedColor = chalkWithForcedColor) != null ? _chalkWithForcedColor : chalkWithForcedColor = new chalk.constructor({ + enabled: true, + level: 1 + }); + return chalkWithForcedColor; + } + return chalk; +} +{ + { + exports.getChalk = options => getChalk(options.forceColor); + } +} +function highlight(code, options = {}) { + if (code !== "" && shouldHighlight(options)) { + const defs = getDefs(getChalk(options.forceColor)); + return highlightTokens(defs, code); + } else { + return code; + } +} + +//# sourceMappingURL=index.js.map diff --git a/node_modules/@babel/highlight/lib/index.js.map b/node_modules/@babel/highlight/lib/index.js.map new file mode 100644 index 0000000..b7b1666 --- /dev/null +++ b/node_modules/@babel/highlight/lib/index.js.map @@ -0,0 +1 @@ +{"version":3,"names":["_jsTokens","require","_helperValidatorIdentifier","_chalk2","chalk","_chalk","sometimesKeywords","Set","getDefs","keyword","cyan","capitalized","yellow","jsxIdentifier","punctuator","number","magenta","string","green","regex","comment","grey","invalid","white","bgRed","bold","NEWLINE","BRACKET","tokenize","JSX_TAG","getTokenType","token","offset","text","type","isKeyword","value","isStrictReservedWord","has","test","slice","toLowerCase","match","jsTokens","default","exec","matchToToken","index","highlightTokens","defs","highlighted","colorize","split","map","str","join","shouldHighlight","options","supportsColor","forceColor","chalkWithForcedColor","undefined","getChalk","_chalkWithForcedColor","constructor","enabled","level","exports","highlight","code"],"sources":["../src/index.ts"],"sourcesContent":["/// \n\nimport type { Token as JSToken, JSXToken } from \"js-tokens\";\nimport jsTokens from \"js-tokens\";\n\nimport {\n isStrictReservedWord,\n isKeyword,\n} from \"@babel/helper-validator-identifier\";\n\nimport _chalk from \"chalk\";\nconst chalk = _chalk as unknown as typeof import(\"chalk-BABEL_8_BREAKING-true\");\ntype Chalk =\n typeof import(\"chalk-BABEL_8_BREAKING-true\").Instance extends new () => infer R\n ? R\n : never;\n\n/**\n * Names that are always allowed as identifiers, but also appear as keywords\n * within certain syntactic productions.\n *\n * https://tc39.es/ecma262/#sec-keywords-and-reserved-words\n *\n * `target` has been omitted since it is very likely going to be a false\n * positive.\n */\nconst sometimesKeywords = new Set([\"as\", \"async\", \"from\", \"get\", \"of\", \"set\"]);\n\ntype InternalTokenType =\n | \"keyword\"\n | \"capitalized\"\n | \"jsxIdentifier\"\n | \"punctuator\"\n | \"number\"\n | \"string\"\n | \"regex\"\n | \"comment\"\n | \"invalid\";\n\ntype Token = {\n type: InternalTokenType | \"uncolored\";\n value: string;\n};\n/**\n * Chalk styles for token types.\n */\nfunction getDefs(chalk: Chalk): Record {\n return {\n keyword: chalk.cyan,\n capitalized: chalk.yellow,\n jsxIdentifier: chalk.yellow,\n punctuator: chalk.yellow,\n number: chalk.magenta,\n string: chalk.green,\n regex: chalk.magenta,\n comment: chalk.grey,\n invalid: chalk.white.bgRed.bold,\n };\n}\n\n/**\n * RegExp to test for newlines in terminal.\n */\nconst NEWLINE = /\\r\\n|[\\n\\r\\u2028\\u2029]/;\n\n/**\n * RegExp to test for the three types of brackets.\n */\nconst BRACKET = /^[()[\\]{}]$/;\n\nlet tokenize: (\n text: string,\n) => Generator<{ type: InternalTokenType | \"uncolored\"; value: string }>;\n\nif (process.env.BABEL_8_BREAKING) {\n /**\n * Get the type of token, specifying punctuator type.\n */\n const getTokenType = function (\n token: JSToken | JSXToken,\n ): InternalTokenType | \"uncolored\" {\n if (token.type === \"IdentifierName\") {\n if (\n isKeyword(token.value) ||\n isStrictReservedWord(token.value, true) ||\n sometimesKeywords.has(token.value)\n ) {\n return \"keyword\";\n }\n\n if (token.value[0] !== token.value[0].toLowerCase()) {\n return \"capitalized\";\n }\n }\n\n if (token.type === \"Punctuator\" && BRACKET.test(token.value)) {\n return \"uncolored\";\n }\n\n if (token.type === \"Invalid\" && token.value === \"@\") {\n return \"punctuator\";\n }\n\n switch (token.type) {\n case \"NumericLiteral\":\n return \"number\";\n\n case \"StringLiteral\":\n case \"JSXString\":\n case \"NoSubstitutionTemplate\":\n return \"string\";\n\n case \"RegularExpressionLiteral\":\n return \"regex\";\n\n case \"Punctuator\":\n case \"JSXPunctuator\":\n return \"punctuator\";\n\n case \"MultiLineComment\":\n case \"SingleLineComment\":\n return \"comment\";\n\n case \"Invalid\":\n case \"JSXInvalid\":\n return \"invalid\";\n\n case \"JSXIdentifier\":\n return \"jsxIdentifier\";\n\n default:\n return \"uncolored\";\n }\n };\n\n /**\n * Turn a string of JS into an array of objects.\n */\n tokenize = function* (text: string): Generator {\n for (const token of jsTokens(text, { jsx: true })) {\n switch (token.type) {\n case \"TemplateHead\":\n yield { type: \"string\", value: token.value.slice(0, -2) };\n yield { type: \"punctuator\", value: \"${\" };\n break;\n\n case \"TemplateMiddle\":\n yield { type: \"punctuator\", value: \"}\" };\n yield { type: \"string\", value: token.value.slice(1, -2) };\n yield { type: \"punctuator\", value: \"${\" };\n break;\n\n case \"TemplateTail\":\n yield { type: \"punctuator\", value: \"}\" };\n yield { type: \"string\", value: token.value.slice(1) };\n break;\n\n default:\n yield {\n type: getTokenType(token),\n value: token.value,\n };\n }\n }\n };\n} else {\n /**\n * RegExp to test for what seems to be a JSX tag name.\n */\n const JSX_TAG = /^[a-z][\\w-]*$/i;\n\n // The token here is defined in js-tokens@4. However we don't bother\n // typing it since the whole block will be removed in Babel 8\n const getTokenType = function (token: any, offset: number, text: string) {\n if (token.type === \"name\") {\n if (\n isKeyword(token.value) ||\n isStrictReservedWord(token.value, true) ||\n sometimesKeywords.has(token.value)\n ) {\n return \"keyword\";\n }\n\n if (\n JSX_TAG.test(token.value) &&\n (text[offset - 1] === \"<\" || text.slice(offset - 2, offset) == \", text: string) {\n let highlighted = \"\";\n\n for (const { type, value } of tokenize(text)) {\n const colorize = defs[type];\n if (colorize) {\n highlighted += value\n .split(NEWLINE)\n .map(str => colorize(str))\n .join(\"\\n\");\n } else {\n highlighted += value;\n }\n }\n\n return highlighted;\n}\n\n/**\n * Highlight `text` using the token definitions in `defs`.\n */\n\ntype Options = {\n forceColor?: boolean;\n};\n\n/**\n * Whether the code should be highlighted given the passed options.\n */\nexport function shouldHighlight(options: Options): boolean {\n return !!chalk.supportsColor || options.forceColor;\n}\n\nlet chalkWithForcedColor: Chalk = undefined;\nfunction getChalk(forceColor: boolean) {\n if (forceColor) {\n chalkWithForcedColor ??= process.env.BABEL_8_BREAKING\n ? new chalk.Instance({ level: 1 })\n : // @ts-expect-error .Instance was .constructor in chalk 2\n new chalk.constructor({ enabled: true, level: 1 });\n return chalkWithForcedColor;\n }\n return chalk;\n}\nif (!process.env.BABEL_8_BREAKING) {\n if (!USE_ESM) {\n // eslint-disable-next-line no-restricted-globals\n exports.getChalk = (options: Options) => getChalk(options.forceColor);\n }\n}\n\n/**\n * Highlight `code`.\n */\nexport default function highlight(code: string, options: Options = {}): string {\n if (code !== \"\" && shouldHighlight(options)) {\n const defs = getDefs(getChalk(options.forceColor));\n return highlightTokens(defs, code);\n } else {\n return code;\n }\n}\n"],"mappings":";;;;;;;AAGA,IAAAA,SAAA,GAAAC,OAAA;AAEA,IAAAC,0BAAA,GAAAD,OAAA;AAKA,IAAAE,OAAA,GAAAF,OAAA;AACA,MAAMG,KAAK,GAAGC,OAAiE;AAe/E,MAAMC,iBAAiB,GAAG,IAAIC,GAAG,CAAC,CAAC,IAAI,EAAE,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,CAAC,CAAC;AAoB9E,SAASC,OAAOA,CAACJ,KAAY,EAAoC;EAC/D,OAAO;IACLK,OAAO,EAAEL,KAAK,CAACM,IAAI;IACnBC,WAAW,EAAEP,KAAK,CAACQ,MAAM;IACzBC,aAAa,EAAET,KAAK,CAACQ,MAAM;IAC3BE,UAAU,EAAEV,KAAK,CAACQ,MAAM;IACxBG,MAAM,EAAEX,KAAK,CAACY,OAAO;IACrBC,MAAM,EAAEb,KAAK,CAACc,KAAK;IACnBC,KAAK,EAAEf,KAAK,CAACY,OAAO;IACpBI,OAAO,EAAEhB,KAAK,CAACiB,IAAI;IACnBC,OAAO,EAAElB,KAAK,CAACmB,KAAK,CAACC,KAAK,CAACC;EAC7B,CAAC;AACH;AAKA,MAAMC,OAAO,GAAG,yBAAyB;AAKzC,MAAMC,OAAO,GAAG,aAAa;AAE7B,IAAIC,QAEoE;AA6FjE;EAIL,MAAMC,OAAO,GAAG,gBAAgB;EAIhC,MAAMC,YAAY,GAAG,SAAAA,CAAUC,KAAU,EAAEC,MAAc,EAAEC,IAAY,EAAE;IACvE,IAAIF,KAAK,CAACG,IAAI,KAAK,MAAM,EAAE;MACzB,IACE,IAAAC,oCAAS,EAACJ,KAAK,CAACK,KAAK,CAAC,IACtB,IAAAC,+CAAoB,EAACN,KAAK,CAACK,KAAK,EAAE,IAAI,CAAC,IACvC9B,iBAAiB,CAACgC,GAAG,CAACP,KAAK,CAACK,KAAK,CAAC,EAClC;QACA,OAAO,SAAS;MAClB;MAEA,IACEP,OAAO,CAACU,IAAI,CAACR,KAAK,CAACK,KAAK,CAAC,KACxBH,IAAI,CAACD,MAAM,GAAG,CAAC,CAAC,KAAK,GAAG,IAAIC,IAAI,CAACO,KAAK,CAACR,MAAM,GAAG,CAAC,EAAEA,MAAM,CAAC,IAAI,IAAI,CAAC,EACpE;QACA,OAAO,eAAe;MACxB;MAEA,IAAID,KAAK,CAACK,KAAK,CAAC,CAAC,CAAC,KAAKL,KAAK,CAACK,KAAK,CAAC,CAAC,CAAC,CAACK,WAAW,CAAC,CAAC,EAAE;QACnD,OAAO,aAAa;MACtB;IACF;IAEA,IAAIV,KAAK,CAACG,IAAI,KAAK,YAAY,IAAIP,OAAO,CAACY,IAAI,CAACR,KAAK,CAACK,KAAK,CAAC,EAAE;MAC5D,OAAO,SAAS;IAClB;IAEA,IACEL,KAAK,CAACG,IAAI,KAAK,SAAS,KACvBH,KAAK,CAACK,KAAK,KAAK,GAAG,IAAIL,KAAK,CAACK,KAAK,KAAK,GAAG,CAAC,EAC5C;MACA,OAAO,YAAY;IACrB;IAEA,OAAOL,KAAK,CAACG,IAAI;EACnB,CAAC;EAEDN,QAAQ,GAAG,UAAAA,CAAWK,IAAY,EAAE;IAClC,IAAIS,KAAK;IACT,OAAQA,KAAK,GAAIC,SAAQ,CAASC,OAAO,CAACC,IAAI,CAACZ,IAAI,CAAC,EAAG;MACrD,MAAMF,KAAK,GAAIY,SAAQ,CAASG,YAAY,CAACJ,KAAK,CAAC;MAEnD,MAAM;QACJR,IAAI,EAAEJ,YAAY,CAACC,KAAK,EAAEW,KAAK,CAACK,KAAK,EAAEd,IAAI,CAAC;QAC5CG,KAAK,EAAEL,KAAK,CAACK;MACf,CAAC;IACH;EACF,CAAC;AACH;AAKA,SAASY,eAAeA,CAACC,IAA2B,EAAEhB,IAAY,EAAE;EAClE,IAAIiB,WAAW,GAAG,EAAE;EAEpB,KAAK,MAAM;IAAEhB,IAAI;IAAEE;EAAM,CAAC,IAAIR,QAAQ,CAACK,IAAI,CAAC,EAAE;IAC5C,MAAMkB,QAAQ,GAAGF,IAAI,CAACf,IAAI,CAAC;IAC3B,IAAIiB,QAAQ,EAAE;MACZD,WAAW,IAAId,KAAK,CACjBgB,KAAK,CAAC1B,OAAO,CAAC,CACd2B,GAAG,CAACC,GAAG,IAAIH,QAAQ,CAACG,GAAG,CAAC,CAAC,CACzBC,IAAI,CAAC,IAAI,CAAC;IACf,CAAC,MAAM;MACLL,WAAW,IAAId,KAAK;IACtB;EACF;EAEA,OAAOc,WAAW;AACpB;AAaO,SAASM,eAAeA,CAACC,OAAgB,EAAW;EACzD,OAAO,CAAC,CAACrD,KAAK,CAACsD,aAAa,IAAID,OAAO,CAACE,UAAU;AACpD;AAEA,IAAIC,oBAA2B,GAAGC,SAAS;AAC3C,SAASC,QAAQA,CAACH,UAAmB,EAAE;EACrC,IAAIA,UAAU,EAAE;IAAA,IAAAI,qBAAA;IACd,CAAAA,qBAAA,GAAAH,oBAAoB,YAAAG,qBAAA,GAApBH,oBAAoB,GAGhB,IAAIxD,KAAK,CAAC4D,WAAW,CAAC;MAAEC,OAAO,EAAE,IAAI;MAAEC,KAAK,EAAE;IAAE,CAAC,CAAC;IACtD,OAAON,oBAAoB;EAC7B;EACA,OAAOxD,KAAK;AACd;AACmC;EACnB;IAEZ+D,OAAO,CAACL,QAAQ,GAAIL,OAAgB,IAAKK,QAAQ,CAACL,OAAO,CAACE,UAAU,CAAC;EACvE;AACF;AAKe,SAASS,SAASA,CAACC,IAAY,EAAEZ,OAAgB,GAAG,CAAC,CAAC,EAAU;EAC7E,IAAIY,IAAI,KAAK,EAAE,IAAIb,eAAe,CAACC,OAAO,CAAC,EAAE;IAC3C,MAAMR,IAAI,GAAGzC,OAAO,CAACsD,QAAQ,CAACL,OAAO,CAACE,UAAU,CAAC,CAAC;IAClD,OAAOX,eAAe,CAACC,IAAI,EAAEoB,IAAI,CAAC;EACpC,CAAC,MAAM;IACL,OAAOA,IAAI;EACb;AACF"} \ No newline at end of file diff --git a/node_modules/@babel/highlight/package.json b/node_modules/@babel/highlight/package.json new file mode 100644 index 0000000..33f69fd --- /dev/null +++ b/node_modules/@babel/highlight/package.json @@ -0,0 +1,29 @@ +{ + "name": "@babel/highlight", + "version": "7.22.10", + "description": "Syntax highlight JavaScript strings for output in terminals.", + "author": "The Babel Team (https://babel.dev/team)", + "homepage": "https://babel.dev/docs/en/next/babel-highlight", + "license": "MIT", + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "https://github.com/babel/babel.git", + "directory": "packages/babel-highlight" + }, + "main": "./lib/index.js", + "dependencies": { + "@babel/helper-validator-identifier": "^7.22.5", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0" + }, + "devDependencies": { + "strip-ansi": "^4.0.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "type": "commonjs" +} \ No newline at end of file diff --git a/node_modules/@gar/promisify/LICENSE.md b/node_modules/@gar/promisify/LICENSE.md new file mode 100644 index 0000000..64f7732 --- /dev/null +++ b/node_modules/@gar/promisify/LICENSE.md @@ -0,0 +1,10 @@ +The MIT License (MIT) + +Copyright © 2020-2022 Michael Garvin + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/@gar/promisify/README.md b/node_modules/@gar/promisify/README.md new file mode 100644 index 0000000..465c546 --- /dev/null +++ b/node_modules/@gar/promisify/README.md @@ -0,0 +1,65 @@ +# @gar/promisify + +### Promisify an entire object or class instance + +This module leverages es6 Proxy and Reflect to promisify every function in an +object or class instance. + +It assumes the callback that the function is expecting is the last +parameter, and that it is an error-first callback with only one value, +i.e. `(err, value) => ...`. This mirrors node's `util.promisify` method. + +In order that you can use it as a one-stop-shop for all your promisify +needs, you can also pass it a function. That function will be +promisified as normal using node's built-in `util.promisify` method. + +[node's custom promisified +functions](https://nodejs.org/api/util.html#util_custom_promisified_functions) +will also be mirrored, further allowing this to be a drop-in replacement +for the built-in `util.promisify`. + +### Examples + +Promisify an entire object + +```javascript + +const promisify = require('@gar/promisify') + +class Foo { + constructor (attr) { + this.attr = attr + } + + double (input, cb) { + cb(null, input * 2) + } + +const foo = new Foo('baz') +const promisified = promisify(foo) + +console.log(promisified.attr) +console.log(await promisified.double(1024)) +``` + +Promisify a function + +```javascript + +const promisify = require('@gar/promisify') + +function foo (a, cb) { + if (a !== 'bad') { + return cb(null, 'ok') + } + return cb('not ok') +} + +const promisified = promisify(foo) + +// This will resolve to 'ok' +promisified('good') + +// this will reject +promisified('bad') +``` diff --git a/node_modules/@gar/promisify/index.js b/node_modules/@gar/promisify/index.js new file mode 100644 index 0000000..d0be95f --- /dev/null +++ b/node_modules/@gar/promisify/index.js @@ -0,0 +1,36 @@ +'use strict' + +const { promisify } = require('util') + +const handler = { + get: function (target, prop, receiver) { + if (typeof target[prop] !== 'function') { + return target[prop] + } + if (target[prop][promisify.custom]) { + return function () { + return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments) + } + } + return function () { + return new Promise((resolve, reject) => { + Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) { + if (err) { + return reject(err) + } + resolve(result) + }]) + }) + } + } +} + +module.exports = function (thingToPromisify) { + if (typeof thingToPromisify === 'function') { + return promisify(thingToPromisify) + } + if (typeof thingToPromisify === 'object') { + return new Proxy(thingToPromisify, handler) + } + throw new TypeError('Can only promisify functions or objects') +} diff --git a/node_modules/@gar/promisify/package.json b/node_modules/@gar/promisify/package.json new file mode 100644 index 0000000..d0ce69b --- /dev/null +++ b/node_modules/@gar/promisify/package.json @@ -0,0 +1,32 @@ +{ + "name": "@gar/promisify", + "version": "1.1.3", + "description": "Promisify an entire class or object", + "main": "index.js", + "repository": { + "type": "git", + "url": "https://github.com/wraithgar/gar-promisify.git" + }, + "scripts": { + "lint": "standard", + "lint:fix": "standard --fix", + "test": "lab -a @hapi/code -t 100", + "posttest": "npm run lint" + }, + "files": [ + "index.js" + ], + "keywords": [ + "promisify", + "all", + "class", + "object" + ], + "author": "Gar ", + "license": "MIT", + "devDependencies": { + "@hapi/code": "^8.0.1", + "@hapi/lab": "^24.1.0", + "standard": "^16.0.3" + } +} diff --git a/node_modules/@npmcli/fs/LICENSE.md b/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 0000000..5fc208f --- /dev/null +++ b/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/fs/README.md b/node_modules/@npmcli/fs/README.md new file mode 100644 index 0000000..0642968 --- /dev/null +++ b/node_modules/@npmcli/fs/README.md @@ -0,0 +1,62 @@ +# @npmcli/fs + +polyfills, and extensions, of the core `fs` module. + +## Features + +- all exposed functions return promises +- `fs.rm` polyfill for node versions < 14.14.0 +- `fs.mkdir` polyfill adding support for the `recursive` and `force` options in node versions < 10.12.0 +- `fs.copyFile` extended to accept an `owner` option +- `fs.mkdir` extended to accept an `owner` option +- `fs.mkdtemp` extended to accept an `owner` option +- `fs.writeFile` extended to accept an `owner` option +- `fs.withTempDir` added +- `fs.withOwner` added +- `fs.withOwnerSync` added +- `fs.cp` polyfill for node < 16.7.0 + +## The `owner` option + +The `copyFile`, `mkdir`, `mkdtemp`, `writeFile`, and `withTempDir` functions +all accept a new `owner` property in their options. It can be used in two ways: + +- `{ owner: { uid: 100, gid: 100 } }` - set the `uid` and `gid` explicitly +- `{ owner: 100 }` - use one value, will set both `uid` and `gid` the same + +The special string `'inherit'` may be passed instead of a number, which will +cause this module to automatically determine the correct `uid` and/or `gid` +from the nearest existing parent directory of the target. + +## `fs.withTempDir(root, fn, options) -> Promise` + +### Parameters + +- `root`: the directory in which to create the temporary directory +- `fn`: a function that will be called with the path to the temporary directory +- `options` + - `tmpPrefix`: a prefix to be used in the generated directory name + +### Usage + +The `withTempDir` function creates a temporary directory, runs the provided +function (`fn`), then removes the temporary directory and resolves or rejects +based on the result of `fn`. + +```js +const fs = require('@npmcli/fs') +const os = require('os') + +// this function will be called with the full path to the temporary directory +// it is called with `await` behind the scenes, so can be async if desired. +const myFunction = async (tempPath) => { + return 'done!' +} + +const main = async () => { + const result = await fs.withTempDir(os.tmpdir(), myFunction) + // result === 'done!' +} + +main() +``` diff --git a/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 0000000..cb5982f --- /dev/null +++ b/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 0000000..4d13bc0 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/node_modules/@npmcli/fs/lib/common/owner-sync.js b/node_modules/@npmcli/fs/lib/common/owner-sync.js new file mode 100644 index 0000000..3704aa6 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/common/owner-sync.js @@ -0,0 +1,96 @@ +const { dirname, resolve } = require('path') +const url = require('url') + +const fs = require('../fs.js') + +// given a path, find the owner of the nearest parent +const find = (path) => { + // if we have no getuid, permissions are irrelevant on this platform + if (!process.getuid) { + return {} + } + + // fs methods accept URL objects with a scheme of file: so we need to unwrap + // those into an actual path string before we can resolve it + const resolved = path != null && path.href && path.origin + ? resolve(url.fileURLToPath(path)) + : resolve(path) + + let stat + + try { + stat = fs.lstatSync(resolved) + } finally { + // if we got a stat, return its contents + if (stat) { + return { uid: stat.uid, gid: stat.gid } + } + + // try the parent directory + if (resolved !== dirname(resolved)) { + return find(dirname(resolved)) + } + + // no more parents, never got a stat, just return an empty object + return {} + } +} + +// given a path, uid, and gid update the ownership of the path if necessary +const update = (path, uid, gid) => { + // nothing to update, just exit + if (uid === undefined && gid === undefined) { + return + } + + try { + // see if the permissions are already the same, if they are we don't + // need to do anything, so return early + const stat = fs.statSync(path) + if (uid === stat.uid && gid === stat.gid) { + return + } + } catch { + // ignore errors + } + + try { + fs.chownSync(path, uid, gid) + } catch { + // ignore errors + } +} + +// accepts a `path` and the `owner` property of an options object and normalizes +// it into an object with numerical `uid` and `gid` +const validate = (path, input) => { + let uid + let gid + + if (typeof input === 'string' || typeof input === 'number') { + uid = input + gid = input + } else if (input && typeof input === 'object') { + uid = input.uid + gid = input.gid + } + + if (uid === 'inherit' || gid === 'inherit') { + const owner = find(path) + if (uid === 'inherit') { + uid = owner.uid + } + + if (gid === 'inherit') { + gid = owner.gid + } + } + + return { uid, gid } +} + +module.exports = { + find, + update, + validate, +} diff --git a/node_modules/@npmcli/fs/lib/common/owner.js b/node_modules/@npmcli/fs/lib/common/owner.js new file mode 100644 index 0000000..9f02d41 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/common/owner.js @@ -0,0 +1,96 @@ +const { dirname, resolve } = require('path') +const url = require('url') + +const fs = require('../fs.js') + +// given a path, find the owner of the nearest parent +const find = async (path) => { + // if we have no getuid, permissions are irrelevant on this platform + if (!process.getuid) { + return {} + } + + // fs methods accept URL objects with a scheme of file: so we need to unwrap + // those into an actual path string before we can resolve it + const resolved = path != null && path.href && path.origin + ? resolve(url.fileURLToPath(path)) + : resolve(path) + + let stat + + try { + stat = await fs.lstat(resolved) + } finally { + // if we got a stat, return its contents + if (stat) { + return { uid: stat.uid, gid: stat.gid } + } + + // try the parent directory + if (resolved !== dirname(resolved)) { + return find(dirname(resolved)) + } + + // no more parents, never got a stat, just return an empty object + return {} + } +} + +// given a path, uid, and gid update the ownership of the path if necessary +const update = async (path, uid, gid) => { + // nothing to update, just exit + if (uid === undefined && gid === undefined) { + return + } + + try { + // see if the permissions are already the same, if they are we don't + // need to do anything, so return early + const stat = await fs.stat(path) + if (uid === stat.uid && gid === stat.gid) { + return + } + } catch { + // ignore errors + } + + try { + await fs.chown(path, uid, gid) + } catch { + // ignore errors + } +} + +// accepts a `path` and the `owner` property of an options object and normalizes +// it into an object with numerical `uid` and `gid` +const validate = async (path, input) => { + let uid + let gid + + if (typeof input === 'string' || typeof input === 'number') { + uid = input + gid = input + } else if (input && typeof input === 'object') { + uid = input.uid + gid = input.gid + } + + if (uid === 'inherit' || gid === 'inherit') { + const owner = await find(path) + if (uid === 'inherit') { + uid = owner.uid + } + + if (gid === 'inherit') { + gid = owner.gid + } + } + + return { uid, gid } +} + +module.exports = { + find, + update, + validate, +} diff --git a/node_modules/@npmcli/fs/lib/copy-file.js b/node_modules/@npmcli/fs/lib/copy-file.js new file mode 100644 index 0000000..8888266 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/copy-file.js @@ -0,0 +1,16 @@ +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const withOwner = require('./with-owner.js') + +const copyFile = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['mode'], + wrap: 'mode', + }) + + // the node core method as of 16.5.0 does not support the mode being in an + // object, so we have to pass the mode value directly + return withOwner(dest, () => fs.copyFile(src, dest, options.mode), opts) +} + +module.exports = copyFile diff --git a/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 0000000..93546df --- /dev/null +++ b/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 0000000..5da4739 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('../fs.js') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 0000000..f83ccbf --- /dev/null +++ b/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('../errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('../fs.js') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/node_modules/@npmcli/fs/lib/errors.js b/node_modules/@npmcli/fs/lib/errors.js new file mode 100644 index 0000000..1cd1e05 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/node_modules/@npmcli/fs/lib/fs.js b/node_modules/@npmcli/fs/lib/fs.js new file mode 100644 index 0000000..457da10 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/fs.js @@ -0,0 +1,14 @@ +const fs = require('fs') +const promisify = require('@gar/promisify') + +const isLower = (s) => s === s.toLowerCase() && s !== s.toUpperCase() + +const fsSync = Object.fromEntries(Object.entries(fs).filter(([k, v]) => + typeof v === 'function' && (k.endsWith('Sync') || !isLower(k[0])) +)) + +// this module returns the core fs async fns wrapped in a proxy that promisifies +// method calls within the getter. we keep it in a separate module so that the +// overridden methods have a consistent way to get to promisified fs methods +// without creating a circular dependency. the ctors and sync methods are kept untouched +module.exports = { ...promisify(fs), ...fsSync } diff --git a/node_modules/@npmcli/fs/lib/index.js b/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 0000000..3a98648 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,12 @@ +module.exports = { + ...require('./fs.js'), + copyFile: require('./copy-file.js'), + cp: require('./cp/index.js'), + mkdir: require('./mkdir.js'), + mkdtemp: require('./mkdtemp.js'), + rm: require('./rm/index.js'), + withTempDir: require('./with-temp-dir.js'), + withOwner: require('./with-owner.js'), + withOwnerSync: require('./with-owner-sync.js'), + writeFile: require('./write-file.js'), +} diff --git a/node_modules/@npmcli/fs/lib/mkdir.js b/node_modules/@npmcli/fs/lib/mkdir.js new file mode 100644 index 0000000..098d8d0 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/mkdir.js @@ -0,0 +1,19 @@ +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const withOwner = require('./with-owner.js') + +// extends mkdir with the ability to specify an owner of the new dir +const mkdir = async (path, opts) => { + const options = getOptions(opts, { + copy: ['mode', 'recursive'], + wrap: 'mode', + }) + + return withOwner( + path, + () => fs.mkdir(path, options), + opts + ) +} + +module.exports = mkdir diff --git a/node_modules/@npmcli/fs/lib/mkdtemp.js b/node_modules/@npmcli/fs/lib/mkdtemp.js new file mode 100644 index 0000000..60b12a7 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/mkdtemp.js @@ -0,0 +1,23 @@ +const { dirname, sep } = require('path') + +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const withOwner = require('./with-owner.js') + +const mkdtemp = async (prefix, opts) => { + const options = getOptions(opts, { + copy: ['encoding'], + wrap: 'encoding', + }) + + // mkdtemp relies on the trailing path separator to indicate if it should + // create a directory inside of the prefix. if that's the case then the root + // we infer ownership from is the prefix itself, otherwise it's the dirname + // /tmp -> /tmpABCDEF, infers from / + // /tmp/ -> /tmp/ABCDEF, infers from /tmp + const root = prefix.endsWith(sep) ? prefix : dirname(prefix) + + return withOwner(root, () => fs.mkdtemp(prefix, options), opts) +} + +module.exports = mkdtemp diff --git a/node_modules/@npmcli/fs/lib/rm/index.js b/node_modules/@npmcli/fs/lib/rm/index.js new file mode 100644 index 0000000..cb81fbd --- /dev/null +++ b/node_modules/@npmcli/fs/lib/rm/index.js @@ -0,0 +1,22 @@ +const fs = require('../fs.js') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 14.14.0 added fs.rm, which allows both the force and recursive options +const useNative = node.satisfies('>=14.14.0') + +const rm = async (path, opts) => { + const options = getOptions(opts, { + copy: ['retryDelay', 'maxRetries', 'recursive', 'force'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.rm(path, options) + : polyfill(path, options) +} + +module.exports = rm diff --git a/node_modules/@npmcli/fs/lib/rm/polyfill.js b/node_modules/@npmcli/fs/lib/rm/polyfill.js new file mode 100644 index 0000000..a25c174 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/rm/polyfill.js @@ -0,0 +1,239 @@ +// this file is a modified version of the code in node core >=14.14.0 +// which is, in turn, a modified version of the rimraf module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +const errnos = require('os').constants.errno +const { join } = require('path') +const fs = require('../fs.js') + +// error codes that mean we need to remove contents +const notEmptyCodes = new Set([ + 'ENOTEMPTY', + 'EEXIST', + 'EPERM', +]) + +// error codes we can retry later +const retryCodes = new Set([ + 'EBUSY', + 'EMFILE', + 'ENFILE', + 'ENOTEMPTY', + 'EPERM', +]) + +const isWindows = process.platform === 'win32' + +const defaultOptions = { + retryDelay: 100, + maxRetries: 0, + recursive: false, + force: false, +} + +// this is drastically simplified, but should be roughly equivalent to what +// node core throws +class ERR_FS_EISDIR extends Error { + constructor (path) { + super() + this.info = { + code: 'EISDIR', + message: 'is a directory', + path, + syscall: 'rm', + errno: errnos.EISDIR, + } + this.name = 'SystemError' + this.code = 'ERR_FS_EISDIR' + this.errno = errnos.EISDIR + this.syscall = 'rm' + this.path = path + this.message = `Path is a directory: ${this.syscall} returned ` + + `${this.info.code} (is a directory) ${path}` + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ENOTDIR extends Error { + constructor (path) { + super() + this.name = 'Error' + this.code = 'ENOTDIR' + this.errno = errnos.ENOTDIR + this.syscall = 'rmdir' + this.path = path + this.message = `not a directory, ${this.syscall} '${this.path}'` + } + + toString () { + return `${this.name}: ${this.code}: ${this.message}` + } +} + +// force is passed separately here because we respect it for the first entry +// into rimraf only, any further calls that are spawned as a result (i.e. to +// delete content within the target) will ignore ENOENT errors +const rimraf = async (path, options, isTop = false) => { + const force = isTop ? options.force : true + const stat = await fs.lstat(path) + .catch((err) => { + // we only ignore ENOENT if we're forcing this call + if (err.code === 'ENOENT' && force) { + return + } + + if (isWindows && err.code === 'EPERM') { + return fixEPERM(path, options, err, isTop) + } + + throw err + }) + + // no stat object here means either lstat threw an ENOENT, or lstat threw + // an EPERM and the fixPERM function took care of things. either way, we're + // already done, so return early + if (!stat) { + return + } + + if (stat.isDirectory()) { + return rmdir(path, options, null, isTop) + } + + return fs.unlink(path) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return + } + + if (err.code === 'EISDIR') { + return rmdir(path, options, err, isTop) + } + + if (err.code === 'EPERM') { + // in windows, we handle this through fixEPERM which will also try to + // delete things again. everywhere else since deleting the target as a + // file didn't work we go ahead and try to delete it as a directory + return isWindows + ? fixEPERM(path, options, err, isTop) + : rmdir(path, options, err, isTop) + } + + throw err + }) +} + +const fixEPERM = async (path, options, originalErr, isTop) => { + const force = isTop ? options.force : true + const targetMissing = await fs.chmod(path, 0o666) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return true + } + + throw originalErr + }) + + // got an ENOENT above, return now. no file = no problem + if (targetMissing) { + return + } + + // this function does its own lstat rather than calling rimraf again to avoid + // infinite recursion for a repeating EPERM + const stat = await fs.lstat(path) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return + } + + throw originalErr + }) + + if (!stat) { + return + } + + if (stat.isDirectory()) { + return rmdir(path, options, originalErr, isTop) + } + + return fs.unlink(path) +} + +const rmdir = async (path, options, originalErr, isTop) => { + if (!options.recursive && isTop) { + throw originalErr || new ERR_FS_EISDIR(path) + } + const force = isTop ? options.force : true + + return fs.rmdir(path) + .catch(async (err) => { + // in Windows, calling rmdir on a file path will fail with ENOENT rather + // than ENOTDIR. to determine if that's what happened, we have to do + // another lstat on the path. if the path isn't actually gone, we throw + // away the ENOENT and replace it with our own ENOTDIR + if (isWindows && err.code === 'ENOENT') { + const stillExists = await fs.lstat(path).then(() => true, () => false) + if (stillExists) { + err = new ENOTDIR(path) + } + } + + // not there, not a problem + if (err.code === 'ENOENT' && force) { + return + } + + // we may not have originalErr if lstat tells us our target is a + // directory but that changes before we actually remove it, so + // only throw it here if it's set + if (originalErr && err.code === 'ENOTDIR') { + throw originalErr + } + + // the directory isn't empty, remove the contents and try again + if (notEmptyCodes.has(err.code)) { + const files = await fs.readdir(path) + await Promise.all(files.map((file) => { + const target = join(path, file) + return rimraf(target, options) + })) + return fs.rmdir(path) + } + + throw err + }) +} + +const rm = async (path, opts) => { + const options = { ...defaultOptions, ...opts } + let retries = 0 + + const errHandler = async (err) => { + if (retryCodes.has(err.code) && ++retries < options.maxRetries) { + const delay = retries * options.retryDelay + await promiseTimeout(delay) + return rimraf(path, options, true).catch(errHandler) + } + + throw err + } + + return rimraf(path, options, true).catch(errHandler) +} + +const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms)) + +module.exports = rm diff --git a/node_modules/@npmcli/fs/lib/with-owner-sync.js b/node_modules/@npmcli/fs/lib/with-owner-sync.js new file mode 100644 index 0000000..3597d1c --- /dev/null +++ b/node_modules/@npmcli/fs/lib/with-owner-sync.js @@ -0,0 +1,21 @@ +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner-sync.js') + +const withOwnerSync = (path, fn, opts) => { + const options = getOptions(opts, { + copy: ['owner'], + }) + + const { uid, gid } = owner.validate(path, options.owner) + + const result = fn({ uid, gid }) + + owner.update(path, uid, gid) + if (typeof result === 'string') { + owner.update(result, uid, gid) + } + + return result +} + +module.exports = withOwnerSync diff --git a/node_modules/@npmcli/fs/lib/with-owner.js b/node_modules/@npmcli/fs/lib/with-owner.js new file mode 100644 index 0000000..a679102 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/with-owner.js @@ -0,0 +1,21 @@ +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner.js') + +const withOwner = async (path, fn, opts) => { + const options = getOptions(opts, { + copy: ['owner'], + }) + + const { uid, gid } = await owner.validate(path, options.owner) + + const result = await fn({ uid, gid }) + + await Promise.all([ + owner.update(path, uid, gid), + typeof result === 'string' ? owner.update(result, uid, gid) : null, + ]) + + return result +} + +module.exports = withOwner diff --git a/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 0000000..81db59d --- /dev/null +++ b/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,41 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const mkdir = require('./mkdir.js') +const mkdtemp = require('./mkdtemp.js') +const rm = require('./rm/index.js') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory, and fix its ownership + await mkdir(root, { recursive: true, owner: 'inherit' }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' }) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/node_modules/@npmcli/fs/lib/write-file.js b/node_modules/@npmcli/fs/lib/write-file.js new file mode 100644 index 0000000..ff90057 --- /dev/null +++ b/node_modules/@npmcli/fs/lib/write-file.js @@ -0,0 +1,14 @@ +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const withOwner = require('./with-owner.js') + +const writeFile = async (file, data, opts) => { + const options = getOptions(opts, { + copy: ['encoding', 'mode', 'flag', 'signal'], + wrap: 'encoding', + }) + + return withOwner(file, () => fs.writeFile(file, data, options), opts) +} + +module.exports = writeFile diff --git a/node_modules/@npmcli/fs/node_modules/.bin/semver b/node_modules/@npmcli/fs/node_modules/.bin/semver new file mode 100644 index 0000000..db6dc54 --- /dev/null +++ b/node_modules/@npmcli/fs/node_modules/.bin/semver @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../semver/bin/semver.js" "$@" + ret=$? +else + node "$basedir/../../../../semver/bin/semver.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/@npmcli/fs/node_modules/.bin/semver.cmd b/node_modules/@npmcli/fs/node_modules/.bin/semver.cmd new file mode 100644 index 0000000..f59d3aa --- /dev/null +++ b/node_modules/@npmcli/fs/node_modules/.bin/semver.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\semver\bin\semver.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\semver\bin\semver.js" %* +) \ No newline at end of file diff --git a/node_modules/@npmcli/fs/package.json b/node_modules/@npmcli/fs/package.json new file mode 100644 index 0000000..1512fd6 --- /dev/null +++ b/node_modules/@npmcli/fs/package.json @@ -0,0 +1,50 @@ +{ + "name": "@npmcli/fs", + "version": "2.1.2", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.5.0", + "tap": "^16.0.1" + }, + "dependencies": { + "@gar/promisify": "^1.1.3", + "semver": "^7.3.5" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.5.0" + } +} diff --git a/node_modules/@npmcli/move-file/LICENSE.md b/node_modules/@npmcli/move-file/LICENSE.md new file mode 100644 index 0000000..072bf20 --- /dev/null +++ b/node_modules/@npmcli/move-file/LICENSE.md @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) +Copyright (c) npm, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@npmcli/move-file/README.md b/node_modules/@npmcli/move-file/README.md new file mode 100644 index 0000000..8a5a57f --- /dev/null +++ b/node_modules/@npmcli/move-file/README.md @@ -0,0 +1,69 @@ +# @npmcli/move-file + +A fork of [move-file](https://github.com/sindresorhus/move-file) with +compatibility with all node 10.x versions. + +> Move a file (or directory) + +The built-in +[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback) +is just a JavaScript wrapper for the C `rename(2)` function, which doesn't +support moving files across partitions or devices. This module is what you +would have expected `fs.rename()` to be. + +## Highlights + +- Promise API. +- Supports moving a file across partitions and devices. +- Optionally prevent overwriting an existing file. +- Creates non-existent destination directories for you. +- Support for Node versions that lack built-in recursive `fs.mkdir()` +- Automatically recurses when source is a directory. + +## Install + +``` +$ npm install @npmcli/move-file +``` + +## Usage + +```js +const moveFile = require('@npmcli/move-file'); + +(async () => { + await moveFile('source/unicorn.png', 'destination/unicorn.png'); + console.log('The file has been moved'); +})(); +``` + +## API + +### moveFile(source, destination, options?) + +Returns a `Promise` that resolves when the file has been moved. + +### moveFile.sync(source, destination, options?) + +#### source + +Type: `string` + +File, or directory, you want to move. + +#### destination + +Type: `string` + +Where you want the file or directory moved. + +#### options + +Type: `object` + +##### overwrite + +Type: `boolean`\ +Default: `true` + +Overwrite existing destination file(s). diff --git a/node_modules/@npmcli/move-file/lib/index.js b/node_modules/@npmcli/move-file/lib/index.js new file mode 100644 index 0000000..5789bb1 --- /dev/null +++ b/node_modules/@npmcli/move-file/lib/index.js @@ -0,0 +1,185 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const rimraf_ = require('rimraf') +const { promisify } = require('util') +const { + access: access_, + accessSync, + copyFile: copyFile_, + copyFileSync, + readdir: readdir_, + readdirSync, + rename: rename_, + renameSync, + stat: stat_, + statSync, + lstat: lstat_, + lstatSync, + symlink: symlink_, + symlinkSync, + readlink: readlink_, + readlinkSync, +} = require('fs') + +const access = promisify(access_) +const copyFile = promisify(copyFile_) +const readdir = promisify(readdir_) +const rename = promisify(rename_) +const stat = promisify(stat_) +const lstat = promisify(lstat_) +const symlink = promisify(symlink_) +const readlink = promisify(readlink_) +const rimraf = promisify(rimraf_) +const rimrafSync = rimraf_.sync + +const mkdirp = require('mkdirp') + +const pathExists = async path => { + try { + await access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const pathExistsSync = path => { + try { + accessSync(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await mkdirp(dirname(destination)) + + try { + await rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await lstat(source) + if (sourceStat.isDirectory()) { + const files = await readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await symlink( + target, + symDestination, + targetStat + ) + })) + await rimraf(source) + } +} + +const moveFileSync = (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && pathExistsSync(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + mkdirp.sync(dirname(destination)) + + try { + renameSync(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = lstatSync(source) + if (sourceStat.isDirectory()) { + const files = readdirSync(source) + for (const file of files) { + moveFileSync(join(source, file), join(destination, file), options, false, symlinks) + } + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + copyFileSync(source, destination) + } + } else { + throw error + } + } + + if (root) { + for (const { source: symSource, destination: symDestination } of symlinks) { + let target = readlinkSync(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = statSync(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + symlinkSync( + target, + symDestination, + targetStat + ) + } + rimrafSync(source) + } +} + +module.exports = moveFile +module.exports.sync = moveFileSync diff --git a/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp b/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp new file mode 100644 index 0000000..0d39a0f --- /dev/null +++ b/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../mkdirp/bin/cmd.js" "$@" + ret=$? +else + node "$basedir/../../../../mkdirp/bin/cmd.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp.cmd b/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp.cmd new file mode 100644 index 0000000..15d7643 --- /dev/null +++ b/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\mkdirp\bin\cmd.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\mkdirp\bin\cmd.js" %* +) \ No newline at end of file diff --git a/node_modules/@npmcli/move-file/node_modules/.bin/rimraf b/node_modules/@npmcli/move-file/node_modules/.bin/rimraf new file mode 100644 index 0000000..52877e2 --- /dev/null +++ b/node_modules/@npmcli/move-file/node_modules/.bin/rimraf @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../rimraf/bin.js" "$@" + ret=$? +else + node "$basedir/../../../../rimraf/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/@npmcli/move-file/node_modules/.bin/rimraf.cmd b/node_modules/@npmcli/move-file/node_modules/.bin/rimraf.cmd new file mode 100644 index 0000000..e405a7c --- /dev/null +++ b/node_modules/@npmcli/move-file/node_modules/.bin/rimraf.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\rimraf\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\rimraf\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/@npmcli/move-file/package.json b/node_modules/@npmcli/move-file/package.json new file mode 100644 index 0000000..58793b9 --- /dev/null +++ b/node_modules/@npmcli/move-file/package.json @@ -0,0 +1,47 @@ +{ + "name": "@npmcli/move-file", + "version": "2.0.1", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "move a file (fork of move-file)", + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "devDependencies": { + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.5.0", + "tap": "^16.0.1" + }, + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/move-file.git" + }, + "tap": { + "check-coverage": true + }, + "license": "MIT", + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.5.0" + } +} diff --git a/node_modules/@tootallnate/once/LICENSE b/node_modules/@tootallnate/once/LICENSE new file mode 100644 index 0000000..c4c56a2 --- /dev/null +++ b/node_modules/@tootallnate/once/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@tootallnate/once/README.md b/node_modules/@tootallnate/once/README.md new file mode 100644 index 0000000..bc980fd --- /dev/null +++ b/node_modules/@tootallnate/once/README.md @@ -0,0 +1,93 @@ +# @tootallnate/once + +### Creates a Promise that waits for a single event + +## Installation + +Install with `npm`: + +```bash +$ npm install @tootallnate/once +``` + +## API + +### once(emitter: EventEmitter, name: string, opts?: OnceOptions): Promise<[...Args]> + +Creates a Promise that waits for event `name` to occur on `emitter`, and resolves +the promise with an array of the values provided to the event handler. If an +`error` event occurs before the event specified by `name`, then the Promise is +rejected with the error argument. + +```typescript +import once from '@tootallnate/once'; +import { EventEmitter } from 'events'; + +const emitter = new EventEmitter(); + +setTimeout(() => { + emitter.emit('foo', 'bar'); +}, 100); + +const [result] = await once(emitter, 'foo'); +console.log({ result }); +// { result: 'bar' } +``` + +#### Promise Strong Typing + +The main feature that this module provides over other "once" implementations is that +the Promise that is returned is _**strongly typed**_ based on the type of `emitter` +and the `name` of the event. Some examples are shown below. + +_The process "exit" event contains a single number for exit code:_ + +```typescript +const [code] = await once(process, 'exit'); +// ^ number +``` +_A child process "exit" event contains either an exit code or a signal:_ + +```typescript +const child = spawn('echo', []); +const [code, signal] = await once(child, 'exit'); +// ^ number | null +// ^ string | null +``` + +_A forked child process "message" event is type `any`, so you can cast the Promise directly:_ + +```typescript +const child = fork('file.js'); + +// With `await` +const [message, _]: [WorkerPayload, unknown] = await once(child, 'message'); + +// With Promise +const messagePromise: Promise<[WorkerPayload, unknown]> = once(child, 'message'); + +// Better yet would be to leave it as `any`, and validate the payload +// at runtime with i.e. `ajv` + `json-schema-to-typescript` +``` + +_If the TypeScript definition does not contain an overload for the specified event name, then the Promise will have type `unknown[]` and your code will need to narrow the result manually:_ + +```typescript +interface CustomEmitter extends EventEmitter { + on(name: 'foo', listener: (a: string, b: number) => void): this; +} + +const emitter: CustomEmitter = new EventEmitter(); + +// "foo" event is a defined overload, so it's properly typed +const fooPromise = once(emitter, 'foo'); +// ^ Promise<[a: string, b: number]> + +// "bar" event in not a defined overload, so it gets `unknown[]` +const barPromise = once(emitter, 'bar'); +// ^ Promise +``` + +### OnceOptions + +- `signal` - `AbortSignal` instance to unbind event handlers before the Promise has been fulfilled. diff --git a/node_modules/@tootallnate/once/dist/index.d.ts b/node_modules/@tootallnate/once/dist/index.d.ts new file mode 100644 index 0000000..93d02a9 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/index.d.ts @@ -0,0 +1,7 @@ +/// +import { EventEmitter } from 'events'; +import { EventNames, EventListenerParameters, AbortSignal } from './types'; +export interface OnceOptions { + signal?: AbortSignal; +} +export default function once>(emitter: Emitter, name: Event, { signal }?: OnceOptions): Promise>; diff --git a/node_modules/@tootallnate/once/dist/index.js b/node_modules/@tootallnate/once/dist/index.js new file mode 100644 index 0000000..ca6385b --- /dev/null +++ b/node_modules/@tootallnate/once/dist/index.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function once(emitter, name, { signal } = {}) { + return new Promise((resolve, reject) => { + function cleanup() { + signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + } + function onEvent(...args) { + cleanup(); + resolve(args); + } + function onError(err) { + cleanup(); + reject(err); + } + signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); + emitter.on(name, onEvent); + emitter.on('error', onError); + }); +} +exports.default = once; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/index.js.map b/node_modules/@tootallnate/once/dist/index.js.map new file mode 100644 index 0000000..61708ca --- /dev/null +++ b/node_modules/@tootallnate/once/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAOA,SAAwB,IAAI,CAI3B,OAAgB,EAChB,IAAW,EACX,EAAE,MAAM,KAAkB,EAAE;IAE5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,SAAS,OAAO;YACf,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YAC9C,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC1C,CAAC;QACD,SAAS,OAAO,CAAC,GAAG,IAAW;YAC9B,OAAO,EAAE,CAAC;YACV,OAAO,CAAC,IAA+C,CAAC,CAAC;QAC1D,CAAC;QACD,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QACD,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC3C,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACJ,CAAC;AA1BD,uBA0BC"} \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts b/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts new file mode 100644 index 0000000..eb2bbc6 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts @@ -0,0 +1,231 @@ +export declare type OverloadedParameters = T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; + (...args: infer A20): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 | A20 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; + (...args: infer A19): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; + (...args: infer A18): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; + (...args: infer A17): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; + (...args: infer A16): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; + (...args: infer A15): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; + (...args: infer A14): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; + (...args: infer A13): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; + (...args: infer A12): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; + (...args: infer A11): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; + (...args: infer A10): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; + (...args: infer A9): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; + (...args: infer A8): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; + (...args: infer A7): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; + (...args: infer A6): any; +} ? A1 | A2 | A3 | A4 | A5 | A6 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; + (...args: infer A5): any; +} ? A1 | A2 | A3 | A4 | A5 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; + (...args: infer A4): any; +} ? A1 | A2 | A3 | A4 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; + (...args: infer A3): any; +} ? A1 | A2 | A3 : T extends { + (...args: infer A1): any; + (...args: infer A2): any; +} ? A1 | A2 : T extends { + (...args: infer A1): any; +} ? A1 : any; diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js b/node_modules/@tootallnate/once/dist/overloaded-parameters.js new file mode 100644 index 0000000..207186d --- /dev/null +++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=overloaded-parameters.js.map \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map b/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map new file mode 100644 index 0000000..863f146 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"overloaded-parameters.js","sourceRoot":"","sources":["../src/overloaded-parameters.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/types.d.ts b/node_modules/@tootallnate/once/dist/types.d.ts new file mode 100644 index 0000000..58be828 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/types.d.ts @@ -0,0 +1,17 @@ +/// +import { EventEmitter } from 'events'; +import { OverloadedParameters } from './overloaded-parameters'; +export declare type FirstParameter = T extends [infer R, ...any[]] ? R : never; +export declare type EventListener = F extends [ + T, + infer R, + ...any[] +] ? R : never; +export declare type EventParameters = OverloadedParameters; +export declare type EventNames = FirstParameter>; +export declare type EventListenerParameters> = WithDefault, Event>>, unknown[]>; +export declare type WithDefault = [T] extends [never] ? D : T; +export interface AbortSignal { + addEventListener: (name: string, listener: (...args: any[]) => any) => void; + removeEventListener: (name: string, listener: (...args: any[]) => any) => void; +} diff --git a/node_modules/@tootallnate/once/dist/types.js b/node_modules/@tootallnate/once/dist/types.js new file mode 100644 index 0000000..11e638d --- /dev/null +++ b/node_modules/@tootallnate/once/dist/types.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@tootallnate/once/dist/types.js.map b/node_modules/@tootallnate/once/dist/types.js.map new file mode 100644 index 0000000..c768b79 --- /dev/null +++ b/node_modules/@tootallnate/once/dist/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@tootallnate/once/package.json b/node_modules/@tootallnate/once/package.json new file mode 100644 index 0000000..69ce947 --- /dev/null +++ b/node_modules/@tootallnate/once/package.json @@ -0,0 +1,52 @@ +{ + "name": "@tootallnate/once", + "version": "2.0.0", + "description": "Creates a Promise that waits for a single event", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "jest", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/once.git" + }, + "keywords": [], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/once/issues" + }, + "devDependencies": { + "@types/jest": "^27.0.2", + "@types/node": "^12.12.11", + "abort-controller": "^3.0.0", + "jest": "^27.2.1", + "rimraf": "^3.0.0", + "ts-jest": "^27.0.5", + "typescript": "^4.4.3" + }, + "engines": { + "node": ">= 10" + }, + "jest": { + "preset": "ts-jest", + "globals": { + "ts-jest": { + "diagnostics": false, + "isolatedModules": true + } + }, + "verbose": false, + "testEnvironment": "node", + "testMatch": [ + "/test/**/*.test.ts" + ] + } +} diff --git a/node_modules/@types/minimist/LICENSE b/node_modules/@types/minimist/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/minimist/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/minimist/README.md b/node_modules/@types/minimist/README.md new file mode 100644 index 0000000..45641a4 --- /dev/null +++ b/node_modules/@types/minimist/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/minimist` + +# Summary +This package contains type definitions for minimist (https://github.com/substack/minimist). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/minimist. + +### Additional Details + * Last updated: Wed, 07 Jul 2021 00:01:41 GMT + * Dependencies: none + * Global values: none + +# Credits +These definitions were written by [Bart van der Schoor](https://github.com/Bartvds), [Necroskillz](https://github.com/Necroskillz), [kamranayub](https://github.com/kamranayub), and [Piotr Błażejewicz](https://github.com/peterblazejewicz). diff --git a/node_modules/@types/minimist/index.d.ts b/node_modules/@types/minimist/index.d.ts new file mode 100644 index 0000000..afc15d5 --- /dev/null +++ b/node_modules/@types/minimist/index.d.ts @@ -0,0 +1,95 @@ +// Type definitions for minimist 1.2 +// Project: https://github.com/substack/minimist +// Definitions by: Bart van der Schoor +// Necroskillz +// kamranayub +// Piotr Błażejewicz +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/** + * Return an argument object populated with the array arguments from args + * + * @param [args] An optional argument array (typically `process.argv.slice(2)`) + * @param [opts] An optional options object to customize the parsing + */ +declare function minimist(args?: string[], opts?: minimist.Opts): minimist.ParsedArgs; + +/** + * Return an argument object populated with the array arguments from args. Strongly-typed + * to be the intersect of type T with minimist.ParsedArgs. + * + * `T` The type that will be intersected with minimist.ParsedArgs to represent the argument object + * + * @param [args] An optional argument array (typically `process.argv.slice(2)`) + * @param [opts] An optional options object to customize the parsing + */ +declare function minimist(args?: string[], opts?: minimist.Opts): T & minimist.ParsedArgs; + +/** + * Return an argument object populated with the array arguments from args. Strongly-typed + * to be the the type T which should extend minimist.ParsedArgs + * + * `T` The type that extends minimist.ParsedArgs and represents the argument object + * + * @param [args] An optional argument array (typically `process.argv.slice(2)`) + * @param [opts] An optional options object to customize the parsing + */ +declare function minimist(args?: string[], opts?: minimist.Opts): T; + +declare namespace minimist { + interface Opts { + /** + * A string or array of strings argument names to always treat as strings + */ + string?: string | string[] | undefined; + + /** + * A boolean, string or array of strings to always treat as booleans. If true will treat + * all double hyphenated arguments without equals signs as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) + */ + boolean?: boolean | string | string[] | undefined; + + /** + * An object mapping string names to strings or arrays of string argument names to use as aliases + */ + alias?: { [key: string]: string | string[] } | undefined; + + /** + * An object mapping string argument names to default values + */ + default?: { [key: string]: any } | undefined; + + /** + * When true, populate argv._ with everything after the first non-option + */ + stopEarly?: boolean | undefined; + + /** + * A function which is invoked with a command line parameter not defined in the opts + * configuration object. If the function returns false, the unknown option is not added to argv + */ + unknown?: ((arg: string) => boolean) | undefined; + + /** + * When true, populate argv._ with everything before the -- and argv['--'] with everything after the --. + * Note that with -- set, parsing for arguments still stops after the `--`. + */ + '--'?: boolean | undefined; + } + + interface ParsedArgs { + [arg: string]: any; + + /** + * If opts['--'] is true, populated with everything after the -- + */ + '--'?: string[] | undefined; + + /** + * Contains all the arguments that didn't have an option associated with them + */ + _: string[]; + } +} + +export = minimist; diff --git a/node_modules/@types/minimist/package.json b/node_modules/@types/minimist/package.json new file mode 100644 index 0000000..99ea2d2 --- /dev/null +++ b/node_modules/@types/minimist/package.json @@ -0,0 +1,40 @@ +{ + "name": "@types/minimist", + "version": "1.2.2", + "description": "TypeScript definitions for minimist", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/minimist", + "license": "MIT", + "contributors": [ + { + "name": "Bart van der Schoor", + "url": "https://github.com/Bartvds", + "githubUsername": "Bartvds" + }, + { + "name": "Necroskillz", + "url": "https://github.com/Necroskillz", + "githubUsername": "Necroskillz" + }, + { + "name": "kamranayub", + "url": "https://github.com/kamranayub", + "githubUsername": "kamranayub" + }, + { + "name": "Piotr Błażejewicz", + "url": "https://github.com/peterblazejewicz", + "githubUsername": "peterblazejewicz" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/minimist" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "9032205d52417d0f537f1e52af6f7ac447acb4b87dca0ab5840b83ec7818232e", + "typeScriptVersion": "3.6" +} \ No newline at end of file diff --git a/node_modules/@types/normalize-package-data/LICENSE b/node_modules/@types/normalize-package-data/LICENSE new file mode 100644 index 0000000..9e841e7 --- /dev/null +++ b/node_modules/@types/normalize-package-data/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/normalize-package-data/README.md b/node_modules/@types/normalize-package-data/README.md new file mode 100644 index 0000000..3d2a0a0 --- /dev/null +++ b/node_modules/@types/normalize-package-data/README.md @@ -0,0 +1,66 @@ +# Installation +> `npm install --save @types/normalize-package-data` + +# Summary +This package contains type definitions for normalize-package-data (https://github.com/npm/normalize-package-data#readme). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/normalize-package-data. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/normalize-package-data/index.d.ts) +````ts +// Type definitions for normalize-package-data 2.4 +// Project: https://github.com/npm/normalize-package-data#readme +// Definitions by: Jeff Dickey +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +export = normalize; + +declare function normalize(data: normalize.Input, warn?: normalize.WarnFn, strict?: boolean): void; +declare function normalize(data: normalize.Input, strict?: boolean): void; + +declare namespace normalize { + type WarnFn = (msg: string) => void; + interface Input {[k: string]: any; } + + interface Person { + name?: string | undefined; + email?: string | undefined; + url?: string | undefined; + } + + interface Package { + [k: string]: any; + name: string; + version: string; + files?: string[] | undefined; + bin?: {[k: string]: string } | undefined; + man?: string[] | undefined; + keywords?: string[] | undefined; + author?: Person | undefined; + maintainers?: Person[] | undefined; + contributors?: Person[] | undefined; + bundleDependencies?: {[name: string]: string; } | undefined; + dependencies?: {[name: string]: string; } | undefined; + devDependencies?: {[name: string]: string; } | undefined; + optionalDependencies?: {[name: string]: string; } | undefined; + description?: string | undefined; + engines?: {[type: string]: string } | undefined; + license?: string | undefined; + repository?: { type: string, url: string } | undefined; + bugs?: { url: string, email?: string | undefined } | { url?: string | undefined, email: string } | undefined; + homepage?: string | undefined; + scripts?: {[k: string]: string} | undefined; + readme: string; + _id: string; + } +} + +```` + +### Additional Details + * Last updated: Wed, 07 Jul 2021 16:31:34 GMT + * Dependencies: none + * Global values: none + +# Credits +These definitions were written by [Jeff Dickey](https://github.com/jdxcode). diff --git a/node_modules/@types/normalize-package-data/index.d.ts b/node_modules/@types/normalize-package-data/index.d.ts new file mode 100644 index 0000000..102d073 --- /dev/null +++ b/node_modules/@types/normalize-package-data/index.d.ts @@ -0,0 +1,46 @@ +// Type definitions for normalize-package-data 2.4 +// Project: https://github.com/npm/normalize-package-data#readme +// Definitions by: Jeff Dickey +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +export = normalize; + +declare function normalize(data: normalize.Input, warn?: normalize.WarnFn, strict?: boolean): void; +declare function normalize(data: normalize.Input, strict?: boolean): void; + +declare namespace normalize { + type WarnFn = (msg: string) => void; + interface Input {[k: string]: any; } + + interface Person { + name?: string | undefined; + email?: string | undefined; + url?: string | undefined; + } + + interface Package { + [k: string]: any; + name: string; + version: string; + files?: string[] | undefined; + bin?: {[k: string]: string } | undefined; + man?: string[] | undefined; + keywords?: string[] | undefined; + author?: Person | undefined; + maintainers?: Person[] | undefined; + contributors?: Person[] | undefined; + bundleDependencies?: {[name: string]: string; } | undefined; + dependencies?: {[name: string]: string; } | undefined; + devDependencies?: {[name: string]: string; } | undefined; + optionalDependencies?: {[name: string]: string; } | undefined; + description?: string | undefined; + engines?: {[type: string]: string } | undefined; + license?: string | undefined; + repository?: { type: string, url: string } | undefined; + bugs?: { url: string, email?: string | undefined } | { url?: string | undefined, email: string } | undefined; + homepage?: string | undefined; + scripts?: {[k: string]: string} | undefined; + readme: string; + _id: string; + } +} diff --git a/node_modules/@types/normalize-package-data/package.json b/node_modules/@types/normalize-package-data/package.json new file mode 100644 index 0000000..1f61182 --- /dev/null +++ b/node_modules/@types/normalize-package-data/package.json @@ -0,0 +1,25 @@ +{ + "name": "@types/normalize-package-data", + "version": "2.4.1", + "description": "TypeScript definitions for normalize-package-data", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/normalize-package-data", + "license": "MIT", + "contributors": [ + { + "name": "Jeff Dickey", + "url": "https://github.com/jdxcode", + "githubUsername": "jdxcode" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/normalize-package-data" + }, + "scripts": {}, + "dependencies": {}, + "typesPublisherContentHash": "10653410655e204616118acfbe2900dc09227bc3a80c532a93d44b46be54db36", + "typeScriptVersion": "3.6" +} \ No newline at end of file diff --git a/node_modules/abbrev/LICENSE b/node_modules/abbrev/LICENSE new file mode 100644 index 0000000..9bcfa9d --- /dev/null +++ b/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/abbrev/README.md b/node_modules/abbrev/README.md new file mode 100644 index 0000000..99746fe --- /dev/null +++ b/node_modules/abbrev/README.md @@ -0,0 +1,23 @@ +# abbrev-js + +Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). + +Usage: + + var abbrev = require("abbrev"); + abbrev("foo", "fool", "folding", "flop"); + + // returns: + { fl: 'flop' + , flo: 'flop' + , flop: 'flop' + , fol: 'folding' + , fold: 'folding' + , foldi: 'folding' + , foldin: 'folding' + , folding: 'folding' + , foo: 'foo' + , fool: 'fool' + } + +This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/abbrev/abbrev.js b/node_modules/abbrev/abbrev.js new file mode 100644 index 0000000..7b1dc5d --- /dev/null +++ b/node_modules/abbrev/abbrev.js @@ -0,0 +1,61 @@ +module.exports = exports = abbrev.abbrev = abbrev + +abbrev.monkeyPatch = monkeyPatch + +function monkeyPatch () { + Object.defineProperty(Array.prototype, 'abbrev', { + value: function () { return abbrev(this) }, + enumerable: false, configurable: true, writable: true + }) + + Object.defineProperty(Object.prototype, 'abbrev', { + value: function () { return abbrev(Object.keys(this)) }, + enumerable: false, configurable: true, writable: true + }) +} + +function abbrev (list) { + if (arguments.length !== 1 || !Array.isArray(list)) { + list = Array.prototype.slice.call(arguments, 0) + } + for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { + args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + args = args.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + var abbrevs = {} + , prev = "" + for (var i = 0, l = args.length ; i < l ; i ++) { + var current = args[i] + , next = args[i + 1] || "" + , nextMatches = true + , prevMatches = true + if (current === next) continue + for (var j = 0, cl = current.length ; j < cl ; j ++) { + var curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j ++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (var a = current.substr(0, j) ; j <= cl ; j ++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/node_modules/abbrev/package.json b/node_modules/abbrev/package.json new file mode 100644 index 0000000..bf4e801 --- /dev/null +++ b/node_modules/abbrev/package.json @@ -0,0 +1,21 @@ +{ + "name": "abbrev", + "version": "1.1.1", + "description": "Like ruby's abbrev module, but in js", + "author": "Isaac Z. Schlueter ", + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": "http://github.com/isaacs/abbrev-js", + "license": "ISC", + "devDependencies": { + "tap": "^10.1" + }, + "files": [ + "abbrev.js" + ] +} diff --git a/node_modules/agent-base/README.md b/node_modules/agent-base/README.md new file mode 100644 index 0000000..256f1f3 --- /dev/null +++ b/node_modules/agent-base/README.md @@ -0,0 +1,145 @@ +agent-base +========== +### Turn a function into an [`http.Agent`][http.Agent] instance +[![Build Status](https://github.com/TooTallNate/node-agent-base/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-agent-base/actions?workflow=Node+CI) + +This module provides an `http.Agent` generator. That is, you pass it an async +callback function, and it returns a new `http.Agent` instance that will invoke the +given callback function when sending outbound HTTP requests. + +#### Some subclasses: + +Here's some more interesting uses of `agent-base`. +Send a pull request to list yours! + + * [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints + * [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints + * [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS + * [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install agent-base +``` + + +Example +------- + +Here's a minimal example that creates a new `net.Socket` connection to the server +for every HTTP request (i.e. the equivalent of `agent: false` option): + +```js +var net = require('net'); +var tls = require('tls'); +var url = require('url'); +var http = require('http'); +var agent = require('agent-base'); + +var endpoint = 'http://nodejs.org/api/'; +var parsed = url.parse(endpoint); + +// This is the important part! +parsed.agent = agent(function (req, opts) { + var socket; + // `secureEndpoint` is true when using the https module + if (opts.secureEndpoint) { + socket = tls.connect(opts); + } else { + socket = net.connect(opts); + } + return socket; +}); + +// Everything else works just like normal... +http.get(parsed, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +Returning a Promise or using an `async` function is also supported: + +```js +agent(async function (req, opts) { + await sleep(1000); + // etc… +}); +``` + +Return another `http.Agent` instance to "pass through" the responsibility +for that HTTP request to that agent: + +```js +agent(function (req, opts) { + return opts.secureEndpoint ? https.globalAgent : http.globalAgent; +}); +``` + + +API +--- + +## Agent(Function callback[, Object options]) → [http.Agent][] + +Creates a base `http.Agent` that will execute the callback function `callback` +for every HTTP request that it is used as the `agent` for. The callback function +is responsible for creating a `stream.Duplex` instance of some kind that will be +used as the underlying socket in the HTTP request. + +The `options` object accepts the following properties: + + * `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional). + +The callback function should have the following signature: + +### callback(http.ClientRequest req, Object options, Function cb) → undefined + +The ClientRequest `req` can be accessed to read request headers and +and the path, etc. The `options` object contains the options passed +to the `http.request()`/`https.request()` function call, and is formatted +to be directly passed to `net.connect()`/`tls.connect()`, or however +else you want a Socket to be created. Pass the created socket to +the callback function `cb` once created, and the HTTP request will +continue to proceed. + +If the `https` module is used to invoke the HTTP request, then the +`secureEndpoint` property on `options` _will be set to `true`_. + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent +[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent +[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent +[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent +[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent diff --git a/node_modules/agent-base/dist/src/index.d.ts b/node_modules/agent-base/dist/src/index.d.ts new file mode 100644 index 0000000..bc4ab74 --- /dev/null +++ b/node_modules/agent-base/dist/src/index.d.ts @@ -0,0 +1,78 @@ +/// +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +declare function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +declare function createAgent(callback: createAgent.AgentCallback, opts?: createAgent.AgentOptions): createAgent.Agent; +declare namespace createAgent { + interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + interface AgentRequestOptions { + host?: string; + path?: string; + port: number; + } + interface HttpRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: false; + } + interface HttpsRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: true; + } + type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + type AgentLike = Pick | http.Agent; + type AgentCallbackReturn = Duplex | AgentLike; + type AgentCallbackCallback = (err?: Error | null, socket?: createAgent.AgentCallbackReturn) => void; + type AgentCallbackPromise = (req: createAgent.ClientRequest, opts: createAgent.RequestOptions) => createAgent.AgentCallbackReturn | Promise; + type AgentCallback = typeof Agent.prototype.callback; + type AgentOptions = { + timeout?: number; + }; + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends EventEmitter { + timeout: number | null; + maxFreeSockets: number; + maxTotalSockets: number; + maxSockets: number; + sockets: { + [key: string]: net.Socket[]; + }; + freeSockets: { + [key: string]: net.Socket[]; + }; + requests: { + [key: string]: http.IncomingMessage[]; + }; + options: https.AgentOptions; + private promisifiedCallback?; + private explicitDefaultPort?; + private explicitProtocol?; + constructor(callback?: createAgent.AgentCallback | createAgent.AgentOptions, _opts?: createAgent.AgentOptions); + get defaultPort(): number; + set defaultPort(v: number); + get protocol(): string; + set protocol(v: string); + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions, fn: createAgent.AgentCallbackCallback): void; + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions): createAgent.AgentCallbackReturn | Promise; + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void; + freeSocket(socket: net.Socket, opts: AgentOptions): void; + destroy(): void; + } +} +export = createAgent; diff --git a/node_modules/agent-base/dist/src/index.js b/node_modules/agent-base/dist/src/index.js new file mode 100644 index 0000000..bfd9e22 --- /dev/null +++ b/node_modules/agent-base/dist/src/index.js @@ -0,0 +1,203 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = require("events"); +const debug_1 = __importDefault(require("debug")); +const promisify_1 = __importDefault(require("./promisify")); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/agent-base/dist/src/index.js.map b/node_modules/agent-base/dist/src/index.js.map new file mode 100644 index 0000000..bd118ab --- /dev/null +++ b/node_modules/agent-base/dist/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAIA,mCAAsC;AACtC,kDAAgC;AAChC,4DAAoC;AAEpC,MAAM,KAAK,GAAG,eAAW,CAAC,YAAY,CAAC,CAAC;AAExC,SAAS,OAAO,CAAC,CAAM;IACtB,OAAO,OAAO,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,UAAU,KAAK,UAAU,CAAC;AACzD,CAAC;AAED,SAAS,gBAAgB;IACxB,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,KAAK,EAAE,CAAC;IAC9B,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC5C,OAAO,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,IAAK,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACxG,CAAC;AAOD,SAAS,WAAW,CACnB,QAA+D,EAC/D,IAA+B;IAE/B,OAAO,IAAI,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED,WAAU,WAAW;IAmDpB;;;;;;OAMG;IACH,MAAa,KAAM,SAAQ,qBAAY;QAmBtC,YACC,QAA+D,EAC/D,KAAgC;YAEhC,KAAK,EAAE,CAAC;YAER,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;gBACnC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;aACzB;iBAAM,IAAI,QAAQ,EAAE;gBACpB,IAAI,GAAG,QAAQ,CAAC;aAChB;YAED,0DAA0D;YAC1D,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;aAC5B;YAED,+DAA+D;YAC/D,0DAA0D;YAC1D,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;YACxB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YACpB,IAAI,CAAC,eAAe,GAAG,QAAQ,CAAC;YAChC,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;YAClB,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;YACnB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;QACnB,CAAC;QAED,IAAI,WAAW;YACd,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;gBACjD,OAAO,IAAI,CAAC,mBAAmB,CAAC;aAChC;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QAED,IAAI,WAAW,CAAC,CAAS;YACxB,IAAI,CAAC,mBAAmB,GAAG,CAAC,CAAC;QAC9B,CAAC;QAED,IAAI,QAAQ;YACX,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,QAAQ,EAAE;gBAC9C,OAAO,IAAI,CAAC,gBAAgB,CAAC;aAC7B;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;QAChD,CAAC;QAED,IAAI,QAAQ,CAAC,CAAS;YACrB,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC;QAC3B,CAAC;QAaD,QAAQ,CACP,GAA8B,EAC9B,IAA8B,EAC9B,EAAsC;YAKtC,MAAM,IAAI,KAAK,CACd,yFAAyF,CACzF,CAAC;QACH,CAAC;QAED;;;;;WAKG;QACH,UAAU,CAAC,GAAkB,EAAE,KAAqB;YACnD,MAAM,IAAI,qBAAwB,KAAK,CAAE,CAAC;YAE1C,IAAI,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,EAAE;gBAC7C,IAAI,CAAC,cAAc,GAAG,gBAAgB,EAAE,CAAC;aACzC;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;aACxB;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;aACzD;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE;gBAC3B,2DAA2D;gBAC3D,0DAA0D;gBAC1D,4DAA4D;gBAC5D,8CAA8C;gBAC9C,OAAO,IAAI,CAAC,IAAI,CAAC;aACjB;YAED,OAAO,IAAI,CAAC,KAAK,CAAC;YAClB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACrB,OAAO,IAAI,CAAC,aAAa,CAAC;YAC1B,OAAO,IAAI,CAAC,WAAW,CAAC;YACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC;YAE7B,kCAAkC;YAClC,2CAA2C;YAC3C,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC;YACjB,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;YAE5B,IAAI,QAAQ,GAAG,KAAK,CAAC;YACrB,IAAI,SAAS,GAAyC,IAAI,CAAC;YAC3D,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAE/C,MAAM,OAAO,GAAG,CAAC,GAA0B,EAAE,EAAE;gBAC9C,IAAI,GAAG,CAAC,SAAS;oBAAE,OAAO;gBAC1B,GAAG,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;gBACvB,yDAAyD;gBACzD,iEAAiE;gBACjE,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;YACtB,CAAC,CAAC;YAEF,MAAM,SAAS,GAAG,GAAG,EAAE;gBACtB,SAAS,GAAG,IAAI,CAAC;gBACjB,QAAQ,GAAG,IAAI,CAAC;gBAChB,MAAM,GAAG,GAA0B,IAAI,KAAK,CAC3C,sDAAsD,SAAS,IAAI,CACnE,CAAC;gBACF,GAAG,CAAC,IAAI,GAAG,UAAU,CAAC;gBACtB,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,aAAa,GAAG,CAAC,GAA0B,EAAE,EAAE;gBACpD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,KAAK,IAAI,EAAE;oBACvB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBACD,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,QAAQ,GAAG,CAAC,MAA2B,EAAE,EAAE;gBAChD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,IAAI,IAAI,EAAE;oBACtB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBAED,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE;oBACpB,oDAAoD;oBACpD,wDAAwD;oBACxD,eAAe;oBACf,KAAK,CACJ,6CAA6C,EAC7C,MAAM,CAAC,WAAW,CAAC,IAAI,CACvB,CAAC;oBACD,MAA4B,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;oBACpD,OAAO;iBACP;gBAED,IAAI,MAAM,EAAE;oBACX,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE;wBACxB,IAAI,CAAC,UAAU,CAAC,MAAoB,EAAE,IAAI,CAAC,CAAC;oBAC7C,CAAC,CAAC,CAAC;oBACH,GAAG,CAAC,QAAQ,CAAC,MAAoB,CAAC,CAAC;oBACnC,OAAO;iBACP;gBAED,MAAM,GAAG,GAAG,IAAI,KAAK,CACpB,qDAAqD,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,IAAI,CAC/E,CAAC;gBACF,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACxC,OAAO,CAAC,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC,CAAC;gBAChD,OAAO;aACP;YAED,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;gBAC9B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,IAAI,CAAC,EAAE;oBAC9B,KAAK,CAAC,gDAAgD,CAAC,CAAC;oBACxD,IAAI,CAAC,mBAAmB,GAAG,mBAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;iBACpD;qBAAM;oBACN,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,QAAQ,CAAC;iBACzC;aACD;YAED,IAAI,OAAO,SAAS,KAAK,QAAQ,IAAI,SAAS,GAAG,CAAC,EAAE;gBACnD,SAAS,GAAG,UAAU,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;aAC7C;YAED,IAAI,MAAM,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACpD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI;gBACH,KAAK,CACJ,qCAAqC,EACrC,IAAI,CAAC,QAAQ,EACb,GAAG,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,CAC3B,CAAC;gBACF,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CACxD,QAAQ,EACR,aAAa,CACb,CAAC;aACF;YAAC,OAAO,GAAG,EAAE;gBACb,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;aACzC;QACF,CAAC;QAED,UAAU,CAAC,MAAkB,EAAE,IAAkB;YAChD,KAAK,CAAC,sBAAsB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;YAC7D,MAAM,CAAC,OAAO,EAAE,CAAC;QAClB,CAAC;QAED,OAAO;YACN,KAAK,CAAC,qBAAqB,EAAE,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACrD,CAAC;KACD;IAxPY,iBAAK,QAwPjB,CAAA;IAED,uCAAuC;IACvC,WAAW,CAAC,SAAS,GAAG,WAAW,CAAC,KAAK,CAAC,SAAS,CAAC;AACrD,CAAC,EAtTS,WAAW,KAAX,WAAW,QAsTpB;AAED,iBAAS,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/agent-base/dist/src/promisify.d.ts b/node_modules/agent-base/dist/src/promisify.d.ts new file mode 100644 index 0000000..0268869 --- /dev/null +++ b/node_modules/agent-base/dist/src/promisify.d.ts @@ -0,0 +1,4 @@ +import { ClientRequest, RequestOptions, AgentCallbackCallback, AgentCallbackPromise } from './index'; +declare type LegacyCallback = (req: ClientRequest, opts: RequestOptions, fn: AgentCallbackCallback) => void; +export default function promisify(fn: LegacyCallback): AgentCallbackPromise; +export {}; diff --git a/node_modules/agent-base/dist/src/promisify.js b/node_modules/agent-base/dist/src/promisify.js new file mode 100644 index 0000000..b2f6132 --- /dev/null +++ b/node_modules/agent-base/dist/src/promisify.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports.default = promisify; +//# sourceMappingURL=promisify.js.map \ No newline at end of file diff --git a/node_modules/agent-base/dist/src/promisify.js.map b/node_modules/agent-base/dist/src/promisify.js.map new file mode 100644 index 0000000..4bff9bf --- /dev/null +++ b/node_modules/agent-base/dist/src/promisify.js.map @@ -0,0 +1 @@ +{"version":3,"file":"promisify.js","sourceRoot":"","sources":["../../src/promisify.ts"],"names":[],"mappings":";;AAeA,SAAwB,SAAS,CAAC,EAAkB;IACnD,OAAO,UAAsB,GAAkB,EAAE,IAAoB;QACpE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACtC,EAAE,CAAC,IAAI,CACN,IAAI,EACJ,GAAG,EACH,IAAI,EACJ,CAAC,GAA6B,EAAE,GAAyB,EAAE,EAAE;gBAC5D,IAAI,GAAG,EAAE;oBACR,MAAM,CAAC,GAAG,CAAC,CAAC;iBACZ;qBAAM;oBACN,OAAO,CAAC,GAAG,CAAC,CAAC;iBACb;YACF,CAAC,CACD,CAAC;QACH,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC;AACH,CAAC;AAjBD,4BAiBC"} \ No newline at end of file diff --git a/node_modules/agent-base/package.json b/node_modules/agent-base/package.json new file mode 100644 index 0000000..fadce3a --- /dev/null +++ b/node_modules/agent-base/package.json @@ -0,0 +1,64 @@ +{ + "name": "agent-base", + "version": "6.0.2", + "description": "Turn a function into an `http.Agent` instance", + "main": "dist/src/index", + "typings": "dist/src/index", + "files": [ + "dist/src", + "src" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "postbuild": "cpy --parents src test '!**/*.ts' dist", + "test": "mocha --reporter spec dist/test/*.js", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-agent-base.git" + }, + "keywords": [ + "http", + "agent", + "base", + "barebones", + "https" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-agent-base/issues" + }, + "dependencies": { + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/mocha": "^5.2.7", + "@types/node": "^14.0.20", + "@types/semver": "^7.1.0", + "@types/ws": "^6.0.3", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "async-listen": "^1.2.0", + "cpy-cli": "^2.0.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.0", + "rimraf": "^3.0.0", + "semver": "^7.1.2", + "typescript": "^3.5.3", + "ws": "^3.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } +} diff --git a/node_modules/agent-base/src/index.ts b/node_modules/agent-base/src/index.ts new file mode 100644 index 0000000..a47ccd4 --- /dev/null +++ b/node_modules/agent-base/src/index.ts @@ -0,0 +1,345 @@ +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +import createDebug from 'debug'; +import promisify from './promisify'; + +const debug = createDebug('agent-base'); + +function isAgent(v: any): v is createAgent.AgentLike { + return Boolean(v) && typeof v.addRequest === 'function'; +} + +function isSecureEndpoint(): boolean { + const { stack } = new Error(); + if (typeof stack !== 'string') return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} + +function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +function createAgent( + callback: createAgent.AgentCallback, + opts?: createAgent.AgentOptions +): createAgent.Agent; +function createAgent( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + opts?: createAgent.AgentOptions +) { + return new createAgent.Agent(callback, opts); +} + +namespace createAgent { + export interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + + export interface AgentRequestOptions { + host?: string; + path?: string; + // `port` on `http.RequestOptions` can be a string or undefined, + // but `net.TcpNetConnectOpts` expects only a number + port: number; + } + + export interface HttpRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: false; + } + + export interface HttpsRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: true; + } + + export type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + + export type AgentLike = Pick | http.Agent; + + export type AgentCallbackReturn = Duplex | AgentLike; + + export type AgentCallbackCallback = ( + err?: Error | null, + socket?: createAgent.AgentCallbackReturn + ) => void; + + export type AgentCallbackPromise = ( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ) => + | createAgent.AgentCallbackReturn + | Promise; + + export type AgentCallback = typeof Agent.prototype.callback; + + export type AgentOptions = { + timeout?: number; + }; + + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + export class Agent extends EventEmitter { + public timeout: number | null; + public maxFreeSockets: number; + public maxTotalSockets: number; + public maxSockets: number; + public sockets: { + [key: string]: net.Socket[]; + }; + public freeSockets: { + [key: string]: net.Socket[]; + }; + public requests: { + [key: string]: http.IncomingMessage[]; + }; + public options: https.AgentOptions; + private promisifiedCallback?: createAgent.AgentCallbackPromise; + private explicitDefaultPort?: number; + private explicitProtocol?: string; + + constructor( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + _opts?: createAgent.AgentOptions + ) { + super(); + + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } else if (callback) { + opts = callback; + } + + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + + get defaultPort(): number { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + + set defaultPort(v: number) { + this.explicitDefaultPort = v; + } + + get protocol(): string { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + + set protocol(v: string) { + this.explicitProtocol = v; + } + + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions, + fn: createAgent.AgentCallbackCallback + ): void; + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ): + | createAgent.AgentCallbackReturn + | Promise; + callback( + req: createAgent.ClientRequest, + opts: createAgent.AgentOptions, + fn?: createAgent.AgentCallbackCallback + ): + | createAgent.AgentCallbackReturn + | Promise + | void { + throw new Error( + '"agent-base" has no default implementation, you must subclass and override `callback()`' + ); + } + + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void { + const opts: RequestOptions = { ..._opts }; + + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + + if (opts.host == null) { + opts.host = 'localhost'; + } + + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + + let timedOut = false; + let timeoutId: ReturnType | null = null; + const timeoutMs = opts.timeout || this.timeout; + + const onerror = (err: NodeJS.ErrnoException) => { + if (req._hadError) return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err: NodeJS.ErrnoException = new Error( + `A "socket" was not created for HTTP request before ${timeoutMs}ms` + ); + err.code = 'ETIMEOUT'; + onerror(err); + }; + + const callbackError = (err: NodeJS.ErrnoException) => { + if (timedOut) return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + + const onsocket = (socket: AgentCallbackReturn) => { + if (timedOut) return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug( + 'Callback returned another Agent instance %o', + socket.constructor.name + ); + (socket as createAgent.Agent).addRequest(req, opts); + return; + } + + if (socket) { + socket.once('free', () => { + this.freeSocket(socket as net.Socket, opts); + }); + req.onSocket(socket as net.Socket); + return; + } + + const err = new Error( + `no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\`` + ); + onerror(err); + }; + + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify(this.callback); + } else { + this.promisifiedCallback = this.callback; + } + } + + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + + try { + debug( + 'Resolving socket for %o request: %o', + opts.protocol, + `${req.method} ${req.path}` + ); + Promise.resolve(this.promisifiedCallback(req, opts)).then( + onsocket, + callbackError + ); + } catch (err) { + Promise.reject(err).catch(callbackError); + } + } + + freeSocket(socket: net.Socket, opts: AgentOptions) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +} + +export = createAgent; diff --git a/node_modules/agent-base/src/promisify.ts b/node_modules/agent-base/src/promisify.ts new file mode 100644 index 0000000..60cc662 --- /dev/null +++ b/node_modules/agent-base/src/promisify.ts @@ -0,0 +1,33 @@ +import { + Agent, + ClientRequest, + RequestOptions, + AgentCallbackCallback, + AgentCallbackPromise, + AgentCallbackReturn +} from './index'; + +type LegacyCallback = ( + req: ClientRequest, + opts: RequestOptions, + fn: AgentCallbackCallback +) => void; + +export default function promisify(fn: LegacyCallback): AgentCallbackPromise { + return function(this: Agent, req: ClientRequest, opts: RequestOptions) { + return new Promise((resolve, reject) => { + fn.call( + this, + req, + opts, + (err: Error | null | undefined, rtn?: AgentCallbackReturn) => { + if (err) { + reject(err); + } else { + resolve(rtn); + } + } + ); + }); + }; +} diff --git a/node_modules/agentkeepalive/History.md b/node_modules/agentkeepalive/History.md new file mode 100644 index 0000000..6877834 --- /dev/null +++ b/node_modules/agentkeepalive/History.md @@ -0,0 +1,268 @@ + +4.5.0 / 2023-08-06 +================== + +**others** + * [[`1e5e312`](http://github.com/node-modules/agentkeepalive/commit/1e5e312f36491243372dbfee0dd47607e7b3d94a)] - deps: remove debug and depd (#114) (fengmk2 <>) + +4.4.0 / 2023-08-05 +================== + +**features** + * [[`c7c1e93`](http://github.com/node-modules/agentkeepalive/commit/c7c1e93beba7310d7c2cc9647dd211a686d21cac)] - feat: return socket from createConnection (#113) (Nabeel Bukhari <>) + +4.3.0 / 2023-03-06 +================== + +**others** + * [[`6f9852b`](http://github.com/node-modules/agentkeepalive/commit/6f9852bf6f674846103e403fd9c84e92fc24f820)] - deps: depd@2.0.0 (#109) (Brian DeHamer <>) + * [[`fd4bd9b`](http://github.com/node-modules/agentkeepalive/commit/fd4bd9b0e0f051de3cb49559d1b0d534a0ded18c)] - test: use npm install (#110) (fengmk2 <>) + * [[`d52822c`](http://github.com/node-modules/agentkeepalive/commit/d52822c1243c689df1c8232a3bb14139cf87fae5)] - chore: update contributors (fengmk2 <>) + +4.2.1 / 2022-02-21 +================== + +**fixes** + * [[`8b13b5c`](http://github.com/node-modules/agentkeepalive/commit/8b13b5ca797f4779a0a8d393ad8ecb622cd27987)] - fix: explicitly set `| undefined` in type definitions (#99) (Benoit Lemoine <>) + +4.2.0 / 2021-12-31 +================== + +**fixes** + * [[`f418c67`](http://github.com/node-modules/agentkeepalive/commit/f418c67a63c061c7261592d4553bc455e0b0d306)] - fix: change `freeSocketTimeout` default value to 4000 (#102) (fengmk2 <>) + +**others** + * [[`bc2a1ce`](http://github.com/node-modules/agentkeepalive/commit/bc2a1cea0884b4d18b0d244bf00006d9107963df)] - doc(readme): making `timeout`'s default clear (#100) (Aaron <>) + +4.1.4 / 2021-02-05 +================== + +**fixes** + * [[`4d04794`](http://github.com/node-modules/agentkeepalive/commit/4d047946b1547b4edff92ea40205aee4f0c8aa46)] - fix(types): correct `Https` constructor argument (#89) (Simen Bekkhus <>) + +4.1.3 / 2020-06-15 +================== + +**fixes** + * [[`4ba9f9c`](http://github.com/node-modules/agentkeepalive/commit/4ba9f9c844f2a6b8037ce56599d25c69ef054d91)] - fix: compatible with node v12.16.3 (#91) (killa <>) + +4.1.2 / 2020-04-25 +================== + +**fixes** + * [[`de66b02`](http://github.com/node-modules/agentkeepalive/commit/de66b0206d064a97129c2c31bcdabd4d64557b91)] - fix: detect http request timeout handler (#88) (fengmk2 <>) + +4.1.1 / 2020-04-25 +================== + +**fixes** + * [[`bbd20c0`](http://github.com/node-modules/agentkeepalive/commit/bbd20c03b8cf7dfb00b3aad1ada26d4ab90d2d6e)] - fix: definition error (#87) (吖猩 <>) + +**others** + * [[`3b01699`](http://github.com/node-modules/agentkeepalive/commit/3b01699b8e90022d5f56898dd709e4fe7ee7cdaa)] - test: run test on node 12 (#84) (Igor Savin <>) + +4.1.0 / 2019-10-12 +================== + +**features** + * [[`fe33b80`](http://github.com/node-modules/agentkeepalive/commit/fe33b800acc09109388bfe65107550952b6fc7b0)] - feat: Add `reusedSocket` property on client request (#82) (Weijia Wang <>) + +**others** + * [[`77ba744`](http://github.com/node-modules/agentkeepalive/commit/77ba744667bb6b9e5986a53e5222f62094db12b9)] - docs: fix grammar in readme (#81) (Herrington Darkholme <<2883231+HerringtonDarkholme@users.noreply.github.com>>) + +4.0.2 / 2019-02-19 +================== + +**fixes** + * [[`56d4a9b`](http://github.com/node-modules/agentkeepalive/commit/56d4a9b2a4499ea28943ddb590358d7831a02cb1)] - fix: HttpAgent export = internal (#74) (Andrew Leedham <>) + +4.0.1 / 2019-02-19 +================== + +**fixes** + * [[`bad1ac0`](http://github.com/node-modules/agentkeepalive/commit/bad1ac0e710fbc486717e14e68c59266d35df6a8)] - fix: HttpsAgent Type Definition (#71) (#72) (Andrew Leedham <>) + * [[`f48a4a7`](http://github.com/node-modules/agentkeepalive/commit/f48a4a701ea6fbe43781c91e1c0aaad6e328ac7f)] - fix: export interface (#70) (Vinay <>) + +**others** + * [[`9124343`](http://github.com/node-modules/agentkeepalive/commit/91243437cfdd324cb97f39dee76746d5e5f4cd72)] - chore: add agent.options.keepAlive instead agent.keepAlive (fengmk2 <>) + * [[`d177d40`](http://github.com/node-modules/agentkeepalive/commit/d177d40422fe7296990b4e270cf498e3f33c18fa)] - test: add request timeout bigger than agent timeout cases (fengmk2 <>) + +4.0.0 / 2018-10-23 +================== + +**features** + * [[`5c9f3bb`](http://github.com/node-modules/agentkeepalive/commit/5c9f3bbd60555744edcf777105b148982a1a42b6)] - feat: impl the new Agent extend http.Agent (fengmk2 <>) + +**others** + * [[`498c8f1`](http://github.com/node-modules/agentkeepalive/commit/498c8f13cf76600d3dd6e1c91cdf2d8292355dff)] - chore: move LICENSE from readme to file (fengmk2 <>) + * [[`4f39894`](http://github.com/node-modules/agentkeepalive/commit/4f398942ba2f90cf4501239e56ac4e6344931a01)] - bugfix: support agent.options.timeout on https agent (fengmk2 <>) + +3.5.2 / 2018-10-19 +================== + +**fixes** + * [[`5751fc1`](http://github.com/node-modules/agentkeepalive/commit/5751fc1180ed6544602c681ffbd08ca66a0cb12c)] - fix: sockLen being miscalculated when removing sockets (#60) (Ehden Sinai <>) + +3.5.1 / 2018-07-31 +================== + +**fixes** + * [[`495f1ab`](http://github.com/node-modules/agentkeepalive/commit/495f1ab625d43945d72f68096b97db723d4f0657)] - fix: add the lost npm files (#66) (Henry Zhuang <>) + +3.5.0 / 2018-07-31 +================== + +**features** + * [[`16f5aea`](http://github.com/node-modules/agentkeepalive/commit/16f5aeadfda57f1c602652f1472a63cc83cd05bf)] - feat: add typing define. (#65) (Henry Zhuang <>) + +**others** + * [[`28fa062`](http://github.com/node-modules/agentkeepalive/commit/28fa06246fb5103f88ebeeb8563757a9078b8157)] - docs: add "per host" to description of maxFreeSockets (tony-gutierrez <>) + * [[`7df2577`](http://github.com/node-modules/agentkeepalive/commit/7df25774f00a1031ca4daad2878a17e0539072a2)] - test: run test on node 10 (#63) (fengmk2 <>) + +3.4.1 / 2018-03-08 +================== + +**fixes** + * [[`4d3a3b1`](http://github.com/node-modules/agentkeepalive/commit/4d3a3b1f7b16595febbbd39eeed72b2663549014)] - fix: Handle ipv6 addresses in host-header correctly with TLS (#53) (Mattias Holmlund <>) + +**others** + * [[`55a7a5c`](http://github.com/node-modules/agentkeepalive/commit/55a7a5cd33e97f9a8370083dcb041c5552f10ac9)] - test: stop timer after test end (fengmk2 <>) + +3.4.0 / 2018-02-27 +================== + +**features** + * [[`bc7cadb`](http://github.com/node-modules/agentkeepalive/commit/bc7cadb30ecd2071e2b341ac53ae1a2b8155c43d)] - feat: use socket custom freeSocketKeepAliveTimeout first (#59) (fengmk2 <>) + +**others** + * [[`138eda8`](http://github.com/node-modules/agentkeepalive/commit/138eda81e10b632aaa87bea0cb66d8667124c4e8)] - doc: fix `keepAliveMsecs` params description (#55) (Hongcai Deng <>) + +3.3.0 / 2017-06-20 +================== + + * feat: add statusChanged getter (#51) + * chore: format License + +3.2.0 / 2017-06-10 +================== + + * feat: add expiring active sockets + * test: add node 8 (#49) + +3.1.0 / 2017-02-20 +================== + + * feat: timeout support humanize ms (#48) + +3.0.0 / 2016-12-20 +================== + + * fix: emit agent socket close event + * test: add remove excess calls to removeSocket + * test: use egg-ci + * test: refactor test with eslint rules + * feat: merge _http_agent.js from 7.2.1 + +2.2.0 / 2016-06-26 +================== + + * feat: Add browser shim (noop) for isomorphic use. (#39) + * chore: add security check badge + +2.1.1 / 2016-04-06 +================== + + * https: fix ssl socket leak when keepalive is used + * chore: remove circle ci image + +2.1.0 / 2016-04-02 +================== + + * fix: opened sockets number overflow maxSockets + +2.0.5 / 2016-03-16 +================== + + * fix: pick _evictSession to httpsAgent + +2.0.4 / 2016-03-13 +================== + + * test: add Circle ci + * test: add appveyor ci build + * refactor: make sure only one error listener + * chore: use codecov + * fix: handle idle socket error + * test: run on more node versions + +2.0.3 / 2015-08-03 +================== + + * fix: add default error handler to avoid Unhandled error event throw + +2.0.2 / 2015-04-25 +================== + + * fix: remove socket from freeSockets on 'timeout' (@pmalouin) + +2.0.1 / 2015-04-19 +================== + + * fix: add timeoutSocketCount to getCurrentStatus() + * feat(getCurrentStatus): add getCurrentStatus + +2.0.0 / 2015-04-01 +================== + + * fix: socket.destroyed always be undefined on 0.10.x + * Make it compatible with node v0.10.x (@lattmann) + +1.2.1 / 2015-03-23 +================== + + * patch from iojs: don't overwrite servername option + * patch commits from joyent/node + * add max sockets test case + * add nagle algorithm delayed link + +1.2.0 / 2014-09-02 +================== + + * allow set keepAliveTimeout = 0 + * support timeout on working socket. fixed #6 + +1.1.0 / 2014-08-28 +================== + + * add some socket counter for deep monitor + +1.0.0 / 2014-08-13 +================== + + * update _http_agent, only support 0.11+, only support node 0.11.0+ + +0.2.2 / 2013-11-19 +================== + + * support node 0.8 and node 0.10 + +0.2.1 / 2013-11-08 +================== + + * fix socket does not timeout bug, it will hang on life, must use 0.2.x on node 0.11 + +0.2.0 / 2013-11-06 +================== + + * use keepalive agent on node 0.11+ impl + +0.1.5 / 2013-06-24 +================== + + * support coveralls + * add node 0.10 test + * add 0.8.22 original https.js + * add original http.js module to diff + * update jscover + * mv pem to fixtures + * add https agent usage diff --git a/node_modules/agentkeepalive/LICENSE b/node_modules/agentkeepalive/LICENSE new file mode 100644 index 0000000..941258c --- /dev/null +++ b/node_modules/agentkeepalive/LICENSE @@ -0,0 +1,23 @@ +The MIT License + +Copyright(c) node-modules and other contributors. +Copyright(c) 2012 - 2015 fengmk2 + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/agentkeepalive/README.md b/node_modules/agentkeepalive/README.md new file mode 100644 index 0000000..91bd68c --- /dev/null +++ b/node_modules/agentkeepalive/README.md @@ -0,0 +1,256 @@ +# agentkeepalive + +[![NPM version][npm-image]][npm-url] +[![Known Vulnerabilities][snyk-image]][snyk-url] +[![Node.js CI](https://github.com/node-modules/agentkeepalive/actions/workflows/nodejs.yml/badge.svg)](https://github.com/node-modules/agentkeepalive/actions/workflows/nodejs.yml) +[![npm download][download-image]][download-url] + +[npm-image]: https://img.shields.io/npm/v/agentkeepalive.svg?style=flat +[npm-url]: https://npmjs.org/package/agentkeepalive +[snyk-image]: https://snyk.io/test/npm/agentkeepalive/badge.svg?style=flat-square +[snyk-url]: https://snyk.io/test/npm/agentkeepalive +[download-image]: https://img.shields.io/npm/dm/agentkeepalive.svg?style=flat-square +[download-url]: https://npmjs.org/package/agentkeepalive + +The enhancement features `keep alive` `http.Agent`. Support `http` and `https`. + +## What's different from original `http.Agent`? + +- `keepAlive=true` by default +- Disable Nagle's algorithm: `socket.setNoDelay(true)` +- Add free socket timeout: avoid long time inactivity socket leak in the free-sockets queue. +- Add active socket timeout: avoid long time inactivity socket leak in the active-sockets queue. +- TTL for active socket. + +## Node.js version required + +Support Node.js >= `8.0.0` + +## Install + +```bash +$ npm install agentkeepalive --save +``` + +## new Agent([options]) + +* `options` {Object} Set of configurable options to set on the agent. + Can have the following fields: + * `keepAlive` {Boolean} Keep sockets around in a pool to be used by + other requests in the future. Default = `true`. + * `keepAliveMsecs` {Number} When using the keepAlive option, specifies the initial delay + for TCP Keep-Alive packets. Ignored when the keepAlive option is false or undefined. Defaults to 1000. + Default = `1000`. Only relevant if `keepAlive` is set to `true`. + * `freeSocketTimeout`: {Number} Sets the free socket to timeout + after `freeSocketTimeout` milliseconds of inactivity on the free socket. + The default [server-side timeout](https://nodejs.org/api/http.html#serverkeepalivetimeout) is 5000 milliseconds, to [avoid ECONNRESET exceptions](https://medium.com/ssense-tech/reduce-networking-errors-in-nodejs-23b4eb9f2d83), we set the default value to `4000` milliseconds. + Only relevant if `keepAlive` is set to `true`. + * `timeout`: {Number} Sets the working socket to timeout + after `timeout` milliseconds of inactivity on the working socket. + Default is `freeSocketTimeout * 2` so long as that value is greater than or equal to 8 seconds, otherwise the default is 8 seconds. + * `maxSockets` {Number} Maximum number of sockets to allow per + host. Default = `Infinity`. + * `maxFreeSockets` {Number} Maximum number of sockets (per host) to leave open + in a free state. Only relevant if `keepAlive` is set to `true`. + Default = `256`. + * `socketActiveTTL` {Number} Sets the socket active time to live, even if it's in use. + If not set, the behaviour keeps the same (the socket will be released only when free) + Default = `null`. + +## Usage + +```js +const http = require('http'); +const Agent = require('agentkeepalive'); + +const keepaliveAgent = new Agent({ + maxSockets: 100, + maxFreeSockets: 10, + timeout: 60000, // active socket keepalive for 60 seconds + freeSocketTimeout: 30000, // free socket keepalive for 30 seconds +}); + +const options = { + host: 'cnodejs.org', + port: 80, + path: '/', + method: 'GET', + agent: keepaliveAgent, +}; + +const req = http.request(options, res => { + console.log('STATUS: ' + res.statusCode); + console.log('HEADERS: ' + JSON.stringify(res.headers)); + res.setEncoding('utf8'); + res.on('data', function (chunk) { + console.log('BODY: ' + chunk); + }); +}); +req.on('error', e => { + console.log('problem with request: ' + e.message); +}); +req.end(); + +setTimeout(() => { + if (keepaliveAgent.statusChanged) { + console.log('[%s] agent status changed: %j', Date(), keepaliveAgent.getCurrentStatus()); + } +}, 2000); + +``` + +### `getter agent.statusChanged` + +counters have change or not after last checkpoint. + +### `agent.getCurrentStatus()` + +`agent.getCurrentStatus()` will return a object to show the status of this agent: + +```js +{ + createSocketCount: 10, + closeSocketCount: 5, + timeoutSocketCount: 0, + requestCount: 5, + freeSockets: { 'localhost:57479:': 3 }, + sockets: { 'localhost:57479:': 5 }, + requests: {} +} +``` + +### Support `https` + +```js +const https = require('https'); +const HttpsAgent = require('agentkeepalive').HttpsAgent; + +const keepaliveAgent = new HttpsAgent(); +// https://www.google.com/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8 +const options = { + host: 'www.google.com', + port: 443, + path: '/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8', + method: 'GET', + agent: keepaliveAgent, +}; + +const req = https.request(options, res => { + console.log('STATUS: ' + res.statusCode); + console.log('HEADERS: ' + JSON.stringify(res.headers)); + res.setEncoding('utf8'); + res.on('data', chunk => { + console.log('BODY: ' + chunk); + }); +}); + +req.on('error', e => { + console.log('problem with request: ' + e.message); +}); +req.end(); + +setTimeout(() => { + console.log('agent status: %j', keepaliveAgent.getCurrentStatus()); +}, 2000); +``` + +### Support `req.reusedSocket` + +This agent implements the `req.reusedSocket` to determine whether a request is send through a reused socket. + +When server closes connection at unfortunate time ([keep-alive race](https://code-examples.net/en/q/28a8069)), the http client will throw a `ECONNRESET` error. Under this circumstance, `req.reusedSocket` is useful when we want to retry the request automatically. + +```js +const http = require('http'); +const Agent = require('agentkeepalive'); +const agent = new Agent(); + +const req = http + .get('http://localhost:3000', { agent }, (res) => { + // ... + }) + .on('error', (err) => { + if (req.reusedSocket && err.code === 'ECONNRESET') { + // retry the request or anything else... + } + }) +``` + +This behavior is consistent with Node.js core. But through `agentkeepalive`, you can use this feature in older Node.js version. + +## [Benchmark](https://github.com/node-modules/agentkeepalive/tree/master/benchmark) + +run the benchmark: + +```bash +cd benchmark +sh start.sh +``` + +Intel(R) Core(TM)2 Duo CPU P8600 @ 2.40GHz + +node@v0.8.9 + +50 maxSockets, 60 concurrent, 1000 requests per concurrent, 5ms delay + +Keep alive agent (30 seconds): + +```js +Transactions: 60000 hits +Availability: 100.00 % +Elapsed time: 29.70 secs +Data transferred: 14.88 MB +Response time: 0.03 secs +Transaction rate: 2020.20 trans/sec +Throughput: 0.50 MB/sec +Concurrency: 59.84 +Successful transactions: 60000 +Failed transactions: 0 +Longest transaction: 0.15 +Shortest transaction: 0.01 +``` + +Normal agent: + +```js +Transactions: 60000 hits +Availability: 100.00 % +Elapsed time: 46.53 secs +Data transferred: 14.88 MB +Response time: 0.05 secs +Transaction rate: 1289.49 trans/sec +Throughput: 0.32 MB/sec +Concurrency: 59.81 +Successful transactions: 60000 +Failed transactions: 0 +Longest transaction: 0.45 +Shortest transaction: 0.00 +``` + +Socket created: + +```bash +[proxy.js:120000] keepalive, 50 created, 60000 requestFinished, 1200 req/socket, 0 requests, 0 sockets, 0 unusedSockets, 50 timeout +{" <10ms":662," <15ms":17825," <20ms":20552," <30ms":17646," <40ms":2315," <50ms":567," <100ms":377," <150ms":56," <200ms":0," >=200ms+":0} +---------------------------------------------------------------- +[proxy.js:120000] normal , 53866 created, 84260 requestFinished, 1.56 req/socket, 0 requests, 0 sockets +{" <10ms":75," <15ms":1112," <20ms":10947," <30ms":32130," <40ms":8228," <50ms":3002," <100ms":4274," <150ms":181," <200ms":18," >=200ms+":33} +``` + +## License + +[MIT](LICENSE) + + + +## Contributors + +|[
fengmk2](https://github.com/fengmk2)
|[
dead-horse](https://github.com/dead-horse)
|[
AndrewLeedham](https://github.com/AndrewLeedham)
|[
ngot](https://github.com/ngot)
|[
wrynearson](https://github.com/wrynearson)
|[
aaronArinder](https://github.com/aaronArinder)
| +| :---: | :---: | :---: | :---: | :---: | :---: | +|[
alexpenev-s](https://github.com/alexpenev-s)
|[
blemoine](https://github.com/blemoine)
|[
bdehamer](https://github.com/bdehamer)
|[
DylanPiercey](https://github.com/DylanPiercey)
|[
cixel](https://github.com/cixel)
|[
HerringtonDarkholme](https://github.com/HerringtonDarkholme)
| +|[
denghongcai](https://github.com/denghongcai)
|[
kibertoad](https://github.com/kibertoad)
|[
pangorgo](https://github.com/pangorgo)
|[
mattiash](https://github.com/mattiash)
|[
nabeelbukhari](https://github.com/nabeelbukhari)
|[
pmalouin](https://github.com/pmalouin)
| +[
SimenB](https://github.com/SimenB)
|[
vinaybedre](https://github.com/vinaybedre)
|[
starkwang](https://github.com/starkwang)
|[
killagu](https://github.com/killagu)
|[
tony-gutierrez](https://github.com/tony-gutierrez)
|[
whxaxes](https://github.com/whxaxes)
+ +This project follows the git-contributor [spec](https://github.com/xudafeng/git-contributor), auto updated at `Sat Aug 05 2023 02:36:31 GMT+0800`. + + diff --git a/node_modules/agentkeepalive/browser.js b/node_modules/agentkeepalive/browser.js new file mode 100644 index 0000000..29c9398 --- /dev/null +++ b/node_modules/agentkeepalive/browser.js @@ -0,0 +1,5 @@ +module.exports = noop; +module.exports.HttpsAgent = noop; + +// Noop function for browser since native api's don't use agents. +function noop () {} diff --git a/node_modules/agentkeepalive/index.d.ts b/node_modules/agentkeepalive/index.d.ts new file mode 100644 index 0000000..2bc70a6 --- /dev/null +++ b/node_modules/agentkeepalive/index.d.ts @@ -0,0 +1,65 @@ +import * as http from 'http'; +import * as https from 'https'; +import * as net from 'net'; + +interface PlainObject { + [key: string]: any; +} + +declare class HttpAgent extends http.Agent { + constructor(opts?: AgentKeepAlive.HttpOptions); + readonly statusChanged: boolean; + createConnection(options: net.NetConnectOpts, cb?: Function): net.Socket; + createSocket(req: http.IncomingMessage, options: http.RequestOptions, cb: Function): void; + getCurrentStatus(): AgentKeepAlive.AgentStatus; +} + +interface Constants { + CURRENT_ID: Symbol; + CREATE_ID: Symbol; + INIT_SOCKET: Symbol; + CREATE_HTTPS_CONNECTION: Symbol; + SOCKET_CREATED_TIME: Symbol; + SOCKET_NAME: Symbol; + SOCKET_REQUEST_COUNT: Symbol; + SOCKET_REQUEST_FINISHED_COUNT: Symbol; +} + +declare class AgentKeepAlive extends HttpAgent {} + +declare namespace AgentKeepAlive { + export interface AgentStatus { + createSocketCount: number; + createSocketErrorCount: number; + closeSocketCount: number; + errorSocketCount: number; + timeoutSocketCount: number; + requestCount: number; + freeSockets: PlainObject; + sockets: PlainObject; + requests: PlainObject; + } + + interface CommonHttpOption { + keepAlive?: boolean | undefined; + freeSocketTimeout?: number | undefined; + freeSocketKeepAliveTimeout?: number | undefined; + timeout?: number | undefined; + socketActiveTTL?: number | undefined; + } + + export interface HttpOptions extends http.AgentOptions, CommonHttpOption { } + export interface HttpsOptions extends https.AgentOptions, CommonHttpOption { } + + export class HttpsAgent extends https.Agent { + constructor(opts?: HttpsOptions); + readonly statusChanged: boolean; + createConnection(options: net.NetConnectOpts, cb?: Function): net.Socket; + createSocket(req: http.IncomingMessage, options: http.RequestOptions, cb: Function): void; + getCurrentStatus(): AgentStatus; + } + + export const constants: Constants; +} + +export = AgentKeepAlive; diff --git a/node_modules/agentkeepalive/index.js b/node_modules/agentkeepalive/index.js new file mode 100644 index 0000000..6ca1513 --- /dev/null +++ b/node_modules/agentkeepalive/index.js @@ -0,0 +1,5 @@ +'use strict'; + +module.exports = require('./lib/agent'); +module.exports.HttpsAgent = require('./lib/https_agent'); +module.exports.constants = require('./lib/constants'); diff --git a/node_modules/agentkeepalive/lib/agent.js b/node_modules/agentkeepalive/lib/agent.js new file mode 100644 index 0000000..8bd354e --- /dev/null +++ b/node_modules/agentkeepalive/lib/agent.js @@ -0,0 +1,402 @@ +'use strict'; + +const OriginalAgent = require('http').Agent; +const ms = require('humanize-ms'); +const debug = require('util').debuglog('agentkeepalive'); +const { + INIT_SOCKET, + CURRENT_ID, + CREATE_ID, + SOCKET_CREATED_TIME, + SOCKET_NAME, + SOCKET_REQUEST_COUNT, + SOCKET_REQUEST_FINISHED_COUNT, +} = require('./constants'); + +// OriginalAgent come from +// - https://github.com/nodejs/node/blob/v8.12.0/lib/_http_agent.js +// - https://github.com/nodejs/node/blob/v10.12.0/lib/_http_agent.js + +// node <= 10 +let defaultTimeoutListenerCount = 1; +const majorVersion = parseInt(process.version.split('.', 1)[0].substring(1)); +if (majorVersion >= 11 && majorVersion <= 12) { + defaultTimeoutListenerCount = 2; +} else if (majorVersion >= 13) { + defaultTimeoutListenerCount = 3; +} + +function deprecate(message) { + console.log('[agentkeepalive:deprecated] %s', message); +} + +class Agent extends OriginalAgent { + constructor(options) { + options = options || {}; + options.keepAlive = options.keepAlive !== false; + // default is keep-alive and 4s free socket timeout + // see https://medium.com/ssense-tech/reduce-networking-errors-in-nodejs-23b4eb9f2d83 + if (options.freeSocketTimeout === undefined) { + options.freeSocketTimeout = 4000; + } + // Legacy API: keepAliveTimeout should be rename to `freeSocketTimeout` + if (options.keepAliveTimeout) { + deprecate('options.keepAliveTimeout is deprecated, please use options.freeSocketTimeout instead'); + options.freeSocketTimeout = options.keepAliveTimeout; + delete options.keepAliveTimeout; + } + // Legacy API: freeSocketKeepAliveTimeout should be rename to `freeSocketTimeout` + if (options.freeSocketKeepAliveTimeout) { + deprecate('options.freeSocketKeepAliveTimeout is deprecated, please use options.freeSocketTimeout instead'); + options.freeSocketTimeout = options.freeSocketKeepAliveTimeout; + delete options.freeSocketKeepAliveTimeout; + } + + // Sets the socket to timeout after timeout milliseconds of inactivity on the socket. + // By default is double free socket timeout. + if (options.timeout === undefined) { + // make sure socket default inactivity timeout >= 8s + options.timeout = Math.max(options.freeSocketTimeout * 2, 8000); + } + + // support humanize format + options.timeout = ms(options.timeout); + options.freeSocketTimeout = ms(options.freeSocketTimeout); + options.socketActiveTTL = options.socketActiveTTL ? ms(options.socketActiveTTL) : 0; + + super(options); + + this[CURRENT_ID] = 0; + + // create socket success counter + this.createSocketCount = 0; + this.createSocketCountLastCheck = 0; + + this.createSocketErrorCount = 0; + this.createSocketErrorCountLastCheck = 0; + + this.closeSocketCount = 0; + this.closeSocketCountLastCheck = 0; + + // socket error event count + this.errorSocketCount = 0; + this.errorSocketCountLastCheck = 0; + + // request finished counter + this.requestCount = 0; + this.requestCountLastCheck = 0; + + // including free socket timeout counter + this.timeoutSocketCount = 0; + this.timeoutSocketCountLastCheck = 0; + + this.on('free', socket => { + // https://github.com/nodejs/node/pull/32000 + // Node.js native agent will check socket timeout eqs agent.options.timeout. + // Use the ttl or freeSocketTimeout to overwrite. + const timeout = this.calcSocketTimeout(socket); + if (timeout > 0 && socket.timeout !== timeout) { + socket.setTimeout(timeout); + } + }); + } + + get freeSocketKeepAliveTimeout() { + deprecate('agent.freeSocketKeepAliveTimeout is deprecated, please use agent.options.freeSocketTimeout instead'); + return this.options.freeSocketTimeout; + } + + get timeout() { + deprecate('agent.timeout is deprecated, please use agent.options.timeout instead'); + return this.options.timeout; + } + + get socketActiveTTL() { + deprecate('agent.socketActiveTTL is deprecated, please use agent.options.socketActiveTTL instead'); + return this.options.socketActiveTTL; + } + + calcSocketTimeout(socket) { + /** + * return <= 0: should free socket + * return > 0: should update socket timeout + * return undefined: not find custom timeout + */ + let freeSocketTimeout = this.options.freeSocketTimeout; + const socketActiveTTL = this.options.socketActiveTTL; + if (socketActiveTTL) { + // check socketActiveTTL + const aliveTime = Date.now() - socket[SOCKET_CREATED_TIME]; + const diff = socketActiveTTL - aliveTime; + if (diff <= 0) { + return diff; + } + if (freeSocketTimeout && diff < freeSocketTimeout) { + freeSocketTimeout = diff; + } + } + // set freeSocketTimeout + if (freeSocketTimeout) { + // set free keepalive timer + // try to use socket custom freeSocketTimeout first, support headers['keep-alive'] + // https://github.com/node-modules/urllib/blob/b76053020923f4d99a1c93cf2e16e0c5ba10bacf/lib/urllib.js#L498 + const customFreeSocketTimeout = socket.freeSocketTimeout || socket.freeSocketKeepAliveTimeout; + return customFreeSocketTimeout || freeSocketTimeout; + } + } + + keepSocketAlive(socket) { + const result = super.keepSocketAlive(socket); + // should not keepAlive, do nothing + if (!result) return result; + + const customTimeout = this.calcSocketTimeout(socket); + if (typeof customTimeout === 'undefined') { + return true; + } + if (customTimeout <= 0) { + debug('%s(requests: %s, finished: %s) free but need to destroy by TTL, request count %s, diff is %s', + socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], customTimeout); + return false; + } + if (socket.timeout !== customTimeout) { + socket.setTimeout(customTimeout); + } + return true; + } + + // only call on addRequest + reuseSocket(...args) { + // reuseSocket(socket, req) + super.reuseSocket(...args); + const socket = args[0]; + const req = args[1]; + req.reusedSocket = true; + const agentTimeout = this.options.timeout; + if (getSocketTimeout(socket) !== agentTimeout) { + // reset timeout before use + socket.setTimeout(agentTimeout); + debug('%s reset timeout to %sms', socket[SOCKET_NAME], agentTimeout); + } + socket[SOCKET_REQUEST_COUNT]++; + debug('%s(requests: %s, finished: %s) reuse on addRequest, timeout %sms', + socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], + getSocketTimeout(socket)); + } + + [CREATE_ID]() { + const id = this[CURRENT_ID]++; + if (this[CURRENT_ID] === Number.MAX_SAFE_INTEGER) this[CURRENT_ID] = 0; + return id; + } + + [INIT_SOCKET](socket, options) { + // bugfix here. + // https on node 8, 10 won't set agent.options.timeout by default + // TODO: need to fix on node itself + if (options.timeout) { + const timeout = getSocketTimeout(socket); + if (!timeout) { + socket.setTimeout(options.timeout); + } + } + + if (this.options.keepAlive) { + // Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/ + // https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html + socket.setNoDelay(true); + } + this.createSocketCount++; + if (this.options.socketActiveTTL) { + socket[SOCKET_CREATED_TIME] = Date.now(); + } + // don't show the hole '-----BEGIN CERTIFICATE----' key string + socket[SOCKET_NAME] = `sock[${this[CREATE_ID]()}#${options._agentKey}]`.split('-----BEGIN', 1)[0]; + socket[SOCKET_REQUEST_COUNT] = 1; + socket[SOCKET_REQUEST_FINISHED_COUNT] = 0; + installListeners(this, socket, options); + } + + createConnection(options, oncreate) { + let called = false; + const onNewCreate = (err, socket) => { + if (called) return; + called = true; + + if (err) { + this.createSocketErrorCount++; + return oncreate(err); + } + this[INIT_SOCKET](socket, options); + oncreate(err, socket); + }; + + const newSocket = super.createConnection(options, onNewCreate); + if (newSocket) onNewCreate(null, newSocket); + return newSocket; + } + + get statusChanged() { + const changed = this.createSocketCount !== this.createSocketCountLastCheck || + this.createSocketErrorCount !== this.createSocketErrorCountLastCheck || + this.closeSocketCount !== this.closeSocketCountLastCheck || + this.errorSocketCount !== this.errorSocketCountLastCheck || + this.timeoutSocketCount !== this.timeoutSocketCountLastCheck || + this.requestCount !== this.requestCountLastCheck; + if (changed) { + this.createSocketCountLastCheck = this.createSocketCount; + this.createSocketErrorCountLastCheck = this.createSocketErrorCount; + this.closeSocketCountLastCheck = this.closeSocketCount; + this.errorSocketCountLastCheck = this.errorSocketCount; + this.timeoutSocketCountLastCheck = this.timeoutSocketCount; + this.requestCountLastCheck = this.requestCount; + } + return changed; + } + + getCurrentStatus() { + return { + createSocketCount: this.createSocketCount, + createSocketErrorCount: this.createSocketErrorCount, + closeSocketCount: this.closeSocketCount, + errorSocketCount: this.errorSocketCount, + timeoutSocketCount: this.timeoutSocketCount, + requestCount: this.requestCount, + freeSockets: inspect(this.freeSockets), + sockets: inspect(this.sockets), + requests: inspect(this.requests), + }; + } +} + +// node 8 don't has timeout attribute on socket +// https://github.com/nodejs/node/pull/21204/files#diff-e6ef024c3775d787c38487a6309e491dR408 +function getSocketTimeout(socket) { + return socket.timeout || socket._idleTimeout; +} + +function installListeners(agent, socket, options) { + debug('%s create, timeout %sms', socket[SOCKET_NAME], getSocketTimeout(socket)); + + // listener socket events: close, timeout, error, free + function onFree() { + // create and socket.emit('free') logic + // https://github.com/nodejs/node/blob/master/lib/_http_agent.js#L311 + // no req on the socket, it should be the new socket + if (!socket._httpMessage && socket[SOCKET_REQUEST_COUNT] === 1) return; + + socket[SOCKET_REQUEST_FINISHED_COUNT]++; + agent.requestCount++; + debug('%s(requests: %s, finished: %s) free', + socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]); + + // should reuse on pedding requests? + const name = agent.getName(options); + if (socket.writable && agent.requests[name] && agent.requests[name].length) { + // will be reuse on agent free listener + socket[SOCKET_REQUEST_COUNT]++; + debug('%s(requests: %s, finished: %s) will be reuse on agent free event', + socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]); + } + } + socket.on('free', onFree); + + function onClose(isError) { + debug('%s(requests: %s, finished: %s) close, isError: %s', + socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], isError); + agent.closeSocketCount++; + } + socket.on('close', onClose); + + // start socket timeout handler + function onTimeout() { + // onTimeout and emitRequestTimeout(_http_client.js) + // https://github.com/nodejs/node/blob/v12.x/lib/_http_client.js#L711 + const listenerCount = socket.listeners('timeout').length; + // node <= 10, default listenerCount is 1, onTimeout + // 11 < node <= 12, default listenerCount is 2, onTimeout and emitRequestTimeout + // node >= 13, default listenerCount is 3, onTimeout, + // onTimeout(https://github.com/nodejs/node/pull/32000/files#diff-5f7fb0850412c6be189faeddea6c5359R333) + // and emitRequestTimeout + const timeout = getSocketTimeout(socket); + const req = socket._httpMessage; + const reqTimeoutListenerCount = req && req.listeners('timeout').length || 0; + debug('%s(requests: %s, finished: %s) timeout after %sms, listeners %s, defaultTimeoutListenerCount %s, hasHttpRequest %s, HttpRequest timeoutListenerCount %s', + socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], + timeout, listenerCount, defaultTimeoutListenerCount, !!req, reqTimeoutListenerCount); + if (debug.enabled) { + debug('timeout listeners: %s', socket.listeners('timeout').map(f => f.name).join(', ')); + } + agent.timeoutSocketCount++; + const name = agent.getName(options); + if (agent.freeSockets[name] && agent.freeSockets[name].indexOf(socket) !== -1) { + // free socket timeout, destroy quietly + socket.destroy(); + // Remove it from freeSockets list immediately to prevent new requests + // from being sent through this socket. + agent.removeSocket(socket, options); + debug('%s is free, destroy quietly', socket[SOCKET_NAME]); + } else { + // if there is no any request socket timeout handler, + // agent need to handle socket timeout itself. + // + // custom request socket timeout handle logic must follow these rules: + // 1. Destroy socket first + // 2. Must emit socket 'agentRemove' event tell agent remove socket + // from freeSockets list immediately. + // Otherise you may be get 'socket hang up' error when reuse + // free socket and timeout happen in the same time. + if (reqTimeoutListenerCount === 0) { + const error = new Error('Socket timeout'); + error.code = 'ERR_SOCKET_TIMEOUT'; + error.timeout = timeout; + // must manually call socket.end() or socket.destroy() to end the connection. + // https://nodejs.org/dist/latest-v10.x/docs/api/net.html#net_socket_settimeout_timeout_callback + socket.destroy(error); + agent.removeSocket(socket, options); + debug('%s destroy with timeout error', socket[SOCKET_NAME]); + } + } + } + socket.on('timeout', onTimeout); + + function onError(err) { + const listenerCount = socket.listeners('error').length; + debug('%s(requests: %s, finished: %s) error: %s, listenerCount: %s', + socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], + err, listenerCount); + agent.errorSocketCount++; + if (listenerCount === 1) { + // if socket don't contain error event handler, don't catch it, emit it again + debug('%s emit uncaught error event', socket[SOCKET_NAME]); + socket.removeListener('error', onError); + socket.emit('error', err); + } + } + socket.on('error', onError); + + function onRemove() { + debug('%s(requests: %s, finished: %s) agentRemove', + socket[SOCKET_NAME], + socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]); + // We need this function for cases like HTTP 'upgrade' + // (defined by WebSockets) where we need to remove a socket from the + // pool because it'll be locked up indefinitely + socket.removeListener('close', onClose); + socket.removeListener('error', onError); + socket.removeListener('free', onFree); + socket.removeListener('timeout', onTimeout); + socket.removeListener('agentRemove', onRemove); + } + socket.on('agentRemove', onRemove); +} + +module.exports = Agent; + +function inspect(obj) { + const res = {}; + for (const key in obj) { + res[key] = obj[key].length; + } + return res; +} diff --git a/node_modules/agentkeepalive/lib/constants.js b/node_modules/agentkeepalive/lib/constants.js new file mode 100644 index 0000000..ca7ab97 --- /dev/null +++ b/node_modules/agentkeepalive/lib/constants.js @@ -0,0 +1,14 @@ +'use strict'; + +module.exports = { + // agent + CURRENT_ID: Symbol('agentkeepalive#currentId'), + CREATE_ID: Symbol('agentkeepalive#createId'), + INIT_SOCKET: Symbol('agentkeepalive#initSocket'), + CREATE_HTTPS_CONNECTION: Symbol('agentkeepalive#createHttpsConnection'), + // socket + SOCKET_CREATED_TIME: Symbol('agentkeepalive#socketCreatedTime'), + SOCKET_NAME: Symbol('agentkeepalive#socketName'), + SOCKET_REQUEST_COUNT: Symbol('agentkeepalive#socketRequestCount'), + SOCKET_REQUEST_FINISHED_COUNT: Symbol('agentkeepalive#socketRequestFinishedCount'), +}; diff --git a/node_modules/agentkeepalive/lib/https_agent.js b/node_modules/agentkeepalive/lib/https_agent.js new file mode 100644 index 0000000..344fb32 --- /dev/null +++ b/node_modules/agentkeepalive/lib/https_agent.js @@ -0,0 +1,51 @@ +'use strict'; + +const OriginalHttpsAgent = require('https').Agent; +const HttpAgent = require('./agent'); +const { + INIT_SOCKET, + CREATE_HTTPS_CONNECTION, +} = require('./constants'); + +class HttpsAgent extends HttpAgent { + constructor(options) { + super(options); + + this.defaultPort = 443; + this.protocol = 'https:'; + this.maxCachedSessions = this.options.maxCachedSessions; + /* istanbul ignore next */ + if (this.maxCachedSessions === undefined) { + this.maxCachedSessions = 100; + } + + this._sessionCache = { + map: {}, + list: [], + }; + } + + createConnection(options, oncreate) { + const socket = this[CREATE_HTTPS_CONNECTION](options, oncreate); + this[INIT_SOCKET](socket, options); + return socket; + } +} + +// https://github.com/nodejs/node/blob/master/lib/https.js#L89 +HttpsAgent.prototype[CREATE_HTTPS_CONNECTION] = OriginalHttpsAgent.prototype.createConnection; + +[ + 'getName', + '_getSession', + '_cacheSession', + // https://github.com/nodejs/node/pull/4982 + '_evictSession', +].forEach(function(method) { + /* istanbul ignore next */ + if (typeof OriginalHttpsAgent.prototype[method] === 'function') { + HttpsAgent.prototype[method] = OriginalHttpsAgent.prototype[method]; + } +}); + +module.exports = HttpsAgent; diff --git a/node_modules/agentkeepalive/package.json b/node_modules/agentkeepalive/package.json new file mode 100644 index 0000000..d8e9aa7 --- /dev/null +++ b/node_modules/agentkeepalive/package.json @@ -0,0 +1,56 @@ +{ + "name": "agentkeepalive", + "version": "4.5.0", + "description": "Missing keepalive http.Agent", + "main": "index.js", + "browser": "browser.js", + "files": [ + "index.js", + "index.d.ts", + "browser.js", + "lib" + ], + "scripts": { + "contributor": "git-contributor", + "test": "npm run lint && egg-bin test --full-trace", + "test-local": "egg-bin test --full-trace", + "cov": "cross-env NODE_DEBUG=agentkeepalive egg-bin cov --full-trace", + "ci": "npm run lint && npm run cov", + "lint": "eslint lib test index.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/node-modules/agentkeepalive.git" + }, + "bugs": { + "url": "https://github.com/node-modules/agentkeepalive/issues" + }, + "keywords": [ + "http", + "https", + "agent", + "keepalive", + "agentkeepalive", + "HttpAgent", + "HttpsAgent" + ], + "dependencies": { + "humanize-ms": "^1.2.1" + }, + "devDependencies": { + "coffee": "^5.3.0", + "cross-env": "^6.0.3", + "egg-bin": "^4.9.0", + "eslint": "^5.7.0", + "eslint-config-egg": "^7.1.0", + "git-contributor": "^2.0.0", + "mm": "^2.4.1", + "pedding": "^1.1.0", + "typescript": "^3.8.3" + }, + "engines": { + "node": ">= 8.0.0" + }, + "author": "fengmk2 (https://github.com/fengmk2)", + "license": "MIT" +} diff --git a/node_modules/aggregate-error/index.d.ts b/node_modules/aggregate-error/index.d.ts new file mode 100644 index 0000000..502bf7a --- /dev/null +++ b/node_modules/aggregate-error/index.d.ts @@ -0,0 +1,51 @@ +/** +Create an error from multiple errors. +*/ +declare class AggregateError extends Error implements Iterable { + readonly name: 'AggregateError'; + + /** + @param errors - If a string, a new `Error` is created with the string as the error message. If a non-Error object, a new `Error` is created with all properties from the object copied over. + @returns An Error that is also an [`Iterable`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Iterables) for the individual errors. + + @example + ``` + import AggregateError = require('aggregate-error'); + + const error = new AggregateError([new Error('foo'), 'bar', {message: 'baz'}]); + + throw error; + + // AggregateError: + // Error: foo + // at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:33) + // Error: bar + // at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:13) + // Error: baz + // at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:13) + // at AggregateError (/Users/sindresorhus/dev/aggregate-error/index.js:19:3) + // at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:13) + // at Module._compile (module.js:556:32) + // at Object.Module._extensions..js (module.js:565:10) + // at Module.load (module.js:473:32) + // at tryModuleLoad (module.js:432:12) + // at Function.Module._load (module.js:424:3) + // at Module.runMain (module.js:590:10) + // at run (bootstrap_node.js:394:7) + // at startup (bootstrap_node.js:149:9) + + + for (const individualError of error) { + console.log(individualError); + } + //=> [Error: foo] + //=> [Error: bar] + //=> [Error: baz] + ``` + */ + constructor(errors: ReadonlyArray); + + [Symbol.iterator](): IterableIterator; +} + +export = AggregateError; diff --git a/node_modules/aggregate-error/index.js b/node_modules/aggregate-error/index.js new file mode 100644 index 0000000..ba5bf02 --- /dev/null +++ b/node_modules/aggregate-error/index.js @@ -0,0 +1,47 @@ +'use strict'; +const indentString = require('indent-string'); +const cleanStack = require('clean-stack'); + +const cleanInternalStack = stack => stack.replace(/\s+at .*aggregate-error\/index.js:\d+:\d+\)?/g, ''); + +class AggregateError extends Error { + constructor(errors) { + if (!Array.isArray(errors)) { + throw new TypeError(`Expected input to be an Array, got ${typeof errors}`); + } + + errors = [...errors].map(error => { + if (error instanceof Error) { + return error; + } + + if (error !== null && typeof error === 'object') { + // Handle plain error objects with message property and/or possibly other metadata + return Object.assign(new Error(error.message), error); + } + + return new Error(error); + }); + + let message = errors + .map(error => { + // The `stack` property is not standardized, so we can't assume it exists + return typeof error.stack === 'string' ? cleanInternalStack(cleanStack(error.stack)) : String(error); + }) + .join('\n'); + message = '\n' + indentString(message, 4); + super(message); + + this.name = 'AggregateError'; + + Object.defineProperty(this, '_errors', {value: errors}); + } + + * [Symbol.iterator]() { + for (const error of this._errors) { + yield error; + } + } +} + +module.exports = AggregateError; diff --git a/node_modules/aggregate-error/license b/node_modules/aggregate-error/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/aggregate-error/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/aggregate-error/package.json b/node_modules/aggregate-error/package.json new file mode 100644 index 0000000..74fcc37 --- /dev/null +++ b/node_modules/aggregate-error/package.json @@ -0,0 +1,41 @@ +{ + "name": "aggregate-error", + "version": "3.1.0", + "description": "Create an error from multiple errors", + "license": "MIT", + "repository": "sindresorhus/aggregate-error", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "aggregate", + "error", + "combine", + "multiple", + "many", + "collection", + "iterable", + "iterator" + ], + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "devDependencies": { + "ava": "^2.4.0", + "tsd": "^0.7.1", + "xo": "^0.25.3" + } +} diff --git a/node_modules/aggregate-error/readme.md b/node_modules/aggregate-error/readme.md new file mode 100644 index 0000000..850de98 --- /dev/null +++ b/node_modules/aggregate-error/readme.md @@ -0,0 +1,61 @@ +# aggregate-error [![Build Status](https://travis-ci.org/sindresorhus/aggregate-error.svg?branch=master)](https://travis-ci.org/sindresorhus/aggregate-error) + +> Create an error from multiple errors + + +## Install + +``` +$ npm install aggregate-error +``` + + +## Usage + +```js +const AggregateError = require('aggregate-error'); + +const error = new AggregateError([new Error('foo'), 'bar', {message: 'baz'}]); + +throw error; +/* +AggregateError: + Error: foo + at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:33) + Error: bar + at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:13) + Error: baz + at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:13) + at AggregateError (/Users/sindresorhus/dev/aggregate-error/index.js:19:3) + at Object. (/Users/sindresorhus/dev/aggregate-error/example.js:3:13) + at Module._compile (module.js:556:32) + at Object.Module._extensions..js (module.js:565:10) + at Module.load (module.js:473:32) + at tryModuleLoad (module.js:432:12) + at Function.Module._load (module.js:424:3) + at Module.runMain (module.js:590:10) + at run (bootstrap_node.js:394:7) + at startup (bootstrap_node.js:149:9) +*/ + +for (const individualError of error) { + console.log(individualError); +} +//=> [Error: foo] +//=> [Error: bar] +//=> [Error: baz] +``` + + +## API + +### AggregateError(errors) + +Returns an `Error` that is also an [`Iterable`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Iterators_and_Generators#Iterables) for the individual errors. + +#### errors + +Type: `Array` + +If a string, a new `Error` is created with the string as the error message.
+If a non-Error object, a new `Error` is created with all properties from the object copied over. diff --git a/node_modules/ansi-regex/index.d.ts b/node_modules/ansi-regex/index.d.ts new file mode 100644 index 0000000..2dbf6af --- /dev/null +++ b/node_modules/ansi-regex/index.d.ts @@ -0,0 +1,37 @@ +declare namespace ansiRegex { + interface Options { + /** + Match only the first ANSI escape. + + @default false + */ + onlyFirst: boolean; + } +} + +/** +Regular expression for matching ANSI escape codes. + +@example +``` +import ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001B[4mcake\u001B[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001B[4mcake\u001B[0m'.match(ansiRegex()); +//=> ['\u001B[4m', '\u001B[0m'] + +'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true})); +//=> ['\u001B[4m'] + +'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex()); +//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007'] +``` +*/ +declare function ansiRegex(options?: ansiRegex.Options): RegExp; + +export = ansiRegex; diff --git a/node_modules/ansi-regex/index.js b/node_modules/ansi-regex/index.js new file mode 100644 index 0000000..616ff83 --- /dev/null +++ b/node_modules/ansi-regex/index.js @@ -0,0 +1,10 @@ +'use strict'; + +module.exports = ({onlyFirst = false} = {}) => { + const pattern = [ + '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' + ].join('|'); + + return new RegExp(pattern, onlyFirst ? undefined : 'g'); +}; diff --git a/node_modules/ansi-regex/license b/node_modules/ansi-regex/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/ansi-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-regex/package.json b/node_modules/ansi-regex/package.json new file mode 100644 index 0000000..017f531 --- /dev/null +++ b/node_modules/ansi-regex/package.json @@ -0,0 +1,55 @@ +{ + "name": "ansi-regex", + "version": "5.0.1", + "description": "Regular expression for matching ANSI escape codes", + "license": "MIT", + "repository": "chalk/ansi-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd", + "view-supported": "node fixtures/view-codes.js" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "devDependencies": { + "ava": "^2.4.0", + "tsd": "^0.9.0", + "xo": "^0.25.3" + } +} diff --git a/node_modules/ansi-regex/readme.md b/node_modules/ansi-regex/readme.md new file mode 100644 index 0000000..4d848bc --- /dev/null +++ b/node_modules/ansi-regex/readme.md @@ -0,0 +1,78 @@ +# ansi-regex + +> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install ansi-regex +``` + + +## Usage + +```js +const ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001B[4mcake\u001B[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001B[4mcake\u001B[0m'.match(ansiRegex()); +//=> ['\u001B[4m', '\u001B[0m'] + +'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true})); +//=> ['\u001B[4m'] + +'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex()); +//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007'] +``` + + +## API + +### ansiRegex(options?) + +Returns a regex for matching ANSI escape codes. + +#### options + +Type: `object` + +##### onlyFirst + +Type: `boolean`
+Default: `false` *(Matches any ANSI escape codes in a string)* + +Match only the first ANSI escape. + + +## FAQ + +### Why do you test for codes not in the ECMA 48 standard? + +Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. We test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. + +On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/ansi-styles/index.d.ts b/node_modules/ansi-styles/index.d.ts new file mode 100644 index 0000000..44a907e --- /dev/null +++ b/node_modules/ansi-styles/index.d.ts @@ -0,0 +1,345 @@ +declare type CSSColor = + | 'aliceblue' + | 'antiquewhite' + | 'aqua' + | 'aquamarine' + | 'azure' + | 'beige' + | 'bisque' + | 'black' + | 'blanchedalmond' + | 'blue' + | 'blueviolet' + | 'brown' + | 'burlywood' + | 'cadetblue' + | 'chartreuse' + | 'chocolate' + | 'coral' + | 'cornflowerblue' + | 'cornsilk' + | 'crimson' + | 'cyan' + | 'darkblue' + | 'darkcyan' + | 'darkgoldenrod' + | 'darkgray' + | 'darkgreen' + | 'darkgrey' + | 'darkkhaki' + | 'darkmagenta' + | 'darkolivegreen' + | 'darkorange' + | 'darkorchid' + | 'darkred' + | 'darksalmon' + | 'darkseagreen' + | 'darkslateblue' + | 'darkslategray' + | 'darkslategrey' + | 'darkturquoise' + | 'darkviolet' + | 'deeppink' + | 'deepskyblue' + | 'dimgray' + | 'dimgrey' + | 'dodgerblue' + | 'firebrick' + | 'floralwhite' + | 'forestgreen' + | 'fuchsia' + | 'gainsboro' + | 'ghostwhite' + | 'gold' + | 'goldenrod' + | 'gray' + | 'green' + | 'greenyellow' + | 'grey' + | 'honeydew' + | 'hotpink' + | 'indianred' + | 'indigo' + | 'ivory' + | 'khaki' + | 'lavender' + | 'lavenderblush' + | 'lawngreen' + | 'lemonchiffon' + | 'lightblue' + | 'lightcoral' + | 'lightcyan' + | 'lightgoldenrodyellow' + | 'lightgray' + | 'lightgreen' + | 'lightgrey' + | 'lightpink' + | 'lightsalmon' + | 'lightseagreen' + | 'lightskyblue' + | 'lightslategray' + | 'lightslategrey' + | 'lightsteelblue' + | 'lightyellow' + | 'lime' + | 'limegreen' + | 'linen' + | 'magenta' + | 'maroon' + | 'mediumaquamarine' + | 'mediumblue' + | 'mediumorchid' + | 'mediumpurple' + | 'mediumseagreen' + | 'mediumslateblue' + | 'mediumspringgreen' + | 'mediumturquoise' + | 'mediumvioletred' + | 'midnightblue' + | 'mintcream' + | 'mistyrose' + | 'moccasin' + | 'navajowhite' + | 'navy' + | 'oldlace' + | 'olive' + | 'olivedrab' + | 'orange' + | 'orangered' + | 'orchid' + | 'palegoldenrod' + | 'palegreen' + | 'paleturquoise' + | 'palevioletred' + | 'papayawhip' + | 'peachpuff' + | 'peru' + | 'pink' + | 'plum' + | 'powderblue' + | 'purple' + | 'rebeccapurple' + | 'red' + | 'rosybrown' + | 'royalblue' + | 'saddlebrown' + | 'salmon' + | 'sandybrown' + | 'seagreen' + | 'seashell' + | 'sienna' + | 'silver' + | 'skyblue' + | 'slateblue' + | 'slategray' + | 'slategrey' + | 'snow' + | 'springgreen' + | 'steelblue' + | 'tan' + | 'teal' + | 'thistle' + | 'tomato' + | 'turquoise' + | 'violet' + | 'wheat' + | 'white' + | 'whitesmoke' + | 'yellow' + | 'yellowgreen'; + +declare namespace ansiStyles { + interface ColorConvert { + /** + The RGB color space. + + @param red - (`0`-`255`) + @param green - (`0`-`255`) + @param blue - (`0`-`255`) + */ + rgb(red: number, green: number, blue: number): string; + + /** + The RGB HEX color space. + + @param hex - A hexadecimal string containing RGB data. + */ + hex(hex: string): string; + + /** + @param keyword - A CSS color name. + */ + keyword(keyword: CSSColor): string; + + /** + The HSL color space. + + @param hue - (`0`-`360`) + @param saturation - (`0`-`100`) + @param lightness - (`0`-`100`) + */ + hsl(hue: number, saturation: number, lightness: number): string; + + /** + The HSV color space. + + @param hue - (`0`-`360`) + @param saturation - (`0`-`100`) + @param value - (`0`-`100`) + */ + hsv(hue: number, saturation: number, value: number): string; + + /** + The HSV color space. + + @param hue - (`0`-`360`) + @param whiteness - (`0`-`100`) + @param blackness - (`0`-`100`) + */ + hwb(hue: number, whiteness: number, blackness: number): string; + + /** + Use a [4-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4-bit) to set text color. + */ + ansi(ansi: number): string; + + /** + Use an [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color. + */ + ansi256(ansi: number): string; + } + + interface CSPair { + /** + The ANSI terminal control sequence for starting this style. + */ + readonly open: string; + + /** + The ANSI terminal control sequence for ending this style. + */ + readonly close: string; + } + + interface ColorBase { + readonly ansi: ColorConvert; + readonly ansi256: ColorConvert; + readonly ansi16m: ColorConvert; + + /** + The ANSI terminal control sequence for ending this color. + */ + readonly close: string; + } + + interface Modifier { + /** + Resets the current color chain. + */ + readonly reset: CSPair; + + /** + Make text bold. + */ + readonly bold: CSPair; + + /** + Emitting only a small amount of light. + */ + readonly dim: CSPair; + + /** + Make text italic. (Not widely supported) + */ + readonly italic: CSPair; + + /** + Make text underline. (Not widely supported) + */ + readonly underline: CSPair; + + /** + Inverse background and foreground colors. + */ + readonly inverse: CSPair; + + /** + Prints the text, but makes it invisible. + */ + readonly hidden: CSPair; + + /** + Puts a horizontal line through the center of the text. (Not widely supported) + */ + readonly strikethrough: CSPair; + } + + interface ForegroundColor { + readonly black: CSPair; + readonly red: CSPair; + readonly green: CSPair; + readonly yellow: CSPair; + readonly blue: CSPair; + readonly cyan: CSPair; + readonly magenta: CSPair; + readonly white: CSPair; + + /** + Alias for `blackBright`. + */ + readonly gray: CSPair; + + /** + Alias for `blackBright`. + */ + readonly grey: CSPair; + + readonly blackBright: CSPair; + readonly redBright: CSPair; + readonly greenBright: CSPair; + readonly yellowBright: CSPair; + readonly blueBright: CSPair; + readonly cyanBright: CSPair; + readonly magentaBright: CSPair; + readonly whiteBright: CSPair; + } + + interface BackgroundColor { + readonly bgBlack: CSPair; + readonly bgRed: CSPair; + readonly bgGreen: CSPair; + readonly bgYellow: CSPair; + readonly bgBlue: CSPair; + readonly bgCyan: CSPair; + readonly bgMagenta: CSPair; + readonly bgWhite: CSPair; + + /** + Alias for `bgBlackBright`. + */ + readonly bgGray: CSPair; + + /** + Alias for `bgBlackBright`. + */ + readonly bgGrey: CSPair; + + readonly bgBlackBright: CSPair; + readonly bgRedBright: CSPair; + readonly bgGreenBright: CSPair; + readonly bgYellowBright: CSPair; + readonly bgBlueBright: CSPair; + readonly bgCyanBright: CSPair; + readonly bgMagentaBright: CSPair; + readonly bgWhiteBright: CSPair; + } +} + +declare const ansiStyles: { + readonly modifier: ansiStyles.Modifier; + readonly color: ansiStyles.ForegroundColor & ansiStyles.ColorBase; + readonly bgColor: ansiStyles.BackgroundColor & ansiStyles.ColorBase; + readonly codes: ReadonlyMap; +} & ansiStyles.BackgroundColor & ansiStyles.ForegroundColor & ansiStyles.Modifier; + +export = ansiStyles; diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js new file mode 100644 index 0000000..5d82581 --- /dev/null +++ b/node_modules/ansi-styles/index.js @@ -0,0 +1,163 @@ +'use strict'; + +const wrapAnsi16 = (fn, offset) => (...args) => { + const code = fn(...args); + return `\u001B[${code + offset}m`; +}; + +const wrapAnsi256 = (fn, offset) => (...args) => { + const code = fn(...args); + return `\u001B[${38 + offset};5;${code}m`; +}; + +const wrapAnsi16m = (fn, offset) => (...args) => { + const rgb = fn(...args); + return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`; +}; + +const ansi2ansi = n => n; +const rgb2rgb = (r, g, b) => [r, g, b]; + +const setLazyProperty = (object, property, get) => { + Object.defineProperty(object, property, { + get: () => { + const value = get(); + + Object.defineProperty(object, property, { + value, + enumerable: true, + configurable: true + }); + + return value; + }, + enumerable: true, + configurable: true + }); +}; + +/** @type {typeof import('color-convert')} */ +let colorConvert; +const makeDynamicStyles = (wrap, targetSpace, identity, isBackground) => { + if (colorConvert === undefined) { + colorConvert = require('color-convert'); + } + + const offset = isBackground ? 10 : 0; + const styles = {}; + + for (const [sourceSpace, suite] of Object.entries(colorConvert)) { + const name = sourceSpace === 'ansi16' ? 'ansi' : sourceSpace; + if (sourceSpace === targetSpace) { + styles[name] = wrap(identity, offset); + } else if (typeof suite === 'object') { + styles[name] = wrap(suite[targetSpace], offset); + } + } + + return styles; +}; + +function assembleStyles() { + const codes = new Map(); + const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29] + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + + // Bright color + blackBright: [90, 39], + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39] + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49] + } + }; + + // Alias bright black as gray (and grey) + styles.color.gray = styles.color.blackBright; + styles.bgColor.bgGray = styles.bgColor.bgBlackBright; + styles.color.grey = styles.color.blackBright; + styles.bgColor.bgGrey = styles.bgColor.bgBlackBright; + + for (const [groupName, group] of Object.entries(styles)) { + for (const [styleName, style] of Object.entries(group)) { + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m` + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false + }); + } + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false + }); + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + setLazyProperty(styles.color, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, false)); + setLazyProperty(styles.color, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, false)); + setLazyProperty(styles.color, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, false)); + setLazyProperty(styles.bgColor, 'ansi', () => makeDynamicStyles(wrapAnsi16, 'ansi16', ansi2ansi, true)); + setLazyProperty(styles.bgColor, 'ansi256', () => makeDynamicStyles(wrapAnsi256, 'ansi256', ansi2ansi, true)); + setLazyProperty(styles.bgColor, 'ansi16m', () => makeDynamicStyles(wrapAnsi16m, 'rgb', rgb2rgb, true)); + + return styles; +} + +// Make the export immutable +Object.defineProperty(module, 'exports', { + enumerable: true, + get: assembleStyles +}); diff --git a/node_modules/ansi-styles/license b/node_modules/ansi-styles/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/ansi-styles/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json new file mode 100644 index 0000000..7539328 --- /dev/null +++ b/node_modules/ansi-styles/package.json @@ -0,0 +1,56 @@ +{ + "name": "ansi-styles", + "version": "4.3.0", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "funding": "https://github.com/chalk/ansi-styles?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "color-convert": "^2.0.1" + }, + "devDependencies": { + "@types/color-convert": "^1.9.0", + "ava": "^2.3.0", + "svg-term-cli": "^2.1.1", + "tsd": "^0.11.0", + "xo": "^0.25.3" + } +} diff --git a/node_modules/ansi-styles/readme.md b/node_modules/ansi-styles/readme.md new file mode 100644 index 0000000..24883de --- /dev/null +++ b/node_modules/ansi-styles/readme.md @@ -0,0 +1,152 @@ +# ansi-styles [![Build Status](https://travis-ci.org/chalk/ansi-styles.svg?branch=master)](https://travis-ci.org/chalk/ansi-styles) + +> [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code#Colors_and_Styles) for styling strings in the terminal + +You probably want the higher-level [chalk](https://github.com/chalk/chalk) module for styling your strings. + + + +## Install + +``` +$ npm install ansi-styles +``` + +## Usage + +```js +const style = require('ansi-styles'); + +console.log(`${style.green.open}Hello world!${style.green.close}`); + + +// Color conversion between 16/256/truecolor +// NOTE: If conversion goes to 16 colors or 256 colors, the original color +// may be degraded to fit that color palette. This means terminals +// that do not support 16 million colors will best-match the +// original color. +console.log(style.bgColor.ansi.hsl(120, 80, 72) + 'Hello world!' + style.bgColor.close); +console.log(style.color.ansi256.rgb(199, 20, 250) + 'Hello world!' + style.color.close); +console.log(style.color.ansi16m.hex('#abcdef') + 'Hello world!' + style.color.close); +``` + +## API + +Each style has an `open` and `close` property. + +## Styles + +### Modifiers + +- `reset` +- `bold` +- `dim` +- `italic` *(Not widely supported)* +- `underline` +- `inverse` +- `hidden` +- `strikethrough` *(Not widely supported)* + +### Colors + +- `black` +- `red` +- `green` +- `yellow` +- `blue` +- `magenta` +- `cyan` +- `white` +- `blackBright` (alias: `gray`, `grey`) +- `redBright` +- `greenBright` +- `yellowBright` +- `blueBright` +- `magentaBright` +- `cyanBright` +- `whiteBright` + +### Background colors + +- `bgBlack` +- `bgRed` +- `bgGreen` +- `bgYellow` +- `bgBlue` +- `bgMagenta` +- `bgCyan` +- `bgWhite` +- `bgBlackBright` (alias: `bgGray`, `bgGrey`) +- `bgRedBright` +- `bgGreenBright` +- `bgYellowBright` +- `bgBlueBright` +- `bgMagentaBright` +- `bgCyanBright` +- `bgWhiteBright` + +## Advanced usage + +By default, you get a map of styles, but the styles are also available as groups. They are non-enumerable so they don't show up unless you access them explicitly. This makes it easier to expose only a subset in a higher-level module. + +- `style.modifier` +- `style.color` +- `style.bgColor` + +###### Example + +```js +console.log(style.color.green.open); +``` + +Raw escape codes (i.e. without the CSI escape prefix `\u001B[` and render mode postfix `m`) are available under `style.codes`, which returns a `Map` with the open codes as keys and close codes as values. + +###### Example + +```js +console.log(style.codes.get(36)); +//=> 39 +``` + +## [256 / 16 million (TrueColor) support](https://gist.github.com/XVilka/8346728) + +`ansi-styles` uses the [`color-convert`](https://github.com/Qix-/color-convert) package to allow for converting between various colors and ANSI escapes, with support for 256 and 16 million colors. + +The following color spaces from `color-convert` are supported: + +- `rgb` +- `hex` +- `keyword` +- `hsl` +- `hsv` +- `hwb` +- `ansi` +- `ansi256` + +To use these, call the associated conversion function with the intended output, for example: + +```js +style.color.ansi.rgb(100, 200, 15); // RGB to 16 color ansi foreground code +style.bgColor.ansi.rgb(100, 200, 15); // RGB to 16 color ansi background code + +style.color.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code +style.bgColor.ansi256.hsl(120, 100, 60); // HSL to 256 color ansi foreground code + +style.color.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color foreground code +style.bgColor.ansi16m.hex('#C0FFEE'); // Hex (RGB) to 16 million color background code +``` + +## Related + +- [ansi-escapes](https://github.com/sindresorhus/ansi-escapes) - ANSI escape codes for manipulating the terminal + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + +## For enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of `ansi-styles` and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-ansi-styles?utm_source=npm-ansi-styles&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) diff --git a/node_modules/aproba/CHANGELOG.md b/node_modules/aproba/CHANGELOG.md new file mode 100644 index 0000000..bab30ec --- /dev/null +++ b/node_modules/aproba/CHANGELOG.md @@ -0,0 +1,4 @@ +2.0.0 + * Drop support for 0.10 and 0.12. They haven't been in travis but still, + since we _know_ we'll break with them now it's only polite to do a + major bump. diff --git a/node_modules/aproba/LICENSE b/node_modules/aproba/LICENSE new file mode 100644 index 0000000..f4be44d --- /dev/null +++ b/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/aproba/README.md b/node_modules/aproba/README.md new file mode 100644 index 0000000..0bfc594 --- /dev/null +++ b/node_modules/aproba/README.md @@ -0,0 +1,94 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +| type | description +| :--: | :---------- +| * | matches any type +| A | `Array.isArray` OR an `arguments` object +| S | typeof == string +| N | typeof == number +| F | typeof == function +| O | typeof == object and not type A and not type E +| B | typeof == boolean +| E | `instanceof Error` OR `null` **(special: see below)** +| Z | == `null` + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an **error** argument is found and is not null then the remaining +arguments are optional. That is, if you say `ESO` then that's like using a +non-magical `E` in: `E|ESO|ZSO`. + +### But I have optional arguments?! + +You can provide more than one signature by separating them with pipes `|`. +If any signature matches the arguments then they'll be considered valid. + +So for example, say you wanted to write a signature for +`fs.createWriteStream`. The docs for it describe it thusly: + +``` +fs.createWriteStream(path[, options]) +``` + +This would be a signature of `SO|S`. That is, a string and and object, or +just a string. + +Now, if you read the full `fs` docs, you'll see that actually path can ALSO +be a buffer. And options can be a string, that is: +``` +path | +options | +``` + +To reproduce this you have to fully enumerate all of the possible +combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The +awkwardness is a feature: It reminds you of the complexity you're adding to +your API when you do this sort of thing. + + +### Browser support + +This has no dependencies and should work in browsers, though you'll have +noisier stack traces. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/aproba/index.js b/node_modules/aproba/index.js new file mode 100644 index 0000000..fd94748 --- /dev/null +++ b/node_modules/aproba/index.js @@ -0,0 +1,105 @@ +'use strict' +module.exports = validate + +function isArguments (thingy) { + return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +const types = { + '*': {label: 'any', check: () => true}, + A: {label: 'array', check: _ => Array.isArray(_) || isArguments(_)}, + S: {label: 'string', check: _ => typeof _ === 'string'}, + N: {label: 'number', check: _ => typeof _ === 'number'}, + F: {label: 'function', check: _ => typeof _ === 'function'}, + O: {label: 'object', check: _ => typeof _ === 'object' && _ != null && !types.A.check(_) && !types.E.check(_)}, + B: {label: 'boolean', check: _ => typeof _ === 'boolean'}, + E: {label: 'error', check: _ => _ instanceof Error}, + Z: {label: 'null', check: _ => _ == null} +} + +function addSchema (schema, arity) { + const group = arity[schema.length] = arity[schema.length] || [] + if (group.indexOf(schema) === -1) group.push(schema) +} + +function validate (rawSchemas, args) { + if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) + if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) + if (!types.A.check(args)) throw invalidType(1, ['array'], args) + const schemas = rawSchemas.split('|') + const arity = {} + + schemas.forEach(schema => { + for (let ii = 0; ii < schema.length; ++ii) { + const type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + } + if (/E.*E/.test(schema)) throw moreThanOneError(schema) + addSchema(schema, arity) + if (/E/.test(schema)) { + addSchema(schema.replace(/E.*$/, 'E'), arity) + addSchema(schema.replace(/E/, 'Z'), arity) + if (schema.length === 1) addSchema('', arity) + } + }) + let matching = arity[args.length] + if (!matching) { + throw wrongNumberOfArgs(Object.keys(arity), args.length) + } + for (let ii = 0; ii < args.length; ++ii) { + let newMatching = matching.filter(schema => { + const type = schema[ii] + const typeCheck = types[type].check + return typeCheck(args[ii]) + }) + if (!newMatching.length) { + const labels = matching.map(_ => types[_[ii]].label).filter(_ => _ != null) + throw invalidType(ii, labels, args[ii]) + } + matching = newMatching + } +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedTypes, value) { + let valueType + Object.keys(types).forEach(typeCode => { + if (types[typeCode].check(value)) valueType = types[typeCode].label + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + englishList(expectedTypes) + ' but got ' + valueType) +} + +function englishList (list) { + return list.join(', ').replace(/, ([^,]+)$/, ' or $1') +} + +function wrongNumberOfArgs (expected, got) { + const english = englishList(expected) + const args = expected.every(ex => ex.length === 1) + ? 'argument' + : 'arguments' + return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) +} + +function moreThanOneError (schema) { + return newException('ETOOMANYERRORTYPES', + 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') +} + +function newException (code, msg) { + const err = new Error(msg) + err.code = code + /* istanbul ignore else */ + if (Error.captureStackTrace) Error.captureStackTrace(err, validate) + return err +} diff --git a/node_modules/aproba/package.json b/node_modules/aproba/package.json new file mode 100644 index 0000000..d2212d3 --- /dev/null +++ b/node_modules/aproba/package.json @@ -0,0 +1,35 @@ +{ + "name": "aproba", + "version": "2.0.0", + "description": "A ridiculously light-weight argument validator (now browser friendly)", + "main": "index.js", + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "standard": "^11.0.1", + "tap": "^12.0.1" + }, + "files": [ + "index.js" + ], + "scripts": { + "pretest": "standard", + "test": "tap --100 -J test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/aproba" + }, + "keywords": [ + "argument", + "validate" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "homepage": "https://github.com/iarna/aproba" +} diff --git a/node_modules/are-we-there-yet/LICENSE.md b/node_modules/are-we-there-yet/LICENSE.md new file mode 100644 index 0000000..845be76 --- /dev/null +++ b/node_modules/are-we-there-yet/LICENSE.md @@ -0,0 +1,18 @@ +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/are-we-there-yet/README.md b/node_modules/are-we-there-yet/README.md new file mode 100644 index 0000000..caae19b --- /dev/null +++ b/node_modules/are-we-there-yet/README.md @@ -0,0 +1,208 @@ +are-we-there-yet +---------------- + +Track complex hierarchies of asynchronous task completion statuses. This is +intended to give you a way of recording and reporting the progress of the big +recursive fan-out and gather type workflows that are so common in async. + +What you do with this completion data is up to you, but the most common use case is to +feed it to one of the many progress bar modules. + +Most progress bar modules include a rudimentary version of this, but my +needs were more complex. + +Usage +===== + +```javascript +var TrackerGroup = require("are-we-there-yet").TrackerGroup + +var top = new TrackerGroup("program") + +var single = top.newItem("one thing", 100) +single.completeWork(20) + +console.log(top.completed()) // 0.2 + +fs.stat("file", function(er, stat) { + if (er) throw er + var stream = top.newStream("file", stat.size) + console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete + // and 50% * 20% == 10% + fs.createReadStream("file").pipe(stream).on("data", function (chunk) { + // do stuff with chunk + }) + top.on("change", function (name) { + // called each time a chunk is read from "file" + // top.completed() will start at 0.1 and fill up to 0.6 as the file is read + }) +}) +``` + +Shared Methods +============== + +* var completed = tracker.completed() + +Implemented in: `Tracker`, `TrackerGroup`, `TrackerStream` + +Returns the ratio of completed work to work to be done. Range of 0 to 1. + +* tracker.finish() + +Implemented in: `Tracker`, `TrackerGroup` + +Marks the tracker as completed. With a TrackerGroup this marks all of its +components as completed. + +Marks all of the components of this tracker as finished, which in turn means +that `tracker.completed()` for this will now be 1. + +This will result in one or more `change` events being emitted. + +Events +====== + +All tracker objects emit `change` events with the following arguments: + +``` +function (name, completed, tracker) +``` + +`name` is the name of the tracker that originally emitted the event, +or if it didn't have one, the first containing tracker group that had one. + +`completed` is the percent complete (as returned by `tracker.completed()` method). + +`tracker` is the tracker object that you are listening for events on. + +TrackerGroup +============ + +* var tracker = new TrackerGroup(**name**) + + * **name** *(optional)* - The name of this tracker group, used in change + notifications if the component updating didn't have a name. Defaults to undefined. + +Creates a new empty tracker aggregation group. These are trackers whose +completion status is determined by the completion status of other trackers added to this aggregation group. + +Ex. + +```javascript +var tracker = new TrackerGroup("parent") +var foo = tracker.newItem("firstChild", 100) +var bar = tracker.newItem("secondChild", 100) + +foo.finish() +console.log(tracker.completed()) // 0.5 +bar.finish() +console.log(tracker.completed()) // 1 +``` + +* tracker.addUnit(**otherTracker**, **weight**) + + * **otherTracker** - Any of the other are-we-there-yet tracker objects + * **weight** *(optional)* - The weight to give the tracker, defaults to 1. + +Adds the **otherTracker** to this aggregation group. The weight determines +how long you expect this tracker to take to complete in proportion to other +units. So for instance, if you add one tracker with a weight of 1 and +another with a weight of 2, you're saying the second will take twice as long +to complete as the first. As such, the first will account for 33% of the +completion of this tracker and the second will account for the other 67%. + +Returns **otherTracker**. + +* var subGroup = tracker.newGroup(**name**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subGroup = tracker.addUnit(new TrackerGroup(name), weight) +``` + +* var subItem = tracker.newItem(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subItem = tracker.addUnit(new Tracker(name, todo), weight) +``` + +* var subStream = tracker.newStream(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subStream = tracker.addUnit(new TrackerStream(name, todo), weight) +``` + +* console.log( tracker.debug() ) + +Returns a tree showing the completion of this tracker group and all of its +children, including recursively entering all of the children. + +Tracker +======= + +* var tracker = new Tracker(**name**, **todo**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **todo** *(optional)* The amount of work todo (a number). Defaults to 0. + +Ordinarily these are constructed as a part of a tracker group (via +`newItem`). + +* var completed = tracker.completed() + +Returns the ratio of completed work to work to be done. Range of 0 to 1. If +total work to be done is 0 then it will return 0. + +* tracker.addWork(**todo**) + + * **todo** A number to add to the amount of work to be done. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. + +* tracker.completeWork(**completed**) + + * **completed** A number to add to the work complete + +Increase the amount of work complete, thus increasing the completion percentage. +Will never increase the work completed past the amount of work todo. That is, +percentages > 100% are not allowed. Triggers a `change` event. + +* tracker.finish() + +Marks this tracker as finished, tracker.completed() will now be 1. Triggers +a `change` event. + +TrackerStream +============= + +* var tracker = new TrackerStream(**name**, **size**, **options**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **size** *(optional)* The number of bytes being sent through this stream. + * **options** *(optional)* A hash of stream options + +The tracker stream object is a pass through stream that updates an internal +tracker object each time a block passes through. It's intended to track +downloads, file extraction and other related activities. You use it by piping +your data source into it and then using it as your data source. + +If your data has a length attribute then that's used as the amount of work +completed when the chunk is passed through. If it does not (eg, object +streams) then each chunk counts as completing 1 unit of work, so your size +should be the total number of objects being streamed. + +* tracker.addWork(**todo**) + + * **todo** Increase the expected overall size by **todo** bytes. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. diff --git a/node_modules/are-we-there-yet/lib/index.js b/node_modules/are-we-there-yet/lib/index.js new file mode 100644 index 0000000..57d8743 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/index.js @@ -0,0 +1,4 @@ +'use strict' +exports.TrackerGroup = require('./tracker-group.js') +exports.Tracker = require('./tracker.js') +exports.TrackerStream = require('./tracker-stream.js') diff --git a/node_modules/are-we-there-yet/lib/tracker-base.js b/node_modules/are-we-there-yet/lib/tracker-base.js new file mode 100644 index 0000000..6f43687 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker-base.js @@ -0,0 +1,11 @@ +'use strict' +var EventEmitter = require('events').EventEmitter +var util = require('util') + +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) diff --git a/node_modules/are-we-there-yet/lib/tracker-group.js b/node_modules/are-we-there-yet/lib/tracker-group.js new file mode 100644 index 0000000..a3c7af8 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker-group.js @@ -0,0 +1,116 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') +var Tracker = require('./tracker.js') +var TrackerStream = require('./tracker-stream.js') + +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) + +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) { + return + } + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} + +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} + +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup + } + unit.parentGroup = this + } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) { + this.emit('change', unit.name, this.completion[unit.id], unit) + } + return unit +} + +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) { + return 0 + } + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += + valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} + +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} + +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} + +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} + +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) { + this.addUnit(new Tracker(), 1, true) + } + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} + +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.slice(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) + } else { + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + } + }) + return output +} diff --git a/node_modules/are-we-there-yet/lib/tracker-stream.js b/node_modules/are-we-there-yet/lib/tracker-stream.js new file mode 100644 index 0000000..e1cf850 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker-stream.js @@ -0,0 +1,36 @@ +'use strict' +var util = require('util') +var stream = require('readable-stream') +var delegate = require('delegates') +var Tracker = require('./tracker.js') + +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) +} +util.inherits(TrackerStream, stream.Transform) + +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } +} + +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} + +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} + +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') + .method('finish') diff --git a/node_modules/are-we-there-yet/lib/tracker.js b/node_modules/are-we-there-yet/lib/tracker.js new file mode 100644 index 0000000..a8f8b3b --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker.js @@ -0,0 +1,32 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') + +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) + +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} + +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) { + this.workDone = this.workTodo + } + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/are-we-there-yet/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/GOVERNANCE.md b/node_modules/are-we-there-yet/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/LICENSE b/node_modules/are-we-there-yet/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/README.md b/node_modules/are-we-there-yet/node_modules/readable-stream/README.md new file mode 100644 index 0000000..19117c1 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/errors-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/errors-browser.js new file mode 100644 index 0000000..fb8e73e --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/errors.js b/node_modules/are-we-there-yet/node_modules/readable-stream/errors.js new file mode 100644 index 0000000..8471526 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/experimentalWarning.js b/node_modules/are-we-there-yet/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 0000000..78e8414 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..19abfa6 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..24a6bdd --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..df1f608 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..1ccb715 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..292415e --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 0000000..742c5a4 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 0000000..69bda49 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..31a17c4 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 0000000..59c671b --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/from-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 0000000..a4ce56f --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/from.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 0000000..0a34ee9 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 0000000..e6f3924 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/state.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 0000000..3fbf892 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/package.json b/node_modules/are-we-there-yet/node_modules/readable-stream/package.json new file mode 100644 index 0000000..ade59e7 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/readable-browser.js b/node_modules/are-we-there-yet/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..adbf60d --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js b/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..9e0ca12 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/node_modules/are-we-there-yet/node_modules/safe-buffer/LICENSE b/node_modules/are-we-there-yet/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/are-we-there-yet/node_modules/safe-buffer/README.md b/node_modules/are-we-there-yet/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/are-we-there-yet/node_modules/safe-buffer/index.d.ts b/node_modules/are-we-there-yet/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/safe-buffer/index.js b/node_modules/are-we-there-yet/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..f8d3ec9 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/safe-buffer/index.js @@ -0,0 +1,65 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.prototype = Object.create(Buffer.prototype) + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/are-we-there-yet/node_modules/safe-buffer/package.json b/node_modules/are-we-there-yet/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..f2869e2 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/safe-buffer/package.json @@ -0,0 +1,51 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.2.1", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "https://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^5.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] +} diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/LICENSE b/node_modules/are-we-there-yet/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/README.md b/node_modules/are-we-there-yet/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/lib/string_decoder.js b/node_modules/are-we-there-yet/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/are-we-there-yet/node_modules/string_decoder/package.json b/node_modules/are-we-there-yet/node_modules/string_decoder/package.json new file mode 100644 index 0000000..b2bb141 --- /dev/null +++ b/node_modules/are-we-there-yet/node_modules/string_decoder/package.json @@ -0,0 +1,34 @@ +{ + "name": "string_decoder", + "version": "1.3.0", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "files": [ + "lib" + ], + "dependencies": { + "safe-buffer": "~5.2.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/are-we-there-yet/package.json b/node_modules/are-we-there-yet/package.json new file mode 100644 index 0000000..cc3d750 --- /dev/null +++ b/node_modules/are-we-there-yet/package.json @@ -0,0 +1,56 @@ +{ + "name": "are-we-there-yet", + "version": "3.0.1", + "description": "Keep track of the overall completion of many disparate processes", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/are-we-there-yet.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/are-we-there-yet/issues" + }, + "homepage": "https://github.com/npm/are-we-there-yet", + "devDependencies": { + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.5.0", + "tap": "^16.0.1" + }, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "tap": { + "branches": 68, + "statements": 92, + "functions": 86, + "lines": 92 + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.5.0" + } +} diff --git a/node_modules/arrify/index.js b/node_modules/arrify/index.js new file mode 100644 index 0000000..2a2fdee --- /dev/null +++ b/node_modules/arrify/index.js @@ -0,0 +1,8 @@ +'use strict'; +module.exports = function (val) { + if (val === null || val === undefined) { + return []; + } + + return Array.isArray(val) ? val : [val]; +}; diff --git a/node_modules/arrify/license b/node_modules/arrify/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/arrify/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/arrify/package.json b/node_modules/arrify/package.json new file mode 100644 index 0000000..1e63e59 --- /dev/null +++ b/node_modules/arrify/package.json @@ -0,0 +1,33 @@ +{ + "name": "arrify", + "version": "1.0.1", + "description": "Convert a value to an array", + "license": "MIT", + "repository": "sindresorhus/arrify", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "array", + "arr", + "arrify", + "arrayify", + "convert", + "value" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/arrify/readme.md b/node_modules/arrify/readme.md new file mode 100644 index 0000000..183d075 --- /dev/null +++ b/node_modules/arrify/readme.md @@ -0,0 +1,36 @@ +# arrify [![Build Status](https://travis-ci.org/sindresorhus/arrify.svg?branch=master)](https://travis-ci.org/sindresorhus/arrify) + +> Convert a value to an array + + +## Install + +``` +$ npm install --save arrify +``` + + +## Usage + +```js +const arrify = require('arrify'); + +arrify('unicorn'); +//=> ['unicorn'] + +arrify(['unicorn']); +//=> ['unicorn'] + +arrify(null); +//=> [] + +arrify(undefined); +//=> [] +``` + +*Supplying `null` or `undefined` results in an empty array.* + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/async-foreach/LICENSE-MIT b/node_modules/async-foreach/LICENSE-MIT new file mode 100644 index 0000000..7505716 --- /dev/null +++ b/node_modules/async-foreach/LICENSE-MIT @@ -0,0 +1,22 @@ +Copyright (c) 2011 "Cowboy" Ben Alman + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/async-foreach/README.md b/node_modules/async-foreach/README.md new file mode 100644 index 0000000..f574204 --- /dev/null +++ b/node_modules/async-foreach/README.md @@ -0,0 +1,195 @@ +# JavaScript Sync/Async forEach + +An optionally-asynchronous forEach with an interesting interface. + +## Getting Started + +This code should work just fine in Node.js: + +First, install the module with: `npm install async-foreach` + +```javascript +var forEach = require('async-foreach').forEach; +forEach(["a", "b", "c"], function(item, index, arr) { + console.log("each", item, index, arr); +}); +// logs: +// each a 0 ["a", "b", "c"] +// each b 1 ["a", "b", "c"] +// each c 2 ["a", "b", "c"] +``` + +Or in the browser: + +```html + + +``` + +In the browser, you can attach the forEach method to any object. + +```html + + + +``` + +## The General Idea (Why I thought this was worth sharing) + +The idea is to allow the callback to decide _at runtime_ whether the loop will be synchronous or asynchronous. By using `this` in a creative way (in situations where that value isn't already spoken for), an entire control API can be offered without over-complicating function signatures. + +```javascript +forEach(arr, function(item, index) { + // Synchronous. +}); + +forEach(arr, function(item, index) { + // Only when `this.async` is called does iteration becomes asynchronous. The + // loop won't be continued until the `done` function is executed. + var done = this.async(); + // Continue in one second. + setTimeout(done, 1000); +}); + +forEach(arr, function(item, index) { + // Break out of synchronous iteration early by returning false. + return index !== 1; +}); + +forEach(arr, function(item, index) { + // Break out of asynchronous iteration early... + var done = this.async(); + // ...by passing false to the done function. + setTimeout(function() { + done(index !== 1); + }); +}); +``` + +## Examples +See the unit tests for more examples. + +```javascript +// Generic "done" callback. +function allDone(notAborted, arr) { + console.log("done", notAborted, arr); +} + +// Synchronous. +forEach(["a", "b", "c"], function(item, index, arr) { + console.log("each", item, index, arr); +}, allDone); +// logs: +// each a 0 ["a", "b", "c"] +// each b 1 ["a", "b", "c"] +// each c 2 ["a", "b", "c"] +// done true ["a", "b", "c"] + +// Synchronous with early abort. +forEach(["a", "b", "c"], function(item, index, arr) { + console.log("each", item, index, arr); + if (item === "b") { return false; } +}, allDone); +// logs: +// each a 0 ["a", "b", "c"] +// each b 1 ["a", "b", "c"] +// done false ["a", "b", "c"] + +// Asynchronous. +forEach(["a", "b", "c"], function(item, index, arr) { + console.log("each", item, index, arr); + var done = this.async(); + setTimeout(function() { + done(); + }, 500); +}, allDone); +// logs: +// each a 0 ["a", "b", "c"] +// each b 1 ["a", "b", "c"] +// each c 2 ["a", "b", "c"] +// done true ["a", "b", "c"] + +// Asynchronous with early abort. +forEach(["a", "b", "c"], function(item, index, arr) { + console.log("each", item, index, arr); + var done = this.async(); + setTimeout(function() { + done(item !== "b"); + }, 500); +}, allDone); +// logs: +// each a 0 ["a", "b", "c"] +// each b 1 ["a", "b", "c"] +// done false ["a", "b", "c"] + +// Not actually asynchronous. +forEach(["a", "b", "c"], function(item, index, arr) { + console.log("each", item, index, arr); + var done = this.async() + done(); +}, allDone); +// logs: +// each a 0 ["a", "b", "c"] +// each b 1 ["a", "b", "c"] +// each c 2 ["a", "b", "c"] +// done true ["a", "b", "c"] + +// Not actually asynchronous with early abort. +forEach(["a", "b", "c"], function(item, index, arr) { + console.log("each", item, index, arr); + var done = this.async(); + done(item !== "b"); +}, allDone); +// logs: +// each a 0 ["a", "b", "c"] +// each b 1 ["a", "b", "c"] +// done false ["a", "b", "c"] +``` + +## Contributing +In lieu of a formal styleguide, take care to maintain the existing coding style. Add unit tests for any new or changed functionality. Lint and test your code using [grunt](https://github.com/cowboy/grunt). + +_Also, please don't edit files in the "dist" subdirectory as they are generated via grunt. You'll find source code in the "lib" subdirectory!_ + +## Release History + +04/29/2013 +v0.1.3 +Removed hard Node.js version dependency. + +11/17/2011 +v0.1.2 +Adding sparse array support. +Invalid length properties are now sanitized. +This closes issue #1 (like a boss). + +11/11/2011 +v0.1.1 +Refactored code to be much simpler. Yay for unit tests! + +11/11/2011 +v0.1.0 +Initial Release. + +## License +Copyright (c) 2012 "Cowboy" Ben Alman +Licensed under the MIT license. + diff --git a/node_modules/async-foreach/dist/ba-foreach.js b/node_modules/async-foreach/dist/ba-foreach.js new file mode 100644 index 0000000..64c6eb8 --- /dev/null +++ b/node_modules/async-foreach/dist/ba-foreach.js @@ -0,0 +1,58 @@ +/* JavaScript Sync/Async forEach - v0.1.2 - 1/10/2012 + * http://github.com/cowboy/javascript-sync-async-foreach + * Copyright (c) 2012 "Cowboy" Ben Alman; Licensed MIT */ + +(function(exports) { + + // Iterate synchronously or asynchronously. + exports.forEach = function(arr, eachFn, doneFn) { + var i = -1; + // Resolve array length to a valid (ToUint32) number. + var len = arr.length >>> 0; + + // This IIFE is called once now, and then again, by name, for each loop + // iteration. + (function next(result) { + // This flag will be set to true if `this.async` is called inside the + // eachFn` callback. + var async; + // Was false returned from the `eachFn` callback or passed to the + // `this.async` done function? + var abort = result === false; + + // Increment counter variable and skip any indices that don't exist. This + // allows sparse arrays to be iterated. + do { ++i; } while (!(i in arr) && i !== len); + + // Exit if result passed to `this.async` done function or returned from + // the `eachFn` callback was false, or when done iterating. + if (abort || i === len) { + // If a `doneFn` callback was specified, invoke that now. Pass in a + // boolean value representing "not aborted" state along with the array. + if (doneFn) { + doneFn(!abort, arr); + } + return; + } + + // Invoke the `eachFn` callback, setting `this` inside the callback to a + // custom object that contains one method, and passing in the array item, + // index, and the array. + result = eachFn.call({ + // If `this.async` is called inside the `eachFn` callback, set the async + // flag and return a function that can be used to continue iterating. + async: function() { + async = true; + return next; + } + }, arr[i], i, arr); + + // If the async flag wasn't set, continue by calling `next` synchronously, + // passing in the result of the `eachFn` callback. + if (!async) { + next(result); + } + }()); + }; + +}(typeof exports === "object" && exports || this)); \ No newline at end of file diff --git a/node_modules/async-foreach/dist/ba-foreach.min.js b/node_modules/async-foreach/dist/ba-foreach.min.js new file mode 100644 index 0000000..c218a91 --- /dev/null +++ b/node_modules/async-foreach/dist/ba-foreach.min.js @@ -0,0 +1,4 @@ +/* JavaScript Sync/Async forEach - v0.1.2 - 1/10/2012 + * http://github.com/cowboy/javascript-sync-async-foreach + * Copyright (c) 2012 "Cowboy" Ben Alman; Licensed MIT */ +(function(a){a.forEach=function(a,b,c){var d=-1,e=a.length>>>0;(function f(g){var h,j=g===!1;do++d;while(!(d in a)&&d!==e);if(j||d===e){c&&c(!j,a);return}g=b.call({async:function(){return h=!0,f}},a[d],d,a),h||f(g)})()}})(typeof exports=="object"&&exports||this) \ No newline at end of file diff --git a/node_modules/async-foreach/grunt.js b/node_modules/async-foreach/grunt.js new file mode 100644 index 0000000..079a23a --- /dev/null +++ b/node_modules/async-foreach/grunt.js @@ -0,0 +1,48 @@ +/*global config:true, task:true*/ +config.init({ + pkg: '', + meta: { + title: 'JavaScript Sync/Async forEach', + license: ['MIT'], + copyright: 'Copyright (c) 2012 "Cowboy" Ben Alman', + banner: '/* {{meta.title}} - v{{pkg.version}} - {{today "m/d/yyyy"}}\n' + + ' * {{pkg.homepage}}\n' + + ' * {{{meta.copyright}}}; Licensed {{join meta.license}} */' + }, + concat: { + 'dist/ba-foreach.js': ['', ''] + }, + min: { + 'dist/ba-foreach.min.js': ['', 'dist/ba-foreach.js'] + }, + test: { + files: ['test/**/*.js'] + }, + lint: { + files: ['grunt.js', 'lib/**/*.js', 'test/**/*.js'] + }, + watch: { + files: '', + tasks: 'lint:files test:files' + }, + jshint: { + options: { + curly: true, + eqeqeq: true, + immed: true, + latedef: true, + newcap: true, + noarg: true, + sub: true, + undef: true, + eqnull: true + }, + globals: { + exports: true + } + }, + uglify: {} +}); + +// Default task. +task.registerTask('default', 'lint:files test:files concat min'); diff --git a/node_modules/async-foreach/lib/foreach.js b/node_modules/async-foreach/lib/foreach.js new file mode 100644 index 0000000..9ee98b0 --- /dev/null +++ b/node_modules/async-foreach/lib/foreach.js @@ -0,0 +1,63 @@ +/*! + * Sync/Async forEach + * https://github.com/cowboy/javascript-sync-async-foreach + * + * Copyright (c) 2012 "Cowboy" Ben Alman + * Licensed under the MIT license. + * http://benalman.com/about/license/ + */ + +(function(exports) { + + // Iterate synchronously or asynchronously. + exports.forEach = function(arr, eachFn, doneFn) { + var i = -1; + // Resolve array length to a valid (ToUint32) number. + var len = arr.length >>> 0; + + // This IIFE is called once now, and then again, by name, for each loop + // iteration. + (function next(result) { + // This flag will be set to true if `this.async` is called inside the + // eachFn` callback. + var async; + // Was false returned from the `eachFn` callback or passed to the + // `this.async` done function? + var abort = result === false; + + // Increment counter variable and skip any indices that don't exist. This + // allows sparse arrays to be iterated. + do { ++i; } while (!(i in arr) && i !== len); + + // Exit if result passed to `this.async` done function or returned from + // the `eachFn` callback was false, or when done iterating. + if (abort || i === len) { + // If a `doneFn` callback was specified, invoke that now. Pass in a + // boolean value representing "not aborted" state along with the array. + if (doneFn) { + doneFn(!abort, arr); + } + return; + } + + // Invoke the `eachFn` callback, setting `this` inside the callback to a + // custom object that contains one method, and passing in the array item, + // index, and the array. + result = eachFn.call({ + // If `this.async` is called inside the `eachFn` callback, set the async + // flag and return a function that can be used to continue iterating. + async: function() { + async = true; + return next; + } + }, arr[i], i, arr); + + // If the async flag wasn't set, continue by calling `next` synchronously, + // passing in the result of the `eachFn` callback. + if (!async) { + next(result); + } + }()); + }; + +}(typeof exports === "object" && exports || this)); \ No newline at end of file diff --git a/node_modules/async-foreach/package.json b/node_modules/async-foreach/package.json new file mode 100644 index 0000000..5615dce --- /dev/null +++ b/node_modules/async-foreach/package.json @@ -0,0 +1,25 @@ +{ + "author": "\"Cowboy\" Ben Alman (http://benalman.com/)", + "name": "async-foreach", + "description": "An optionally-asynchronous forEach with an interesting interface.", + "version": "0.1.3", + "homepage": "http://github.com/cowboy/javascript-sync-async-foreach", + "bugs": "https://github.com/cowboy/javascript-sync-async-foreach/issues", + "repository": { + "type": "git", + "url": "git://github.com/cowboy/javascript-sync-async-foreach.git" + }, + "main": "lib/foreach", + "engines": { + "node": "*" + }, + "keywords": [ + "array", + "loop", + "sync", + "async", + "foreach" + ], + "dependencies": {}, + "devDependencies": {} +} diff --git a/node_modules/async-foreach/test/foreach_test.js b/node_modules/async-foreach/test/foreach_test.js new file mode 100644 index 0000000..be18c43 --- /dev/null +++ b/node_modules/async-foreach/test/foreach_test.js @@ -0,0 +1,200 @@ +/*global require:true, setTimeout:true */ +var forEach = require('../lib/foreach').forEach; + +exports['foreach'] = { + setUp: function(done) { + this.order = []; + this.track = function() { + [].push.apply(this.order, arguments); + }; + done(); + }, + 'Synchronous': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + }); + + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "each", "c", 2, arr + ], "should call eachFn for each array item, in order."); + test.done(); + }, + 'Synchronous, done': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + }, function(notAborted, arr) { + that.track("done", notAborted, arr); + }); + + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "each", "c", 2, arr, + "done", true, arr + ], "should call eachFn for each array item, in order, followed by doneFn."); + test.done(); + }, + 'Synchronous, early abort': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + if (item === "b") { return false; } + }, function(notAborted, arr) { + that.track("done", notAborted, arr); + }); + + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "done", false, arr + ], "should call eachFn for each array item, in order, followed by doneFn."); + test.done(); + }, + 'Asynchronous': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + var done = this.async(); + setTimeout(done, 10); + }); + + setTimeout(function() { + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "each", "c", 2, arr + ], "should call eachFn for each array item, in order."); + test.done(); + }, 100); + }, + 'Asynchronous, done': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + var done = this.async(); + setTimeout(done, 10); + }, function(notAborted, arr) { + that.track("done", notAborted, arr); + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "each", "c", 2, arr, + "done", true, arr + ], "should call eachFn for each array item, in order, followed by doneFn."); + test.done(); + }); + }, + 'Asynchronous, early abort': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + var done = this.async(); + setTimeout(function() { + done(item !== "b"); + }, 10); + }, function(notAborted, arr) { + that.track("done", notAborted, arr); + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "done", false, arr + ], "should call eachFn for each array item, in order, followed by doneFn."); + test.done(); + }); + }, + 'Not actually asynchronous': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + var done = this.async(); + done(); + }, function(notAborted, arr) { + that.track("done", notAborted, arr); + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "each", "c", 2, arr, + "done", true, arr + ], "should call eachFn for each array item, in order, followed by doneFn."); + test.done(); + }); + }, + 'Not actually asynchronous, early abort': function(test) { + test.expect(1); + var that = this; + + var arr = ["a", "b", "c"]; + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + var done = this.async(); + done(item !== "b"); + }, function(notAborted, arr) { + that.track("done", notAborted, arr); + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "b", 1, arr, + "done", false, arr + ], "should call eachFn for each array item, in order, followed by doneFn."); + test.done(); + }); + }, + 'Sparse array support': function(test) { + test.expect(1); + var that = this; + + var arr = []; + arr[0] = "a"; + arr[9] = "z"; + + forEach(arr, function(item, index, arr) { + that.track("each", item, index, arr); + }); + + test.deepEqual(that.order, [ + "each", "a", 0, arr, + "each", "z", 9, arr + ], "should skip nonexistent array items."); + test.done(); + }, + 'Invalid length sanitization': function(test) { + test.expect(1); + var that = this; + + var obj = {length: 4294967299, 0: "a", 2: "b", 3: "c" }; + + forEach(obj, function(item, index, arr) { + that.track("each", item, index, arr); + }); + + test.deepEqual(that.order, [ + "each", "a", 0, obj, + "each", "b", 2, obj + ], "should sanitize length property (ToUint32)."); + test.done(); + } +}; diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 0000000..cea8b16 --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..d2a48b6 --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..c67a646 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..ce6073e --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..6b4e0e1 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..0478be8 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..a18faa8 --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,47 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.11", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/cacache/LICENSE.md b/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000..8d28acf --- /dev/null +++ b/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/README.md b/node_modules/cacache/README.md new file mode 100644 index 0000000..cd39b37 --- /dev/null +++ b/node_modules/cacache/README.md @@ -0,0 +1,716 @@ +# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest) + +[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing +local key and content address caches. It's really fast, really good at +concurrency, and it will never give you corrupted data, even if cache files +get corrupted or manipulated. + +On systems that support user and group settings on files, cacache will +match the `uid` and `gid` values to the folder where the cache lives, even +when running as `root`. + +It was written to be used as [npm](https://npm.im)'s local cache, but can +just as easily be used on its own. + +## Install + +`$ npm install --save cacache` + +## Table of Contents + +* [Example](#example) +* [Features](#features) +* [Contributing](#contributing) +* [API](#api) + * [Using localized APIs](#localized-api) + * Reading + * [`ls`](#ls) + * [`ls.stream`](#ls-stream) + * [`get`](#get-data) + * [`get.stream`](#get-stream) + * [`get.info`](#get-info) + * [`get.hasContent`](#get-hasContent) + * Writing + * [`put`](#put-data) + * [`put.stream`](#put-stream) + * [`rm.all`](#rm-all) + * [`rm.entry`](#rm-entry) + * [`rm.content`](#rm-content) + * [`index.compact`](#index-compact) + * [`index.insert`](#index-insert) + * Utilities + * [`clearMemoized`](#clear-memoized) + * [`tmp.mkdir`](#tmp-mkdir) + * [`tmp.withTmp`](#with-tmp) + * Integrity + * [Subresource Integrity](#integrity) + * [`verify`](#verify) + * [`verify.lastRun`](#verify-last-run) + +### Example + +```javascript +const cacache = require('cacache') +const fs = require('fs') + +const tarball = '/path/to/mytar.tgz' +const cachePath = '/tmp/my-toy-cache' +const key = 'my-unique-key-1234' + +// Cache it! Use `cachePath` as the root of the content cache +cacache.put(cachePath, key, '10293801983029384').then(integrity => { + console.log(`Saved content to ${cachePath}.`) +}) + +const destination = '/tmp/mytar.tgz' + +// Copy the contents out of the cache and into their destination! +// But this time, use stream instead! +cacache.get.stream( + cachePath, key +).pipe( + fs.createWriteStream(destination) +).on('finish', () => { + console.log('done extracting!') +}) + +// The same thing, but skip the key index. +cacache.get.byDigest(cachePath, integrityHash).then(data => { + fs.writeFile(destination, data, err => { + console.log('tarball data fetched based on its sha512sum and written out!') + }) +}) +``` + +### Features + +* Extraction by key or by content address (shasum, etc) +* [Subresource Integrity](#integrity) web standard support +* Multi-hash support - safely host sha1, sha512, etc, in a single cache +* Automatic content deduplication +* Fault tolerance (immune to corruption, partial writes, process races, etc) +* Consistency guarantees on read and write (full data verification) +* Lockless, high-concurrency cache access +* Streaming support +* Promise support +* Fast -- sub-millisecond reads and writes including verification +* Arbitrary metadata storage +* Garbage collection and additional offline verification +* Thorough test coverage +* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔 + +### Contributing + +The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. + +All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. + +Please refer to the [Changelog](CHANGELOG.md) for project history details, too. + +Happy hacking! + +### API + +#### `> cacache.ls(cache) -> Promise` + +Lists info for all entries currently in the cache as a single large object. Each +entry in the object will be keyed by the unique index key, with corresponding +[`get.info`](#get-info) objects as the values. + +##### Example + +```javascript +cacache.ls(cachePath).then(console.log) +// Output +{ + 'my-thing': { + key: 'my-thing', + integrity: 'sha512-BaSe64/EnCoDED+HAsh==' + path: '.testcache/content/deadbeef', // joined with `cachePath` + time: 12345698490, + size: 4023948, + metadata: { + name: 'blah', + version: '1.2.3', + description: 'this was once a package but now it is my-thing' + } + }, + 'other-thing': { + key: 'other-thing', + integrity: 'sha1-ANothER+hasH=', + path: '.testcache/content/bada55', + time: 11992309289, + size: 111112 + } +} +``` + +#### `> cacache.ls.stream(cache) -> Readable` + +Lists info for all entries currently in the cache as a single large object. + +This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are +returned as `'data'` events on the returned stream. + +##### Example + +```javascript +cacache.ls.stream(cachePath).on('data', console.log) +// Output +{ + key: 'my-thing', + integrity: 'sha512-BaSe64HaSh', + path: '.testcache/content/deadbeef', // joined with `cachePath` + time: 12345698490, + size: 13423, + metadata: { + name: 'blah', + version: '1.2.3', + description: 'this was once a package but now it is my-thing' + } +} + +{ + key: 'other-thing', + integrity: 'whirlpool-WoWSoMuchSupport', + path: '.testcache/content/bada55', + time: 11992309289, + size: 498023984029 +} + +{ + ... +} +``` + +#### `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})` + +Returns an object with the cached data, digest, and metadata identified by +`key`. The `data` property of this object will be a `Buffer` instance that +presumably holds some data that means something to you. I'm sure you know what +to do with it! cacache just won't care. + +`integrity` is a [Subresource +Integrity](#integrity) +string. That is, a string that can be used to verify `data`, which looks like +`-`. + +If there is no content identified by `key`, or if the locally-stored data does +not pass the validity checksum, the promise will be rejected. + +A sub-function, `get.byDigest` may be used for identical behavior, except lookup +will happen by integrity hash, bypassing the index entirely. This version of the +function *only* returns `data` itself, without any wrapper. + +See: [options](#get-options) + +##### Note + +This function loads the entire cache entry into memory before returning it. If +you're dealing with Very Large data, consider using [`get.stream`](#get-stream) +instead. + +##### Example + +```javascript +// Look up by key +cache.get(cachePath, 'my-thing').then(console.log) +// Output: +{ + metadata: { + thingName: 'my' + }, + integrity: 'sha512-BaSe64HaSh', + data: Buffer#, + size: 9320 +} + +// Look up by digest +cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log) +// Output: +Buffer# +``` + +#### `> cacache.get.stream(cache, key, [opts]) -> Readable` + +Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`. + +If there is no content identified by `key`, or if the locally-stored data does +not pass the validity checksum, an error will be emitted. + +`metadata` and `integrity` events will be emitted before the stream closes, if +you need to collect that extra data about the cached entry. + +A sub-function, `get.stream.byDigest` may be used for identical behavior, +except lookup will happen by integrity hash, bypassing the index entirely. This +version does not emit the `metadata` and `integrity` events at all. + +See: [options](#get-options) + +##### Example + +```javascript +// Look up by key +cache.get.stream( + cachePath, 'my-thing' +).on('metadata', metadata => { + console.log('metadata:', metadata) +}).on('integrity', integrity => { + console.log('integrity:', integrity) +}).pipe( + fs.createWriteStream('./x.tgz') +) +// Outputs: +metadata: { ... } +integrity: 'sha512-SoMeDIGest+64==' + +// Look up by digest +cache.get.stream.byDigest( + cachePath, 'sha512-SoMeDIGest+64==' +).pipe( + fs.createWriteStream('./x.tgz') +) +``` + +#### `> cacache.get.info(cache, key) -> Promise` + +Looks up `key` in the cache index, returning information about the entry if +one exists. + +##### Fields + +* `key` - Key the entry was looked up under. Matches the `key` argument. +* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to. +* `path` - Filesystem path where content is stored, joined with `cache` argument. +* `time` - Timestamp the entry was first added on. +* `metadata` - User-assigned metadata associated with the entry/content. + +##### Example + +```javascript +cacache.get.info(cachePath, 'my-thing').then(console.log) + +// Output +{ + key: 'my-thing', + integrity: 'sha256-MUSTVERIFY+ALL/THINGS==' + path: '.testcache/content/deadbeef', + time: 12345698490, + size: 849234, + metadata: { + name: 'blah', + version: '1.2.3', + description: 'this was once a package but now it is my-thing' + } +} +``` + +#### `> cacache.get.hasContent(cache, integrity) -> Promise` + +Looks up a [Subresource Integrity hash](#integrity) in the cache. If content +exists for this `integrity`, it will return an object, with the specific single integrity hash +that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`. + +##### Example + +```javascript +cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log) + +// Output +{ + sri: { + source: 'sha256-MUSTVERIFY+ALL/THINGS==', + algorithm: 'sha256', + digest: 'MUSTVERIFY+ALL/THINGS==', + options: [] + }, + size: 9001 +} + +cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log) + +// Output +false +``` + +##### Options + +##### `opts.integrity` +If present, the pre-calculated digest for the inserted content. If this option +is provided and does not match the post-insertion digest, insertion will fail +with an `EINTEGRITY` error. + +##### `opts.memoize` + +Default: null + +If explicitly truthy, cacache will read from memory and memoize data on bulk read. If `false`, cacache will read from disk data. Reader functions by default read from in-memory cache. + +##### `opts.size` +If provided, the data stream will be verified to check that enough data was +passed through. If there's more or less data than expected, insertion will fail +with an `EBADSIZE` error. + + +#### `> cacache.put(cache, key, data, [opts]) -> Promise` + +Inserts data passed to it into the cache. The returned Promise resolves with a +digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the +cache entry has been successfully written. + +See: [options](#put-options) + +##### Example + +```javascript +fetch( + 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' +).then(data => { + return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data) +}).then(integrity => { + console.log('integrity hash is', integrity) +}) +``` + +#### `> cacache.put.stream(cache, key, [opts]) -> Writable` + +Returns a [Writable +Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts +data written to it into the cache. Emits an `integrity` event with the digest of +written contents when it succeeds. + +See: [options](#put-options) + +##### Example + +```javascript +request.get( + 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' +).pipe( + cacache.put.stream( + cachePath, 'registry.npmjs.org|cacache@1.0.0' + ).on('integrity', d => console.log(`integrity digest is ${d}`)) +) +``` + +##### Options + +##### `opts.metadata` + +Arbitrary metadata to be attached to the inserted key. + +##### `opts.size` + +If provided, the data stream will be verified to check that enough data was +passed through. If there's more or less data than expected, insertion will fail +with an `EBADSIZE` error. + +##### `opts.integrity` + +If present, the pre-calculated digest for the inserted content. If this option +is provided and does not match the post-insertion digest, insertion will fail +with an `EINTEGRITY` error. + +`algorithms` has no effect if this option is present. + +##### `opts.integrityEmitter` + +*Streaming only* If present, uses the provided event emitter as a source of +truth for both integrity and size. This allows use cases where integrity is +already being calculated outside of cacache to reuse that data instead of +calculating it a second time. + +The emitter must emit both the `'integrity'` and `'size'` events. + +NOTE: If this option is provided, you must verify that you receive the correct +integrity value yourself and emit an `'error'` event if there is a mismatch. +[ssri Integrity Streams](https://github.com/npm/ssri#integrity-stream) do this for you when given an expected integrity. + +##### `opts.algorithms` + +Default: ['sha512'] + +Hashing algorithms to use when calculating the [subresource integrity +digest](#integrity) +for inserted data. Can use any algorithm listed in `crypto.getHashes()` or +`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You +may also use any anagram of `'modnar'` to use this feature. + +Currently only supports one algorithm at a time (i.e., an array length of +exactly `1`). Has no effect if `opts.integrity` is present. + +##### `opts.memoize` + +Default: null + +If provided, cacache will memoize the given cache insertion in memory, bypassing +any filesystem checks for that key or digest in future cache fetches. Nothing +will be written to the in-memory cache unless this option is explicitly truthy. + +If `opts.memoize` is an object or a `Map`-like (that is, an object with `get` +and `set` methods), it will be written to instead of the global memoization +cache. + +Reading from disk data can be forced by explicitly passing `memoize: false` to +the reader functions, but their default will be to read from memory. + +##### `opts.tmpPrefix` +Default: null + +Prefix to append on the temporary directory name inside the cache's tmp dir. + +#### `> cacache.rm.all(cache) -> Promise` + +Clears the entire cache. Mainly by blowing away the cache directory itself. + +##### Example + +```javascript +cacache.rm.all(cachePath).then(() => { + console.log('THE APOCALYPSE IS UPON US 😱') +}) +``` + +#### `> cacache.rm.entry(cache, key, [opts]) -> Promise` + +Alias: `cacache.rm` + +Removes the index entry for `key`. Content will still be accessible if +requested directly by content address ([`get.stream.byDigest`](#get-stream)). + +By default, this appends a new entry to the index with an integrity of `null`. +If `opts.removeFully` is set to `true` then the index file itself will be +physically deleted rather than appending a `null`. + +To remove the content itself (which might still be used by other entries), use +[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use +[`verify`](#verify). + +##### Example + +```javascript +cacache.rm.entry(cachePath, 'my-thing').then(() => { + console.log('I did not like it anyway') +}) +``` + +#### `> cacache.rm.content(cache, integrity) -> Promise` + +Removes the content identified by `integrity`. Any index entries referring to it +will not be usable again until the content is re-added to the cache with an +identical digest. + +##### Example + +```javascript +cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => { + console.log('data for my-thing is gone!') +}) +``` + +#### `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise` + +Uses `matchFn`, which must be a synchronous function that accepts two entries +and returns a boolean indicating whether or not the two entries match, to +deduplicate all entries in the cache for the given `key`. + +If `opts.validateEntry` is provided, it will be called as a function with the +only parameter being a single index entry. The function must return a Boolean, +if it returns `true` the entry is considered valid and will be kept in the index, +if it returns `false` the entry will be removed from the index. + +If `opts.validateEntry` is not provided, however, every entry in the index will +be deduplicated and kept until the first `null` integrity is reached, removing +all entries that were written before the `null`. + +The deduplicated list of entries is both written to the index, replacing the +existing content, and returned in the Promise. + +#### `> cacache.index.insert(cache, key, integrity, opts) -> Promise` + +Writes an index entry to the cache for the given `key` without writing content. + +It is assumed if you are using this method, you have already stored the content +some other way and you only wish to add a new index to that content. The `metadata` +and `size` properties are read from `opts` and used as part of the index entry. + +Returns a Promise resolving to the newly added entry. + +#### `> cacache.clearMemoized()` + +Completely resets the in-memory entry cache. + +#### `> tmp.mkdir(cache, opts) -> Promise` + +Returns a unique temporary directory inside the cache's `tmp` dir. This +directory will use the same safe user assignment that all the other stuff use. + +Once the directory is made, it's the user's responsibility that all files +within are given the appropriate `gid`/`uid` ownership settings to match +the rest of the cache. If not, you can ask cacache to do it for you by +calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory +permissions. + +If you want automatic cleanup of this directory, use +[`tmp.withTmp()`](#with-tpm) + +See: [options](#tmp-options) + +##### Example + +```javascript +cacache.tmp.mkdir(cache).then(dir => { + fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...) +}) +``` + +#### `> tmp.fix(cache) -> Promise` + +Sets the `uid` and `gid` properties on all files and folders within the tmp +folder to match the rest of the cache. + +Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or +[`tmp.withTmp`](#with-tmp). + +##### Example + +```javascript +cacache.tmp.mkdir(cache).then(dir => { + writeFile(path.join(dir, 'file'), someData).then(() => { + // make sure we didn't just put a root-owned file in the cache + cacache.tmp.fix().then(() => { + // all uids and gids match now + }) + }) +}) +``` + +#### `> tmp.withTmp(cache, opts, cb) -> Promise` + +Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb` +with it. The created temporary directory will be removed when the return value +of `cb()` resolves, the tmp directory will be automatically deleted once that +promise completes. + +The same caveats apply when it comes to managing permissions for the tmp dir's +contents. + +See: [options](#tmp-options) + +##### Example + +```javascript +cacache.tmp.withTmp(cache, dir => { + return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...) +}).then(() => { + // `dir` no longer exists +}) +``` + +##### Options + +##### `opts.tmpPrefix` +Default: null + +Prefix to append on the temporary directory name inside the cache's tmp dir. + +#### Subresource Integrity Digests + +For content verification and addressing, cacache uses strings following the +[Subresource +Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity). +That is, any time cacache expects an `integrity` argument or option, it +should be in the format `-`. + +One deviation from the current spec is that cacache will support any hash +algorithms supported by the underlying Node.js process. You can use +`crypto.getHashes()` to see which ones you can use. + +##### Generating Digests Yourself + +If you have an existing content shasum, they are generally formatted as a +hexadecimal string (that is, a sha1 would look like: +`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with +cacache, you'll need to convert this to an equivalent subresource integrity +string. For this example, the corresponding hash would be: +`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`. + +If you want to generate an integrity string yourself for existing data, you can +use something like this: + +```javascript +const crypto = require('crypto') +const hashAlgorithm = 'sha512' +const data = 'foobarbaz' + +const integrity = ( + hashAlgorithm + + '-' + + crypto.createHash(hashAlgorithm).update(data).digest('base64') +) +``` + +You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality +around SRI strings, including generation, parsing, and translating from existing +hex-formatted strings. + +#### `> cacache.verify(cache, opts) -> Promise` + +Checks out and fixes up your cache: + +* Cleans up corrupted or invalid index entries. +* Custom entry filtering options. +* Garbage collects any content entries not referenced by the index. +* Checks integrity for all content entries and removes invalid content. +* Fixes cache ownership. +* Removes the `tmp` directory in the cache and all its contents. + +When it's done, it'll return an object with various stats about the verification +process, including amount of storage reclaimed, number of valid entries, number +of entries removed, etc. + +##### Options + +##### `opts.concurrency` + +Default: 20 + +Number of concurrently read files in the filesystem while doing clean up. + +##### `opts.filter` +Receives a formatted entry. Return false to remove it. +Note: might be called more than once on the same entry. + +##### `opts.log` +Custom logger function: +``` + log: { silly () {} } + log.silly('verify', 'verifying cache at', cache) +``` + +##### Example + +```sh +echo somegarbage >> $CACHEPATH/content/deadbeef +``` + +```javascript +cacache.verify(cachePath).then(stats => { + // deadbeef collected, because of invalid checksum. + console.log('cache is much nicer now! stats:', stats) +}) +``` + +#### `> cacache.verify.lastRun(cache) -> Promise` + +Returns a `Date` representing the last time `cacache.verify` was run on `cache`. + +##### Example + +```javascript +cacache.verify(cachePath).then(() => { + cacache.verify.lastRun(cachePath).then(lastTime => { + console.log('cacache.verify was last called on' + lastTime) + }) +}) +``` diff --git a/node_modules/cacache/lib/content/path.js b/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000..ad5a76a --- /dev/null +++ b/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/cacache/lib/content/read.js b/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000..7c20c75 --- /dev/null +++ b/node_modules/cacache/lib/content/read.js @@ -0,0 +1,241 @@ +'use strict' + +const fs = require('@npmcli/fs') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.sync = readSync + +function readSync (cache, integrity, opts = {}) { + const { size } = opts + return withContentSriSync(cache, integrity, (cpath, sri) => { + const data = fs.readFileSync(cpath, { encoding: null }) + if (typeof size === 'number' && size !== data.length) { + throw sizeError(size, data.length) + } + + if (ssri.checkData(data, sri)) { + return data + } + + throw integrityError(sri, cpath) + }) +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy +module.exports.copy.sync = copySync + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +function copySync (cache, integrity, dest) { + return withContentSriSync(cache, integrity, (cpath, sri) => { + return fs.copyFileSync(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +module.exports.hasContent.sync = hasContentSync + +function hasContentSync (cache, integrity) { + if (!integrity) { + return false + } + + return withContentSriSync(cache, integrity, (cpath, sri) => { + try { + const stat = fs.statSync(cpath) + return { size: stat.size, sri, stat } + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } + }) +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function withContentSriSync (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + let lastErr = null + for (const meta of digests) { + try { + return withContentSriSync(cache, meta, fn) + } catch (err) { + lastErr = err + } + } + throw lastErr + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/cacache/lib/content/rm.js b/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000..f733305 --- /dev/null +++ b/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,20 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const { hasContent } = require('./read') +const rimraf = util.promisify(require('rimraf')) + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await rimraf(contentPath(cache, content.sri)) + return true + } else { + return false + } +} diff --git a/node_modules/cacache/lib/content/write.js b/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000..0e8c0f4 --- /dev/null +++ b/node_modules/cacache/lib/content/write.js @@ -0,0 +1,189 @@ +'use strict' + +const events = require('events') +const util = require('util') + +const contentPath = require('./path') +const fixOwner = require('../util/fix-owner') +const fs = require('@npmcli/fs') +const moveFile = require('../util/move-file') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + if (algorithms && algorithms.length > 1) { + throw new Error('opts.algorithms only supports a single algorithm for now') + } + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + const tmp = await makeTmp(cache, opts) + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, sri, opts) + return { integrity: sri, size: data.length } + } finally { + if (!tmp.moved) { + await rimraf(tmp.target) + } + } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await rimraf(tmp.target) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fixOwner.mkdirfix(cache, path.dirname(tmpTarget)) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + + await fixOwner.mkdirfix(cache, destDir) + await moveFile(tmp.target, destination) + tmp.moved = true + await fixOwner.chownr(cache, destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000..1dc73a9 --- /dev/null +++ b/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,404 @@ +'use strict' + +const util = require('util') +const crypto = require('crypto') +const fs = require('@npmcli/fs') +const Minipass = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const moveFile = require('@npmcli/move-file') +const _rimraf = require('rimraf') +const rimraf = util.promisify(_rimraf) +rimraf.sync = _rimraf.sync + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fixOwner.mkdirfix(cache, path.dirname(target)) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rimraf(tmp.target) + } + } + + const write = async (tmp) => { + await fs.writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + try { + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + try { + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + // There's a class of race conditions that happen when things get deleted + // during fixOwner, or between the two mkdirfix/chownr calls. + // + // It's perfectly fine to just not bother in those cases and lie + // that the index entry was written. Because it's a cache. + } + return formatEntry(cache, entry) +} + +module.exports.insert.sync = insertSync + +function insertSync (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + try { + fixOwner.chownr.sync(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.find.sync = findSync + +function findSync (cache, key) { + const bucket = bucketPath(cache, key) + try { + return bucketEntriesSync(bucket).reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf(bucket) +} + +module.exports.delete.sync = delSync + +function delSync (cache, key, opts = {}) { + if (!opts.removeFully) { + return insertSync(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rimraf.sync(bucket) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await fs.readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +module.exports.bucketEntries.sync = bucketEntriesSync + +function bucketEntriesSync (bucket, filter) { + const data = fs.readFileSync(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (e) { + // Entry is corrupted! + return + } + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return fs.readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/cacache/lib/get.js b/node_modules/cacache/lib/get.js new file mode 100644 index 0000000..254b4ec --- /dev/null +++ b/node_modules/cacache/lib/get.js @@ -0,0 +1,225 @@ +'use strict' + +const Collect = require('minipass-collect') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +function getDataSync (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + const entry = index.find.sync(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = read.sync(cache, entry.integrity, { + integrity: integrity, + size: size, + }) + const res = { + metadata: entry.metadata, + data: data, + size: entry.size, + integrity: entry.integrity, + } + if (memoize) { + memo.put(cache, entry, res.data, opts) + } + + return res +} + +module.exports.sync = getDataSync + +function getDataByDigestSync (cache, digest, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, digest, opts) + + if (memoized && memoize !== false) { + return memoized + } + + const res = read.sync(cache, digest, { + integrity: integrity, + size: size, + }) + if (memoize) { + memo.put.byDigest(cache, digest, res, opts) + } + + return res +} +module.exports.sync.byDigest = getDataByDigestSync + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/cacache/lib/index.js b/node_modules/cacache/lib/index.js new file mode 100644 index 0000000..1c56be6 --- /dev/null +++ b/node_modules/cacache/lib/index.js @@ -0,0 +1,45 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.sync = get.sync +module.exports.get.sync.byDigest = get.sync.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent +module.exports.get.hasContent.sync = get.hasContent.sync + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000..0ff604a --- /dev/null +++ b/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/cacache/lib/put.js b/node_modules/cacache/lib/put.js new file mode 100644 index 0000000..9fc932d --- /dev/null +++ b/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/node_modules/cacache/lib/rm.js b/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000..5f00071 --- /dev/null +++ b/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const util = require('util') + +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +function all (cache) { + memo.clearMemoized() + return rimraf(path.join(cache, '*(content-*|index-*)')) +} diff --git a/node_modules/cacache/lib/util/fix-owner.js b/node_modules/cacache/lib/util/fix-owner.js new file mode 100644 index 0000000..182fcb0 --- /dev/null +++ b/node_modules/cacache/lib/util/fix-owner.js @@ -0,0 +1,145 @@ +'use strict' + +const util = require('util') + +const chownr = util.promisify(require('chownr')) +const mkdirp = require('mkdirp') +const inflight = require('promise-inflight') +const inferOwner = require('infer-owner') + +// Memoize getuid()/getgid() calls. +// patch process.setuid/setgid to invalidate cached value on change +const self = { uid: null, gid: null } +const getSelf = () => { + if (typeof self.uid !== 'number') { + self.uid = process.getuid() + const setuid = process.setuid + process.setuid = (uid) => { + self.uid = null + process.setuid = setuid + return process.setuid(uid) + } + } + if (typeof self.gid !== 'number') { + self.gid = process.getgid() + const setgid = process.setgid + process.setgid = (gid) => { + self.gid = null + process.setgid = setgid + return process.setgid(gid) + } + } +} + +module.exports.chownr = fixOwner + +async function fixOwner (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return + } + + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return + } + + const { uid, gid } = await inferOwner(cache) + + // No need to override if it's already what we used. + if (self.uid === uid && self.gid === gid) { + return + } + + return inflight('fixOwner: fixing ownership on ' + filepath, () => + chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch((err) => { + if (err.code === 'ENOENT') { + return null + } + + throw err + }) + ) +} + +module.exports.chownr.sync = fixOwnerSync + +function fixOwnerSync (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return + } + const { uid, gid } = inferOwner.sync(cache) + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return + } + + if (self.uid === uid && self.gid === gid) { + // No need to override if it's already what we used. + return + } + try { + chownr.sync( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ) + } catch (err) { + // only catch ENOENT, any other error is a problem. + if (err.code === 'ENOENT') { + return null + } + + throw err + } +} + +module.exports.mkdirfix = mkdirfix + +async function mkdirfix (cache, p, cb) { + // we have to infer the owner _before_ making the directory, even though + // we aren't going to use the results, since the cache itself might not + // exist yet. If we mkdirp it, then our current uid/gid will be assumed + // to be correct if it creates the cache folder in the process. + await inferOwner(cache) + try { + const made = await mkdirp(p) + if (made) { + await fixOwner(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + await fixOwner(cache, p) + return null + } + throw err + } +} + +module.exports.mkdirfix.sync = mkdirfixSync + +function mkdirfixSync (cache, p) { + try { + inferOwner.sync(cache) + const made = mkdirp.sync(p) + if (made) { + fixOwnerSync(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + fixOwnerSync(cache, p) + return null + } else { + throw err + } + } +} diff --git a/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000..445599b --- /dev/null +++ b/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/cacache/lib/util/move-file.js b/node_modules/cacache/lib/util/move-file.js new file mode 100644 index 0000000..a0b4041 --- /dev/null +++ b/node_modules/cacache/lib/util/move-file.js @@ -0,0 +1,56 @@ +'use strict' + +const fs = require('@npmcli/fs') +const move = require('@npmcli/move-file') +const pinflight = require('promise-inflight') + +module.exports = moveFile + +async function moveFile (src, dest) { + const isWindows = process.platform === 'win32' + + // This isn't quite an fs.rename -- the assumption is that + // if `dest` already exists, and we get certain errors while + // trying to move it, we should just not bother. + // + // In the case of cache corruption, users will receive an + // EINTEGRITY error elsewhere, and can remove the offending + // content their own way. + // + // Note that, as the name suggests, this strictly only supports file moves. + try { + await fs.link(src, dest) + } catch (err) { + if (isWindows && err.code === 'EPERM') { + // XXX This is a really weird way to handle this situation, as it + // results in the src file being deleted even though the dest + // might not exist. Since we pretty much always write files to + // deterministic locations based on content hash, this is likely + // ok (or at worst, just ends in a future cache miss). But it would + // be worth investigating at some time in the future if this is + // really what we want to do here. + } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { + // file already exists, so whatever + } else { + throw err + } + } + try { + await Promise.all([ + fs.unlink(src), + !isWindows && fs.chmod(dest, '0444'), + ]) + } catch (e) { + return pinflight('cacache-move-file:' + dest, async () => { + await fs.stat(dest).catch((err) => { + if (err.code !== 'ENOENT') { + // Something else is wrong here. Bail bail bail + throw err + } + }) + // file doesn't already exist! let's try a rename -> copy fallback + // only delete if it successfully copies + return move(src, dest) + }) + } +} diff --git a/node_modules/cacache/lib/util/tmp.js b/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000..b4437cf --- /dev/null +++ b/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,33 @@ +'use strict' + +const fs = require('@npmcli/fs') + +const fixOwner = require('./fix-owner') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) +} + +module.exports.fix = fixtmpdir + +function fixtmpdir (cache) { + return fixOwner(cache, path.join(cache, 'tmp')) +} diff --git a/node_modules/cacache/lib/verify.js b/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000..52692a0 --- /dev/null +++ b/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const util = require('util') + +const pMap = require('p-map') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const fs = require('@npmcli/fs') +const fsm = require('fs-minipass') +const glob = util.promisify(require('glob')) +const index = require('./entry-index') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') + +const globify = pattern => pattern.split('\\').join('/') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await fixOwner.mkdirfix(cache, cache) + // TODO - fix file permissions too + await fixOwner.chownr(cache, cache) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rimraf it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + liveContent.add(entry.integrity.toString()) + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(globify(path.join(contentDir, '**')), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await fs.stat(f) + await rimraf(f) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await fs.stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rimraf(filepath) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await fs.truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await fs.stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rimraf(path.join(cache, 'tmp')) +} + +function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + try { + return fs.writeFile(verifile, `${Date.now()}`) + } finally { + fixOwner.chownr.sync(cache, verifile) + } +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/node_modules/cacache/node_modules/.bin/mkdirp b/node_modules/cacache/node_modules/.bin/mkdirp new file mode 100644 index 0000000..1c12a6c --- /dev/null +++ b/node_modules/cacache/node_modules/.bin/mkdirp @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../mkdirp/bin/cmd.js" "$@" + ret=$? +else + node "$basedir/../../../mkdirp/bin/cmd.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/cacache/node_modules/.bin/mkdirp.cmd b/node_modules/cacache/node_modules/.bin/mkdirp.cmd new file mode 100644 index 0000000..ecac552 --- /dev/null +++ b/node_modules/cacache/node_modules/.bin/mkdirp.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\mkdirp\bin\cmd.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\mkdirp\bin\cmd.js" %* +) \ No newline at end of file diff --git a/node_modules/cacache/node_modules/.bin/rimraf b/node_modules/cacache/node_modules/.bin/rimraf new file mode 100644 index 0000000..96e5c25 --- /dev/null +++ b/node_modules/cacache/node_modules/.bin/rimraf @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../rimraf/bin.js" "$@" + ret=$? +else + node "$basedir/../../../rimraf/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/cacache/node_modules/.bin/rimraf.cmd b/node_modules/cacache/node_modules/.bin/rimraf.cmd new file mode 100644 index 0000000..93b1691 --- /dev/null +++ b/node_modules/cacache/node_modules/.bin/rimraf.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\rimraf\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\rimraf\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/cacache/node_modules/brace-expansion/.github/FUNDING.yml b/node_modules/cacache/node_modules/brace-expansion/.github/FUNDING.yml new file mode 100644 index 0000000..79d1eaf --- /dev/null +++ b/node_modules/cacache/node_modules/brace-expansion/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/brace-expansion" +patreon: juliangruber diff --git a/node_modules/cacache/node_modules/brace-expansion/LICENSE b/node_modules/cacache/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/cacache/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/cacache/node_modules/brace-expansion/README.md b/node_modules/cacache/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..e55c583 --- /dev/null +++ b/node_modules/cacache/node_modules/brace-expansion/README.md @@ -0,0 +1,135 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/cacache/node_modules/brace-expansion/index.js b/node_modules/cacache/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..4af9dde --- /dev/null +++ b/node_modules/cacache/node_modules/brace-expansion/index.js @@ -0,0 +1,203 @@ +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m) return [str]; + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre+ '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand(n[j], false)); + } + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + + return expansions; +} + diff --git a/node_modules/cacache/node_modules/brace-expansion/package.json b/node_modules/cacache/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..7097d41 --- /dev/null +++ b/node_modules/cacache/node_modules/brace-expansion/package.json @@ -0,0 +1,46 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "2.0.1", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0" + }, + "devDependencies": { + "@c4312/matcha": "^1.3.1", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/cacache/node_modules/glob/LICENSE b/node_modules/cacache/node_modules/glob/LICENSE new file mode 100644 index 0000000..39e8fe1 --- /dev/null +++ b/node_modules/cacache/node_modules/glob/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/glob/README.md b/node_modules/cacache/node_modules/glob/README.md new file mode 100644 index 0000000..6293911 --- /dev/null +++ b/node_modules/cacache/node_modules/glob/README.md @@ -0,0 +1,399 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![a fun cartoon logo made of glob characters](logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. This option is always coerced to use + forward-slashes as a path separator, because it is not tested + as a glob pattern, so there is no need to escape anything. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) This option is + always coerced to use forward-slashes as a path separator, + because it is not tested as a glob pattern, so there is no need + to escape anything. +* `windowsPathsNoEscape` Use `\\` as a path separator _only_, and + _never_ as an escape character. If set, all `\\` characters + are replaced with `/` in the pattern. Note that this makes it + **impossible** to match against paths containing literal glob + pattern characters, but allows matching with patterns constructed + using `path.join()` and `path.resolve()` on Windows platforms, + mimicking the (buggy!) behavior of Glob v7 and before on + Windows. Please use with caution, and be mindful of [the caveat + below about Windows paths](#windows). (For legacy reasons, + this is also set if `allowWindowsEscape` is set to the exact + value `false`.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. +* `fs` File-system object with Node's `fs` API. By default, the built-in + `fs` module will be used. Set to a volume provided by a library like + `memfs` to avoid using the "real" file-system. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +To automatically coerce all `\` characters to `/` in pattern +strings, **thus making it impossible to escape literal glob +characters**, you may set the `windowsPathsNoEscape` option to +`true`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` + +![](oh-my-glob.gif) diff --git a/node_modules/cacache/node_modules/glob/common.js b/node_modules/cacache/node_modules/glob/common.js new file mode 100644 index 0000000..61a4452 --- /dev/null +++ b/node_modules/cacache/node_modules/glob/common.js @@ -0,0 +1,244 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = require("fs") +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path").isAbsolute +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.windowsPathsNoEscape = !!options.windowsPathsNoEscape || + options.allowWindowsEscape === false + if (self.windowsPathsNoEscape) { + pattern = pattern.replace(/\\/g, '/') + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = path.resolve(cwd) + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + self.nomount = !!options.nomount + + if (process.platform === "win32") { + self.root = self.root.replace(/\\/g, "/") + self.cwd = self.cwd.replace(/\\/g, "/") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + } + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/node_modules/cacache/node_modules/glob/glob.js b/node_modules/cacache/node_modules/glob/glob.js new file mode 100644 index 0000000..2112a95 --- /dev/null +++ b/node_modules/cacache/node_modules/glob/glob.js @@ -0,0 +1,790 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path').isAbsolute +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/node_modules/cacache/node_modules/glob/package.json b/node_modules/cacache/node_modules/glob/package.json new file mode 100644 index 0000000..ca0fd91 --- /dev/null +++ b/node_modules/cacache/node_modules/glob/package.json @@ -0,0 +1,55 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "8.1.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": ">=12" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^5.0.1", + "once": "^1.3.0" + }, + "devDependencies": { + "memfs": "^3.2.0", + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^16.0.1", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "statements": 90, + "branches": 90, + "functions": 90, + "lines": 90, + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/cacache/node_modules/glob/sync.js b/node_modules/cacache/node_modules/glob/sync.js new file mode 100644 index 0000000..af4600d --- /dev/null +++ b/node_modules/cacache/node_modules/glob/sync.js @@ -0,0 +1,486 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path').isAbsolute +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert.ok(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert.ok(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/cacache/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000..f785757 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/lru-cache/README.md b/node_modules/cacache/node_modules/lru-cache/README.md new file mode 100644 index 0000000..f128330 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/README.md @@ -0,0 +1,1117 @@ +# lru-cache + +A cache object that deletes the least-recently-used items. + +Specify a max number of the most recently used items that you +want to keep, and this cache will keep that many of the most +recently accessed items. + +This is not primarily a TTL cache, and does not make strong TTL +guarantees. There is no preemptive pruning of expired items by +default, but you _may_ set a TTL on the cache or on a single +`set`. If you do so, it will treat expired items as missing, and +delete them when fetched. If you are more interested in TTL +caching than LRU caching, check out +[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache). + +As of version 7, this is one of the most performant LRU +implementations available in JavaScript, and supports a wide +diversity of use cases. However, note that using some of the +features will necessarily impact performance, by causing the +cache to have to do more work. See the "Performance" section +below. + +## Installation + +```bash +npm install lru-cache --save +``` + +## Usage + +```js +// hybrid module, either works +import LRUCache from 'lru-cache' +// or: +const LRUCache = require('lru-cache') + +// At least one of 'max', 'ttl', or 'maxSize' is required, to prevent +// unsafe unbounded storage. +// +// In most cases, it's best to specify a max for performance, so all +// the required memory allocation is done up-front. +// +// All the other options are optional, see the sections below for +// documentation on what each one does. Most of them can be +// overridden for specific items in get()/set() +const options = { + max: 500, + + // for use with tracking overall storage size + maxSize: 5000, + sizeCalculation: (value, key) => { + return 1 + }, + + // for use when you need to clean up something when objects + // are evicted from the cache + dispose: (value, key) => { + freeFromMemoryOrWhatever(value) + }, + + // how long to live in ms + ttl: 1000 * 60 * 5, + + // return stale items before removing from cache? + allowStale: false, + + updateAgeOnGet: false, + updateAgeOnHas: false, + + // async method to use for cache.fetch(), for + // stale-while-revalidate type of behavior + fetchMethod: async (key, staleValue, { options, signal }) => {}, +} + +const cache = new LRUCache(options) + +cache.set('key', 'value') +cache.get('key') // "value" + +// non-string keys ARE fully supported +// but note that it must be THE SAME object, not +// just a JSON-equivalent object. +var someObject = { a: 1 } +cache.set(someObject, 'a value') +// Object keys are not toString()-ed +cache.set('[object Object]', 'a different value') +assert.equal(cache.get(someObject), 'a value') +// A similar object with same keys/values won't work, +// because it's a different object identity +assert.equal(cache.get({ a: 1 }), undefined) + +cache.clear() // empty the cache +``` + +If you put more stuff in it, then items will fall out. + +## Options + +### `max` + +The maximum number of items that remain in the cache (assuming no +TTL pruning or explicit deletions). Note that fewer items may be +stored if size calculation is used, and `maxSize` is exceeded. +This must be a positive finite intger. + +At least one of `max`, `maxSize`, or `TTL` is required. This +must be a positive integer if set. + +**It is strongly recommended to set a `max` to prevent unbounded +growth of the cache.** See "Storage Bounds Safety" below. + +### `maxSize` + +Set to a positive integer to track the sizes of items added to +the cache, and automatically evict items in order to stay below +this size. Note that this may result in fewer than `max` items +being stored. + +Attempting to add an item to the cache whose calculated size is +greater that this amount will be a no-op. The item will not be +cached, and no other items will be evicted. + +Optional, must be a positive integer if provided. + +Sets `maxEntrySize` to the same value, unless a different value +is provided for `maxEntrySize`. + +At least one of `max`, `maxSize`, or `TTL` is required. This +must be a positive integer if set. + +Even if size tracking is enabled, **it is strongly recommended to +set a `max` to prevent unbounded growth of the cache.** See +"Storage Bounds Safety" below. + +### `maxEntrySize` + +Set to a positive integer to track the sizes of items added to +the cache, and prevent caching any item over a given size. +Attempting to add an item whose calculated size is greater than +this amount will be a no-op. The item will not be cached, and no +other items will be evicted. + +Optional, must be a positive integer if provided. Defaults to +the value of `maxSize` if provided. + +### `sizeCalculation` + +Function used to calculate the size of stored items. If you're +storing strings or buffers, then you probably want to do +something like `n => n.length`. The item is passed as the first +argument, and the key is passed as the second argument. + +This may be overridden by passing an options object to +`cache.set()`. + +Requires `maxSize` to be set. + +If the `size` (or return value of `sizeCalculation`) for a given +entry is greater than `maxEntrySize`, then the item will not be +added to the cache. + +Deprecated alias: `length` + +### `fetchMethod` + +Function that is used to make background asynchronous fetches. +Called with `fetchMethod(key, staleValue, { signal, options, +context })`. May return a Promise. + +If `fetchMethod` is not provided, then `cache.fetch(key)` is +equivalent to `Promise.resolve(cache.get(key))`. + +The `signal` object is an `AbortSignal` if that's available in +the global object, otherwise it's a pretty close polyfill. + +If at any time, `signal.aborted` is set to `true`, or if the +`signal.onabort` method is called, or if it emits an `'abort'` +event which you can listen to with `addEventListener`, then that +means that the fetch should be abandoned. This may be passed +along to async functions aware of AbortController/AbortSignal +behavior. + +The `fetchMethod` should **only** return `undefined` or a Promise +resolving to `undefined` if the AbortController signaled an +`abort` event. In all other cases, it should return or resolve +to a value suitable for adding to the cache. + +The `options` object is a union of the options that may be +provided to `set()` and `get()`. If they are modified, then that +will result in modifying the settings to `cache.set()` when the +value is resolved, and in the case of `noDeleteOnFetchRejection` +and `allowStaleOnFetchRejection`, the handling of `fetchMethod` +failures. + +For example, a DNS cache may update the TTL based on the value +returned from a remote DNS server by changing `options.ttl` in +the `fetchMethod`. + +### `fetchContext` + +Arbitrary data that can be passed to the `fetchMethod` as the +`context` option. + +Note that this will only be relevant when the `cache.fetch()` +call needs to call `fetchMethod()`. Thus, any data which will +meaningfully vary the fetch response needs to be present in the +key. This is primarily intended for including `x-request-id` +headers and the like for debugging purposes, which do not affect +the `fetchMethod()` response. + +### `noDeleteOnFetchRejection` + +If a `fetchMethod` throws an error or returns a rejected promise, +then by default, any existing stale value will be removed from +the cache. + +If `noDeleteOnFetchRejection` is set to `true`, then this +behavior is suppressed, and the stale value remains in the cache +in the case of a rejected `fetchMethod`. + +This is important in cases where a `fetchMethod` is _only_ called +as a background update while the stale value is returned, when +`allowStale` is used. + +This is implicitly in effect when `allowStaleOnFetchRejection` is +set. + +This may be set in calls to `fetch()`, or defaulted on the +constructor, or overridden by modifying the options object in the +`fetchMethod`. + +### `allowStaleOnFetchRejection` + +Set to true to return a stale value from the cache when a +`fetchMethod` throws an error or returns a rejected Promise. + +If a `fetchMethod` fails, and there is no stale value available, +the `fetch()` will resolve to `undefined`. Ie, all `fetchMethod` +errors are suppressed. + +Implies `noDeleteOnFetchRejection`. + +This may be set in calls to `fetch()`, or defaulted on the +constructor, or overridden by modifying the options object in the +`fetchMethod`. + +### `allowStaleOnFetchAbort` + +Set to true to return a stale value from the cache when the +`AbortSignal` passed to the `fetchMethod` dispatches an `'abort'` +event, whether user-triggered, or due to internal cache behavior. + +Unless `ignoreFetchAbort` is also set, the underlying +`fetchMethod` will still be considered canceled, and its return +value will be ignored and not cached. + +### `ignoreFetchAbort` + +Set to true to ignore the `abort` event emitted by the +`AbortSignal` object passed to `fetchMethod`, and still cache the +resulting resolution value, as long as it is not `undefined`. + +When used on its own, this means aborted `fetch()` calls are not +immediately resolved or rejected when they are aborted, and +instead take the full time to await. + +When used with `allowStaleOnFetchAbort`, aborted `fetch()` calls +will resolve immediately to their stale cached value or +`undefined`, and will continue to process and eventually update +the cache when they resolve, as long as the resulting value is +not `undefined`, thus supporting a "return stale on timeout while +refreshing" mechanism by passing `AbortSignal.timeout(n)` as the +signal. + +For example: + +```js +const c = new LRUCache({ + ttl: 100, + ignoreFetchAbort: true, + allowStaleOnFetchAbort: true, + fetchMethod: async (key, oldValue, { signal }) => { + // note: do NOT pass the signal to fetch()! + // let's say this fetch can take a long time. + const res = await fetch(`https://slow-backend-server/${key}`) + return await res.json() + }, +}) + +// this will return the stale value after 100ms, while still +// updating in the background for next time. +const val = await c.fetch('key', { signal: AbortSignal.timeout(100) }) +``` + +**Note**: regardless of this setting, an `abort` event _is still +emitted on the `AbortSignal` object_, so may result in invalid +results when passed to other underlying APIs that use +AbortSignals. + +This may be overridden on the `fetch()` call or in the +`fetchMethod` itself. + +### `dispose` + +Function that is called on items when they are dropped from the +cache, as `this.dispose(value, key, reason)`. + +This can be handy if you want to close file descriptors or do +other cleanup tasks when items are no longer stored in the cache. + +**NOTE**: It is called _before_ the item has been fully removed +from the cache, so if you want to put it right back in, you need +to wait until the next tick. If you try to add it back in during +the `dispose()` function call, it will break things in subtle and +weird ways. + +Unlike several other options, this may _not_ be overridden by +passing an option to `set()`, for performance reasons. If +disposal functions may vary between cache entries, then the +entire list must be scanned on every cache swap, even if no +disposal function is in use. + +The `reason` will be one of the following strings, corresponding +to the reason for the item's deletion: + +- `evict` Item was evicted to make space for a new addition +- `set` Item was overwritten by a new value +- `delete` Item was removed by explicit `cache.delete(key)` or by + calling `cache.clear()`, which deletes everything. + +The `dispose()` method is _not_ called for canceled calls to +`fetchMethod()`. If you wish to handle evictions, overwrites, +and deletes of in-flight asynchronous fetches, you must use the +`AbortSignal` provided. + +Optional, must be a function. + +### `disposeAfter` + +The same as `dispose`, but called _after_ the entry is completely +removed and the cache is once again in a clean state. + +It is safe to add an item right back into the cache at this +point. However, note that it is _very_ easy to inadvertently +create infinite recursion in this way. + +The `disposeAfter()` method is _not_ called for canceled calls to +`fetchMethod()`. If you wish to handle evictions, overwrites, +and deletes of in-flight asynchronous fetches, you must use the +`AbortSignal` provided. + +### `noDisposeOnSet` + +Set to `true` to suppress calling the `dispose()` function if the +entry key is still accessible within the cache. + +This may be overridden by passing an options object to +`cache.set()`. + +Boolean, default `false`. Only relevant if `dispose` or +`disposeAfter` options are set. + +### `ttl` + +Max time to live for items before they are considered stale. +Note that stale items are NOT preemptively removed by default, +and MAY live in the cache, contributing to its LRU max, long +after they have expired. + +Also, as this cache is optimized for LRU/MRU operations, some of +the staleness/TTL checks will reduce performance. + +This is not primarily a TTL cache, and does not make strong TTL +guarantees. There is no pre-emptive pruning of expired items, +but you _may_ set a TTL on the cache, and it will treat expired +items as missing when they are fetched, and delete them. + +Optional, but must be a positive integer in ms if specified. + +This may be overridden by passing an options object to +`cache.set()`. + +At least one of `max`, `maxSize`, or `TTL` is required. This +must be a positive integer if set. + +Even if ttl tracking is enabled, **it is strongly recommended to +set a `max` to prevent unbounded growth of the cache.** See +"Storage Bounds Safety" below. + +If ttl tracking is enabled, and `max` and `maxSize` are not set, +and `ttlAutopurge` is not set, then a warning will be emitted +cautioning about the potential for unbounded memory consumption. + +Deprecated alias: `maxAge` + +### `noUpdateTTL` + +Boolean flag to tell the cache to not update the TTL when setting +a new value for an existing key (ie, when updating a value rather +than inserting a new value). Note that the TTL value is _always_ +set (if provided) when adding a new entry into the cache. + +This may be passed as an option to `cache.set()`. + +Boolean, default false. + +### `ttlResolution` + +Minimum amount of time in ms in which to check for staleness. +Defaults to `1`, which means that the current time is checked at +most once per millisecond. + +Set to `0` to check the current time every time staleness is +tested. + +Note that setting this to a higher value _will_ improve +performance somewhat while using ttl tracking, albeit at the +expense of keeping stale items around a bit longer than intended. + +### `ttlAutopurge` + +Preemptively remove stale items from the cache. + +Note that this may _significantly_ degrade performance, +especially if the cache is storing a large number of items. It +is almost always best to just leave the stale items in the cache, +and let them fall out as new items are added. + +Note that this means that `allowStale` is a bit pointless, as +stale items will be deleted almost as soon as they expire. + +Use with caution! + +Boolean, default `false` + +### `allowStale` + +By default, if you set `ttl`, it'll only delete stale items from +the cache when you `get(key)`. That is, it's not preemptively +pruning items. + +If you set `allowStale:true`, it'll return the stale value as +well as deleting it. If you don't set this, then it'll return +`undefined` when you try to get a stale entry. + +Note that when a stale entry is fetched, _even if it is returned +due to `allowStale` being set_, it is removed from the cache +immediately. You can immediately put it back in the cache if you +wish, thus resetting the TTL. + +This may be overridden by passing an options object to +`cache.get()`. The `cache.has()` method will always return +`false` for stale items. + +Boolean, default false, only relevant if `ttl` is set. + +Deprecated alias: `stale` + +### `noDeleteOnStaleGet` + +When using time-expiring entries with `ttl`, by default stale +items will be removed from the cache when the key is accessed +with `cache.get()`. + +Setting `noDeleteOnStaleGet` to `true` will cause stale items to +remain in the cache, until they are explicitly deleted with +`cache.delete(key)`, or retrieved with `noDeleteOnStaleGet` set +to `false`. + +This may be overridden by passing an options object to +`cache.get()`. + +Boolean, default false, only relevant if `ttl` is set. + +### `updateAgeOnGet` + +When using time-expiring entries with `ttl`, setting this to +`true` will make each item's age reset to 0 whenever it is +retrieved from cache with `get()`, causing it to not expire. (It +can still fall out of cache based on recency of use, of course.) + +This may be overridden by passing an options object to +`cache.get()`. + +Boolean, default false, only relevant if `ttl` is set. + +### `updateAgeOnHas` + +When using time-expiring entries with `ttl`, setting this to +`true` will make each item's age reset to 0 whenever its presence +in the cache is checked with `has()`, causing it to not expire. +(It can still fall out of cache based on recency of use, of +course.) + +This may be overridden by passing an options object to +`cache.has()`. + +Boolean, default false, only relevant if `ttl` is set. + +## API + +### `new LRUCache(options)` + +Create a new LRUCache. All options are documented above, and are +on the cache as public members. + +### `cache.max`, `cache.maxSize`, `cache.allowStale`, + +`cache.noDisposeOnSet`, `cache.sizeCalculation`, `cache.dispose`, +`cache.maxSize`, `cache.ttl`, `cache.updateAgeOnGet`, +`cache.updateAgeOnHas` + +All option names are exposed as public members on the cache +object. + +These are intended for read access only. Changing them during +program operation can cause undefined behavior. + +### `cache.size` + +The total number of items held in the cache at the current +moment. + +### `cache.calculatedSize` + +The total size of items in cache when using size tracking. + +### `set(key, value, [{ size, sizeCalculation, ttl, noDisposeOnSet, start, status }])` + +Add a value to the cache. + +Optional options object may contain `ttl` and `sizeCalculation` +as described above, which default to the settings on the cache +object. + +If `start` is provided, then that will set the effective start +time for the TTL calculation. Note that this must be a previous +value of `performance.now()` if supported, or a previous value of +`Date.now()` if not. + +Options object may also include `size`, which will prevent +calling the `sizeCalculation` function and just use the specified +number if it is a positive integer, and `noDisposeOnSet` which +will prevent calling a `dispose` function in the case of +overwrites. + +If the `size` (or return value of `sizeCalculation`) for a given +entry is greater than `maxEntrySize`, then the item will not be +added to the cache. + +Will update the recency of the entry. + +Returns the cache object. + +For the usage of the `status` option, see **Status Tracking** +below. + +### `get(key, { updateAgeOnGet, allowStale, status } = {}) => value` + +Return a value from the cache. + +Will update the recency of the cache entry found. + +If the key is not found, `get()` will return `undefined`. This +can be confusing when setting values specifically to `undefined`, +as in `cache.set(key, undefined)`. Use `cache.has()` to +determine whether a key is present in the cache at all. + +For the usage of the `status` option, see **Status Tracking** +below. + +### `async fetch(key, options = {}) => Promise` + +The following options are supported: + +- `updateAgeOnGet` +- `allowStale` +- `size` +- `sizeCalculation` +- `ttl` +- `noDisposeOnSet` +- `forceRefresh` +- `status` - See **Status Tracking** below. +- `signal` - AbortSignal can be used to cancel the `fetch()`. + Note that the `signal` option provided to the `fetchMethod` is + a different object, because it must also respond to internal + cache state changes, but aborting this signal will abort the + one passed to `fetchMethod` as well. +- `fetchContext` - sets the `context` option passed to the + underlying `fetchMethod`. + +If the value is in the cache and not stale, then the returned +Promise resolves to the value. + +If not in the cache, or beyond its TTL staleness, then +`fetchMethod(key, staleValue, { options, signal, context })` is +called, and the value returned will be added to the cache once +resolved. + +If called with `allowStale`, and an asynchronous fetch is +currently in progress to reload a stale value, then the former +stale value will be returned. + +If called with `forceRefresh`, then the cached item will be +re-fetched, even if it is not stale. However, if `allowStale` is +set, then the old value will still be returned. This is useful +in cases where you want to force a reload of a cached value. If +a background fetch is already in progress, then `forceRefresh` +has no effect. + +Multiple fetches for the same `key` will only call `fetchMethod` +a single time, and all will be resolved when the value is +resolved, even if different options are used. + +If `fetchMethod` is not specified, then this is effectively an +alias for `Promise.resolve(cache.get(key))`. + +When the fetch method resolves to a value, if the fetch has not +been aborted due to deletion, eviction, or being overwritten, +then it is added to the cache using the options provided. + +If the key is evicted or deleted before the `fetchMethod` +resolves, then the AbortSignal passed to the `fetchMethod` will +receive an `abort` event, and the promise returned by `fetch()` +will reject with the reason for the abort. + +If a `signal` is passed to the `fetch()` call, then aborting the +signal will abort the fetch and cause the `fetch()` promise to +reject with the reason provided. + +### `peek(key, { allowStale } = {}) => value` + +Like `get()` but doesn't update recency or delete stale items. + +Returns `undefined` if the item is stale, unless `allowStale` is +set either on the cache or in the options object. + +### `has(key, { updateAgeOnHas, status } = {}) => Boolean` + +Check if a key is in the cache, without updating the recency of +use. Age is updated if `updateAgeOnHas` is set to `true` in +either the options or the constructor. + +Will return `false` if the item is stale, even though it is +technically in the cache. The difference can be determined (if +it matters) by using a `status` argument, and inspecting the +`has` field. + +For the usage of the `status` option, see **Status Tracking** +below. + +### `delete(key)` + +Deletes a key out of the cache. + +Returns `true` if the key was deleted, `false` otherwise. + +### `clear()` + +Clear the cache entirely, throwing away all values. + +Deprecated alias: `reset()` + +### `keys()` + +Return a generator yielding the keys in the cache, in order from +most recently used to least recently used. + +### `rkeys()` + +Return a generator yielding the keys in the cache, in order from +least recently used to most recently used. + +### `values()` + +Return a generator yielding the values in the cache, in order +from most recently used to least recently used. + +### `rvalues()` + +Return a generator yielding the values in the cache, in order +from least recently used to most recently used. + +### `entries()` + +Return a generator yielding `[key, value]` pairs, in order from +most recently used to least recently used. + +### `rentries()` + +Return a generator yielding `[key, value]` pairs, in order from +least recently used to most recently used. + +### `find(fn, [getOptions])` + +Find a value for which the supplied `fn` method returns a truthy +value, similar to `Array.find()`. + +`fn` is called as `fn(value, key, cache)`. + +The optional `getOptions` are applied to the resulting `get()` of +the item found. + +### `dump()` + +Return an array of `[key, entry]` objects which can be passed to +`cache.load()` + +The `start` fields are calculated relative to a portable +`Date.now()` timestamp, even if `performance.now()` is available. + +Stale entries are always included in the `dump`, even if +`allowStale` is false. + +Note: this returns an actual array, not a generator, so it can be +more easily passed around. + +### `load(entries)` + +Reset the cache and load in the items in `entries` in the order +listed. Note that the shape of the resulting cache may be +different if the same options are not used in both caches. + +The `start` fields are assumed to be calculated relative to a +portable `Date.now()` timestamp, even if `performance.now()` is +available. + +### `purgeStale()` + +Delete any stale entries. Returns `true` if anything was +removed, `false` otherwise. + +Deprecated alias: `prune` + +### `getRemainingTTL(key)` + +Return the number of ms left in the item's TTL. If item is not +in cache, returns `0`. Returns `Infinity` if item is in cache +without a defined TTL. + +### `forEach(fn, [thisp])` + +Call the `fn` function with each set of `fn(value, key, cache)` +in the LRU cache, from most recent to least recently used. + +Does not affect recency of use. + +If `thisp` is provided, function will be called in the +`this`-context of the provided object. + +### `rforEach(fn, [thisp])` + +Same as `cache.forEach(fn, thisp)`, but in order from least +recently used to most recently used. + +### `pop()` + +Evict the least recently used item, returning its value. + +Returns `undefined` if cache is empty. + +### Internal Methods and Properties + +In order to optimize performance as much as possible, "private" +members and methods are exposed on the object as normal +properties, rather than being accessed via Symbols, private +members, or closure variables. + +**Do not use or rely on these.** They will change or be removed +without notice. They will cause undefined behavior if used +inappropriately. There is no need or reason to ever call them +directly. + +This documentation is here so that it is especially clear that +this not "undocumented" because someone forgot; it _is_ +documented, and the documentation is telling you not to do it. + +**Do not report bugs that stem from using these properties.** +They will be ignored. + +- `initializeTTLTracking()` Set up the cache for tracking TTLs +- `updateItemAge(index)` Called when an item age is updated, by + internal ID +- `setItemTTL(index)` Called when an item ttl is updated, by + internal ID +- `isStale(index)` Called to check an item's staleness, by + internal ID +- `initializeSizeTracking()` Set up the cache for tracking item + size. Called automatically when a size is specified. +- `removeItemSize(index)` Updates the internal size calculation + when an item is removed or modified, by internal ID +- `addItemSize(index)` Updates the internal size calculation when + an item is added or modified, by internal ID +- `indexes()` An iterator over the non-stale internal IDs, from + most recently to least recently used. +- `rindexes()` An iterator over the non-stale internal IDs, from + least recently to most recently used. +- `newIndex()` Create a new internal ID, either reusing a deleted + ID, evicting the least recently used ID, or walking to the end + of the allotted space. +- `evict()` Evict the least recently used internal ID, returning + its ID. Does not do any bounds checking. +- `connect(p, n)` Connect the `p` and `n` internal IDs in the + linked list. +- `moveToTail(index)` Move the specified internal ID to the most + recently used position. +- `keyMap` Map of keys to internal IDs +- `keyList` List of keys by internal ID +- `valList` List of values by internal ID +- `sizes` List of calculated sizes by internal ID +- `ttls` List of TTL values by internal ID +- `starts` List of start time values by internal ID +- `next` Array of "next" pointers by internal ID +- `prev` Array of "previous" pointers by internal ID +- `head` Internal ID of least recently used item +- `tail` Internal ID of most recently used item +- `free` Stack of deleted internal IDs + +## Status Tracking + +Occasionally, it may be useful to track the internal behavior of +the cache, particularly for logging, debugging, or for behavior +within the `fetchMethod`. To do this, you can pass a `status` +object to the `get()`, `set()`, `has()`, and `fetch()` methods. + +The `status` option should be a plain JavaScript object. + +The following fields will be set appropriately: + +```ts +interface Status { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss' + + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: LRUMilliseconds + + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: LRUMilliseconds + + /** + * The timestamp used for TTL calculation + */ + now?: LRUMilliseconds + + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: LRUMilliseconds + + /** + * The calculated size for the item, if sizes are used. + */ + size?: LRUSize + + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link maxEntrySize} + */ + maxEntrySizeExceeded?: true + + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V + + /** + * The results of a {@link has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss' + + /** + * The status of a {@link fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' + + /** + * The {@link fetchMethod} was called + */ + fetchDispatched?: true + + /** + * The cached value was updated after a successful call to fetchMethod + */ + fetchUpdated?: true + + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link fetchMethod}, or the reason for an AbortSignal. + */ + fetchError?: Error + + /** + * The fetch received an abort signal + */ + fetchAborted?: true + + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true + + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true + + /** + * The results of the fetchMethod promise were stored in the cache + */ + fetchUpdated?: true + + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true + + /** + * The status of a {@link get} operation. + * + * - fetching: The item is currently being fetched. If a previous value is + * present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss' + + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true +} +``` + +## Storage Bounds Safety + +This implementation aims to be as flexible as possible, within +the limits of safe memory consumption and optimal performance. + +At initial object creation, storage is allocated for `max` items. +If `max` is set to zero, then some performance is lost, and item +count is unbounded. Either `maxSize` or `ttl` _must_ be set if +`max` is not specified. + +If `maxSize` is set, then this creates a safe limit on the +maximum storage consumed, but without the performance benefits of +pre-allocation. When `maxSize` is set, every item _must_ provide +a size, either via the `sizeCalculation` method provided to the +constructor, or via a `size` or `sizeCalculation` option provided +to `cache.set()`. The size of every item _must_ be a positive +integer. + +If neither `max` nor `maxSize` are set, then `ttl` tracking must +be enabled. Note that, even when tracking item `ttl`, items are +_not_ preemptively deleted when they become stale, unless +`ttlAutopurge` is enabled. Instead, they are only purged the +next time the key is requested. Thus, if `ttlAutopurge`, `max`, +and `maxSize` are all not set, then the cache will potentially +grow unbounded. + +In this case, a warning is printed to standard error. Future +versions may require the use of `ttlAutopurge` if `max` and +`maxSize` are not specified. + +If you truly wish to use a cache that is bound _only_ by TTL +expiration, consider using a `Map` object, and calling +`setTimeout` to delete entries when they expire. It will perform +much better than an LRU cache. + +Here is an implementation you may use, under the same +[license](./LICENSE) as this package: + +```js +// a storage-unbounded ttl cache that is not an lru-cache +const cache = { + data: new Map(), + timers: new Map(), + set: (k, v, ttl) => { + if (cache.timers.has(k)) { + clearTimeout(cache.timers.get(k)) + } + cache.timers.set( + k, + setTimeout(() => cache.delete(k), ttl) + ) + cache.data.set(k, v) + }, + get: k => cache.data.get(k), + has: k => cache.data.has(k), + delete: k => { + if (cache.timers.has(k)) { + clearTimeout(cache.timers.get(k)) + } + cache.timers.delete(k) + return cache.data.delete(k) + }, + clear: () => { + cache.data.clear() + for (const v of cache.timers.values()) { + clearTimeout(v) + } + cache.timers.clear() + }, +} +``` + +If that isn't to your liking, check out +[@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache). + +## Performance + +As of January 2022, version 7 of this library is one of the most +performant LRU cache implementations in JavaScript. + +Benchmarks can be extremely difficult to get right. In +particular, the performance of set/get/delete operations on +objects will vary _wildly_ depending on the type of key used. V8 +is highly optimized for objects with keys that are short strings, +especially integer numeric strings. Thus any benchmark which +tests _solely_ using numbers as keys will tend to find that an +object-based approach performs the best. + +Note that coercing _anything_ to strings to use as object keys is +unsafe, unless you can be 100% certain that no other type of +value will be used. For example: + +```js +const myCache = {} +const set = (k, v) => (myCache[k] = v) +const get = k => myCache[k] + +set({}, 'please hang onto this for me') +set('[object Object]', 'oopsie') +``` + +Also beware of "Just So" stories regarding performance. Garbage +collection of large (especially: deep) object graphs can be +incredibly costly, with several "tipping points" where it +increases exponentially. As a result, putting that off until +later can make it much worse, and less predictable. If a library +performs well, but only in a scenario where the object graph is +kept shallow, then that won't help you if you are using large +objects as keys. + +In general, when attempting to use a library to improve +performance (such as a cache like this one), it's best to choose +an option that will perform well in the sorts of scenarios where +you'll actually use it. + +This library is optimized for repeated gets and minimizing +eviction time, since that is the expected need of a LRU. Set +operations are somewhat slower on average than a few other +options, in part because of that optimization. It is assumed +that you'll be caching some costly operation, ideally as rarely +as possible, so optimizing set over get would be unwise. + +If performance matters to you: + +1. If it's at all possible to use small integer values as keys, + and you can guarantee that no other types of values will be + used as keys, then do that, and use a cache such as + [lru-fast](https://npmjs.com/package/lru-fast), or + [mnemonist's + LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache) + which uses an Object as its data store. +2. Failing that, if at all possible, use short non-numeric + strings (ie, less than 256 characters) as your keys, and use + [mnemonist's + LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache). +3. If the types of your keys will be long strings, strings that + look like floats, `null`, objects, or some mix of types, or if + you aren't sure, then this library will work well for you. +4. Do not use a `dispose` function, size tracking, or especially + ttl behavior, unless absolutely needed. These features are + convenient, and necessary in some use cases, and every attempt + has been made to make the performance impact minimal, but it + isn't nothing. + +## Breaking Changes in Version 7 + +This library changed to a different algorithm and internal data +structure in version 7, yielding significantly better +performance, albeit with some subtle changes as a result. + +If you were relying on the internals of LRUCache in version 6 or +before, it probably will not work in version 7 and above. + +For more info, see the [change log](CHANGELOG.md). diff --git a/node_modules/cacache/node_modules/lru-cache/index.d.ts b/node_modules/cacache/node_modules/lru-cache/index.d.ts new file mode 100644 index 0000000..b58395e --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/index.d.ts @@ -0,0 +1,869 @@ +// Project: https://github.com/isaacs/node-lru-cache +// Based initially on @types/lru-cache +// https://github.com/DefinitelyTyped/DefinitelyTyped +// used under the terms of the MIT License, shown below. +// +// DefinitelyTyped license: +// ------ +// MIT License +// +// Copyright (c) Microsoft Corporation. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the "Software"), +// to deal in the Software without restriction, including without limitation +// the rights to use, copy, modify, merge, publish, distribute, sublicense, +// and/or sell copies of the Software, and to permit persons to whom the +// Software is furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE +// ------ +// +// Changes by Isaac Z. Schlueter released under the terms found in the +// LICENSE file within this project. + +/** + * Integer greater than 0, representing some number of milliseconds, or the + * time at which a TTL started counting from. + */ +declare type LRUMilliseconds = number + +/** + * An integer greater than 0, reflecting the calculated size of items + */ +declare type LRUSize = number + +/** + * An integer greater than 0, reflecting a number of items + */ +declare type LRUCount = number + +declare class LRUCache implements Iterable<[K, V]> { + constructor(options: LRUCache.Options) + + /** + * Number of items in the cache. + * Alias for {@link size} + * + * @deprecated since 7.0 use {@link size} instead + */ + public readonly length: LRUCount + + public readonly max: LRUCount + public readonly maxSize: LRUSize + public readonly maxEntrySize: LRUSize + public readonly sizeCalculation: + | LRUCache.SizeCalculator + | undefined + public readonly dispose: LRUCache.Disposer + /** + * @since 7.4.0 + */ + public readonly disposeAfter: LRUCache.Disposer | null + public readonly noDisposeOnSet: boolean + public readonly ttl: LRUMilliseconds + public readonly ttlResolution: LRUMilliseconds + public readonly ttlAutopurge: boolean + public readonly allowStale: boolean + public readonly updateAgeOnGet: boolean + /** + * @since 7.11.0 + */ + public readonly noDeleteOnStaleGet: boolean + /** + * @since 7.6.0 + */ + public readonly fetchMethod: LRUCache.Fetcher | null + + /** + * The total number of items held in the cache at the current moment. + */ + public readonly size: LRUCount + + /** + * The total size of items in cache when using size tracking. + */ + public readonly calculatedSize: LRUSize + + /** + * Add a value to the cache. + */ + public set( + key: K, + value: V, + options?: LRUCache.SetOptions + ): this + + /** + * Return a value from the cache. Will update the recency of the cache entry + * found. + * + * If the key is not found, {@link get} will return `undefined`. This can be + * confusing when setting values specifically to `undefined`, as in + * `cache.set(key, undefined)`. Use {@link has} to determine whether a key is + * present in the cache at all. + */ + public get(key: K, options?: LRUCache.GetOptions): V | undefined + + /** + * Like {@link get} but doesn't update recency or delete stale items. + * Returns `undefined` if the item is stale, unless {@link allowStale} is set + * either on the cache or in the options object. + */ + public peek(key: K, options?: LRUCache.PeekOptions): V | undefined + + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless {@link updateAgeOnHas} is set in the + * options or constructor. + */ + public has(key: K, options?: LRUCache.HasOptions): boolean + + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + public delete(key: K): boolean + + /** + * Clear the cache entirely, throwing away all values. + */ + public clear(): void + + /** + * Delete any stale entries. Returns true if anything was removed, false + * otherwise. + */ + public purgeStale(): boolean + + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + public find( + callbackFn: ( + value: V, + key: K, + cache: this + ) => boolean | undefined | void, + options?: LRUCache.GetOptions + ): V | undefined + + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + */ + public forEach( + callbackFn: (this: T, value: V, key: K, cache: this) => void, + thisArg?: T + ): void + + /** + * The same as {@link forEach} but items are iterated over in reverse + * order. (ie, less recently used items are iterated over first.) + */ + public rforEach( + callbackFn: (this: T, value: V, key: K, cache: this) => void, + thisArg?: T + ): void + + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + public keys(): Generator + + /** + * Inverse order version of {@link keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + public rkeys(): Generator + + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + public values(): Generator + + /** + * Inverse order version of {@link values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + public rvalues(): Generator + + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + public entries(): Generator<[K, V], void, void> + + /** + * Inverse order version of {@link entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + public rentries(): Generator<[K, V], void, void> + + /** + * Iterating over the cache itself yields the same results as + * {@link entries} + */ + public [Symbol.iterator](): Generator<[K, V], void, void> + + /** + * Return an array of [key, entry] objects which can be passed to + * cache.load() + */ + public dump(): Array<[K, LRUCache.Entry]> + + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + public load( + cacheEntries: ReadonlyArray<[K, LRUCache.Entry]> + ): void + + /** + * Evict the least recently used item, returning its value or `undefined` + * if cache is empty. + */ + public pop(): V | undefined + + /** + * Deletes a key out of the cache. + * + * @deprecated since 7.0 use delete() instead + */ + public del(key: K): boolean + + /** + * Clear the cache entirely, throwing away all values. + * + * @deprecated since 7.0 use clear() instead + */ + public reset(): void + + /** + * Manually iterates over the entire cache proactively pruning old entries. + * + * @deprecated since 7.0 use purgeStale() instead + */ + public prune(): boolean + + /** + * Make an asynchronous cached fetch using the {@link fetchMethod} function. + * + * If multiple fetches for the same key are issued, then they will all be + * coalesced into a single call to fetchMethod. + * + * Note that this means that handling options such as + * {@link allowStaleOnFetchAbort}, {@link signal}, and + * {@link allowStaleOnFetchRejection} will be determined by the FIRST fetch() + * call for a given key. + * + * This is a known (fixable) shortcoming which will be addresed on when + * someone complains about it, as the fix would involve added complexity and + * may not be worth the costs for this edge case. + * + * since: 7.6.0 + */ + public fetch( + key: K, + options?: LRUCache.FetchOptions + ): Promise + + /** + * since: 7.6.0 + */ + public getRemainingTTL(key: K): LRUMilliseconds +} + +declare namespace LRUCache { + type DisposeReason = 'evict' | 'set' | 'delete' + + type SizeCalculator = (value: V, key: K) => LRUSize + type Disposer = ( + value: V, + key: K, + reason: DisposeReason + ) => void + type Fetcher = ( + key: K, + staleValue: V | undefined, + options: FetcherOptions + ) => Promise | V | void | undefined + + interface DeprecatedOptions { + /** + * alias for ttl + * + * @deprecated since 7.0 use options.ttl instead + */ + maxAge?: LRUMilliseconds + + /** + * alias for {@link sizeCalculation} + * + * @deprecated since 7.0 use {@link sizeCalculation} instead + */ + length?: SizeCalculator + + /** + * alias for allowStale + * + * @deprecated since 7.0 use options.allowStale instead + */ + stale?: boolean + } + + interface LimitedByCount { + /** + * The number of most recently used items to keep. + * Note that we may store fewer items than this if maxSize is hit. + */ + max: LRUCount + } + + type MaybeMaxEntrySizeLimit = + | { + /** + * The maximum allowed size for any single item in the cache. + * + * If a larger item is passed to {@link set} or returned by a + * {@link fetchMethod}, then it will not be stored in the cache. + */ + maxEntrySize: LRUSize + sizeCalculation?: SizeCalculator + } + | {} + + interface LimitedBySize { + /** + * If you wish to track item size, you must provide a maxSize + * note that we still will only keep up to max *actual items*, + * if max is set, so size tracking may cause fewer than max items + * to be stored. At the extreme, a single item of maxSize size + * will cause everything else in the cache to be dropped when it + * is added. Use with caution! + * + * Note also that size tracking can negatively impact performance, + * though for most cases, only minimally. + */ + maxSize: LRUSize + + /** + * Function to calculate size of items. Useful if storing strings or + * buffers or other items where memory size depends on the object itself. + * + * Items larger than {@link maxEntrySize} will not be stored in the cache. + * + * Note that when {@link maxSize} or {@link maxEntrySize} are set, every + * item added MUST have a size specified, either via a `sizeCalculation` in + * the constructor, or `sizeCalculation` or {@link size} options to + * {@link set}. + */ + sizeCalculation?: SizeCalculator + } + + interface LimitedByTTL { + /** + * Max time in milliseconds for items to live in cache before they are + * considered stale. Note that stale items are NOT preemptively removed + * by default, and MAY live in the cache, contributing to its LRU max, + * long after they have expired. + * + * Also, as this cache is optimized for LRU/MRU operations, some of + * the staleness/TTL checks will reduce performance, as they will incur + * overhead by deleting items. + * + * Must be an integer number of ms, defaults to 0, which means "no TTL" + */ + ttl: LRUMilliseconds + + /** + * Boolean flag to tell the cache to not update the TTL when + * setting a new value for an existing key (ie, when updating a value + * rather than inserting a new value). Note that the TTL value is + * _always_ set (if provided) when adding a new entry into the cache. + * + * @default false + * @since 7.4.0 + */ + noUpdateTTL?: boolean + + /** + * Minimum amount of time in ms in which to check for staleness. + * Defaults to 1, which means that the current time is checked + * at most once per millisecond. + * + * Set to 0 to check the current time every time staleness is tested. + * (This reduces performance, and is theoretically unnecessary.) + * + * Setting this to a higher value will improve performance somewhat + * while using ttl tracking, albeit at the expense of keeping stale + * items around a bit longer than their TTLs would indicate. + * + * @default 1 + * @since 7.1.0 + */ + ttlResolution?: LRUMilliseconds + + /** + * Preemptively remove stale items from the cache. + * Note that this may significantly degrade performance, + * especially if the cache is storing a large number of items. + * It is almost always best to just leave the stale items in + * the cache, and let them fall out as new items are added. + * + * Note that this means that {@link allowStale} is a bit pointless, + * as stale items will be deleted almost as soon as they expire. + * + * Use with caution! + * + * @default false + * @since 7.1.0 + */ + ttlAutopurge?: boolean + + /** + * Return stale items from {@link get} before disposing of them. + * Return stale values from {@link fetch} while performing a call + * to the {@link fetchMethod} in the background. + * + * @default false + */ + allowStale?: boolean + + /** + * Update the age of items on {@link get}, renewing their TTL + * + * @default false + */ + updateAgeOnGet?: boolean + + /** + * Do not delete stale items when they are retrieved with {@link get}. + * Note that the {@link get} return value will still be `undefined` unless + * allowStale is true. + * + * @default false + * @since 7.11.0 + */ + noDeleteOnStaleGet?: boolean + + /** + * Update the age of items on {@link has}, renewing their TTL + * + * @default false + */ + updateAgeOnHas?: boolean + } + + type SafetyBounds = + | LimitedByCount + | LimitedBySize + | LimitedByTTL + + // options shared by all three of the limiting scenarios + interface SharedOptions { + /** + * Function that is called on items when they are dropped from the cache. + * This can be handy if you want to close file descriptors or do other + * cleanup tasks when items are no longer accessible. Called with `key, + * value`. It's called before actually removing the item from the + * internal cache, so it is *NOT* safe to re-add them. + * Use {@link disposeAfter} if you wish to dispose items after they have + * been full removed, when it is safe to add them back to the cache. + */ + dispose?: Disposer + + /** + * The same as dispose, but called *after* the entry is completely + * removed and the cache is once again in a clean state. It is safe to + * add an item right back into the cache at this point. + * However, note that it is *very* easy to inadvertently create infinite + * recursion this way. + * + * @since 7.3.0 + */ + disposeAfter?: Disposer + + /** + * Set to true to suppress calling the dispose() function if the entry + * key is still accessible within the cache. + * This may be overridden by passing an options object to {@link set}. + * + * @default false + */ + noDisposeOnSet?: boolean + + /** + * Function that is used to make background asynchronous fetches. Called + * with `fetchMethod(key, staleValue, { signal, options, context })`. + * + * If `fetchMethod` is not provided, then {@link fetch} is + * equivalent to `Promise.resolve(cache.get(key))`. + * + * The `fetchMethod` should ONLY return `undefined` in cases where the + * abort controller has sent an abort signal. + * + * @since 7.6.0 + */ + fetchMethod?: LRUCache.Fetcher + + /** + * Set to true to suppress the deletion of stale data when a + * {@link fetchMethod} throws an error or returns a rejected promise + * + * This may be overridden in the {@link fetchMethod}. + * + * @default false + * @since 7.10.0 + */ + noDeleteOnFetchRejection?: boolean + + /** + * Set to true to allow returning stale data when a {@link fetchMethod} + * throws an error or returns a rejected promise. Note that this + * differs from using {@link allowStale} in that stale data will + * ONLY be returned in the case that the fetch fails, not any other + * times. + * + * This may be overridden in the {@link fetchMethod}. + * + * @default false + * @since 7.16.0 + */ + allowStaleOnFetchRejection?: boolean + + /** + * + * Set to true to ignore the `abort` event emitted by the `AbortSignal` + * object passed to {@link fetchMethod}, and still cache the + * resulting resolution value, as long as it is not `undefined`. + * + * When used on its own, this means aborted {@link fetch} calls are not + * immediately resolved or rejected when they are aborted, and instead take + * the full time to await. + * + * When used with {@link allowStaleOnFetchAbort}, aborted {@link fetch} + * calls will resolve immediately to their stale cached value or + * `undefined`, and will continue to process and eventually update the + * cache when they resolve, as long as the resulting value is not + * `undefined`, thus supporting a "return stale on timeout while + * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal. + * + * **Note**: regardless of this setting, an `abort` event _is still emitted + * on the `AbortSignal` object_, so may result in invalid results when + * passed to other underlying APIs that use AbortSignals. + * + * This may be overridden in the {@link fetchMethod} or the call to + * {@link fetch}. + * + * @default false + * @since 7.17.0 + */ + ignoreFetchAbort?: boolean + + /** + * Set to true to return a stale value from the cache when the + * `AbortSignal` passed to the {@link fetchMethod} dispatches an `'abort'` + * event, whether user-triggered, or due to internal cache behavior. + * + * Unless {@link ignoreFetchAbort} is also set, the underlying + * {@link fetchMethod} will still be considered canceled, and its return + * value will be ignored and not cached. + * + * This may be overridden in the {@link fetchMethod} or the call to + * {@link fetch}. + * + * @default false + * @since 7.17.0 + */ + allowStaleOnFetchAbort?: boolean + + /** + * Set to any value in the constructor or {@link fetch} options to + * pass arbitrary data to the {@link fetchMethod} in the {@link context} + * options field. + * + * @since 7.12.0 + */ + fetchContext?: any + } + + type Options = SharedOptions & + DeprecatedOptions & + SafetyBounds & + MaybeMaxEntrySizeLimit + + /** + * options which override the options set in the LRUCache constructor + * when making calling {@link set}. + */ + interface SetOptions { + /** + * A value for the size of the entry, prevents calls to + * {@link sizeCalculation}. + * + * Items larger than {@link maxEntrySize} will not be stored in the cache. + * + * Note that when {@link maxSize} or {@link maxEntrySize} are set, every + * item added MUST have a size specified, either via a `sizeCalculation` in + * the constructor, or {@link sizeCalculation} or `size` options to + * {@link set}. + */ + size?: LRUSize + /** + * Overrides the {@link sizeCalculation} method set in the constructor. + * + * Items larger than {@link maxEntrySize} will not be stored in the cache. + * + * Note that when {@link maxSize} or {@link maxEntrySize} are set, every + * item added MUST have a size specified, either via a `sizeCalculation` in + * the constructor, or `sizeCalculation` or {@link size} options to + * {@link set}. + */ + sizeCalculation?: SizeCalculator + ttl?: LRUMilliseconds + start?: LRUMilliseconds + noDisposeOnSet?: boolean + noUpdateTTL?: boolean + status?: Status + } + + /** + * options which override the options set in the LRUCAche constructor + * when calling {@link has}. + */ + interface HasOptions { + updateAgeOnHas?: boolean + status: Status + } + + /** + * options which override the options set in the LRUCache constructor + * when calling {@link get}. + */ + interface GetOptions { + allowStale?: boolean + updateAgeOnGet?: boolean + noDeleteOnStaleGet?: boolean + status?: Status + } + + /** + * options which override the options set in the LRUCache constructor + * when calling {@link peek}. + */ + interface PeekOptions { + allowStale?: boolean + } + + /** + * Options object passed to the {@link fetchMethod} + * + * May be mutated by the {@link fetchMethod} to affect the behavior of the + * resulting {@link set} operation on resolution, or in the case of + * {@link noDeleteOnFetchRejection}, {@link ignoreFetchAbort}, and + * {@link allowStaleOnFetchRejection}, the handling of failure. + */ + interface FetcherFetchOptions { + allowStale?: boolean + updateAgeOnGet?: boolean + noDeleteOnStaleGet?: boolean + size?: LRUSize + sizeCalculation?: SizeCalculator + ttl?: LRUMilliseconds + noDisposeOnSet?: boolean + noUpdateTTL?: boolean + noDeleteOnFetchRejection?: boolean + allowStaleOnFetchRejection?: boolean + ignoreFetchAbort?: boolean + allowStaleOnFetchAbort?: boolean + status?: Status + } + + /** + * Status object that may be passed to {@link fetch}, {@link get}, + * {@link set}, and {@link has}. + */ + interface Status { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss' + + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: LRUMilliseconds + + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: LRUMilliseconds + + /** + * The timestamp used for TTL calculation + */ + now?: LRUMilliseconds + + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: LRUMilliseconds + + /** + * The calculated size for the item, if sizes are used. + */ + size?: LRUSize + + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link maxEntrySize} + */ + maxEntrySizeExceeded?: true + + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V + + /** + * The results of a {@link has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss' + + /** + * The status of a {@link fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' + + /** + * The {@link fetchMethod} was called + */ + fetchDispatched?: true + + /** + * The cached value was updated after a successful call to fetchMethod + */ + fetchUpdated?: true + + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link fetchMethod}, or the reason for an AbortSignal. + */ + fetchError?: Error + + /** + * The fetch received an abort signal + */ + fetchAborted?: true + + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true + + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true + + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true + + /** + * The status of a {@link get} operation. + * + * - fetching: The item is currently being fetched. If a previous value is + * present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss' + + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true + } + + /** + * options which override the options set in the LRUCache constructor + * when calling {@link fetch}. + * + * This is the union of GetOptions and SetOptions, plus + * {@link noDeleteOnFetchRejection}, {@link allowStaleOnFetchRejection}, + * {@link forceRefresh}, and {@link fetchContext} + */ + interface FetchOptions extends FetcherFetchOptions { + forceRefresh?: boolean + fetchContext?: any + signal?: AbortSignal + status?: Status + } + + interface FetcherOptions { + signal: AbortSignal + options: FetcherFetchOptions + /** + * Object provided in the {@link fetchContext} option + */ + context: any + } + + interface Entry { + value: V + ttl?: LRUMilliseconds + size?: LRUSize + start?: LRUMilliseconds + } +} + +export = LRUCache diff --git a/node_modules/cacache/node_modules/lru-cache/index.js b/node_modules/cacache/node_modules/lru-cache/index.js new file mode 100644 index 0000000..48e99fe --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/cacache/node_modules/lru-cache/index.mjs b/node_modules/cacache/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000..4a0b481 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/cacache/node_modules/lru-cache/package.json new file mode 100644 index 0000000..9684991 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/package.json @@ -0,0 +1,96 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.18.3", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc ./index.d.ts" + }, + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^17.0.31", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "eslint-config-prettier": "^8.5.0", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.7.0", + "tslib": "^2.4.0", + "typedoc": "^0.23.24", + "typescript": "^4.6.4" + }, + "license": "ISC", + "files": [ + "index.js", + "index.mjs", + "index.d.ts" + ], + "engines": { + "node": ">=12" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "nyc-arg": [ + "--include=index.js" + ], + "node-arg": [ + "--expose-gc", + "--require", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/node_modules/cacache/node_modules/minimatch/LICENSE b/node_modules/cacache/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..1493534 --- /dev/null +++ b/node_modules/cacache/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/minimatch/README.md b/node_modules/cacache/node_modules/minimatch/README.md new file mode 100644 index 0000000..a5bdefa --- /dev/null +++ b/node_modules/cacache/node_modules/minimatch/README.md @@ -0,0 +1,259 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes in patterns +will always be interpreted as escape characters, not path separators. + +Note that `\` or `/` _will_ be interpreted as path separators in paths on +Windows, and will match against `/` in glob expressions. + +So just always use `/` in patterns. + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### windowsPathsNoEscape + +Use `\\` as a path separator _only_, and _never_ as an escape +character. If set, all `\\` characters are replaced with `/` in +the pattern. Note that this makes it **impossible** to match +against paths containing literal glob pattern characters, but +allows matching with patterns constructed using `path.join()` and +`path.resolve()` on Windows platforms, mimicking the (buggy!) +behavior of earlier versions on Windows. Please use with +caution, and be mindful of [the caveat about Windows +paths](#windows). + +For legacy reasons, this is also set if +`options.allowWindowsEscape` is set to the exact value `false`. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +Note that `fnmatch(3)` in libc is an extremely naive string comparison +matcher, which does not do anything special for slashes. This library is +designed to be used in glob searching and file walkers, and so it does do +special things with `/`. Thus, `foo*` will not match `foo/bar` in this +library, even though it would in `fnmatch(3)`. diff --git a/node_modules/cacache/node_modules/minimatch/lib/path.js b/node_modules/cacache/node_modules/minimatch/lib/path.js new file mode 100644 index 0000000..ffe453d --- /dev/null +++ b/node_modules/cacache/node_modules/minimatch/lib/path.js @@ -0,0 +1,4 @@ +const isWindows = typeof process === 'object' && + process && + process.platform === 'win32' +module.exports = isWindows ? { sep: '\\' } : { sep: '/' } diff --git a/node_modules/cacache/node_modules/minimatch/minimatch.js b/node_modules/cacache/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..6c8bfc3 --- /dev/null +++ b/node_modules/cacache/node_modules/minimatch/minimatch.js @@ -0,0 +1,944 @@ +const minimatch = module.exports = (p, pattern, options = {}) => { + assertValidPattern(pattern) + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +module.exports = minimatch + +const path = require('./lib/path.js') +minimatch.sep = path.sep + +const GLOBSTAR = Symbol('globstar **') +minimatch.GLOBSTAR = GLOBSTAR +const expand = require('brace-expansion') + +const plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]' + +// * => any number of characters +const star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// "abc" -> { a:true, b:true, c:true } +const charSet = s => s.split('').reduce((set, c) => { + set[c] = true + return set +}, {}) + +// characters that need to be escaped in RegExp. +const reSpecials = charSet('().*{}+?[]^$\\!') + +// characters that indicate we have to add the pattern start +const addPatternStartSet = charSet('[.(') + +// normalizes slashes. +const slashSplit = /\/+/ + +minimatch.filter = (pattern, options = {}) => + (p, i, list) => minimatch(p, pattern, options) + +const ext = (a, b = {}) => { + const t = {} + Object.keys(a).forEach(k => t[k] = a[k]) + Object.keys(b).forEach(k => t[k] = b[k]) + return t +} + +minimatch.defaults = def => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + const orig = minimatch + + const m = (p, pattern, options) => orig(p, pattern, ext(def, options)) + m.Minimatch = class Minimatch extends orig.Minimatch { + constructor (pattern, options) { + super(pattern, ext(def, options)) + } + } + m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch + m.filter = (pattern, options) => orig.filter(pattern, ext(def, options)) + m.defaults = options => orig.defaults(ext(def, options)) + m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options)) + m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options)) + m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options)) + + return m +} + + + + + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options) + +const braceExpand = (pattern, options = {}) => { + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +const MAX_PATTERN_LENGTH = 1024 * 64 +const assertValidPattern = pattern => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const SUBPARSE = Symbol('subparse') + +minimatch.makeRe = (pattern, options) => + new Minimatch(pattern, options || {}).makeRe() + +minimatch.match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options) + list = list.filter(f => mm.match(f)) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +// replace stuff like \* with * +const globUnescape = s => s.replace(/\\(.)/g, '$1') +const charUnescape = s => s.replace(/\\([^-\]])/g, '$1') +const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +const braExpEscape = s => s.replace(/[[\]\\]/g, '\\$&') + +class Minimatch { + constructor (pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + this.options = options + this.set = [] + this.pattern = pattern + this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || + options.allowWindowsEscape === false + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/') + } + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() + } + + debug () {} + + make () { + const pattern = this.pattern + const options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + let set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = (...args) => console.error(...args) + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(s => s.split(slashSplit)) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map((s, si, set) => s.map(this.parse, this)) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(s => s.indexOf(false) === -1) + + this.debug(this.pattern, set) + + this.set = set + } + + parseNegate () { + if (this.options.nonegate) return + + const pattern = this.pattern + let negate = false + let negateOffset = 0 + + for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.slice(negateOffset) + this.negate = negate + } + + // set partial to true to test if, for example, + // "/a/b" matches the start of "/*/b/*/d" + // Partial means, if you run out of file before you run + // out of pattern, then that's fine, as long as all + // the parts match. + matchOne (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') + } + + braceExpand () { + return braceExpand(this.pattern, this.options) + } + + parse (pattern, isSub) { + assertValidPattern(pattern) + + const options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + let re = '' + let hasMagic = false + let escaping = false + // ? => one single character + const patternListStack = [] + const negativeLists = [] + let stateChar + let inClass = false + let reClassStart = -1 + let classStart = -1 + let cs + let pl + let sp + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. However, if the pattern + // starts with ., then traversal patterns can match. + let dotTravAllowed = pattern.charAt(0) === '.' + let dotFileAllowed = options.dot || dotTravAllowed + const patternStart = () => + dotTravAllowed + ? '' + : dotFileAllowed + ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' + : '(?!\\.)' + const subPatternStart = (p) => + p.charAt(0) === '.' + ? '' + : options.dot + ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' + : '(?!\\.)' + + + const clearStateChar = () => { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + this.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping) { + /* istanbul ignore next - completely not allowed, even escaped. */ + if (c === '/') { + return false + } + + if (reSpecials[c]) { + re += '\\' + } + re += c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // Should already be path-split by now. + return false + } + + case '\\': + if (inClass && pattern.charAt(i + 1) === '-') { + re += c + continue + } + + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + this.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': { + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + const plEntry = { + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close, + } + this.debug(this.pattern, '\t', plEntry) + patternListStack.push(plEntry) + // negation is (?:(?!(?:js)(?:))[^/]*) + re += plEntry.open + // next entry starts with a dot maybe? + if (plEntry.start === 0 && plEntry.type !== '!') { + dotTravAllowed = true + re += subPatternStart(pattern.slice(i + 1)) + } + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + } + + case ')': { + const plEntry = patternListStack[patternListStack.length - 1] + if (inClass || !plEntry) { + re += '\\)' + continue + } + patternListStack.pop() + + // closing an extglob + clearStateChar() + hasMagic = true + pl = plEntry + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(Object.assign(pl, { reEnd: re.length })) + } + continue + } + + case '|': { + const plEntry = patternListStack[patternListStack.length - 1] + if (inClass || !plEntry) { + re += '\\|' + continue + } + + clearStateChar() + re += '|' + // next subpattern can start with a dot? + if (plEntry.start === 0 && plEntry.type !== '!') { + dotTravAllowed = true + re += subPatternStart(pattern.slice(i + 1)) + } + continue + } + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + continue + } + + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + braExpEscape(charUnescape(cs)) + ']') + // looks good, finish up the class. + re += c + } catch (er) { + // out of order ranges in JS are errors, but in glob syntax, + // they're just a range that matches nothing. + re = re.substring(0, reClassStart) + '(?:$.)' // match nothing ever + } + hasMagic = true + inClass = false + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (reSpecials[c] && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + break + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.slice(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substring(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + let tail + tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => { + /* istanbul ignore else - should already be done */ + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + const t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + const addPatternStart = addPatternStartSet[re.charAt(0)] + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (let n = negativeLists.length - 1; n > -1; n--) { + const nl = negativeLists[n] + + const nlBefore = re.slice(0, nl.reStart) + const nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + let nlAfter = re.slice(nl.reEnd) + const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + const closeParensBefore = nlBefore.split(')').length + const openParensBefore = nlBefore.split('(').length - closeParensBefore + let cleanAfter = nlAfter + for (let i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + const dollar = nlAfter === '' && isSub !== SUBPARSE ? '(?:$|\\/)' : '' + + re = nlBefore + nlFirst + nlAfter + dollar + nlLast + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart() + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // if it's nocase, and the lcase/uppercase don't match, it's magic + if (options.nocase && !hasMagic) { + hasMagic = pattern.toUpperCase() !== pattern.toLowerCase() + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + const flags = options.nocase ? 'i' : '' + try { + return Object.assign(new RegExp('^' + re + '$', flags), { + _glob: pattern, + _src: re, + }) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + } + + makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + const set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + const options = this.options + + const twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + const flags = options.nocase ? 'i' : '' + + // coalesce globstars and regexpify non-globstar patterns + // if it's the only item, then we just do one twoStar + // if it's the first, and there are more, prepend (\/|twoStar\/)? to next + // if it's the last, append (\/twoStar|) to previous + // if it's in the middle, append (\/|\/twoStar\/) to previous + // then filter out GLOBSTAR symbols + let re = set.map(pattern => { + pattern = pattern.map(p => + typeof p === 'string' ? regExpEscape(p) + : p === GLOBSTAR ? GLOBSTAR + : p._src + ).reduce((set, p) => { + if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) { + set.push(p) + } + return set + }, []) + pattern.forEach((p, i) => { + if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) { + return + } + if (i === 0) { + if (pattern.length > 1) { + pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1] + } else { + pattern[i] = twoStar + } + } else if (i === pattern.length - 1) { + pattern[i-1] += '(?:\\\/|' + twoStar + ')?' + } else { + pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1] + pattern[i+1] = GLOBSTAR + } + }) + return pattern.filter(p => p !== GLOBSTAR).join('/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp + } + + match (f, partial = this.partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + const options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + const set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + let filename + for (let i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (let i = 0; i < set.length; i++) { + const pattern = set[i] + let file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + const hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate + } + + static defaults (def) { + return minimatch.defaults(def).Minimatch + } +} + +minimatch.Minimatch = Minimatch diff --git a/node_modules/cacache/node_modules/minimatch/package.json b/node_modules/cacache/node_modules/minimatch/package.json new file mode 100644 index 0000000..c8809db --- /dev/null +++ b/node_modules/cacache/node_modules/minimatch/package.json @@ -0,0 +1,35 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "publishConfig": { + "tag": "legacy-v5" + }, + "version": "5.1.6", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "engines": { + "node": ">=10" + }, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "devDependencies": { + "tap": "^16.3.2" + }, + "license": "ISC", + "files": [ + "minimatch.js", + "lib" + ] +} diff --git a/node_modules/cacache/node_modules/ssri/LICENSE.md b/node_modules/cacache/node_modules/ssri/LICENSE.md new file mode 100644 index 0000000..e335388 --- /dev/null +++ b/node_modules/cacache/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/ssri/README.md b/node_modules/cacache/node_modules/ssri/README.md new file mode 100644 index 0000000..6f46aa5 --- /dev/null +++ b/node_modules/cacache/node_modules/ssri/README.md @@ -0,0 +1,528 @@ +# ssri [![npm version](https://img.shields.io/npm/v/ssri.svg)](https://npm.im/ssri) [![license](https://img.shields.io/npm/l/ssri.svg)](https://npm.im/ssri) [![Travis](https://img.shields.io/travis/npm/ssri.svg)](https://travis-ci.org/npm/ssri) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/ssri?svg=true)](https://ci.appveyor.com/project/npm/ssri) [![Coverage Status](https://coveralls.io/repos/github/npm/ssri/badge.svg?branch=latest)](https://coveralls.io/github/npm/ssri?branch=latest) + +[`ssri`](https://github.com/npm/ssri), short for Standard Subresource +Integrity, is a Node.js utility for parsing, manipulating, serializing, +generating, and verifying [Subresource +Integrity](https://w3c.github.io/webappsec/specs/subresourceintegrity/) hashes. + +## Install + +`$ npm install --save ssri` + +## Table of Contents + +* [Example](#example) +* [Features](#features) +* [Contributing](#contributing) +* [API](#api) + * Parsing & Serializing + * [`parse`](#parse) + * [`stringify`](#stringify) + * [`Integrity#concat`](#integrity-concat) + * [`Integrity#merge`](#integrity-merge) + * [`Integrity#toString`](#integrity-to-string) + * [`Integrity#toJSON`](#integrity-to-json) + * [`Integrity#match`](#integrity-match) + * [`Integrity#pickAlgorithm`](#integrity-pick-algorithm) + * [`Integrity#hexDigest`](#integrity-hex-digest) + * Integrity Generation + * [`fromHex`](#from-hex) + * [`fromData`](#from-data) + * [`fromStream`](#from-stream) + * [`create`](#create) + * Integrity Verification + * [`checkData`](#check-data) + * [`checkStream`](#check-stream) + * [`integrityStream`](#integrity-stream) + +### Example + +```javascript +const ssri = require('ssri') + +const integrity = 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' + +// Parsing and serializing +const parsed = ssri.parse(integrity) +ssri.stringify(parsed) // === integrity (works on non-Integrity objects) +parsed.toString() // === integrity + +// Async stream functions +ssri.checkStream(fs.createReadStream('./my-file'), integrity).then(...) +ssri.fromStream(fs.createReadStream('./my-file')).then(sri => { + sri.toString() === integrity +}) +fs.createReadStream('./my-file').pipe(ssri.createCheckerStream(sri)) + +// Sync data functions +ssri.fromData(fs.readFileSync('./my-file')) // === parsed +ssri.checkData(fs.readFileSync('./my-file'), integrity) // => 'sha512' +``` + +### Features + +* Parses and stringifies SRI strings. +* Generates SRI strings from raw data or Streams. +* Strict standard compliance. +* `?foo` metadata option support. +* Multiple entries for the same algorithm. +* Object-based integrity hash manipulation. +* Small footprint: no dependencies, concise implementation. +* Full test coverage. +* Customizable algorithm picker. + +### Contributing + +The ssri team enthusiastically welcomes contributions and project participation! +There's a bunch of things you can do if you want to contribute! The [Contributor +Guide](CONTRIBUTING.md) has all the information you need for everything from +reporting bugs to contributing entire new features. Please don't hesitate to +jump in if you'd like to, or even ask us questions if something isn't clear. + +### API + +#### `> ssri.parse(sri, [opts]) -> Integrity` + +Parses `sri` into an `Integrity` data structure. `sri` can be an integrity +string, an `Hash`-like with `digest` and `algorithm` fields and an optional +`options` field, or an `Integrity`-like object. The resulting object will be an +`Integrity` instance that has this shape: + +```javascript +{ + 'sha1': [{algorithm: 'sha1', digest: 'deadbeef', options: []}], + 'sha512': [ + {algorithm: 'sha512', digest: 'c0ffee', options: []}, + {algorithm: 'sha512', digest: 'bad1dea', options: ['foo']} + ], +} +``` + +If `opts.single` is truthy, a single `Hash` object will be returned. That is, a +single object that looks like `{algorithm, digest, options}`, as opposed to a +larger object with multiple of these. + +If `opts.strict` is truthy, the resulting object will be filtered such that +it strictly follows the Subresource Integrity spec, throwing away any entries +with any invalid components. This also means a restricted set of algorithms +will be used -- the spec limits them to `sha256`, `sha384`, and `sha512`. + +Strict mode is recommended if the integrity strings are intended for use in +browsers, or in other situations where strict adherence to the spec is needed. + +##### Example + +```javascript +ssri.parse('sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo') // -> Integrity object +``` + +#### `> ssri.stringify(sri, [opts]) -> String` + +This function is identical to [`Integrity#toString()`](#integrity-to-string), +except it can be used on _any_ object that [`parse`](#parse) can handle -- that +is, a string, an `Hash`-like, or an `Integrity`-like. + +The `opts.sep` option defines the string to use when joining multiple entries +together. To be spec-compliant, this _must_ be whitespace. The default is a +single space (`' '`). + +If `opts.strict` is true, the integrity string will be created using strict +parsing rules. See [`ssri.parse`](#parse). + +##### Example + +```javascript +// Useful for cleaning up input SRI strings: +ssri.stringify('\n\rsha512-foo\n\t\tsha384-bar') +// -> 'sha512-foo sha384-bar' + +// Hash-like: only a single entry. +ssri.stringify({ + algorithm: 'sha512', + digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', + options: ['foo'] +}) +// -> +// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' + +// Integrity-like: full multi-entry syntax. Similar to output of `ssri.parse` +ssri.stringify({ + 'sha512': [ + { + algorithm: 'sha512', + digest:'9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==', + options: ['foo'] + } + ] +}) +// -> +// 'sha512-9KhgCRIx/AmzC8xqYJTZRrnO8OW2Pxyl2DIMZSBOr0oDvtEFyht3xpp71j/r/pAe1DM+JI/A+line3jUBgzQ7A==?foo' +``` + +#### `> Integrity#concat(otherIntegrity, [opts]) -> Integrity` + +Concatenates an `Integrity` object with another IntegrityLike, or an integrity +string. + +This is functionally equivalent to concatenating the string format of both +integrity arguments, and calling [`ssri.parse`](#ssri-parse) on the new string. + +If `opts.strict` is true, the new `Integrity` will be created using strict +parsing rules. See [`ssri.parse`](#parse). + +##### Example + +```javascript +// This will combine the integrity checks for two different versions of +// your index.js file so you can use a single integrity string and serve +// either of these to clients, from a single ` + +``` + +## API + +See: https://cryptojs.gitbook.io/docs/ + +### AES Encryption + +#### Plain text encryption + +```javascript +var CryptoJS = require("crypto-js"); + +// Encrypt +var ciphertext = CryptoJS.AES.encrypt('my message', 'secret key 123').toString(); + +// Decrypt +var bytes = CryptoJS.AES.decrypt(ciphertext, 'secret key 123'); +var originalText = bytes.toString(CryptoJS.enc.Utf8); + +console.log(originalText); // 'my message' +``` + +#### Object encryption + +```javascript +var CryptoJS = require("crypto-js"); + +var data = [{id: 1}, {id: 2}] + +// Encrypt +var ciphertext = CryptoJS.AES.encrypt(JSON.stringify(data), 'secret key 123').toString(); + +// Decrypt +var bytes = CryptoJS.AES.decrypt(ciphertext, 'secret key 123'); +var decryptedData = JSON.parse(bytes.toString(CryptoJS.enc.Utf8)); + +console.log(decryptedData); // [{id: 1}, {id: 2}] +``` + +### List of modules + + +- ```crypto-js/core``` +- ```crypto-js/x64-core``` +- ```crypto-js/lib-typedarrays``` + +--- + +- ```crypto-js/md5``` +- ```crypto-js/sha1``` +- ```crypto-js/sha256``` +- ```crypto-js/sha224``` +- ```crypto-js/sha512``` +- ```crypto-js/sha384``` +- ```crypto-js/sha3``` +- ```crypto-js/ripemd160``` + +--- + +- ```crypto-js/hmac-md5``` +- ```crypto-js/hmac-sha1``` +- ```crypto-js/hmac-sha256``` +- ```crypto-js/hmac-sha224``` +- ```crypto-js/hmac-sha512``` +- ```crypto-js/hmac-sha384``` +- ```crypto-js/hmac-sha3``` +- ```crypto-js/hmac-ripemd160``` + +--- + +- ```crypto-js/pbkdf2``` + +--- + +- ```crypto-js/aes``` +- ```crypto-js/tripledes``` +- ```crypto-js/rc4``` +- ```crypto-js/rabbit``` +- ```crypto-js/rabbit-legacy``` +- ```crypto-js/evpkdf``` + +--- + +- ```crypto-js/format-openssl``` +- ```crypto-js/format-hex``` + +--- + +- ```crypto-js/enc-latin1``` +- ```crypto-js/enc-utf8``` +- ```crypto-js/enc-hex``` +- ```crypto-js/enc-utf16``` +- ```crypto-js/enc-base64``` + +--- + +- ```crypto-js/mode-cfb``` +- ```crypto-js/mode-ctr``` +- ```crypto-js/mode-ctr-gladman``` +- ```crypto-js/mode-ofb``` +- ```crypto-js/mode-ecb``` + +--- + +- ```crypto-js/pad-pkcs7``` +- ```crypto-js/pad-ansix923``` +- ```crypto-js/pad-iso10126``` +- ```crypto-js/pad-iso97971``` +- ```crypto-js/pad-zeropadding``` +- ```crypto-js/pad-nopadding``` + + +## Release notes + +### 4.1.1 + +Fix module order in bundled release. + +Include the browser field in the released package.json. + +### 4.1.0 + +Added url safe variant of base64 encoding. [357](https://github.com/brix/crypto-js/pull/357) + +Avoid webpack to add crypto-browser package. [364](https://github.com/brix/crypto-js/pull/364) + +### 4.0.0 + +This is an update including breaking changes for some environments. + +In this version `Math.random()` has been replaced by the random methods of the native crypto module. + +For this reason CryptoJS might not run in some JavaScript environments without native crypto module. Such as IE 10 or before or React Native. + +### 3.3.0 + +Rollback, `3.3.0` is the same as `3.1.9-1`. + +The move of using native secure crypto module will be shifted to a new `4.x.x` version. As it is a breaking change the impact is too big for a minor release. + +### 3.2.1 + +The usage of the native crypto module has been fixed. The import and access of the native crypto module has been improved. + +### 3.2.0 + +In this version `Math.random()` has been replaced by the random methods of the native crypto module. + +For this reason CryptoJS might does not run in some JavaScript environments without native crypto module. Such as IE 10 or before. + +If it's absolute required to run CryptoJS in such an environment, stay with `3.1.x` version. Encrypting and decrypting stays compatible. But keep in mind `3.1.x` versions still use `Math.random()` which is cryptographically not secure, as it's not random enough. + +This version came along with `CRITICAL` `BUG`. + +DO NOT USE THIS VERSION! Please, go for a newer version! + +### 3.1.x + +The `3.1.x` are based on the original CryptoJS, wrapped in CommonJS modules. + + diff --git a/node_modules/crypto-js/aes.js b/node_modules/crypto-js/aes.js new file mode 100644 index 0000000..166e3ea --- /dev/null +++ b/node_modules/crypto-js/aes.js @@ -0,0 +1,234 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./enc-base64"), require("./md5"), require("./evpkdf"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var BlockCipher = C_lib.BlockCipher; + var C_algo = C.algo; + + // Lookup tables + var SBOX = []; + var INV_SBOX = []; + var SUB_MIX_0 = []; + var SUB_MIX_1 = []; + var SUB_MIX_2 = []; + var SUB_MIX_3 = []; + var INV_SUB_MIX_0 = []; + var INV_SUB_MIX_1 = []; + var INV_SUB_MIX_2 = []; + var INV_SUB_MIX_3 = []; + + // Compute lookup tables + (function () { + // Compute double table + var d = []; + for (var i = 0; i < 256; i++) { + if (i < 128) { + d[i] = i << 1; + } else { + d[i] = (i << 1) ^ 0x11b; + } + } + + // Walk GF(2^8) + var x = 0; + var xi = 0; + for (var i = 0; i < 256; i++) { + // Compute sbox + var sx = xi ^ (xi << 1) ^ (xi << 2) ^ (xi << 3) ^ (xi << 4); + sx = (sx >>> 8) ^ (sx & 0xff) ^ 0x63; + SBOX[x] = sx; + INV_SBOX[sx] = x; + + // Compute multiplication + var x2 = d[x]; + var x4 = d[x2]; + var x8 = d[x4]; + + // Compute sub bytes, mix columns tables + var t = (d[sx] * 0x101) ^ (sx * 0x1010100); + SUB_MIX_0[x] = (t << 24) | (t >>> 8); + SUB_MIX_1[x] = (t << 16) | (t >>> 16); + SUB_MIX_2[x] = (t << 8) | (t >>> 24); + SUB_MIX_3[x] = t; + + // Compute inv sub bytes, inv mix columns tables + var t = (x8 * 0x1010101) ^ (x4 * 0x10001) ^ (x2 * 0x101) ^ (x * 0x1010100); + INV_SUB_MIX_0[sx] = (t << 24) | (t >>> 8); + INV_SUB_MIX_1[sx] = (t << 16) | (t >>> 16); + INV_SUB_MIX_2[sx] = (t << 8) | (t >>> 24); + INV_SUB_MIX_3[sx] = t; + + // Compute next counter + if (!x) { + x = xi = 1; + } else { + x = x2 ^ d[d[d[x8 ^ x2]]]; + xi ^= d[d[xi]]; + } + } + }()); + + // Precomputed Rcon lookup + var RCON = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36]; + + /** + * AES block cipher algorithm. + */ + var AES = C_algo.AES = BlockCipher.extend({ + _doReset: function () { + var t; + + // Skip reset of nRounds has been set before and key did not change + if (this._nRounds && this._keyPriorReset === this._key) { + return; + } + + // Shortcuts + var key = this._keyPriorReset = this._key; + var keyWords = key.words; + var keySize = key.sigBytes / 4; + + // Compute number of rounds + var nRounds = this._nRounds = keySize + 6; + + // Compute number of key schedule rows + var ksRows = (nRounds + 1) * 4; + + // Compute key schedule + var keySchedule = this._keySchedule = []; + for (var ksRow = 0; ksRow < ksRows; ksRow++) { + if (ksRow < keySize) { + keySchedule[ksRow] = keyWords[ksRow]; + } else { + t = keySchedule[ksRow - 1]; + + if (!(ksRow % keySize)) { + // Rot word + t = (t << 8) | (t >>> 24); + + // Sub word + t = (SBOX[t >>> 24] << 24) | (SBOX[(t >>> 16) & 0xff] << 16) | (SBOX[(t >>> 8) & 0xff] << 8) | SBOX[t & 0xff]; + + // Mix Rcon + t ^= RCON[(ksRow / keySize) | 0] << 24; + } else if (keySize > 6 && ksRow % keySize == 4) { + // Sub word + t = (SBOX[t >>> 24] << 24) | (SBOX[(t >>> 16) & 0xff] << 16) | (SBOX[(t >>> 8) & 0xff] << 8) | SBOX[t & 0xff]; + } + + keySchedule[ksRow] = keySchedule[ksRow - keySize] ^ t; + } + } + + // Compute inv key schedule + var invKeySchedule = this._invKeySchedule = []; + for (var invKsRow = 0; invKsRow < ksRows; invKsRow++) { + var ksRow = ksRows - invKsRow; + + if (invKsRow % 4) { + var t = keySchedule[ksRow]; + } else { + var t = keySchedule[ksRow - 4]; + } + + if (invKsRow < 4 || ksRow <= 4) { + invKeySchedule[invKsRow] = t; + } else { + invKeySchedule[invKsRow] = INV_SUB_MIX_0[SBOX[t >>> 24]] ^ INV_SUB_MIX_1[SBOX[(t >>> 16) & 0xff]] ^ + INV_SUB_MIX_2[SBOX[(t >>> 8) & 0xff]] ^ INV_SUB_MIX_3[SBOX[t & 0xff]]; + } + } + }, + + encryptBlock: function (M, offset) { + this._doCryptBlock(M, offset, this._keySchedule, SUB_MIX_0, SUB_MIX_1, SUB_MIX_2, SUB_MIX_3, SBOX); + }, + + decryptBlock: function (M, offset) { + // Swap 2nd and 4th rows + var t = M[offset + 1]; + M[offset + 1] = M[offset + 3]; + M[offset + 3] = t; + + this._doCryptBlock(M, offset, this._invKeySchedule, INV_SUB_MIX_0, INV_SUB_MIX_1, INV_SUB_MIX_2, INV_SUB_MIX_3, INV_SBOX); + + // Inv swap 2nd and 4th rows + var t = M[offset + 1]; + M[offset + 1] = M[offset + 3]; + M[offset + 3] = t; + }, + + _doCryptBlock: function (M, offset, keySchedule, SUB_MIX_0, SUB_MIX_1, SUB_MIX_2, SUB_MIX_3, SBOX) { + // Shortcut + var nRounds = this._nRounds; + + // Get input, add round key + var s0 = M[offset] ^ keySchedule[0]; + var s1 = M[offset + 1] ^ keySchedule[1]; + var s2 = M[offset + 2] ^ keySchedule[2]; + var s3 = M[offset + 3] ^ keySchedule[3]; + + // Key schedule row counter + var ksRow = 4; + + // Rounds + for (var round = 1; round < nRounds; round++) { + // Shift rows, sub bytes, mix columns, add round key + var t0 = SUB_MIX_0[s0 >>> 24] ^ SUB_MIX_1[(s1 >>> 16) & 0xff] ^ SUB_MIX_2[(s2 >>> 8) & 0xff] ^ SUB_MIX_3[s3 & 0xff] ^ keySchedule[ksRow++]; + var t1 = SUB_MIX_0[s1 >>> 24] ^ SUB_MIX_1[(s2 >>> 16) & 0xff] ^ SUB_MIX_2[(s3 >>> 8) & 0xff] ^ SUB_MIX_3[s0 & 0xff] ^ keySchedule[ksRow++]; + var t2 = SUB_MIX_0[s2 >>> 24] ^ SUB_MIX_1[(s3 >>> 16) & 0xff] ^ SUB_MIX_2[(s0 >>> 8) & 0xff] ^ SUB_MIX_3[s1 & 0xff] ^ keySchedule[ksRow++]; + var t3 = SUB_MIX_0[s3 >>> 24] ^ SUB_MIX_1[(s0 >>> 16) & 0xff] ^ SUB_MIX_2[(s1 >>> 8) & 0xff] ^ SUB_MIX_3[s2 & 0xff] ^ keySchedule[ksRow++]; + + // Update state + s0 = t0; + s1 = t1; + s2 = t2; + s3 = t3; + } + + // Shift rows, sub bytes, add round key + var t0 = ((SBOX[s0 >>> 24] << 24) | (SBOX[(s1 >>> 16) & 0xff] << 16) | (SBOX[(s2 >>> 8) & 0xff] << 8) | SBOX[s3 & 0xff]) ^ keySchedule[ksRow++]; + var t1 = ((SBOX[s1 >>> 24] << 24) | (SBOX[(s2 >>> 16) & 0xff] << 16) | (SBOX[(s3 >>> 8) & 0xff] << 8) | SBOX[s0 & 0xff]) ^ keySchedule[ksRow++]; + var t2 = ((SBOX[s2 >>> 24] << 24) | (SBOX[(s3 >>> 16) & 0xff] << 16) | (SBOX[(s0 >>> 8) & 0xff] << 8) | SBOX[s1 & 0xff]) ^ keySchedule[ksRow++]; + var t3 = ((SBOX[s3 >>> 24] << 24) | (SBOX[(s0 >>> 16) & 0xff] << 16) | (SBOX[(s1 >>> 8) & 0xff] << 8) | SBOX[s2 & 0xff]) ^ keySchedule[ksRow++]; + + // Set output + M[offset] = t0; + M[offset + 1] = t1; + M[offset + 2] = t2; + M[offset + 3] = t3; + }, + + keySize: 256/32 + }); + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.AES.encrypt(message, key, cfg); + * var plaintext = CryptoJS.AES.decrypt(ciphertext, key, cfg); + */ + C.AES = BlockCipher._createHelper(AES); + }()); + + + return CryptoJS.AES; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/bower.json b/node_modules/crypto-js/bower.json new file mode 100644 index 0000000..1e12fdc --- /dev/null +++ b/node_modules/crypto-js/bower.json @@ -0,0 +1,39 @@ +{ + "name": "crypto-js", + "version": "4.1.1", + "description": "JavaScript library of crypto standards.", + "license": "MIT", + "homepage": "http://github.com/brix/crypto-js", + "repository": { + "type": "git", + "url": "http://github.com/brix/crypto-js.git" + }, + "keywords": [ + "security", + "crypto", + "Hash", + "MD5", + "SHA1", + "SHA-1", + "SHA256", + "SHA-256", + "RC4", + "Rabbit", + "AES", + "DES", + "PBKDF2", + "HMAC", + "OFB", + "CFB", + "CTR", + "CBC", + "Base64", + "Base64url" + ], + "main": "index.js", + "dependencies": {}, + "browser": { + "crypto": false + }, + "ignore": [] +} diff --git a/node_modules/crypto-js/cipher-core.js b/node_modules/crypto-js/cipher-core.js new file mode 100644 index 0000000..c560c9e --- /dev/null +++ b/node_modules/crypto-js/cipher-core.js @@ -0,0 +1,890 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./evpkdf")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./evpkdf"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * Cipher core components. + */ + CryptoJS.lib.Cipher || (function (undefined) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var WordArray = C_lib.WordArray; + var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm; + var C_enc = C.enc; + var Utf8 = C_enc.Utf8; + var Base64 = C_enc.Base64; + var C_algo = C.algo; + var EvpKDF = C_algo.EvpKDF; + + /** + * Abstract base cipher template. + * + * @property {number} keySize This cipher's key size. Default: 4 (128 bits) + * @property {number} ivSize This cipher's IV size. Default: 4 (128 bits) + * @property {number} _ENC_XFORM_MODE A constant representing encryption mode. + * @property {number} _DEC_XFORM_MODE A constant representing decryption mode. + */ + var Cipher = C_lib.Cipher = BufferedBlockAlgorithm.extend({ + /** + * Configuration options. + * + * @property {WordArray} iv The IV to use for this operation. + */ + cfg: Base.extend(), + + /** + * Creates this cipher in encryption mode. + * + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {Cipher} A cipher instance. + * + * @static + * + * @example + * + * var cipher = CryptoJS.algo.AES.createEncryptor(keyWordArray, { iv: ivWordArray }); + */ + createEncryptor: function (key, cfg) { + return this.create(this._ENC_XFORM_MODE, key, cfg); + }, + + /** + * Creates this cipher in decryption mode. + * + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {Cipher} A cipher instance. + * + * @static + * + * @example + * + * var cipher = CryptoJS.algo.AES.createDecryptor(keyWordArray, { iv: ivWordArray }); + */ + createDecryptor: function (key, cfg) { + return this.create(this._DEC_XFORM_MODE, key, cfg); + }, + + /** + * Initializes a newly created cipher. + * + * @param {number} xformMode Either the encryption or decryption transormation mode constant. + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @example + * + * var cipher = CryptoJS.algo.AES.create(CryptoJS.algo.AES._ENC_XFORM_MODE, keyWordArray, { iv: ivWordArray }); + */ + init: function (xformMode, key, cfg) { + // Apply config defaults + this.cfg = this.cfg.extend(cfg); + + // Store transform mode and key + this._xformMode = xformMode; + this._key = key; + + // Set initial values + this.reset(); + }, + + /** + * Resets this cipher to its initial state. + * + * @example + * + * cipher.reset(); + */ + reset: function () { + // Reset data buffer + BufferedBlockAlgorithm.reset.call(this); + + // Perform concrete-cipher logic + this._doReset(); + }, + + /** + * Adds data to be encrypted or decrypted. + * + * @param {WordArray|string} dataUpdate The data to encrypt or decrypt. + * + * @return {WordArray} The data after processing. + * + * @example + * + * var encrypted = cipher.process('data'); + * var encrypted = cipher.process(wordArray); + */ + process: function (dataUpdate) { + // Append + this._append(dataUpdate); + + // Process available blocks + return this._process(); + }, + + /** + * Finalizes the encryption or decryption process. + * Note that the finalize operation is effectively a destructive, read-once operation. + * + * @param {WordArray|string} dataUpdate The final data to encrypt or decrypt. + * + * @return {WordArray} The data after final processing. + * + * @example + * + * var encrypted = cipher.finalize(); + * var encrypted = cipher.finalize('data'); + * var encrypted = cipher.finalize(wordArray); + */ + finalize: function (dataUpdate) { + // Final data update + if (dataUpdate) { + this._append(dataUpdate); + } + + // Perform concrete-cipher logic + var finalProcessedData = this._doFinalize(); + + return finalProcessedData; + }, + + keySize: 128/32, + + ivSize: 128/32, + + _ENC_XFORM_MODE: 1, + + _DEC_XFORM_MODE: 2, + + /** + * Creates shortcut functions to a cipher's object interface. + * + * @param {Cipher} cipher The cipher to create a helper for. + * + * @return {Object} An object with encrypt and decrypt shortcut functions. + * + * @static + * + * @example + * + * var AES = CryptoJS.lib.Cipher._createHelper(CryptoJS.algo.AES); + */ + _createHelper: (function () { + function selectCipherStrategy(key) { + if (typeof key == 'string') { + return PasswordBasedCipher; + } else { + return SerializableCipher; + } + } + + return function (cipher) { + return { + encrypt: function (message, key, cfg) { + return selectCipherStrategy(key).encrypt(cipher, message, key, cfg); + }, + + decrypt: function (ciphertext, key, cfg) { + return selectCipherStrategy(key).decrypt(cipher, ciphertext, key, cfg); + } + }; + }; + }()) + }); + + /** + * Abstract base stream cipher template. + * + * @property {number} blockSize The number of 32-bit words this cipher operates on. Default: 1 (32 bits) + */ + var StreamCipher = C_lib.StreamCipher = Cipher.extend({ + _doFinalize: function () { + // Process partial blocks + var finalProcessedBlocks = this._process(!!'flush'); + + return finalProcessedBlocks; + }, + + blockSize: 1 + }); + + /** + * Mode namespace. + */ + var C_mode = C.mode = {}; + + /** + * Abstract base block cipher mode template. + */ + var BlockCipherMode = C_lib.BlockCipherMode = Base.extend({ + /** + * Creates this mode for encryption. + * + * @param {Cipher} cipher A block cipher instance. + * @param {Array} iv The IV words. + * + * @static + * + * @example + * + * var mode = CryptoJS.mode.CBC.createEncryptor(cipher, iv.words); + */ + createEncryptor: function (cipher, iv) { + return this.Encryptor.create(cipher, iv); + }, + + /** + * Creates this mode for decryption. + * + * @param {Cipher} cipher A block cipher instance. + * @param {Array} iv The IV words. + * + * @static + * + * @example + * + * var mode = CryptoJS.mode.CBC.createDecryptor(cipher, iv.words); + */ + createDecryptor: function (cipher, iv) { + return this.Decryptor.create(cipher, iv); + }, + + /** + * Initializes a newly created mode. + * + * @param {Cipher} cipher A block cipher instance. + * @param {Array} iv The IV words. + * + * @example + * + * var mode = CryptoJS.mode.CBC.Encryptor.create(cipher, iv.words); + */ + init: function (cipher, iv) { + this._cipher = cipher; + this._iv = iv; + } + }); + + /** + * Cipher Block Chaining mode. + */ + var CBC = C_mode.CBC = (function () { + /** + * Abstract base CBC mode. + */ + var CBC = BlockCipherMode.extend(); + + /** + * CBC encryptor. + */ + CBC.Encryptor = CBC.extend({ + /** + * Processes the data block at offset. + * + * @param {Array} words The data words to operate on. + * @param {number} offset The offset where the block starts. + * + * @example + * + * mode.processBlock(data.words, offset); + */ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + // XOR and encrypt + xorBlock.call(this, words, offset, blockSize); + cipher.encryptBlock(words, offset); + + // Remember this block to use with next block + this._prevBlock = words.slice(offset, offset + blockSize); + } + }); + + /** + * CBC decryptor. + */ + CBC.Decryptor = CBC.extend({ + /** + * Processes the data block at offset. + * + * @param {Array} words The data words to operate on. + * @param {number} offset The offset where the block starts. + * + * @example + * + * mode.processBlock(data.words, offset); + */ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + // Remember this block to use with next block + var thisBlock = words.slice(offset, offset + blockSize); + + // Decrypt and XOR + cipher.decryptBlock(words, offset); + xorBlock.call(this, words, offset, blockSize); + + // This block becomes the previous block + this._prevBlock = thisBlock; + } + }); + + function xorBlock(words, offset, blockSize) { + var block; + + // Shortcut + var iv = this._iv; + + // Choose mixing block + if (iv) { + block = iv; + + // Remove IV for subsequent blocks + this._iv = undefined; + } else { + block = this._prevBlock; + } + + // XOR blocks + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= block[i]; + } + } + + return CBC; + }()); + + /** + * Padding namespace. + */ + var C_pad = C.pad = {}; + + /** + * PKCS #5/7 padding strategy. + */ + var Pkcs7 = C_pad.Pkcs7 = { + /** + * Pads data using the algorithm defined in PKCS #5/7. + * + * @param {WordArray} data The data to pad. + * @param {number} blockSize The multiple that the data should be padded to. + * + * @static + * + * @example + * + * CryptoJS.pad.Pkcs7.pad(wordArray, 4); + */ + pad: function (data, blockSize) { + // Shortcut + var blockSizeBytes = blockSize * 4; + + // Count padding bytes + var nPaddingBytes = blockSizeBytes - data.sigBytes % blockSizeBytes; + + // Create padding word + var paddingWord = (nPaddingBytes << 24) | (nPaddingBytes << 16) | (nPaddingBytes << 8) | nPaddingBytes; + + // Create padding + var paddingWords = []; + for (var i = 0; i < nPaddingBytes; i += 4) { + paddingWords.push(paddingWord); + } + var padding = WordArray.create(paddingWords, nPaddingBytes); + + // Add padding + data.concat(padding); + }, + + /** + * Unpads data that had been padded using the algorithm defined in PKCS #5/7. + * + * @param {WordArray} data The data to unpad. + * + * @static + * + * @example + * + * CryptoJS.pad.Pkcs7.unpad(wordArray); + */ + unpad: function (data) { + // Get number of padding bytes from last byte + var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; + + // Remove padding + data.sigBytes -= nPaddingBytes; + } + }; + + /** + * Abstract base block cipher template. + * + * @property {number} blockSize The number of 32-bit words this cipher operates on. Default: 4 (128 bits) + */ + var BlockCipher = C_lib.BlockCipher = Cipher.extend({ + /** + * Configuration options. + * + * @property {Mode} mode The block mode to use. Default: CBC + * @property {Padding} padding The padding strategy to use. Default: Pkcs7 + */ + cfg: Cipher.cfg.extend({ + mode: CBC, + padding: Pkcs7 + }), + + reset: function () { + var modeCreator; + + // Reset cipher + Cipher.reset.call(this); + + // Shortcuts + var cfg = this.cfg; + var iv = cfg.iv; + var mode = cfg.mode; + + // Reset block mode + if (this._xformMode == this._ENC_XFORM_MODE) { + modeCreator = mode.createEncryptor; + } else /* if (this._xformMode == this._DEC_XFORM_MODE) */ { + modeCreator = mode.createDecryptor; + // Keep at least one block in the buffer for unpadding + this._minBufferSize = 1; + } + + if (this._mode && this._mode.__creator == modeCreator) { + this._mode.init(this, iv && iv.words); + } else { + this._mode = modeCreator.call(mode, this, iv && iv.words); + this._mode.__creator = modeCreator; + } + }, + + _doProcessBlock: function (words, offset) { + this._mode.processBlock(words, offset); + }, + + _doFinalize: function () { + var finalProcessedBlocks; + + // Shortcut + var padding = this.cfg.padding; + + // Finalize + if (this._xformMode == this._ENC_XFORM_MODE) { + // Pad data + padding.pad(this._data, this.blockSize); + + // Process final blocks + finalProcessedBlocks = this._process(!!'flush'); + } else /* if (this._xformMode == this._DEC_XFORM_MODE) */ { + // Process final blocks + finalProcessedBlocks = this._process(!!'flush'); + + // Unpad data + padding.unpad(finalProcessedBlocks); + } + + return finalProcessedBlocks; + }, + + blockSize: 128/32 + }); + + /** + * A collection of cipher parameters. + * + * @property {WordArray} ciphertext The raw ciphertext. + * @property {WordArray} key The key to this ciphertext. + * @property {WordArray} iv The IV used in the ciphering operation. + * @property {WordArray} salt The salt used with a key derivation function. + * @property {Cipher} algorithm The cipher algorithm. + * @property {Mode} mode The block mode used in the ciphering operation. + * @property {Padding} padding The padding scheme used in the ciphering operation. + * @property {number} blockSize The block size of the cipher. + * @property {Format} formatter The default formatting strategy to convert this cipher params object to a string. + */ + var CipherParams = C_lib.CipherParams = Base.extend({ + /** + * Initializes a newly created cipher params object. + * + * @param {Object} cipherParams An object with any of the possible cipher parameters. + * + * @example + * + * var cipherParams = CryptoJS.lib.CipherParams.create({ + * ciphertext: ciphertextWordArray, + * key: keyWordArray, + * iv: ivWordArray, + * salt: saltWordArray, + * algorithm: CryptoJS.algo.AES, + * mode: CryptoJS.mode.CBC, + * padding: CryptoJS.pad.PKCS7, + * blockSize: 4, + * formatter: CryptoJS.format.OpenSSL + * }); + */ + init: function (cipherParams) { + this.mixIn(cipherParams); + }, + + /** + * Converts this cipher params object to a string. + * + * @param {Format} formatter (Optional) The formatting strategy to use. + * + * @return {string} The stringified cipher params. + * + * @throws Error If neither the formatter nor the default formatter is set. + * + * @example + * + * var string = cipherParams + ''; + * var string = cipherParams.toString(); + * var string = cipherParams.toString(CryptoJS.format.OpenSSL); + */ + toString: function (formatter) { + return (formatter || this.formatter).stringify(this); + } + }); + + /** + * Format namespace. + */ + var C_format = C.format = {}; + + /** + * OpenSSL formatting strategy. + */ + var OpenSSLFormatter = C_format.OpenSSL = { + /** + * Converts a cipher params object to an OpenSSL-compatible string. + * + * @param {CipherParams} cipherParams The cipher params object. + * + * @return {string} The OpenSSL-compatible string. + * + * @static + * + * @example + * + * var openSSLString = CryptoJS.format.OpenSSL.stringify(cipherParams); + */ + stringify: function (cipherParams) { + var wordArray; + + // Shortcuts + var ciphertext = cipherParams.ciphertext; + var salt = cipherParams.salt; + + // Format + if (salt) { + wordArray = WordArray.create([0x53616c74, 0x65645f5f]).concat(salt).concat(ciphertext); + } else { + wordArray = ciphertext; + } + + return wordArray.toString(Base64); + }, + + /** + * Converts an OpenSSL-compatible string to a cipher params object. + * + * @param {string} openSSLStr The OpenSSL-compatible string. + * + * @return {CipherParams} The cipher params object. + * + * @static + * + * @example + * + * var cipherParams = CryptoJS.format.OpenSSL.parse(openSSLString); + */ + parse: function (openSSLStr) { + var salt; + + // Parse base64 + var ciphertext = Base64.parse(openSSLStr); + + // Shortcut + var ciphertextWords = ciphertext.words; + + // Test for salt + if (ciphertextWords[0] == 0x53616c74 && ciphertextWords[1] == 0x65645f5f) { + // Extract salt + salt = WordArray.create(ciphertextWords.slice(2, 4)); + + // Remove salt from ciphertext + ciphertextWords.splice(0, 4); + ciphertext.sigBytes -= 16; + } + + return CipherParams.create({ ciphertext: ciphertext, salt: salt }); + } + }; + + /** + * A cipher wrapper that returns ciphertext as a serializable cipher params object. + */ + var SerializableCipher = C_lib.SerializableCipher = Base.extend({ + /** + * Configuration options. + * + * @property {Formatter} format The formatting strategy to convert cipher param objects to and from a string. Default: OpenSSL + */ + cfg: Base.extend({ + format: OpenSSLFormatter + }), + + /** + * Encrypts a message. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {WordArray|string} message The message to encrypt. + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {CipherParams} A cipher params object. + * + * @static + * + * @example + * + * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key); + * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key, { iv: iv }); + * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key, { iv: iv, format: CryptoJS.format.OpenSSL }); + */ + encrypt: function (cipher, message, key, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Encrypt + var encryptor = cipher.createEncryptor(key, cfg); + var ciphertext = encryptor.finalize(message); + + // Shortcut + var cipherCfg = encryptor.cfg; + + // Create and return serializable cipher params + return CipherParams.create({ + ciphertext: ciphertext, + key: key, + iv: cipherCfg.iv, + algorithm: cipher, + mode: cipherCfg.mode, + padding: cipherCfg.padding, + blockSize: cipher.blockSize, + formatter: cfg.format + }); + }, + + /** + * Decrypts serialized ciphertext. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {CipherParams|string} ciphertext The ciphertext to decrypt. + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {WordArray} The plaintext. + * + * @static + * + * @example + * + * var plaintext = CryptoJS.lib.SerializableCipher.decrypt(CryptoJS.algo.AES, formattedCiphertext, key, { iv: iv, format: CryptoJS.format.OpenSSL }); + * var plaintext = CryptoJS.lib.SerializableCipher.decrypt(CryptoJS.algo.AES, ciphertextParams, key, { iv: iv, format: CryptoJS.format.OpenSSL }); + */ + decrypt: function (cipher, ciphertext, key, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Convert string to CipherParams + ciphertext = this._parse(ciphertext, cfg.format); + + // Decrypt + var plaintext = cipher.createDecryptor(key, cfg).finalize(ciphertext.ciphertext); + + return plaintext; + }, + + /** + * Converts serialized ciphertext to CipherParams, + * else assumed CipherParams already and returns ciphertext unchanged. + * + * @param {CipherParams|string} ciphertext The ciphertext. + * @param {Formatter} format The formatting strategy to use to parse serialized ciphertext. + * + * @return {CipherParams} The unserialized ciphertext. + * + * @static + * + * @example + * + * var ciphertextParams = CryptoJS.lib.SerializableCipher._parse(ciphertextStringOrParams, format); + */ + _parse: function (ciphertext, format) { + if (typeof ciphertext == 'string') { + return format.parse(ciphertext, this); + } else { + return ciphertext; + } + } + }); + + /** + * Key derivation function namespace. + */ + var C_kdf = C.kdf = {}; + + /** + * OpenSSL key derivation function. + */ + var OpenSSLKdf = C_kdf.OpenSSL = { + /** + * Derives a key and IV from a password. + * + * @param {string} password The password to derive from. + * @param {number} keySize The size in words of the key to generate. + * @param {number} ivSize The size in words of the IV to generate. + * @param {WordArray|string} salt (Optional) A 64-bit salt to use. If omitted, a salt will be generated randomly. + * + * @return {CipherParams} A cipher params object with the key, IV, and salt. + * + * @static + * + * @example + * + * var derivedParams = CryptoJS.kdf.OpenSSL.execute('Password', 256/32, 128/32); + * var derivedParams = CryptoJS.kdf.OpenSSL.execute('Password', 256/32, 128/32, 'saltsalt'); + */ + execute: function (password, keySize, ivSize, salt) { + // Generate random salt + if (!salt) { + salt = WordArray.random(64/8); + } + + // Derive key and IV + var key = EvpKDF.create({ keySize: keySize + ivSize }).compute(password, salt); + + // Separate key and IV + var iv = WordArray.create(key.words.slice(keySize), ivSize * 4); + key.sigBytes = keySize * 4; + + // Return params + return CipherParams.create({ key: key, iv: iv, salt: salt }); + } + }; + + /** + * A serializable cipher wrapper that derives the key from a password, + * and returns ciphertext as a serializable cipher params object. + */ + var PasswordBasedCipher = C_lib.PasswordBasedCipher = SerializableCipher.extend({ + /** + * Configuration options. + * + * @property {KDF} kdf The key derivation function to use to generate a key and IV from a password. Default: OpenSSL + */ + cfg: SerializableCipher.cfg.extend({ + kdf: OpenSSLKdf + }), + + /** + * Encrypts a message using a password. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {WordArray|string} message The message to encrypt. + * @param {string} password The password. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {CipherParams} A cipher params object. + * + * @static + * + * @example + * + * var ciphertextParams = CryptoJS.lib.PasswordBasedCipher.encrypt(CryptoJS.algo.AES, message, 'password'); + * var ciphertextParams = CryptoJS.lib.PasswordBasedCipher.encrypt(CryptoJS.algo.AES, message, 'password', { format: CryptoJS.format.OpenSSL }); + */ + encrypt: function (cipher, message, password, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Derive key and other params + var derivedParams = cfg.kdf.execute(password, cipher.keySize, cipher.ivSize); + + // Add IV to config + cfg.iv = derivedParams.iv; + + // Encrypt + var ciphertext = SerializableCipher.encrypt.call(this, cipher, message, derivedParams.key, cfg); + + // Mix in derived params + ciphertext.mixIn(derivedParams); + + return ciphertext; + }, + + /** + * Decrypts serialized ciphertext using a password. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {CipherParams|string} ciphertext The ciphertext to decrypt. + * @param {string} password The password. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {WordArray} The plaintext. + * + * @static + * + * @example + * + * var plaintext = CryptoJS.lib.PasswordBasedCipher.decrypt(CryptoJS.algo.AES, formattedCiphertext, 'password', { format: CryptoJS.format.OpenSSL }); + * var plaintext = CryptoJS.lib.PasswordBasedCipher.decrypt(CryptoJS.algo.AES, ciphertextParams, 'password', { format: CryptoJS.format.OpenSSL }); + */ + decrypt: function (cipher, ciphertext, password, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Convert string to CipherParams + ciphertext = this._parse(ciphertext, cfg.format); + + // Derive key and other params + var derivedParams = cfg.kdf.execute(password, cipher.keySize, cipher.ivSize, ciphertext.salt); + + // Add IV to config + cfg.iv = derivedParams.iv; + + // Decrypt + var plaintext = SerializableCipher.decrypt.call(this, cipher, ciphertext, derivedParams.key, cfg); + + return plaintext; + } + }); + }()); + + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/core.js b/node_modules/crypto-js/core.js new file mode 100644 index 0000000..e3a498b --- /dev/null +++ b/node_modules/crypto-js/core.js @@ -0,0 +1,807 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(); + } + else if (typeof define === "function" && define.amd) { + // AMD + define([], factory); + } + else { + // Global (browser) + root.CryptoJS = factory(); + } +}(this, function () { + + /*globals window, global, require*/ + + /** + * CryptoJS core components. + */ + var CryptoJS = CryptoJS || (function (Math, undefined) { + + var crypto; + + // Native crypto from window (Browser) + if (typeof window !== 'undefined' && window.crypto) { + crypto = window.crypto; + } + + // Native crypto in web worker (Browser) + if (typeof self !== 'undefined' && self.crypto) { + crypto = self.crypto; + } + + // Native crypto from worker + if (typeof globalThis !== 'undefined' && globalThis.crypto) { + crypto = globalThis.crypto; + } + + // Native (experimental IE 11) crypto from window (Browser) + if (!crypto && typeof window !== 'undefined' && window.msCrypto) { + crypto = window.msCrypto; + } + + // Native crypto from global (NodeJS) + if (!crypto && typeof global !== 'undefined' && global.crypto) { + crypto = global.crypto; + } + + // Native crypto import via require (NodeJS) + if (!crypto && typeof require === 'function') { + try { + crypto = require('crypto'); + } catch (err) {} + } + + /* + * Cryptographically secure pseudorandom number generator + * + * As Math.random() is cryptographically not safe to use + */ + var cryptoSecureRandomInt = function () { + if (crypto) { + // Use getRandomValues method (Browser) + if (typeof crypto.getRandomValues === 'function') { + try { + return crypto.getRandomValues(new Uint32Array(1))[0]; + } catch (err) {} + } + + // Use randomBytes method (NodeJS) + if (typeof crypto.randomBytes === 'function') { + try { + return crypto.randomBytes(4).readInt32LE(); + } catch (err) {} + } + } + + throw new Error('Native crypto module could not be used to get secure random number.'); + }; + + /* + * Local polyfill of Object.create + + */ + var create = Object.create || (function () { + function F() {} + + return function (obj) { + var subtype; + + F.prototype = obj; + + subtype = new F(); + + F.prototype = null; + + return subtype; + }; + }()); + + /** + * CryptoJS namespace. + */ + var C = {}; + + /** + * Library namespace. + */ + var C_lib = C.lib = {}; + + /** + * Base object for prototypal inheritance. + */ + var Base = C_lib.Base = (function () { + + + return { + /** + * Creates a new object that inherits from this object. + * + * @param {Object} overrides Properties to copy into the new object. + * + * @return {Object} The new object. + * + * @static + * + * @example + * + * var MyType = CryptoJS.lib.Base.extend({ + * field: 'value', + * + * method: function () { + * } + * }); + */ + extend: function (overrides) { + // Spawn + var subtype = create(this); + + // Augment + if (overrides) { + subtype.mixIn(overrides); + } + + // Create default initializer + if (!subtype.hasOwnProperty('init') || this.init === subtype.init) { + subtype.init = function () { + subtype.$super.init.apply(this, arguments); + }; + } + + // Initializer's prototype is the subtype object + subtype.init.prototype = subtype; + + // Reference supertype + subtype.$super = this; + + return subtype; + }, + + /** + * Extends this object and runs the init method. + * Arguments to create() will be passed to init(). + * + * @return {Object} The new object. + * + * @static + * + * @example + * + * var instance = MyType.create(); + */ + create: function () { + var instance = this.extend(); + instance.init.apply(instance, arguments); + + return instance; + }, + + /** + * Initializes a newly created object. + * Override this method to add some logic when your objects are created. + * + * @example + * + * var MyType = CryptoJS.lib.Base.extend({ + * init: function () { + * // ... + * } + * }); + */ + init: function () { + }, + + /** + * Copies properties into this object. + * + * @param {Object} properties The properties to mix in. + * + * @example + * + * MyType.mixIn({ + * field: 'value' + * }); + */ + mixIn: function (properties) { + for (var propertyName in properties) { + if (properties.hasOwnProperty(propertyName)) { + this[propertyName] = properties[propertyName]; + } + } + + // IE won't copy toString using the loop above + if (properties.hasOwnProperty('toString')) { + this.toString = properties.toString; + } + }, + + /** + * Creates a copy of this object. + * + * @return {Object} The clone. + * + * @example + * + * var clone = instance.clone(); + */ + clone: function () { + return this.init.prototype.extend(this); + } + }; + }()); + + /** + * An array of 32-bit words. + * + * @property {Array} words The array of 32-bit words. + * @property {number} sigBytes The number of significant bytes in this word array. + */ + var WordArray = C_lib.WordArray = Base.extend({ + /** + * Initializes a newly created word array. + * + * @param {Array} words (Optional) An array of 32-bit words. + * @param {number} sigBytes (Optional) The number of significant bytes in the words. + * + * @example + * + * var wordArray = CryptoJS.lib.WordArray.create(); + * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]); + * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6); + */ + init: function (words, sigBytes) { + words = this.words = words || []; + + if (sigBytes != undefined) { + this.sigBytes = sigBytes; + } else { + this.sigBytes = words.length * 4; + } + }, + + /** + * Converts this word array to a string. + * + * @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex + * + * @return {string} The stringified word array. + * + * @example + * + * var string = wordArray + ''; + * var string = wordArray.toString(); + * var string = wordArray.toString(CryptoJS.enc.Utf8); + */ + toString: function (encoder) { + return (encoder || Hex).stringify(this); + }, + + /** + * Concatenates a word array to this word array. + * + * @param {WordArray} wordArray The word array to append. + * + * @return {WordArray} This word array. + * + * @example + * + * wordArray1.concat(wordArray2); + */ + concat: function (wordArray) { + // Shortcuts + var thisWords = this.words; + var thatWords = wordArray.words; + var thisSigBytes = this.sigBytes; + var thatSigBytes = wordArray.sigBytes; + + // Clamp excess bits + this.clamp(); + + // Concat + if (thisSigBytes % 4) { + // Copy one byte at a time + for (var i = 0; i < thatSigBytes; i++) { + var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8); + } + } else { + // Copy one word at a time + for (var j = 0; j < thatSigBytes; j += 4) { + thisWords[(thisSigBytes + j) >>> 2] = thatWords[j >>> 2]; + } + } + this.sigBytes += thatSigBytes; + + // Chainable + return this; + }, + + /** + * Removes insignificant bits. + * + * @example + * + * wordArray.clamp(); + */ + clamp: function () { + // Shortcuts + var words = this.words; + var sigBytes = this.sigBytes; + + // Clamp + words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8); + words.length = Math.ceil(sigBytes / 4); + }, + + /** + * Creates a copy of this word array. + * + * @return {WordArray} The clone. + * + * @example + * + * var clone = wordArray.clone(); + */ + clone: function () { + var clone = Base.clone.call(this); + clone.words = this.words.slice(0); + + return clone; + }, + + /** + * Creates a word array filled with random bytes. + * + * @param {number} nBytes The number of random bytes to generate. + * + * @return {WordArray} The random word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.lib.WordArray.random(16); + */ + random: function (nBytes) { + var words = []; + + for (var i = 0; i < nBytes; i += 4) { + words.push(cryptoSecureRandomInt()); + } + + return new WordArray.init(words, nBytes); + } + }); + + /** + * Encoder namespace. + */ + var C_enc = C.enc = {}; + + /** + * Hex encoding strategy. + */ + var Hex = C_enc.Hex = { + /** + * Converts a word array to a hex string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The hex string. + * + * @static + * + * @example + * + * var hexString = CryptoJS.enc.Hex.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var hexChars = []; + for (var i = 0; i < sigBytes; i++) { + var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + hexChars.push((bite >>> 4).toString(16)); + hexChars.push((bite & 0x0f).toString(16)); + } + + return hexChars.join(''); + }, + + /** + * Converts a hex string to a word array. + * + * @param {string} hexStr The hex string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Hex.parse(hexString); + */ + parse: function (hexStr) { + // Shortcut + var hexStrLength = hexStr.length; + + // Convert + var words = []; + for (var i = 0; i < hexStrLength; i += 2) { + words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4); + } + + return new WordArray.init(words, hexStrLength / 2); + } + }; + + /** + * Latin1 encoding strategy. + */ + var Latin1 = C_enc.Latin1 = { + /** + * Converts a word array to a Latin1 string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The Latin1 string. + * + * @static + * + * @example + * + * var latin1String = CryptoJS.enc.Latin1.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var latin1Chars = []; + for (var i = 0; i < sigBytes; i++) { + var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + latin1Chars.push(String.fromCharCode(bite)); + } + + return latin1Chars.join(''); + }, + + /** + * Converts a Latin1 string to a word array. + * + * @param {string} latin1Str The Latin1 string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Latin1.parse(latin1String); + */ + parse: function (latin1Str) { + // Shortcut + var latin1StrLength = latin1Str.length; + + // Convert + var words = []; + for (var i = 0; i < latin1StrLength; i++) { + words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8); + } + + return new WordArray.init(words, latin1StrLength); + } + }; + + /** + * UTF-8 encoding strategy. + */ + var Utf8 = C_enc.Utf8 = { + /** + * Converts a word array to a UTF-8 string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The UTF-8 string. + * + * @static + * + * @example + * + * var utf8String = CryptoJS.enc.Utf8.stringify(wordArray); + */ + stringify: function (wordArray) { + try { + return decodeURIComponent(escape(Latin1.stringify(wordArray))); + } catch (e) { + throw new Error('Malformed UTF-8 data'); + } + }, + + /** + * Converts a UTF-8 string to a word array. + * + * @param {string} utf8Str The UTF-8 string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Utf8.parse(utf8String); + */ + parse: function (utf8Str) { + return Latin1.parse(unescape(encodeURIComponent(utf8Str))); + } + }; + + /** + * Abstract buffered block algorithm template. + * + * The property blockSize must be implemented in a concrete subtype. + * + * @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0 + */ + var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({ + /** + * Resets this block algorithm's data buffer to its initial state. + * + * @example + * + * bufferedBlockAlgorithm.reset(); + */ + reset: function () { + // Initial values + this._data = new WordArray.init(); + this._nDataBytes = 0; + }, + + /** + * Adds new data to this block algorithm's buffer. + * + * @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8. + * + * @example + * + * bufferedBlockAlgorithm._append('data'); + * bufferedBlockAlgorithm._append(wordArray); + */ + _append: function (data) { + // Convert string to WordArray, else assume WordArray already + if (typeof data == 'string') { + data = Utf8.parse(data); + } + + // Append + this._data.concat(data); + this._nDataBytes += data.sigBytes; + }, + + /** + * Processes available data blocks. + * + * This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype. + * + * @param {boolean} doFlush Whether all blocks and partial blocks should be processed. + * + * @return {WordArray} The processed data. + * + * @example + * + * var processedData = bufferedBlockAlgorithm._process(); + * var processedData = bufferedBlockAlgorithm._process(!!'flush'); + */ + _process: function (doFlush) { + var processedWords; + + // Shortcuts + var data = this._data; + var dataWords = data.words; + var dataSigBytes = data.sigBytes; + var blockSize = this.blockSize; + var blockSizeBytes = blockSize * 4; + + // Count blocks ready + var nBlocksReady = dataSigBytes / blockSizeBytes; + if (doFlush) { + // Round up to include partial blocks + nBlocksReady = Math.ceil(nBlocksReady); + } else { + // Round down to include only full blocks, + // less the number of blocks that must remain in the buffer + nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0); + } + + // Count words ready + var nWordsReady = nBlocksReady * blockSize; + + // Count bytes ready + var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes); + + // Process blocks + if (nWordsReady) { + for (var offset = 0; offset < nWordsReady; offset += blockSize) { + // Perform concrete-algorithm logic + this._doProcessBlock(dataWords, offset); + } + + // Remove processed words + processedWords = dataWords.splice(0, nWordsReady); + data.sigBytes -= nBytesReady; + } + + // Return processed words + return new WordArray.init(processedWords, nBytesReady); + }, + + /** + * Creates a copy of this object. + * + * @return {Object} The clone. + * + * @example + * + * var clone = bufferedBlockAlgorithm.clone(); + */ + clone: function () { + var clone = Base.clone.call(this); + clone._data = this._data.clone(); + + return clone; + }, + + _minBufferSize: 0 + }); + + /** + * Abstract hasher template. + * + * @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits) + */ + var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({ + /** + * Configuration options. + */ + cfg: Base.extend(), + + /** + * Initializes a newly created hasher. + * + * @param {Object} cfg (Optional) The configuration options to use for this hash computation. + * + * @example + * + * var hasher = CryptoJS.algo.SHA256.create(); + */ + init: function (cfg) { + // Apply config defaults + this.cfg = this.cfg.extend(cfg); + + // Set initial values + this.reset(); + }, + + /** + * Resets this hasher to its initial state. + * + * @example + * + * hasher.reset(); + */ + reset: function () { + // Reset data buffer + BufferedBlockAlgorithm.reset.call(this); + + // Perform concrete-hasher logic + this._doReset(); + }, + + /** + * Updates this hasher with a message. + * + * @param {WordArray|string} messageUpdate The message to append. + * + * @return {Hasher} This hasher. + * + * @example + * + * hasher.update('message'); + * hasher.update(wordArray); + */ + update: function (messageUpdate) { + // Append + this._append(messageUpdate); + + // Update the hash + this._process(); + + // Chainable + return this; + }, + + /** + * Finalizes the hash computation. + * Note that the finalize operation is effectively a destructive, read-once operation. + * + * @param {WordArray|string} messageUpdate (Optional) A final message update. + * + * @return {WordArray} The hash. + * + * @example + * + * var hash = hasher.finalize(); + * var hash = hasher.finalize('message'); + * var hash = hasher.finalize(wordArray); + */ + finalize: function (messageUpdate) { + // Final message update + if (messageUpdate) { + this._append(messageUpdate); + } + + // Perform concrete-hasher logic + var hash = this._doFinalize(); + + return hash; + }, + + blockSize: 512/32, + + /** + * Creates a shortcut function to a hasher's object interface. + * + * @param {Hasher} hasher The hasher to create a helper for. + * + * @return {Function} The shortcut function. + * + * @static + * + * @example + * + * var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256); + */ + _createHelper: function (hasher) { + return function (message, cfg) { + return new hasher.init(cfg).finalize(message); + }; + }, + + /** + * Creates a shortcut function to the HMAC's object interface. + * + * @param {Hasher} hasher The hasher to use in this HMAC helper. + * + * @return {Function} The shortcut function. + * + * @static + * + * @example + * + * var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256); + */ + _createHmacHelper: function (hasher) { + return function (message, key) { + return new C_algo.HMAC.init(hasher, key).finalize(message); + }; + } + }); + + /** + * Algorithm namespace. + */ + var C_algo = C.algo = {}; + + return C; + }(Math)); + + + return CryptoJS; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/crypto-js.js b/node_modules/crypto-js/crypto-js.js new file mode 100644 index 0000000..27f0a62 --- /dev/null +++ b/node_modules/crypto-js/crypto-js.js @@ -0,0 +1,6191 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(); + } + else if (typeof define === "function" && define.amd) { + // AMD + define([], factory); + } + else { + // Global (browser) + root.CryptoJS = factory(); + } +}(this, function () { + + /*globals window, global, require*/ + + /** + * CryptoJS core components. + */ + var CryptoJS = CryptoJS || (function (Math, undefined) { + + var crypto; + + // Native crypto from window (Browser) + if (typeof window !== 'undefined' && window.crypto) { + crypto = window.crypto; + } + + // Native crypto in web worker (Browser) + if (typeof self !== 'undefined' && self.crypto) { + crypto = self.crypto; + } + + // Native crypto from worker + if (typeof globalThis !== 'undefined' && globalThis.crypto) { + crypto = globalThis.crypto; + } + + // Native (experimental IE 11) crypto from window (Browser) + if (!crypto && typeof window !== 'undefined' && window.msCrypto) { + crypto = window.msCrypto; + } + + // Native crypto from global (NodeJS) + if (!crypto && typeof global !== 'undefined' && global.crypto) { + crypto = global.crypto; + } + + // Native crypto import via require (NodeJS) + if (!crypto && typeof require === 'function') { + try { + crypto = require('crypto'); + } catch (err) {} + } + + /* + * Cryptographically secure pseudorandom number generator + * + * As Math.random() is cryptographically not safe to use + */ + var cryptoSecureRandomInt = function () { + if (crypto) { + // Use getRandomValues method (Browser) + if (typeof crypto.getRandomValues === 'function') { + try { + return crypto.getRandomValues(new Uint32Array(1))[0]; + } catch (err) {} + } + + // Use randomBytes method (NodeJS) + if (typeof crypto.randomBytes === 'function') { + try { + return crypto.randomBytes(4).readInt32LE(); + } catch (err) {} + } + } + + throw new Error('Native crypto module could not be used to get secure random number.'); + }; + + /* + * Local polyfill of Object.create + + */ + var create = Object.create || (function () { + function F() {} + + return function (obj) { + var subtype; + + F.prototype = obj; + + subtype = new F(); + + F.prototype = null; + + return subtype; + }; + }()); + + /** + * CryptoJS namespace. + */ + var C = {}; + + /** + * Library namespace. + */ + var C_lib = C.lib = {}; + + /** + * Base object for prototypal inheritance. + */ + var Base = C_lib.Base = (function () { + + + return { + /** + * Creates a new object that inherits from this object. + * + * @param {Object} overrides Properties to copy into the new object. + * + * @return {Object} The new object. + * + * @static + * + * @example + * + * var MyType = CryptoJS.lib.Base.extend({ + * field: 'value', + * + * method: function () { + * } + * }); + */ + extend: function (overrides) { + // Spawn + var subtype = create(this); + + // Augment + if (overrides) { + subtype.mixIn(overrides); + } + + // Create default initializer + if (!subtype.hasOwnProperty('init') || this.init === subtype.init) { + subtype.init = function () { + subtype.$super.init.apply(this, arguments); + }; + } + + // Initializer's prototype is the subtype object + subtype.init.prototype = subtype; + + // Reference supertype + subtype.$super = this; + + return subtype; + }, + + /** + * Extends this object and runs the init method. + * Arguments to create() will be passed to init(). + * + * @return {Object} The new object. + * + * @static + * + * @example + * + * var instance = MyType.create(); + */ + create: function () { + var instance = this.extend(); + instance.init.apply(instance, arguments); + + return instance; + }, + + /** + * Initializes a newly created object. + * Override this method to add some logic when your objects are created. + * + * @example + * + * var MyType = CryptoJS.lib.Base.extend({ + * init: function () { + * // ... + * } + * }); + */ + init: function () { + }, + + /** + * Copies properties into this object. + * + * @param {Object} properties The properties to mix in. + * + * @example + * + * MyType.mixIn({ + * field: 'value' + * }); + */ + mixIn: function (properties) { + for (var propertyName in properties) { + if (properties.hasOwnProperty(propertyName)) { + this[propertyName] = properties[propertyName]; + } + } + + // IE won't copy toString using the loop above + if (properties.hasOwnProperty('toString')) { + this.toString = properties.toString; + } + }, + + /** + * Creates a copy of this object. + * + * @return {Object} The clone. + * + * @example + * + * var clone = instance.clone(); + */ + clone: function () { + return this.init.prototype.extend(this); + } + }; + }()); + + /** + * An array of 32-bit words. + * + * @property {Array} words The array of 32-bit words. + * @property {number} sigBytes The number of significant bytes in this word array. + */ + var WordArray = C_lib.WordArray = Base.extend({ + /** + * Initializes a newly created word array. + * + * @param {Array} words (Optional) An array of 32-bit words. + * @param {number} sigBytes (Optional) The number of significant bytes in the words. + * + * @example + * + * var wordArray = CryptoJS.lib.WordArray.create(); + * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607]); + * var wordArray = CryptoJS.lib.WordArray.create([0x00010203, 0x04050607], 6); + */ + init: function (words, sigBytes) { + words = this.words = words || []; + + if (sigBytes != undefined) { + this.sigBytes = sigBytes; + } else { + this.sigBytes = words.length * 4; + } + }, + + /** + * Converts this word array to a string. + * + * @param {Encoder} encoder (Optional) The encoding strategy to use. Default: CryptoJS.enc.Hex + * + * @return {string} The stringified word array. + * + * @example + * + * var string = wordArray + ''; + * var string = wordArray.toString(); + * var string = wordArray.toString(CryptoJS.enc.Utf8); + */ + toString: function (encoder) { + return (encoder || Hex).stringify(this); + }, + + /** + * Concatenates a word array to this word array. + * + * @param {WordArray} wordArray The word array to append. + * + * @return {WordArray} This word array. + * + * @example + * + * wordArray1.concat(wordArray2); + */ + concat: function (wordArray) { + // Shortcuts + var thisWords = this.words; + var thatWords = wordArray.words; + var thisSigBytes = this.sigBytes; + var thatSigBytes = wordArray.sigBytes; + + // Clamp excess bits + this.clamp(); + + // Concat + if (thisSigBytes % 4) { + // Copy one byte at a time + for (var i = 0; i < thatSigBytes; i++) { + var thatByte = (thatWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + thisWords[(thisSigBytes + i) >>> 2] |= thatByte << (24 - ((thisSigBytes + i) % 4) * 8); + } + } else { + // Copy one word at a time + for (var j = 0; j < thatSigBytes; j += 4) { + thisWords[(thisSigBytes + j) >>> 2] = thatWords[j >>> 2]; + } + } + this.sigBytes += thatSigBytes; + + // Chainable + return this; + }, + + /** + * Removes insignificant bits. + * + * @example + * + * wordArray.clamp(); + */ + clamp: function () { + // Shortcuts + var words = this.words; + var sigBytes = this.sigBytes; + + // Clamp + words[sigBytes >>> 2] &= 0xffffffff << (32 - (sigBytes % 4) * 8); + words.length = Math.ceil(sigBytes / 4); + }, + + /** + * Creates a copy of this word array. + * + * @return {WordArray} The clone. + * + * @example + * + * var clone = wordArray.clone(); + */ + clone: function () { + var clone = Base.clone.call(this); + clone.words = this.words.slice(0); + + return clone; + }, + + /** + * Creates a word array filled with random bytes. + * + * @param {number} nBytes The number of random bytes to generate. + * + * @return {WordArray} The random word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.lib.WordArray.random(16); + */ + random: function (nBytes) { + var words = []; + + for (var i = 0; i < nBytes; i += 4) { + words.push(cryptoSecureRandomInt()); + } + + return new WordArray.init(words, nBytes); + } + }); + + /** + * Encoder namespace. + */ + var C_enc = C.enc = {}; + + /** + * Hex encoding strategy. + */ + var Hex = C_enc.Hex = { + /** + * Converts a word array to a hex string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The hex string. + * + * @static + * + * @example + * + * var hexString = CryptoJS.enc.Hex.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var hexChars = []; + for (var i = 0; i < sigBytes; i++) { + var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + hexChars.push((bite >>> 4).toString(16)); + hexChars.push((bite & 0x0f).toString(16)); + } + + return hexChars.join(''); + }, + + /** + * Converts a hex string to a word array. + * + * @param {string} hexStr The hex string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Hex.parse(hexString); + */ + parse: function (hexStr) { + // Shortcut + var hexStrLength = hexStr.length; + + // Convert + var words = []; + for (var i = 0; i < hexStrLength; i += 2) { + words[i >>> 3] |= parseInt(hexStr.substr(i, 2), 16) << (24 - (i % 8) * 4); + } + + return new WordArray.init(words, hexStrLength / 2); + } + }; + + /** + * Latin1 encoding strategy. + */ + var Latin1 = C_enc.Latin1 = { + /** + * Converts a word array to a Latin1 string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The Latin1 string. + * + * @static + * + * @example + * + * var latin1String = CryptoJS.enc.Latin1.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var latin1Chars = []; + for (var i = 0; i < sigBytes; i++) { + var bite = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + latin1Chars.push(String.fromCharCode(bite)); + } + + return latin1Chars.join(''); + }, + + /** + * Converts a Latin1 string to a word array. + * + * @param {string} latin1Str The Latin1 string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Latin1.parse(latin1String); + */ + parse: function (latin1Str) { + // Shortcut + var latin1StrLength = latin1Str.length; + + // Convert + var words = []; + for (var i = 0; i < latin1StrLength; i++) { + words[i >>> 2] |= (latin1Str.charCodeAt(i) & 0xff) << (24 - (i % 4) * 8); + } + + return new WordArray.init(words, latin1StrLength); + } + }; + + /** + * UTF-8 encoding strategy. + */ + var Utf8 = C_enc.Utf8 = { + /** + * Converts a word array to a UTF-8 string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The UTF-8 string. + * + * @static + * + * @example + * + * var utf8String = CryptoJS.enc.Utf8.stringify(wordArray); + */ + stringify: function (wordArray) { + try { + return decodeURIComponent(escape(Latin1.stringify(wordArray))); + } catch (e) { + throw new Error('Malformed UTF-8 data'); + } + }, + + /** + * Converts a UTF-8 string to a word array. + * + * @param {string} utf8Str The UTF-8 string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Utf8.parse(utf8String); + */ + parse: function (utf8Str) { + return Latin1.parse(unescape(encodeURIComponent(utf8Str))); + } + }; + + /** + * Abstract buffered block algorithm template. + * + * The property blockSize must be implemented in a concrete subtype. + * + * @property {number} _minBufferSize The number of blocks that should be kept unprocessed in the buffer. Default: 0 + */ + var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm = Base.extend({ + /** + * Resets this block algorithm's data buffer to its initial state. + * + * @example + * + * bufferedBlockAlgorithm.reset(); + */ + reset: function () { + // Initial values + this._data = new WordArray.init(); + this._nDataBytes = 0; + }, + + /** + * Adds new data to this block algorithm's buffer. + * + * @param {WordArray|string} data The data to append. Strings are converted to a WordArray using UTF-8. + * + * @example + * + * bufferedBlockAlgorithm._append('data'); + * bufferedBlockAlgorithm._append(wordArray); + */ + _append: function (data) { + // Convert string to WordArray, else assume WordArray already + if (typeof data == 'string') { + data = Utf8.parse(data); + } + + // Append + this._data.concat(data); + this._nDataBytes += data.sigBytes; + }, + + /** + * Processes available data blocks. + * + * This method invokes _doProcessBlock(offset), which must be implemented by a concrete subtype. + * + * @param {boolean} doFlush Whether all blocks and partial blocks should be processed. + * + * @return {WordArray} The processed data. + * + * @example + * + * var processedData = bufferedBlockAlgorithm._process(); + * var processedData = bufferedBlockAlgorithm._process(!!'flush'); + */ + _process: function (doFlush) { + var processedWords; + + // Shortcuts + var data = this._data; + var dataWords = data.words; + var dataSigBytes = data.sigBytes; + var blockSize = this.blockSize; + var blockSizeBytes = blockSize * 4; + + // Count blocks ready + var nBlocksReady = dataSigBytes / blockSizeBytes; + if (doFlush) { + // Round up to include partial blocks + nBlocksReady = Math.ceil(nBlocksReady); + } else { + // Round down to include only full blocks, + // less the number of blocks that must remain in the buffer + nBlocksReady = Math.max((nBlocksReady | 0) - this._minBufferSize, 0); + } + + // Count words ready + var nWordsReady = nBlocksReady * blockSize; + + // Count bytes ready + var nBytesReady = Math.min(nWordsReady * 4, dataSigBytes); + + // Process blocks + if (nWordsReady) { + for (var offset = 0; offset < nWordsReady; offset += blockSize) { + // Perform concrete-algorithm logic + this._doProcessBlock(dataWords, offset); + } + + // Remove processed words + processedWords = dataWords.splice(0, nWordsReady); + data.sigBytes -= nBytesReady; + } + + // Return processed words + return new WordArray.init(processedWords, nBytesReady); + }, + + /** + * Creates a copy of this object. + * + * @return {Object} The clone. + * + * @example + * + * var clone = bufferedBlockAlgorithm.clone(); + */ + clone: function () { + var clone = Base.clone.call(this); + clone._data = this._data.clone(); + + return clone; + }, + + _minBufferSize: 0 + }); + + /** + * Abstract hasher template. + * + * @property {number} blockSize The number of 32-bit words this hasher operates on. Default: 16 (512 bits) + */ + var Hasher = C_lib.Hasher = BufferedBlockAlgorithm.extend({ + /** + * Configuration options. + */ + cfg: Base.extend(), + + /** + * Initializes a newly created hasher. + * + * @param {Object} cfg (Optional) The configuration options to use for this hash computation. + * + * @example + * + * var hasher = CryptoJS.algo.SHA256.create(); + */ + init: function (cfg) { + // Apply config defaults + this.cfg = this.cfg.extend(cfg); + + // Set initial values + this.reset(); + }, + + /** + * Resets this hasher to its initial state. + * + * @example + * + * hasher.reset(); + */ + reset: function () { + // Reset data buffer + BufferedBlockAlgorithm.reset.call(this); + + // Perform concrete-hasher logic + this._doReset(); + }, + + /** + * Updates this hasher with a message. + * + * @param {WordArray|string} messageUpdate The message to append. + * + * @return {Hasher} This hasher. + * + * @example + * + * hasher.update('message'); + * hasher.update(wordArray); + */ + update: function (messageUpdate) { + // Append + this._append(messageUpdate); + + // Update the hash + this._process(); + + // Chainable + return this; + }, + + /** + * Finalizes the hash computation. + * Note that the finalize operation is effectively a destructive, read-once operation. + * + * @param {WordArray|string} messageUpdate (Optional) A final message update. + * + * @return {WordArray} The hash. + * + * @example + * + * var hash = hasher.finalize(); + * var hash = hasher.finalize('message'); + * var hash = hasher.finalize(wordArray); + */ + finalize: function (messageUpdate) { + // Final message update + if (messageUpdate) { + this._append(messageUpdate); + } + + // Perform concrete-hasher logic + var hash = this._doFinalize(); + + return hash; + }, + + blockSize: 512/32, + + /** + * Creates a shortcut function to a hasher's object interface. + * + * @param {Hasher} hasher The hasher to create a helper for. + * + * @return {Function} The shortcut function. + * + * @static + * + * @example + * + * var SHA256 = CryptoJS.lib.Hasher._createHelper(CryptoJS.algo.SHA256); + */ + _createHelper: function (hasher) { + return function (message, cfg) { + return new hasher.init(cfg).finalize(message); + }; + }, + + /** + * Creates a shortcut function to the HMAC's object interface. + * + * @param {Hasher} hasher The hasher to use in this HMAC helper. + * + * @return {Function} The shortcut function. + * + * @static + * + * @example + * + * var HmacSHA256 = CryptoJS.lib.Hasher._createHmacHelper(CryptoJS.algo.SHA256); + */ + _createHmacHelper: function (hasher) { + return function (message, key) { + return new C_algo.HMAC.init(hasher, key).finalize(message); + }; + } + }); + + /** + * Algorithm namespace. + */ + var C_algo = C.algo = {}; + + return C; + }(Math)); + + + (function (undefined) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var X32WordArray = C_lib.WordArray; + + /** + * x64 namespace. + */ + var C_x64 = C.x64 = {}; + + /** + * A 64-bit word. + */ + var X64Word = C_x64.Word = Base.extend({ + /** + * Initializes a newly created 64-bit word. + * + * @param {number} high The high 32 bits. + * @param {number} low The low 32 bits. + * + * @example + * + * var x64Word = CryptoJS.x64.Word.create(0x00010203, 0x04050607); + */ + init: function (high, low) { + this.high = high; + this.low = low; + } + + /** + * Bitwise NOTs this word. + * + * @return {X64Word} A new x64-Word object after negating. + * + * @example + * + * var negated = x64Word.not(); + */ + // not: function () { + // var high = ~this.high; + // var low = ~this.low; + + // return X64Word.create(high, low); + // }, + + /** + * Bitwise ANDs this word with the passed word. + * + * @param {X64Word} word The x64-Word to AND with this word. + * + * @return {X64Word} A new x64-Word object after ANDing. + * + * @example + * + * var anded = x64Word.and(anotherX64Word); + */ + // and: function (word) { + // var high = this.high & word.high; + // var low = this.low & word.low; + + // return X64Word.create(high, low); + // }, + + /** + * Bitwise ORs this word with the passed word. + * + * @param {X64Word} word The x64-Word to OR with this word. + * + * @return {X64Word} A new x64-Word object after ORing. + * + * @example + * + * var ored = x64Word.or(anotherX64Word); + */ + // or: function (word) { + // var high = this.high | word.high; + // var low = this.low | word.low; + + // return X64Word.create(high, low); + // }, + + /** + * Bitwise XORs this word with the passed word. + * + * @param {X64Word} word The x64-Word to XOR with this word. + * + * @return {X64Word} A new x64-Word object after XORing. + * + * @example + * + * var xored = x64Word.xor(anotherX64Word); + */ + // xor: function (word) { + // var high = this.high ^ word.high; + // var low = this.low ^ word.low; + + // return X64Word.create(high, low); + // }, + + /** + * Shifts this word n bits to the left. + * + * @param {number} n The number of bits to shift. + * + * @return {X64Word} A new x64-Word object after shifting. + * + * @example + * + * var shifted = x64Word.shiftL(25); + */ + // shiftL: function (n) { + // if (n < 32) { + // var high = (this.high << n) | (this.low >>> (32 - n)); + // var low = this.low << n; + // } else { + // var high = this.low << (n - 32); + // var low = 0; + // } + + // return X64Word.create(high, low); + // }, + + /** + * Shifts this word n bits to the right. + * + * @param {number} n The number of bits to shift. + * + * @return {X64Word} A new x64-Word object after shifting. + * + * @example + * + * var shifted = x64Word.shiftR(7); + */ + // shiftR: function (n) { + // if (n < 32) { + // var low = (this.low >>> n) | (this.high << (32 - n)); + // var high = this.high >>> n; + // } else { + // var low = this.high >>> (n - 32); + // var high = 0; + // } + + // return X64Word.create(high, low); + // }, + + /** + * Rotates this word n bits to the left. + * + * @param {number} n The number of bits to rotate. + * + * @return {X64Word} A new x64-Word object after rotating. + * + * @example + * + * var rotated = x64Word.rotL(25); + */ + // rotL: function (n) { + // return this.shiftL(n).or(this.shiftR(64 - n)); + // }, + + /** + * Rotates this word n bits to the right. + * + * @param {number} n The number of bits to rotate. + * + * @return {X64Word} A new x64-Word object after rotating. + * + * @example + * + * var rotated = x64Word.rotR(7); + */ + // rotR: function (n) { + // return this.shiftR(n).or(this.shiftL(64 - n)); + // }, + + /** + * Adds this word with the passed word. + * + * @param {X64Word} word The x64-Word to add with this word. + * + * @return {X64Word} A new x64-Word object after adding. + * + * @example + * + * var added = x64Word.add(anotherX64Word); + */ + // add: function (word) { + // var low = (this.low + word.low) | 0; + // var carry = (low >>> 0) < (this.low >>> 0) ? 1 : 0; + // var high = (this.high + word.high + carry) | 0; + + // return X64Word.create(high, low); + // } + }); + + /** + * An array of 64-bit words. + * + * @property {Array} words The array of CryptoJS.x64.Word objects. + * @property {number} sigBytes The number of significant bytes in this word array. + */ + var X64WordArray = C_x64.WordArray = Base.extend({ + /** + * Initializes a newly created word array. + * + * @param {Array} words (Optional) An array of CryptoJS.x64.Word objects. + * @param {number} sigBytes (Optional) The number of significant bytes in the words. + * + * @example + * + * var wordArray = CryptoJS.x64.WordArray.create(); + * + * var wordArray = CryptoJS.x64.WordArray.create([ + * CryptoJS.x64.Word.create(0x00010203, 0x04050607), + * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) + * ]); + * + * var wordArray = CryptoJS.x64.WordArray.create([ + * CryptoJS.x64.Word.create(0x00010203, 0x04050607), + * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) + * ], 10); + */ + init: function (words, sigBytes) { + words = this.words = words || []; + + if (sigBytes != undefined) { + this.sigBytes = sigBytes; + } else { + this.sigBytes = words.length * 8; + } + }, + + /** + * Converts this 64-bit word array to a 32-bit word array. + * + * @return {CryptoJS.lib.WordArray} This word array's data as a 32-bit word array. + * + * @example + * + * var x32WordArray = x64WordArray.toX32(); + */ + toX32: function () { + // Shortcuts + var x64Words = this.words; + var x64WordsLength = x64Words.length; + + // Convert + var x32Words = []; + for (var i = 0; i < x64WordsLength; i++) { + var x64Word = x64Words[i]; + x32Words.push(x64Word.high); + x32Words.push(x64Word.low); + } + + return X32WordArray.create(x32Words, this.sigBytes); + }, + + /** + * Creates a copy of this word array. + * + * @return {X64WordArray} The clone. + * + * @example + * + * var clone = x64WordArray.clone(); + */ + clone: function () { + var clone = Base.clone.call(this); + + // Clone "words" array + var words = clone.words = this.words.slice(0); + + // Clone each X64Word object + var wordsLength = words.length; + for (var i = 0; i < wordsLength; i++) { + words[i] = words[i].clone(); + } + + return clone; + } + }); + }()); + + + (function () { + // Check if typed arrays are supported + if (typeof ArrayBuffer != 'function') { + return; + } + + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + + // Reference original init + var superInit = WordArray.init; + + // Augment WordArray.init to handle typed arrays + var subInit = WordArray.init = function (typedArray) { + // Convert buffers to uint8 + if (typedArray instanceof ArrayBuffer) { + typedArray = new Uint8Array(typedArray); + } + + // Convert other array views to uint8 + if ( + typedArray instanceof Int8Array || + (typeof Uint8ClampedArray !== "undefined" && typedArray instanceof Uint8ClampedArray) || + typedArray instanceof Int16Array || + typedArray instanceof Uint16Array || + typedArray instanceof Int32Array || + typedArray instanceof Uint32Array || + typedArray instanceof Float32Array || + typedArray instanceof Float64Array + ) { + typedArray = new Uint8Array(typedArray.buffer, typedArray.byteOffset, typedArray.byteLength); + } + + // Handle Uint8Array + if (typedArray instanceof Uint8Array) { + // Shortcut + var typedArrayByteLength = typedArray.byteLength; + + // Extract bytes + var words = []; + for (var i = 0; i < typedArrayByteLength; i++) { + words[i >>> 2] |= typedArray[i] << (24 - (i % 4) * 8); + } + + // Initialize this word array + superInit.call(this, words, typedArrayByteLength); + } else { + // Else call normal init + superInit.apply(this, arguments); + } + }; + + subInit.prototype = WordArray; + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_enc = C.enc; + + /** + * UTF-16 BE encoding strategy. + */ + var Utf16BE = C_enc.Utf16 = C_enc.Utf16BE = { + /** + * Converts a word array to a UTF-16 BE string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The UTF-16 BE string. + * + * @static + * + * @example + * + * var utf16String = CryptoJS.enc.Utf16.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var utf16Chars = []; + for (var i = 0; i < sigBytes; i += 2) { + var codePoint = (words[i >>> 2] >>> (16 - (i % 4) * 8)) & 0xffff; + utf16Chars.push(String.fromCharCode(codePoint)); + } + + return utf16Chars.join(''); + }, + + /** + * Converts a UTF-16 BE string to a word array. + * + * @param {string} utf16Str The UTF-16 BE string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Utf16.parse(utf16String); + */ + parse: function (utf16Str) { + // Shortcut + var utf16StrLength = utf16Str.length; + + // Convert + var words = []; + for (var i = 0; i < utf16StrLength; i++) { + words[i >>> 1] |= utf16Str.charCodeAt(i) << (16 - (i % 2) * 16); + } + + return WordArray.create(words, utf16StrLength * 2); + } + }; + + /** + * UTF-16 LE encoding strategy. + */ + C_enc.Utf16LE = { + /** + * Converts a word array to a UTF-16 LE string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The UTF-16 LE string. + * + * @static + * + * @example + * + * var utf16Str = CryptoJS.enc.Utf16LE.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var utf16Chars = []; + for (var i = 0; i < sigBytes; i += 2) { + var codePoint = swapEndian((words[i >>> 2] >>> (16 - (i % 4) * 8)) & 0xffff); + utf16Chars.push(String.fromCharCode(codePoint)); + } + + return utf16Chars.join(''); + }, + + /** + * Converts a UTF-16 LE string to a word array. + * + * @param {string} utf16Str The UTF-16 LE string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Utf16LE.parse(utf16Str); + */ + parse: function (utf16Str) { + // Shortcut + var utf16StrLength = utf16Str.length; + + // Convert + var words = []; + for (var i = 0; i < utf16StrLength; i++) { + words[i >>> 1] |= swapEndian(utf16Str.charCodeAt(i) << (16 - (i % 2) * 16)); + } + + return WordArray.create(words, utf16StrLength * 2); + } + }; + + function swapEndian(word) { + return ((word << 8) & 0xff00ff00) | ((word >>> 8) & 0x00ff00ff); + } + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_enc = C.enc; + + /** + * Base64 encoding strategy. + */ + var Base64 = C_enc.Base64 = { + /** + * Converts a word array to a Base64 string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The Base64 string. + * + * @static + * + * @example + * + * var base64String = CryptoJS.enc.Base64.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + var map = this._map; + + // Clamp excess bits + wordArray.clamp(); + + // Convert + var base64Chars = []; + for (var i = 0; i < sigBytes; i += 3) { + var byte1 = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + var byte2 = (words[(i + 1) >>> 2] >>> (24 - ((i + 1) % 4) * 8)) & 0xff; + var byte3 = (words[(i + 2) >>> 2] >>> (24 - ((i + 2) % 4) * 8)) & 0xff; + + var triplet = (byte1 << 16) | (byte2 << 8) | byte3; + + for (var j = 0; (j < 4) && (i + j * 0.75 < sigBytes); j++) { + base64Chars.push(map.charAt((triplet >>> (6 * (3 - j))) & 0x3f)); + } + } + + // Add padding + var paddingChar = map.charAt(64); + if (paddingChar) { + while (base64Chars.length % 4) { + base64Chars.push(paddingChar); + } + } + + return base64Chars.join(''); + }, + + /** + * Converts a Base64 string to a word array. + * + * @param {string} base64Str The Base64 string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Base64.parse(base64String); + */ + parse: function (base64Str) { + // Shortcuts + var base64StrLength = base64Str.length; + var map = this._map; + var reverseMap = this._reverseMap; + + if (!reverseMap) { + reverseMap = this._reverseMap = []; + for (var j = 0; j < map.length; j++) { + reverseMap[map.charCodeAt(j)] = j; + } + } + + // Ignore padding + var paddingChar = map.charAt(64); + if (paddingChar) { + var paddingIndex = base64Str.indexOf(paddingChar); + if (paddingIndex !== -1) { + base64StrLength = paddingIndex; + } + } + + // Convert + return parseLoop(base64Str, base64StrLength, reverseMap); + + }, + + _map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=' + }; + + function parseLoop(base64Str, base64StrLength, reverseMap) { + var words = []; + var nBytes = 0; + for (var i = 0; i < base64StrLength; i++) { + if (i % 4) { + var bits1 = reverseMap[base64Str.charCodeAt(i - 1)] << ((i % 4) * 2); + var bits2 = reverseMap[base64Str.charCodeAt(i)] >>> (6 - (i % 4) * 2); + var bitsCombined = bits1 | bits2; + words[nBytes >>> 2] |= bitsCombined << (24 - (nBytes % 4) * 8); + nBytes++; + } + } + return WordArray.create(words, nBytes); + } + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_enc = C.enc; + + /** + * Base64url encoding strategy. + */ + var Base64url = C_enc.Base64url = { + /** + * Converts a word array to a Base64url string. + * + * @param {WordArray} wordArray The word array. + * + * @param {boolean} urlSafe Whether to use url safe + * + * @return {string} The Base64url string. + * + * @static + * + * @example + * + * var base64String = CryptoJS.enc.Base64url.stringify(wordArray); + */ + stringify: function (wordArray, urlSafe=true) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + var map = urlSafe ? this._safe_map : this._map; + + // Clamp excess bits + wordArray.clamp(); + + // Convert + var base64Chars = []; + for (var i = 0; i < sigBytes; i += 3) { + var byte1 = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + var byte2 = (words[(i + 1) >>> 2] >>> (24 - ((i + 1) % 4) * 8)) & 0xff; + var byte3 = (words[(i + 2) >>> 2] >>> (24 - ((i + 2) % 4) * 8)) & 0xff; + + var triplet = (byte1 << 16) | (byte2 << 8) | byte3; + + for (var j = 0; (j < 4) && (i + j * 0.75 < sigBytes); j++) { + base64Chars.push(map.charAt((triplet >>> (6 * (3 - j))) & 0x3f)); + } + } + + // Add padding + var paddingChar = map.charAt(64); + if (paddingChar) { + while (base64Chars.length % 4) { + base64Chars.push(paddingChar); + } + } + + return base64Chars.join(''); + }, + + /** + * Converts a Base64url string to a word array. + * + * @param {string} base64Str The Base64url string. + * + * @param {boolean} urlSafe Whether to use url safe + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Base64url.parse(base64String); + */ + parse: function (base64Str, urlSafe=true) { + // Shortcuts + var base64StrLength = base64Str.length; + var map = urlSafe ? this._safe_map : this._map; + var reverseMap = this._reverseMap; + + if (!reverseMap) { + reverseMap = this._reverseMap = []; + for (var j = 0; j < map.length; j++) { + reverseMap[map.charCodeAt(j)] = j; + } + } + + // Ignore padding + var paddingChar = map.charAt(64); + if (paddingChar) { + var paddingIndex = base64Str.indexOf(paddingChar); + if (paddingIndex !== -1) { + base64StrLength = paddingIndex; + } + } + + // Convert + return parseLoop(base64Str, base64StrLength, reverseMap); + + }, + + _map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=', + _safe_map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_', + }; + + function parseLoop(base64Str, base64StrLength, reverseMap) { + var words = []; + var nBytes = 0; + for (var i = 0; i < base64StrLength; i++) { + if (i % 4) { + var bits1 = reverseMap[base64Str.charCodeAt(i - 1)] << ((i % 4) * 2); + var bits2 = reverseMap[base64Str.charCodeAt(i)] >>> (6 - (i % 4) * 2); + var bitsCombined = bits1 | bits2; + words[nBytes >>> 2] |= bitsCombined << (24 - (nBytes % 4) * 8); + nBytes++; + } + } + return WordArray.create(words, nBytes); + } + }()); + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Constants table + var T = []; + + // Compute constants + (function () { + for (var i = 0; i < 64; i++) { + T[i] = (Math.abs(Math.sin(i + 1)) * 0x100000000) | 0; + } + }()); + + /** + * MD5 hash algorithm. + */ + var MD5 = C_algo.MD5 = Hasher.extend({ + _doReset: function () { + this._hash = new WordArray.init([ + 0x67452301, 0xefcdab89, + 0x98badcfe, 0x10325476 + ]); + }, + + _doProcessBlock: function (M, offset) { + // Swap endian + for (var i = 0; i < 16; i++) { + // Shortcuts + var offset_i = offset + i; + var M_offset_i = M[offset_i]; + + M[offset_i] = ( + (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | + (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) + ); + } + + // Shortcuts + var H = this._hash.words; + + var M_offset_0 = M[offset + 0]; + var M_offset_1 = M[offset + 1]; + var M_offset_2 = M[offset + 2]; + var M_offset_3 = M[offset + 3]; + var M_offset_4 = M[offset + 4]; + var M_offset_5 = M[offset + 5]; + var M_offset_6 = M[offset + 6]; + var M_offset_7 = M[offset + 7]; + var M_offset_8 = M[offset + 8]; + var M_offset_9 = M[offset + 9]; + var M_offset_10 = M[offset + 10]; + var M_offset_11 = M[offset + 11]; + var M_offset_12 = M[offset + 12]; + var M_offset_13 = M[offset + 13]; + var M_offset_14 = M[offset + 14]; + var M_offset_15 = M[offset + 15]; + + // Working varialbes + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + + // Computation + a = FF(a, b, c, d, M_offset_0, 7, T[0]); + d = FF(d, a, b, c, M_offset_1, 12, T[1]); + c = FF(c, d, a, b, M_offset_2, 17, T[2]); + b = FF(b, c, d, a, M_offset_3, 22, T[3]); + a = FF(a, b, c, d, M_offset_4, 7, T[4]); + d = FF(d, a, b, c, M_offset_5, 12, T[5]); + c = FF(c, d, a, b, M_offset_6, 17, T[6]); + b = FF(b, c, d, a, M_offset_7, 22, T[7]); + a = FF(a, b, c, d, M_offset_8, 7, T[8]); + d = FF(d, a, b, c, M_offset_9, 12, T[9]); + c = FF(c, d, a, b, M_offset_10, 17, T[10]); + b = FF(b, c, d, a, M_offset_11, 22, T[11]); + a = FF(a, b, c, d, M_offset_12, 7, T[12]); + d = FF(d, a, b, c, M_offset_13, 12, T[13]); + c = FF(c, d, a, b, M_offset_14, 17, T[14]); + b = FF(b, c, d, a, M_offset_15, 22, T[15]); + + a = GG(a, b, c, d, M_offset_1, 5, T[16]); + d = GG(d, a, b, c, M_offset_6, 9, T[17]); + c = GG(c, d, a, b, M_offset_11, 14, T[18]); + b = GG(b, c, d, a, M_offset_0, 20, T[19]); + a = GG(a, b, c, d, M_offset_5, 5, T[20]); + d = GG(d, a, b, c, M_offset_10, 9, T[21]); + c = GG(c, d, a, b, M_offset_15, 14, T[22]); + b = GG(b, c, d, a, M_offset_4, 20, T[23]); + a = GG(a, b, c, d, M_offset_9, 5, T[24]); + d = GG(d, a, b, c, M_offset_14, 9, T[25]); + c = GG(c, d, a, b, M_offset_3, 14, T[26]); + b = GG(b, c, d, a, M_offset_8, 20, T[27]); + a = GG(a, b, c, d, M_offset_13, 5, T[28]); + d = GG(d, a, b, c, M_offset_2, 9, T[29]); + c = GG(c, d, a, b, M_offset_7, 14, T[30]); + b = GG(b, c, d, a, M_offset_12, 20, T[31]); + + a = HH(a, b, c, d, M_offset_5, 4, T[32]); + d = HH(d, a, b, c, M_offset_8, 11, T[33]); + c = HH(c, d, a, b, M_offset_11, 16, T[34]); + b = HH(b, c, d, a, M_offset_14, 23, T[35]); + a = HH(a, b, c, d, M_offset_1, 4, T[36]); + d = HH(d, a, b, c, M_offset_4, 11, T[37]); + c = HH(c, d, a, b, M_offset_7, 16, T[38]); + b = HH(b, c, d, a, M_offset_10, 23, T[39]); + a = HH(a, b, c, d, M_offset_13, 4, T[40]); + d = HH(d, a, b, c, M_offset_0, 11, T[41]); + c = HH(c, d, a, b, M_offset_3, 16, T[42]); + b = HH(b, c, d, a, M_offset_6, 23, T[43]); + a = HH(a, b, c, d, M_offset_9, 4, T[44]); + d = HH(d, a, b, c, M_offset_12, 11, T[45]); + c = HH(c, d, a, b, M_offset_15, 16, T[46]); + b = HH(b, c, d, a, M_offset_2, 23, T[47]); + + a = II(a, b, c, d, M_offset_0, 6, T[48]); + d = II(d, a, b, c, M_offset_7, 10, T[49]); + c = II(c, d, a, b, M_offset_14, 15, T[50]); + b = II(b, c, d, a, M_offset_5, 21, T[51]); + a = II(a, b, c, d, M_offset_12, 6, T[52]); + d = II(d, a, b, c, M_offset_3, 10, T[53]); + c = II(c, d, a, b, M_offset_10, 15, T[54]); + b = II(b, c, d, a, M_offset_1, 21, T[55]); + a = II(a, b, c, d, M_offset_8, 6, T[56]); + d = II(d, a, b, c, M_offset_15, 10, T[57]); + c = II(c, d, a, b, M_offset_6, 15, T[58]); + b = II(b, c, d, a, M_offset_13, 21, T[59]); + a = II(a, b, c, d, M_offset_4, 6, T[60]); + d = II(d, a, b, c, M_offset_11, 10, T[61]); + c = II(c, d, a, b, M_offset_2, 15, T[62]); + b = II(b, c, d, a, M_offset_9, 21, T[63]); + + // Intermediate hash value + H[0] = (H[0] + a) | 0; + H[1] = (H[1] + b) | 0; + H[2] = (H[2] + c) | 0; + H[3] = (H[3] + d) | 0; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + + var nBitsTotalH = Math.floor(nBitsTotal / 0x100000000); + var nBitsTotalL = nBitsTotal; + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = ( + (((nBitsTotalH << 8) | (nBitsTotalH >>> 24)) & 0x00ff00ff) | + (((nBitsTotalH << 24) | (nBitsTotalH >>> 8)) & 0xff00ff00) + ); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( + (((nBitsTotalL << 8) | (nBitsTotalL >>> 24)) & 0x00ff00ff) | + (((nBitsTotalL << 24) | (nBitsTotalL >>> 8)) & 0xff00ff00) + ); + + data.sigBytes = (dataWords.length + 1) * 4; + + // Hash final blocks + this._process(); + + // Shortcuts + var hash = this._hash; + var H = hash.words; + + // Swap endian + for (var i = 0; i < 4; i++) { + // Shortcut + var H_i = H[i]; + + H[i] = (((H_i << 8) | (H_i >>> 24)) & 0x00ff00ff) | + (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00); + } + + // Return final computed hash + return hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + function FF(a, b, c, d, x, s, t) { + var n = a + ((b & c) | (~b & d)) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + function GG(a, b, c, d, x, s, t) { + var n = a + ((b & d) | (c & ~d)) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + function HH(a, b, c, d, x, s, t) { + var n = a + (b ^ c ^ d) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + function II(a, b, c, d, x, s, t) { + var n = a + (c ^ (b | ~d)) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.MD5('message'); + * var hash = CryptoJS.MD5(wordArray); + */ + C.MD5 = Hasher._createHelper(MD5); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacMD5(message, key); + */ + C.HmacMD5 = Hasher._createHmacHelper(MD5); + }(Math)); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Reusable object + var W = []; + + /** + * SHA-1 hash algorithm. + */ + var SHA1 = C_algo.SHA1 = Hasher.extend({ + _doReset: function () { + this._hash = new WordArray.init([ + 0x67452301, 0xefcdab89, + 0x98badcfe, 0x10325476, + 0xc3d2e1f0 + ]); + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var H = this._hash.words; + + // Working variables + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + var e = H[4]; + + // Computation + for (var i = 0; i < 80; i++) { + if (i < 16) { + W[i] = M[offset + i] | 0; + } else { + var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]; + W[i] = (n << 1) | (n >>> 31); + } + + var t = ((a << 5) | (a >>> 27)) + e + W[i]; + if (i < 20) { + t += ((b & c) | (~b & d)) + 0x5a827999; + } else if (i < 40) { + t += (b ^ c ^ d) + 0x6ed9eba1; + } else if (i < 60) { + t += ((b & c) | (b & d) | (c & d)) - 0x70e44324; + } else /* if (i < 80) */ { + t += (b ^ c ^ d) - 0x359d3e2a; + } + + e = d; + d = c; + c = (b << 30) | (b >>> 2); + b = a; + a = t; + } + + // Intermediate hash value + H[0] = (H[0] + a) | 0; + H[1] = (H[1] + b) | 0; + H[2] = (H[2] + c) | 0; + H[3] = (H[3] + d) | 0; + H[4] = (H[4] + e) | 0; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Return final computed hash + return this._hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA1('message'); + * var hash = CryptoJS.SHA1(wordArray); + */ + C.SHA1 = Hasher._createHelper(SHA1); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA1(message, key); + */ + C.HmacSHA1 = Hasher._createHmacHelper(SHA1); + }()); + + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Initialization and round constants tables + var H = []; + var K = []; + + // Compute constants + (function () { + function isPrime(n) { + var sqrtN = Math.sqrt(n); + for (var factor = 2; factor <= sqrtN; factor++) { + if (!(n % factor)) { + return false; + } + } + + return true; + } + + function getFractionalBits(n) { + return ((n - (n | 0)) * 0x100000000) | 0; + } + + var n = 2; + var nPrime = 0; + while (nPrime < 64) { + if (isPrime(n)) { + if (nPrime < 8) { + H[nPrime] = getFractionalBits(Math.pow(n, 1 / 2)); + } + K[nPrime] = getFractionalBits(Math.pow(n, 1 / 3)); + + nPrime++; + } + + n++; + } + }()); + + // Reusable object + var W = []; + + /** + * SHA-256 hash algorithm. + */ + var SHA256 = C_algo.SHA256 = Hasher.extend({ + _doReset: function () { + this._hash = new WordArray.init(H.slice(0)); + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var H = this._hash.words; + + // Working variables + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + var e = H[4]; + var f = H[5]; + var g = H[6]; + var h = H[7]; + + // Computation + for (var i = 0; i < 64; i++) { + if (i < 16) { + W[i] = M[offset + i] | 0; + } else { + var gamma0x = W[i - 15]; + var gamma0 = ((gamma0x << 25) | (gamma0x >>> 7)) ^ + ((gamma0x << 14) | (gamma0x >>> 18)) ^ + (gamma0x >>> 3); + + var gamma1x = W[i - 2]; + var gamma1 = ((gamma1x << 15) | (gamma1x >>> 17)) ^ + ((gamma1x << 13) | (gamma1x >>> 19)) ^ + (gamma1x >>> 10); + + W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16]; + } + + var ch = (e & f) ^ (~e & g); + var maj = (a & b) ^ (a & c) ^ (b & c); + + var sigma0 = ((a << 30) | (a >>> 2)) ^ ((a << 19) | (a >>> 13)) ^ ((a << 10) | (a >>> 22)); + var sigma1 = ((e << 26) | (e >>> 6)) ^ ((e << 21) | (e >>> 11)) ^ ((e << 7) | (e >>> 25)); + + var t1 = h + sigma1 + ch + K[i] + W[i]; + var t2 = sigma0 + maj; + + h = g; + g = f; + f = e; + e = (d + t1) | 0; + d = c; + c = b; + b = a; + a = (t1 + t2) | 0; + } + + // Intermediate hash value + H[0] = (H[0] + a) | 0; + H[1] = (H[1] + b) | 0; + H[2] = (H[2] + c) | 0; + H[3] = (H[3] + d) | 0; + H[4] = (H[4] + e) | 0; + H[5] = (H[5] + f) | 0; + H[6] = (H[6] + g) | 0; + H[7] = (H[7] + h) | 0; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Return final computed hash + return this._hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA256('message'); + * var hash = CryptoJS.SHA256(wordArray); + */ + C.SHA256 = Hasher._createHelper(SHA256); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA256(message, key); + */ + C.HmacSHA256 = Hasher._createHmacHelper(SHA256); + }(Math)); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_algo = C.algo; + var SHA256 = C_algo.SHA256; + + /** + * SHA-224 hash algorithm. + */ + var SHA224 = C_algo.SHA224 = SHA256.extend({ + _doReset: function () { + this._hash = new WordArray.init([ + 0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, + 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4 + ]); + }, + + _doFinalize: function () { + var hash = SHA256._doFinalize.call(this); + + hash.sigBytes -= 4; + + return hash; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA224('message'); + * var hash = CryptoJS.SHA224(wordArray); + */ + C.SHA224 = SHA256._createHelper(SHA224); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA224(message, key); + */ + C.HmacSHA224 = SHA256._createHmacHelper(SHA224); + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Hasher = C_lib.Hasher; + var C_x64 = C.x64; + var X64Word = C_x64.Word; + var X64WordArray = C_x64.WordArray; + var C_algo = C.algo; + + function X64Word_create() { + return X64Word.create.apply(X64Word, arguments); + } + + // Constants + var K = [ + X64Word_create(0x428a2f98, 0xd728ae22), X64Word_create(0x71374491, 0x23ef65cd), + X64Word_create(0xb5c0fbcf, 0xec4d3b2f), X64Word_create(0xe9b5dba5, 0x8189dbbc), + X64Word_create(0x3956c25b, 0xf348b538), X64Word_create(0x59f111f1, 0xb605d019), + X64Word_create(0x923f82a4, 0xaf194f9b), X64Word_create(0xab1c5ed5, 0xda6d8118), + X64Word_create(0xd807aa98, 0xa3030242), X64Word_create(0x12835b01, 0x45706fbe), + X64Word_create(0x243185be, 0x4ee4b28c), X64Word_create(0x550c7dc3, 0xd5ffb4e2), + X64Word_create(0x72be5d74, 0xf27b896f), X64Word_create(0x80deb1fe, 0x3b1696b1), + X64Word_create(0x9bdc06a7, 0x25c71235), X64Word_create(0xc19bf174, 0xcf692694), + X64Word_create(0xe49b69c1, 0x9ef14ad2), X64Word_create(0xefbe4786, 0x384f25e3), + X64Word_create(0x0fc19dc6, 0x8b8cd5b5), X64Word_create(0x240ca1cc, 0x77ac9c65), + X64Word_create(0x2de92c6f, 0x592b0275), X64Word_create(0x4a7484aa, 0x6ea6e483), + X64Word_create(0x5cb0a9dc, 0xbd41fbd4), X64Word_create(0x76f988da, 0x831153b5), + X64Word_create(0x983e5152, 0xee66dfab), X64Word_create(0xa831c66d, 0x2db43210), + X64Word_create(0xb00327c8, 0x98fb213f), X64Word_create(0xbf597fc7, 0xbeef0ee4), + X64Word_create(0xc6e00bf3, 0x3da88fc2), X64Word_create(0xd5a79147, 0x930aa725), + X64Word_create(0x06ca6351, 0xe003826f), X64Word_create(0x14292967, 0x0a0e6e70), + X64Word_create(0x27b70a85, 0x46d22ffc), X64Word_create(0x2e1b2138, 0x5c26c926), + X64Word_create(0x4d2c6dfc, 0x5ac42aed), X64Word_create(0x53380d13, 0x9d95b3df), + X64Word_create(0x650a7354, 0x8baf63de), X64Word_create(0x766a0abb, 0x3c77b2a8), + X64Word_create(0x81c2c92e, 0x47edaee6), X64Word_create(0x92722c85, 0x1482353b), + X64Word_create(0xa2bfe8a1, 0x4cf10364), X64Word_create(0xa81a664b, 0xbc423001), + X64Word_create(0xc24b8b70, 0xd0f89791), X64Word_create(0xc76c51a3, 0x0654be30), + X64Word_create(0xd192e819, 0xd6ef5218), X64Word_create(0xd6990624, 0x5565a910), + X64Word_create(0xf40e3585, 0x5771202a), X64Word_create(0x106aa070, 0x32bbd1b8), + X64Word_create(0x19a4c116, 0xb8d2d0c8), X64Word_create(0x1e376c08, 0x5141ab53), + X64Word_create(0x2748774c, 0xdf8eeb99), X64Word_create(0x34b0bcb5, 0xe19b48a8), + X64Word_create(0x391c0cb3, 0xc5c95a63), X64Word_create(0x4ed8aa4a, 0xe3418acb), + X64Word_create(0x5b9cca4f, 0x7763e373), X64Word_create(0x682e6ff3, 0xd6b2b8a3), + X64Word_create(0x748f82ee, 0x5defb2fc), X64Word_create(0x78a5636f, 0x43172f60), + X64Word_create(0x84c87814, 0xa1f0ab72), X64Word_create(0x8cc70208, 0x1a6439ec), + X64Word_create(0x90befffa, 0x23631e28), X64Word_create(0xa4506ceb, 0xde82bde9), + X64Word_create(0xbef9a3f7, 0xb2c67915), X64Word_create(0xc67178f2, 0xe372532b), + X64Word_create(0xca273ece, 0xea26619c), X64Word_create(0xd186b8c7, 0x21c0c207), + X64Word_create(0xeada7dd6, 0xcde0eb1e), X64Word_create(0xf57d4f7f, 0xee6ed178), + X64Word_create(0x06f067aa, 0x72176fba), X64Word_create(0x0a637dc5, 0xa2c898a6), + X64Word_create(0x113f9804, 0xbef90dae), X64Word_create(0x1b710b35, 0x131c471b), + X64Word_create(0x28db77f5, 0x23047d84), X64Word_create(0x32caab7b, 0x40c72493), + X64Word_create(0x3c9ebe0a, 0x15c9bebc), X64Word_create(0x431d67c4, 0x9c100d4c), + X64Word_create(0x4cc5d4be, 0xcb3e42b6), X64Word_create(0x597f299c, 0xfc657e2a), + X64Word_create(0x5fcb6fab, 0x3ad6faec), X64Word_create(0x6c44198c, 0x4a475817) + ]; + + // Reusable objects + var W = []; + (function () { + for (var i = 0; i < 80; i++) { + W[i] = X64Word_create(); + } + }()); + + /** + * SHA-512 hash algorithm. + */ + var SHA512 = C_algo.SHA512 = Hasher.extend({ + _doReset: function () { + this._hash = new X64WordArray.init([ + new X64Word.init(0x6a09e667, 0xf3bcc908), new X64Word.init(0xbb67ae85, 0x84caa73b), + new X64Word.init(0x3c6ef372, 0xfe94f82b), new X64Word.init(0xa54ff53a, 0x5f1d36f1), + new X64Word.init(0x510e527f, 0xade682d1), new X64Word.init(0x9b05688c, 0x2b3e6c1f), + new X64Word.init(0x1f83d9ab, 0xfb41bd6b), new X64Word.init(0x5be0cd19, 0x137e2179) + ]); + }, + + _doProcessBlock: function (M, offset) { + // Shortcuts + var H = this._hash.words; + + var H0 = H[0]; + var H1 = H[1]; + var H2 = H[2]; + var H3 = H[3]; + var H4 = H[4]; + var H5 = H[5]; + var H6 = H[6]; + var H7 = H[7]; + + var H0h = H0.high; + var H0l = H0.low; + var H1h = H1.high; + var H1l = H1.low; + var H2h = H2.high; + var H2l = H2.low; + var H3h = H3.high; + var H3l = H3.low; + var H4h = H4.high; + var H4l = H4.low; + var H5h = H5.high; + var H5l = H5.low; + var H6h = H6.high; + var H6l = H6.low; + var H7h = H7.high; + var H7l = H7.low; + + // Working variables + var ah = H0h; + var al = H0l; + var bh = H1h; + var bl = H1l; + var ch = H2h; + var cl = H2l; + var dh = H3h; + var dl = H3l; + var eh = H4h; + var el = H4l; + var fh = H5h; + var fl = H5l; + var gh = H6h; + var gl = H6l; + var hh = H7h; + var hl = H7l; + + // Rounds + for (var i = 0; i < 80; i++) { + var Wil; + var Wih; + + // Shortcut + var Wi = W[i]; + + // Extend message + if (i < 16) { + Wih = Wi.high = M[offset + i * 2] | 0; + Wil = Wi.low = M[offset + i * 2 + 1] | 0; + } else { + // Gamma0 + var gamma0x = W[i - 15]; + var gamma0xh = gamma0x.high; + var gamma0xl = gamma0x.low; + var gamma0h = ((gamma0xh >>> 1) | (gamma0xl << 31)) ^ ((gamma0xh >>> 8) | (gamma0xl << 24)) ^ (gamma0xh >>> 7); + var gamma0l = ((gamma0xl >>> 1) | (gamma0xh << 31)) ^ ((gamma0xl >>> 8) | (gamma0xh << 24)) ^ ((gamma0xl >>> 7) | (gamma0xh << 25)); + + // Gamma1 + var gamma1x = W[i - 2]; + var gamma1xh = gamma1x.high; + var gamma1xl = gamma1x.low; + var gamma1h = ((gamma1xh >>> 19) | (gamma1xl << 13)) ^ ((gamma1xh << 3) | (gamma1xl >>> 29)) ^ (gamma1xh >>> 6); + var gamma1l = ((gamma1xl >>> 19) | (gamma1xh << 13)) ^ ((gamma1xl << 3) | (gamma1xh >>> 29)) ^ ((gamma1xl >>> 6) | (gamma1xh << 26)); + + // W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16] + var Wi7 = W[i - 7]; + var Wi7h = Wi7.high; + var Wi7l = Wi7.low; + + var Wi16 = W[i - 16]; + var Wi16h = Wi16.high; + var Wi16l = Wi16.low; + + Wil = gamma0l + Wi7l; + Wih = gamma0h + Wi7h + ((Wil >>> 0) < (gamma0l >>> 0) ? 1 : 0); + Wil = Wil + gamma1l; + Wih = Wih + gamma1h + ((Wil >>> 0) < (gamma1l >>> 0) ? 1 : 0); + Wil = Wil + Wi16l; + Wih = Wih + Wi16h + ((Wil >>> 0) < (Wi16l >>> 0) ? 1 : 0); + + Wi.high = Wih; + Wi.low = Wil; + } + + var chh = (eh & fh) ^ (~eh & gh); + var chl = (el & fl) ^ (~el & gl); + var majh = (ah & bh) ^ (ah & ch) ^ (bh & ch); + var majl = (al & bl) ^ (al & cl) ^ (bl & cl); + + var sigma0h = ((ah >>> 28) | (al << 4)) ^ ((ah << 30) | (al >>> 2)) ^ ((ah << 25) | (al >>> 7)); + var sigma0l = ((al >>> 28) | (ah << 4)) ^ ((al << 30) | (ah >>> 2)) ^ ((al << 25) | (ah >>> 7)); + var sigma1h = ((eh >>> 14) | (el << 18)) ^ ((eh >>> 18) | (el << 14)) ^ ((eh << 23) | (el >>> 9)); + var sigma1l = ((el >>> 14) | (eh << 18)) ^ ((el >>> 18) | (eh << 14)) ^ ((el << 23) | (eh >>> 9)); + + // t1 = h + sigma1 + ch + K[i] + W[i] + var Ki = K[i]; + var Kih = Ki.high; + var Kil = Ki.low; + + var t1l = hl + sigma1l; + var t1h = hh + sigma1h + ((t1l >>> 0) < (hl >>> 0) ? 1 : 0); + var t1l = t1l + chl; + var t1h = t1h + chh + ((t1l >>> 0) < (chl >>> 0) ? 1 : 0); + var t1l = t1l + Kil; + var t1h = t1h + Kih + ((t1l >>> 0) < (Kil >>> 0) ? 1 : 0); + var t1l = t1l + Wil; + var t1h = t1h + Wih + ((t1l >>> 0) < (Wil >>> 0) ? 1 : 0); + + // t2 = sigma0 + maj + var t2l = sigma0l + majl; + var t2h = sigma0h + majh + ((t2l >>> 0) < (sigma0l >>> 0) ? 1 : 0); + + // Update working variables + hh = gh; + hl = gl; + gh = fh; + gl = fl; + fh = eh; + fl = el; + el = (dl + t1l) | 0; + eh = (dh + t1h + ((el >>> 0) < (dl >>> 0) ? 1 : 0)) | 0; + dh = ch; + dl = cl; + ch = bh; + cl = bl; + bh = ah; + bl = al; + al = (t1l + t2l) | 0; + ah = (t1h + t2h + ((al >>> 0) < (t1l >>> 0) ? 1 : 0)) | 0; + } + + // Intermediate hash value + H0l = H0.low = (H0l + al); + H0.high = (H0h + ah + ((H0l >>> 0) < (al >>> 0) ? 1 : 0)); + H1l = H1.low = (H1l + bl); + H1.high = (H1h + bh + ((H1l >>> 0) < (bl >>> 0) ? 1 : 0)); + H2l = H2.low = (H2l + cl); + H2.high = (H2h + ch + ((H2l >>> 0) < (cl >>> 0) ? 1 : 0)); + H3l = H3.low = (H3l + dl); + H3.high = (H3h + dh + ((H3l >>> 0) < (dl >>> 0) ? 1 : 0)); + H4l = H4.low = (H4l + el); + H4.high = (H4h + eh + ((H4l >>> 0) < (el >>> 0) ? 1 : 0)); + H5l = H5.low = (H5l + fl); + H5.high = (H5h + fh + ((H5l >>> 0) < (fl >>> 0) ? 1 : 0)); + H6l = H6.low = (H6l + gl); + H6.high = (H6h + gh + ((H6l >>> 0) < (gl >>> 0) ? 1 : 0)); + H7l = H7.low = (H7l + hl); + H7.high = (H7h + hh + ((H7l >>> 0) < (hl >>> 0) ? 1 : 0)); + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 30] = Math.floor(nBitsTotal / 0x100000000); + dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 31] = nBitsTotal; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Convert hash to 32-bit word array before returning + var hash = this._hash.toX32(); + + // Return final computed hash + return hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + }, + + blockSize: 1024/32 + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA512('message'); + * var hash = CryptoJS.SHA512(wordArray); + */ + C.SHA512 = Hasher._createHelper(SHA512); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA512(message, key); + */ + C.HmacSHA512 = Hasher._createHmacHelper(SHA512); + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_x64 = C.x64; + var X64Word = C_x64.Word; + var X64WordArray = C_x64.WordArray; + var C_algo = C.algo; + var SHA512 = C_algo.SHA512; + + /** + * SHA-384 hash algorithm. + */ + var SHA384 = C_algo.SHA384 = SHA512.extend({ + _doReset: function () { + this._hash = new X64WordArray.init([ + new X64Word.init(0xcbbb9d5d, 0xc1059ed8), new X64Word.init(0x629a292a, 0x367cd507), + new X64Word.init(0x9159015a, 0x3070dd17), new X64Word.init(0x152fecd8, 0xf70e5939), + new X64Word.init(0x67332667, 0xffc00b31), new X64Word.init(0x8eb44a87, 0x68581511), + new X64Word.init(0xdb0c2e0d, 0x64f98fa7), new X64Word.init(0x47b5481d, 0xbefa4fa4) + ]); + }, + + _doFinalize: function () { + var hash = SHA512._doFinalize.call(this); + + hash.sigBytes -= 16; + + return hash; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA384('message'); + * var hash = CryptoJS.SHA384(wordArray); + */ + C.SHA384 = SHA512._createHelper(SHA384); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA384(message, key); + */ + C.HmacSHA384 = SHA512._createHmacHelper(SHA384); + }()); + + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_x64 = C.x64; + var X64Word = C_x64.Word; + var C_algo = C.algo; + + // Constants tables + var RHO_OFFSETS = []; + var PI_INDEXES = []; + var ROUND_CONSTANTS = []; + + // Compute Constants + (function () { + // Compute rho offset constants + var x = 1, y = 0; + for (var t = 0; t < 24; t++) { + RHO_OFFSETS[x + 5 * y] = ((t + 1) * (t + 2) / 2) % 64; + + var newX = y % 5; + var newY = (2 * x + 3 * y) % 5; + x = newX; + y = newY; + } + + // Compute pi index constants + for (var x = 0; x < 5; x++) { + for (var y = 0; y < 5; y++) { + PI_INDEXES[x + 5 * y] = y + ((2 * x + 3 * y) % 5) * 5; + } + } + + // Compute round constants + var LFSR = 0x01; + for (var i = 0; i < 24; i++) { + var roundConstantMsw = 0; + var roundConstantLsw = 0; + + for (var j = 0; j < 7; j++) { + if (LFSR & 0x01) { + var bitPosition = (1 << j) - 1; + if (bitPosition < 32) { + roundConstantLsw ^= 1 << bitPosition; + } else /* if (bitPosition >= 32) */ { + roundConstantMsw ^= 1 << (bitPosition - 32); + } + } + + // Compute next LFSR + if (LFSR & 0x80) { + // Primitive polynomial over GF(2): x^8 + x^6 + x^5 + x^4 + 1 + LFSR = (LFSR << 1) ^ 0x71; + } else { + LFSR <<= 1; + } + } + + ROUND_CONSTANTS[i] = X64Word.create(roundConstantMsw, roundConstantLsw); + } + }()); + + // Reusable objects for temporary values + var T = []; + (function () { + for (var i = 0; i < 25; i++) { + T[i] = X64Word.create(); + } + }()); + + /** + * SHA-3 hash algorithm. + */ + var SHA3 = C_algo.SHA3 = Hasher.extend({ + /** + * Configuration options. + * + * @property {number} outputLength + * The desired number of bits in the output hash. + * Only values permitted are: 224, 256, 384, 512. + * Default: 512 + */ + cfg: Hasher.cfg.extend({ + outputLength: 512 + }), + + _doReset: function () { + var state = this._state = [] + for (var i = 0; i < 25; i++) { + state[i] = new X64Word.init(); + } + + this.blockSize = (1600 - 2 * this.cfg.outputLength) / 32; + }, + + _doProcessBlock: function (M, offset) { + // Shortcuts + var state = this._state; + var nBlockSizeLanes = this.blockSize / 2; + + // Absorb + for (var i = 0; i < nBlockSizeLanes; i++) { + // Shortcuts + var M2i = M[offset + 2 * i]; + var M2i1 = M[offset + 2 * i + 1]; + + // Swap endian + M2i = ( + (((M2i << 8) | (M2i >>> 24)) & 0x00ff00ff) | + (((M2i << 24) | (M2i >>> 8)) & 0xff00ff00) + ); + M2i1 = ( + (((M2i1 << 8) | (M2i1 >>> 24)) & 0x00ff00ff) | + (((M2i1 << 24) | (M2i1 >>> 8)) & 0xff00ff00) + ); + + // Absorb message into state + var lane = state[i]; + lane.high ^= M2i1; + lane.low ^= M2i; + } + + // Rounds + for (var round = 0; round < 24; round++) { + // Theta + for (var x = 0; x < 5; x++) { + // Mix column lanes + var tMsw = 0, tLsw = 0; + for (var y = 0; y < 5; y++) { + var lane = state[x + 5 * y]; + tMsw ^= lane.high; + tLsw ^= lane.low; + } + + // Temporary values + var Tx = T[x]; + Tx.high = tMsw; + Tx.low = tLsw; + } + for (var x = 0; x < 5; x++) { + // Shortcuts + var Tx4 = T[(x + 4) % 5]; + var Tx1 = T[(x + 1) % 5]; + var Tx1Msw = Tx1.high; + var Tx1Lsw = Tx1.low; + + // Mix surrounding columns + var tMsw = Tx4.high ^ ((Tx1Msw << 1) | (Tx1Lsw >>> 31)); + var tLsw = Tx4.low ^ ((Tx1Lsw << 1) | (Tx1Msw >>> 31)); + for (var y = 0; y < 5; y++) { + var lane = state[x + 5 * y]; + lane.high ^= tMsw; + lane.low ^= tLsw; + } + } + + // Rho Pi + for (var laneIndex = 1; laneIndex < 25; laneIndex++) { + var tMsw; + var tLsw; + + // Shortcuts + var lane = state[laneIndex]; + var laneMsw = lane.high; + var laneLsw = lane.low; + var rhoOffset = RHO_OFFSETS[laneIndex]; + + // Rotate lanes + if (rhoOffset < 32) { + tMsw = (laneMsw << rhoOffset) | (laneLsw >>> (32 - rhoOffset)); + tLsw = (laneLsw << rhoOffset) | (laneMsw >>> (32 - rhoOffset)); + } else /* if (rhoOffset >= 32) */ { + tMsw = (laneLsw << (rhoOffset - 32)) | (laneMsw >>> (64 - rhoOffset)); + tLsw = (laneMsw << (rhoOffset - 32)) | (laneLsw >>> (64 - rhoOffset)); + } + + // Transpose lanes + var TPiLane = T[PI_INDEXES[laneIndex]]; + TPiLane.high = tMsw; + TPiLane.low = tLsw; + } + + // Rho pi at x = y = 0 + var T0 = T[0]; + var state0 = state[0]; + T0.high = state0.high; + T0.low = state0.low; + + // Chi + for (var x = 0; x < 5; x++) { + for (var y = 0; y < 5; y++) { + // Shortcuts + var laneIndex = x + 5 * y; + var lane = state[laneIndex]; + var TLane = T[laneIndex]; + var Tx1Lane = T[((x + 1) % 5) + 5 * y]; + var Tx2Lane = T[((x + 2) % 5) + 5 * y]; + + // Mix rows + lane.high = TLane.high ^ (~Tx1Lane.high & Tx2Lane.high); + lane.low = TLane.low ^ (~Tx1Lane.low & Tx2Lane.low); + } + } + + // Iota + var lane = state[0]; + var roundConstant = ROUND_CONSTANTS[round]; + lane.high ^= roundConstant.high; + lane.low ^= roundConstant.low; + } + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + var blockSizeBits = this.blockSize * 32; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x1 << (24 - nBitsLeft % 32); + dataWords[((Math.ceil((nBitsLeft + 1) / blockSizeBits) * blockSizeBits) >>> 5) - 1] |= 0x80; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Shortcuts + var state = this._state; + var outputLengthBytes = this.cfg.outputLength / 8; + var outputLengthLanes = outputLengthBytes / 8; + + // Squeeze + var hashWords = []; + for (var i = 0; i < outputLengthLanes; i++) { + // Shortcuts + var lane = state[i]; + var laneMsw = lane.high; + var laneLsw = lane.low; + + // Swap endian + laneMsw = ( + (((laneMsw << 8) | (laneMsw >>> 24)) & 0x00ff00ff) | + (((laneMsw << 24) | (laneMsw >>> 8)) & 0xff00ff00) + ); + laneLsw = ( + (((laneLsw << 8) | (laneLsw >>> 24)) & 0x00ff00ff) | + (((laneLsw << 24) | (laneLsw >>> 8)) & 0xff00ff00) + ); + + // Squeeze state to retrieve hash + hashWords.push(laneLsw); + hashWords.push(laneMsw); + } + + // Return final computed hash + return new WordArray.init(hashWords, outputLengthBytes); + }, + + clone: function () { + var clone = Hasher.clone.call(this); + + var state = clone._state = this._state.slice(0); + for (var i = 0; i < 25; i++) { + state[i] = state[i].clone(); + } + + return clone; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA3('message'); + * var hash = CryptoJS.SHA3(wordArray); + */ + C.SHA3 = Hasher._createHelper(SHA3); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA3(message, key); + */ + C.HmacSHA3 = Hasher._createHmacHelper(SHA3); + }(Math)); + + + /** @preserve + (c) 2012 by Cédric Mesnil. All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Constants table + var _zl = WordArray.create([ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, + 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, + 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, + 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13]); + var _zr = WordArray.create([ + 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, + 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, + 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, + 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, + 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11]); + var _sl = WordArray.create([ + 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, + 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, + 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, + 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, + 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6 ]); + var _sr = WordArray.create([ + 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, + 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, + 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, + 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, + 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11 ]); + + var _hl = WordArray.create([ 0x00000000, 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xA953FD4E]); + var _hr = WordArray.create([ 0x50A28BE6, 0x5C4DD124, 0x6D703EF3, 0x7A6D76E9, 0x00000000]); + + /** + * RIPEMD160 hash algorithm. + */ + var RIPEMD160 = C_algo.RIPEMD160 = Hasher.extend({ + _doReset: function () { + this._hash = WordArray.create([0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0]); + }, + + _doProcessBlock: function (M, offset) { + + // Swap endian + for (var i = 0; i < 16; i++) { + // Shortcuts + var offset_i = offset + i; + var M_offset_i = M[offset_i]; + + // Swap + M[offset_i] = ( + (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | + (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) + ); + } + // Shortcut + var H = this._hash.words; + var hl = _hl.words; + var hr = _hr.words; + var zl = _zl.words; + var zr = _zr.words; + var sl = _sl.words; + var sr = _sr.words; + + // Working variables + var al, bl, cl, dl, el; + var ar, br, cr, dr, er; + + ar = al = H[0]; + br = bl = H[1]; + cr = cl = H[2]; + dr = dl = H[3]; + er = el = H[4]; + // Computation + var t; + for (var i = 0; i < 80; i += 1) { + t = (al + M[offset+zl[i]])|0; + if (i<16){ + t += f1(bl,cl,dl) + hl[0]; + } else if (i<32) { + t += f2(bl,cl,dl) + hl[1]; + } else if (i<48) { + t += f3(bl,cl,dl) + hl[2]; + } else if (i<64) { + t += f4(bl,cl,dl) + hl[3]; + } else {// if (i<80) { + t += f5(bl,cl,dl) + hl[4]; + } + t = t|0; + t = rotl(t,sl[i]); + t = (t+el)|0; + al = el; + el = dl; + dl = rotl(cl, 10); + cl = bl; + bl = t; + + t = (ar + M[offset+zr[i]])|0; + if (i<16){ + t += f5(br,cr,dr) + hr[0]; + } else if (i<32) { + t += f4(br,cr,dr) + hr[1]; + } else if (i<48) { + t += f3(br,cr,dr) + hr[2]; + } else if (i<64) { + t += f2(br,cr,dr) + hr[3]; + } else {// if (i<80) { + t += f1(br,cr,dr) + hr[4]; + } + t = t|0; + t = rotl(t,sr[i]) ; + t = (t+er)|0; + ar = er; + er = dr; + dr = rotl(cr, 10); + cr = br; + br = t; + } + // Intermediate hash value + t = (H[1] + cl + dr)|0; + H[1] = (H[2] + dl + er)|0; + H[2] = (H[3] + el + ar)|0; + H[3] = (H[4] + al + br)|0; + H[4] = (H[0] + bl + cr)|0; + H[0] = t; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( + (((nBitsTotal << 8) | (nBitsTotal >>> 24)) & 0x00ff00ff) | + (((nBitsTotal << 24) | (nBitsTotal >>> 8)) & 0xff00ff00) + ); + data.sigBytes = (dataWords.length + 1) * 4; + + // Hash final blocks + this._process(); + + // Shortcuts + var hash = this._hash; + var H = hash.words; + + // Swap endian + for (var i = 0; i < 5; i++) { + // Shortcut + var H_i = H[i]; + + // Swap + H[i] = (((H_i << 8) | (H_i >>> 24)) & 0x00ff00ff) | + (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00); + } + + // Return final computed hash + return hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + + function f1(x, y, z) { + return ((x) ^ (y) ^ (z)); + + } + + function f2(x, y, z) { + return (((x)&(y)) | ((~x)&(z))); + } + + function f3(x, y, z) { + return (((x) | (~(y))) ^ (z)); + } + + function f4(x, y, z) { + return (((x) & (z)) | ((y)&(~(z)))); + } + + function f5(x, y, z) { + return ((x) ^ ((y) |(~(z)))); + + } + + function rotl(x,n) { + return (x<>>(32-n)); + } + + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.RIPEMD160('message'); + * var hash = CryptoJS.RIPEMD160(wordArray); + */ + C.RIPEMD160 = Hasher._createHelper(RIPEMD160); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacRIPEMD160(message, key); + */ + C.HmacRIPEMD160 = Hasher._createHmacHelper(RIPEMD160); + }(Math)); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var C_enc = C.enc; + var Utf8 = C_enc.Utf8; + var C_algo = C.algo; + + /** + * HMAC algorithm. + */ + var HMAC = C_algo.HMAC = Base.extend({ + /** + * Initializes a newly created HMAC. + * + * @param {Hasher} hasher The hash algorithm to use. + * @param {WordArray|string} key The secret key. + * + * @example + * + * var hmacHasher = CryptoJS.algo.HMAC.create(CryptoJS.algo.SHA256, key); + */ + init: function (hasher, key) { + // Init hasher + hasher = this._hasher = new hasher.init(); + + // Convert string to WordArray, else assume WordArray already + if (typeof key == 'string') { + key = Utf8.parse(key); + } + + // Shortcuts + var hasherBlockSize = hasher.blockSize; + var hasherBlockSizeBytes = hasherBlockSize * 4; + + // Allow arbitrary length keys + if (key.sigBytes > hasherBlockSizeBytes) { + key = hasher.finalize(key); + } + + // Clamp excess bits + key.clamp(); + + // Clone key for inner and outer pads + var oKey = this._oKey = key.clone(); + var iKey = this._iKey = key.clone(); + + // Shortcuts + var oKeyWords = oKey.words; + var iKeyWords = iKey.words; + + // XOR keys with pad constants + for (var i = 0; i < hasherBlockSize; i++) { + oKeyWords[i] ^= 0x5c5c5c5c; + iKeyWords[i] ^= 0x36363636; + } + oKey.sigBytes = iKey.sigBytes = hasherBlockSizeBytes; + + // Set initial values + this.reset(); + }, + + /** + * Resets this HMAC to its initial state. + * + * @example + * + * hmacHasher.reset(); + */ + reset: function () { + // Shortcut + var hasher = this._hasher; + + // Reset + hasher.reset(); + hasher.update(this._iKey); + }, + + /** + * Updates this HMAC with a message. + * + * @param {WordArray|string} messageUpdate The message to append. + * + * @return {HMAC} This HMAC instance. + * + * @example + * + * hmacHasher.update('message'); + * hmacHasher.update(wordArray); + */ + update: function (messageUpdate) { + this._hasher.update(messageUpdate); + + // Chainable + return this; + }, + + /** + * Finalizes the HMAC computation. + * Note that the finalize operation is effectively a destructive, read-once operation. + * + * @param {WordArray|string} messageUpdate (Optional) A final message update. + * + * @return {WordArray} The HMAC. + * + * @example + * + * var hmac = hmacHasher.finalize(); + * var hmac = hmacHasher.finalize('message'); + * var hmac = hmacHasher.finalize(wordArray); + */ + finalize: function (messageUpdate) { + // Shortcut + var hasher = this._hasher; + + // Compute HMAC + var innerHash = hasher.finalize(messageUpdate); + hasher.reset(); + var hmac = hasher.finalize(this._oKey.clone().concat(innerHash)); + + return hmac; + } + }); + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var WordArray = C_lib.WordArray; + var C_algo = C.algo; + var SHA1 = C_algo.SHA1; + var HMAC = C_algo.HMAC; + + /** + * Password-Based Key Derivation Function 2 algorithm. + */ + var PBKDF2 = C_algo.PBKDF2 = Base.extend({ + /** + * Configuration options. + * + * @property {number} keySize The key size in words to generate. Default: 4 (128 bits) + * @property {Hasher} hasher The hasher to use. Default: SHA1 + * @property {number} iterations The number of iterations to perform. Default: 1 + */ + cfg: Base.extend({ + keySize: 128/32, + hasher: SHA1, + iterations: 1 + }), + + /** + * Initializes a newly created key derivation function. + * + * @param {Object} cfg (Optional) The configuration options to use for the derivation. + * + * @example + * + * var kdf = CryptoJS.algo.PBKDF2.create(); + * var kdf = CryptoJS.algo.PBKDF2.create({ keySize: 8 }); + * var kdf = CryptoJS.algo.PBKDF2.create({ keySize: 8, iterations: 1000 }); + */ + init: function (cfg) { + this.cfg = this.cfg.extend(cfg); + }, + + /** + * Computes the Password-Based Key Derivation Function 2. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * + * @return {WordArray} The derived key. + * + * @example + * + * var key = kdf.compute(password, salt); + */ + compute: function (password, salt) { + // Shortcut + var cfg = this.cfg; + + // Init HMAC + var hmac = HMAC.create(cfg.hasher, password); + + // Initial values + var derivedKey = WordArray.create(); + var blockIndex = WordArray.create([0x00000001]); + + // Shortcuts + var derivedKeyWords = derivedKey.words; + var blockIndexWords = blockIndex.words; + var keySize = cfg.keySize; + var iterations = cfg.iterations; + + // Generate key + while (derivedKeyWords.length < keySize) { + var block = hmac.update(salt).finalize(blockIndex); + hmac.reset(); + + // Shortcuts + var blockWords = block.words; + var blockWordsLength = blockWords.length; + + // Iterations + var intermediate = block; + for (var i = 1; i < iterations; i++) { + intermediate = hmac.finalize(intermediate); + hmac.reset(); + + // Shortcut + var intermediateWords = intermediate.words; + + // XOR intermediate with block + for (var j = 0; j < blockWordsLength; j++) { + blockWords[j] ^= intermediateWords[j]; + } + } + + derivedKey.concat(block); + blockIndexWords[0]++; + } + derivedKey.sigBytes = keySize * 4; + + return derivedKey; + } + }); + + /** + * Computes the Password-Based Key Derivation Function 2. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * @param {Object} cfg (Optional) The configuration options to use for this computation. + * + * @return {WordArray} The derived key. + * + * @static + * + * @example + * + * var key = CryptoJS.PBKDF2(password, salt); + * var key = CryptoJS.PBKDF2(password, salt, { keySize: 8 }); + * var key = CryptoJS.PBKDF2(password, salt, { keySize: 8, iterations: 1000 }); + */ + C.PBKDF2 = function (password, salt, cfg) { + return PBKDF2.create(cfg).compute(password, salt); + }; + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var WordArray = C_lib.WordArray; + var C_algo = C.algo; + var MD5 = C_algo.MD5; + + /** + * This key derivation function is meant to conform with EVP_BytesToKey. + * www.openssl.org/docs/crypto/EVP_BytesToKey.html + */ + var EvpKDF = C_algo.EvpKDF = Base.extend({ + /** + * Configuration options. + * + * @property {number} keySize The key size in words to generate. Default: 4 (128 bits) + * @property {Hasher} hasher The hash algorithm to use. Default: MD5 + * @property {number} iterations The number of iterations to perform. Default: 1 + */ + cfg: Base.extend({ + keySize: 128/32, + hasher: MD5, + iterations: 1 + }), + + /** + * Initializes a newly created key derivation function. + * + * @param {Object} cfg (Optional) The configuration options to use for the derivation. + * + * @example + * + * var kdf = CryptoJS.algo.EvpKDF.create(); + * var kdf = CryptoJS.algo.EvpKDF.create({ keySize: 8 }); + * var kdf = CryptoJS.algo.EvpKDF.create({ keySize: 8, iterations: 1000 }); + */ + init: function (cfg) { + this.cfg = this.cfg.extend(cfg); + }, + + /** + * Derives a key from a password. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * + * @return {WordArray} The derived key. + * + * @example + * + * var key = kdf.compute(password, salt); + */ + compute: function (password, salt) { + var block; + + // Shortcut + var cfg = this.cfg; + + // Init hasher + var hasher = cfg.hasher.create(); + + // Initial values + var derivedKey = WordArray.create(); + + // Shortcuts + var derivedKeyWords = derivedKey.words; + var keySize = cfg.keySize; + var iterations = cfg.iterations; + + // Generate key + while (derivedKeyWords.length < keySize) { + if (block) { + hasher.update(block); + } + block = hasher.update(password).finalize(salt); + hasher.reset(); + + // Iterations + for (var i = 1; i < iterations; i++) { + block = hasher.finalize(block); + hasher.reset(); + } + + derivedKey.concat(block); + } + derivedKey.sigBytes = keySize * 4; + + return derivedKey; + } + }); + + /** + * Derives a key from a password. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * @param {Object} cfg (Optional) The configuration options to use for this computation. + * + * @return {WordArray} The derived key. + * + * @static + * + * @example + * + * var key = CryptoJS.EvpKDF(password, salt); + * var key = CryptoJS.EvpKDF(password, salt, { keySize: 8 }); + * var key = CryptoJS.EvpKDF(password, salt, { keySize: 8, iterations: 1000 }); + */ + C.EvpKDF = function (password, salt, cfg) { + return EvpKDF.create(cfg).compute(password, salt); + }; + }()); + + + /** + * Cipher core components. + */ + CryptoJS.lib.Cipher || (function (undefined) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var WordArray = C_lib.WordArray; + var BufferedBlockAlgorithm = C_lib.BufferedBlockAlgorithm; + var C_enc = C.enc; + var Utf8 = C_enc.Utf8; + var Base64 = C_enc.Base64; + var C_algo = C.algo; + var EvpKDF = C_algo.EvpKDF; + + /** + * Abstract base cipher template. + * + * @property {number} keySize This cipher's key size. Default: 4 (128 bits) + * @property {number} ivSize This cipher's IV size. Default: 4 (128 bits) + * @property {number} _ENC_XFORM_MODE A constant representing encryption mode. + * @property {number} _DEC_XFORM_MODE A constant representing decryption mode. + */ + var Cipher = C_lib.Cipher = BufferedBlockAlgorithm.extend({ + /** + * Configuration options. + * + * @property {WordArray} iv The IV to use for this operation. + */ + cfg: Base.extend(), + + /** + * Creates this cipher in encryption mode. + * + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {Cipher} A cipher instance. + * + * @static + * + * @example + * + * var cipher = CryptoJS.algo.AES.createEncryptor(keyWordArray, { iv: ivWordArray }); + */ + createEncryptor: function (key, cfg) { + return this.create(this._ENC_XFORM_MODE, key, cfg); + }, + + /** + * Creates this cipher in decryption mode. + * + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {Cipher} A cipher instance. + * + * @static + * + * @example + * + * var cipher = CryptoJS.algo.AES.createDecryptor(keyWordArray, { iv: ivWordArray }); + */ + createDecryptor: function (key, cfg) { + return this.create(this._DEC_XFORM_MODE, key, cfg); + }, + + /** + * Initializes a newly created cipher. + * + * @param {number} xformMode Either the encryption or decryption transormation mode constant. + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @example + * + * var cipher = CryptoJS.algo.AES.create(CryptoJS.algo.AES._ENC_XFORM_MODE, keyWordArray, { iv: ivWordArray }); + */ + init: function (xformMode, key, cfg) { + // Apply config defaults + this.cfg = this.cfg.extend(cfg); + + // Store transform mode and key + this._xformMode = xformMode; + this._key = key; + + // Set initial values + this.reset(); + }, + + /** + * Resets this cipher to its initial state. + * + * @example + * + * cipher.reset(); + */ + reset: function () { + // Reset data buffer + BufferedBlockAlgorithm.reset.call(this); + + // Perform concrete-cipher logic + this._doReset(); + }, + + /** + * Adds data to be encrypted or decrypted. + * + * @param {WordArray|string} dataUpdate The data to encrypt or decrypt. + * + * @return {WordArray} The data after processing. + * + * @example + * + * var encrypted = cipher.process('data'); + * var encrypted = cipher.process(wordArray); + */ + process: function (dataUpdate) { + // Append + this._append(dataUpdate); + + // Process available blocks + return this._process(); + }, + + /** + * Finalizes the encryption or decryption process. + * Note that the finalize operation is effectively a destructive, read-once operation. + * + * @param {WordArray|string} dataUpdate The final data to encrypt or decrypt. + * + * @return {WordArray} The data after final processing. + * + * @example + * + * var encrypted = cipher.finalize(); + * var encrypted = cipher.finalize('data'); + * var encrypted = cipher.finalize(wordArray); + */ + finalize: function (dataUpdate) { + // Final data update + if (dataUpdate) { + this._append(dataUpdate); + } + + // Perform concrete-cipher logic + var finalProcessedData = this._doFinalize(); + + return finalProcessedData; + }, + + keySize: 128/32, + + ivSize: 128/32, + + _ENC_XFORM_MODE: 1, + + _DEC_XFORM_MODE: 2, + + /** + * Creates shortcut functions to a cipher's object interface. + * + * @param {Cipher} cipher The cipher to create a helper for. + * + * @return {Object} An object with encrypt and decrypt shortcut functions. + * + * @static + * + * @example + * + * var AES = CryptoJS.lib.Cipher._createHelper(CryptoJS.algo.AES); + */ + _createHelper: (function () { + function selectCipherStrategy(key) { + if (typeof key == 'string') { + return PasswordBasedCipher; + } else { + return SerializableCipher; + } + } + + return function (cipher) { + return { + encrypt: function (message, key, cfg) { + return selectCipherStrategy(key).encrypt(cipher, message, key, cfg); + }, + + decrypt: function (ciphertext, key, cfg) { + return selectCipherStrategy(key).decrypt(cipher, ciphertext, key, cfg); + } + }; + }; + }()) + }); + + /** + * Abstract base stream cipher template. + * + * @property {number} blockSize The number of 32-bit words this cipher operates on. Default: 1 (32 bits) + */ + var StreamCipher = C_lib.StreamCipher = Cipher.extend({ + _doFinalize: function () { + // Process partial blocks + var finalProcessedBlocks = this._process(!!'flush'); + + return finalProcessedBlocks; + }, + + blockSize: 1 + }); + + /** + * Mode namespace. + */ + var C_mode = C.mode = {}; + + /** + * Abstract base block cipher mode template. + */ + var BlockCipherMode = C_lib.BlockCipherMode = Base.extend({ + /** + * Creates this mode for encryption. + * + * @param {Cipher} cipher A block cipher instance. + * @param {Array} iv The IV words. + * + * @static + * + * @example + * + * var mode = CryptoJS.mode.CBC.createEncryptor(cipher, iv.words); + */ + createEncryptor: function (cipher, iv) { + return this.Encryptor.create(cipher, iv); + }, + + /** + * Creates this mode for decryption. + * + * @param {Cipher} cipher A block cipher instance. + * @param {Array} iv The IV words. + * + * @static + * + * @example + * + * var mode = CryptoJS.mode.CBC.createDecryptor(cipher, iv.words); + */ + createDecryptor: function (cipher, iv) { + return this.Decryptor.create(cipher, iv); + }, + + /** + * Initializes a newly created mode. + * + * @param {Cipher} cipher A block cipher instance. + * @param {Array} iv The IV words. + * + * @example + * + * var mode = CryptoJS.mode.CBC.Encryptor.create(cipher, iv.words); + */ + init: function (cipher, iv) { + this._cipher = cipher; + this._iv = iv; + } + }); + + /** + * Cipher Block Chaining mode. + */ + var CBC = C_mode.CBC = (function () { + /** + * Abstract base CBC mode. + */ + var CBC = BlockCipherMode.extend(); + + /** + * CBC encryptor. + */ + CBC.Encryptor = CBC.extend({ + /** + * Processes the data block at offset. + * + * @param {Array} words The data words to operate on. + * @param {number} offset The offset where the block starts. + * + * @example + * + * mode.processBlock(data.words, offset); + */ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + // XOR and encrypt + xorBlock.call(this, words, offset, blockSize); + cipher.encryptBlock(words, offset); + + // Remember this block to use with next block + this._prevBlock = words.slice(offset, offset + blockSize); + } + }); + + /** + * CBC decryptor. + */ + CBC.Decryptor = CBC.extend({ + /** + * Processes the data block at offset. + * + * @param {Array} words The data words to operate on. + * @param {number} offset The offset where the block starts. + * + * @example + * + * mode.processBlock(data.words, offset); + */ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + // Remember this block to use with next block + var thisBlock = words.slice(offset, offset + blockSize); + + // Decrypt and XOR + cipher.decryptBlock(words, offset); + xorBlock.call(this, words, offset, blockSize); + + // This block becomes the previous block + this._prevBlock = thisBlock; + } + }); + + function xorBlock(words, offset, blockSize) { + var block; + + // Shortcut + var iv = this._iv; + + // Choose mixing block + if (iv) { + block = iv; + + // Remove IV for subsequent blocks + this._iv = undefined; + } else { + block = this._prevBlock; + } + + // XOR blocks + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= block[i]; + } + } + + return CBC; + }()); + + /** + * Padding namespace. + */ + var C_pad = C.pad = {}; + + /** + * PKCS #5/7 padding strategy. + */ + var Pkcs7 = C_pad.Pkcs7 = { + /** + * Pads data using the algorithm defined in PKCS #5/7. + * + * @param {WordArray} data The data to pad. + * @param {number} blockSize The multiple that the data should be padded to. + * + * @static + * + * @example + * + * CryptoJS.pad.Pkcs7.pad(wordArray, 4); + */ + pad: function (data, blockSize) { + // Shortcut + var blockSizeBytes = blockSize * 4; + + // Count padding bytes + var nPaddingBytes = blockSizeBytes - data.sigBytes % blockSizeBytes; + + // Create padding word + var paddingWord = (nPaddingBytes << 24) | (nPaddingBytes << 16) | (nPaddingBytes << 8) | nPaddingBytes; + + // Create padding + var paddingWords = []; + for (var i = 0; i < nPaddingBytes; i += 4) { + paddingWords.push(paddingWord); + } + var padding = WordArray.create(paddingWords, nPaddingBytes); + + // Add padding + data.concat(padding); + }, + + /** + * Unpads data that had been padded using the algorithm defined in PKCS #5/7. + * + * @param {WordArray} data The data to unpad. + * + * @static + * + * @example + * + * CryptoJS.pad.Pkcs7.unpad(wordArray); + */ + unpad: function (data) { + // Get number of padding bytes from last byte + var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; + + // Remove padding + data.sigBytes -= nPaddingBytes; + } + }; + + /** + * Abstract base block cipher template. + * + * @property {number} blockSize The number of 32-bit words this cipher operates on. Default: 4 (128 bits) + */ + var BlockCipher = C_lib.BlockCipher = Cipher.extend({ + /** + * Configuration options. + * + * @property {Mode} mode The block mode to use. Default: CBC + * @property {Padding} padding The padding strategy to use. Default: Pkcs7 + */ + cfg: Cipher.cfg.extend({ + mode: CBC, + padding: Pkcs7 + }), + + reset: function () { + var modeCreator; + + // Reset cipher + Cipher.reset.call(this); + + // Shortcuts + var cfg = this.cfg; + var iv = cfg.iv; + var mode = cfg.mode; + + // Reset block mode + if (this._xformMode == this._ENC_XFORM_MODE) { + modeCreator = mode.createEncryptor; + } else /* if (this._xformMode == this._DEC_XFORM_MODE) */ { + modeCreator = mode.createDecryptor; + // Keep at least one block in the buffer for unpadding + this._minBufferSize = 1; + } + + if (this._mode && this._mode.__creator == modeCreator) { + this._mode.init(this, iv && iv.words); + } else { + this._mode = modeCreator.call(mode, this, iv && iv.words); + this._mode.__creator = modeCreator; + } + }, + + _doProcessBlock: function (words, offset) { + this._mode.processBlock(words, offset); + }, + + _doFinalize: function () { + var finalProcessedBlocks; + + // Shortcut + var padding = this.cfg.padding; + + // Finalize + if (this._xformMode == this._ENC_XFORM_MODE) { + // Pad data + padding.pad(this._data, this.blockSize); + + // Process final blocks + finalProcessedBlocks = this._process(!!'flush'); + } else /* if (this._xformMode == this._DEC_XFORM_MODE) */ { + // Process final blocks + finalProcessedBlocks = this._process(!!'flush'); + + // Unpad data + padding.unpad(finalProcessedBlocks); + } + + return finalProcessedBlocks; + }, + + blockSize: 128/32 + }); + + /** + * A collection of cipher parameters. + * + * @property {WordArray} ciphertext The raw ciphertext. + * @property {WordArray} key The key to this ciphertext. + * @property {WordArray} iv The IV used in the ciphering operation. + * @property {WordArray} salt The salt used with a key derivation function. + * @property {Cipher} algorithm The cipher algorithm. + * @property {Mode} mode The block mode used in the ciphering operation. + * @property {Padding} padding The padding scheme used in the ciphering operation. + * @property {number} blockSize The block size of the cipher. + * @property {Format} formatter The default formatting strategy to convert this cipher params object to a string. + */ + var CipherParams = C_lib.CipherParams = Base.extend({ + /** + * Initializes a newly created cipher params object. + * + * @param {Object} cipherParams An object with any of the possible cipher parameters. + * + * @example + * + * var cipherParams = CryptoJS.lib.CipherParams.create({ + * ciphertext: ciphertextWordArray, + * key: keyWordArray, + * iv: ivWordArray, + * salt: saltWordArray, + * algorithm: CryptoJS.algo.AES, + * mode: CryptoJS.mode.CBC, + * padding: CryptoJS.pad.PKCS7, + * blockSize: 4, + * formatter: CryptoJS.format.OpenSSL + * }); + */ + init: function (cipherParams) { + this.mixIn(cipherParams); + }, + + /** + * Converts this cipher params object to a string. + * + * @param {Format} formatter (Optional) The formatting strategy to use. + * + * @return {string} The stringified cipher params. + * + * @throws Error If neither the formatter nor the default formatter is set. + * + * @example + * + * var string = cipherParams + ''; + * var string = cipherParams.toString(); + * var string = cipherParams.toString(CryptoJS.format.OpenSSL); + */ + toString: function (formatter) { + return (formatter || this.formatter).stringify(this); + } + }); + + /** + * Format namespace. + */ + var C_format = C.format = {}; + + /** + * OpenSSL formatting strategy. + */ + var OpenSSLFormatter = C_format.OpenSSL = { + /** + * Converts a cipher params object to an OpenSSL-compatible string. + * + * @param {CipherParams} cipherParams The cipher params object. + * + * @return {string} The OpenSSL-compatible string. + * + * @static + * + * @example + * + * var openSSLString = CryptoJS.format.OpenSSL.stringify(cipherParams); + */ + stringify: function (cipherParams) { + var wordArray; + + // Shortcuts + var ciphertext = cipherParams.ciphertext; + var salt = cipherParams.salt; + + // Format + if (salt) { + wordArray = WordArray.create([0x53616c74, 0x65645f5f]).concat(salt).concat(ciphertext); + } else { + wordArray = ciphertext; + } + + return wordArray.toString(Base64); + }, + + /** + * Converts an OpenSSL-compatible string to a cipher params object. + * + * @param {string} openSSLStr The OpenSSL-compatible string. + * + * @return {CipherParams} The cipher params object. + * + * @static + * + * @example + * + * var cipherParams = CryptoJS.format.OpenSSL.parse(openSSLString); + */ + parse: function (openSSLStr) { + var salt; + + // Parse base64 + var ciphertext = Base64.parse(openSSLStr); + + // Shortcut + var ciphertextWords = ciphertext.words; + + // Test for salt + if (ciphertextWords[0] == 0x53616c74 && ciphertextWords[1] == 0x65645f5f) { + // Extract salt + salt = WordArray.create(ciphertextWords.slice(2, 4)); + + // Remove salt from ciphertext + ciphertextWords.splice(0, 4); + ciphertext.sigBytes -= 16; + } + + return CipherParams.create({ ciphertext: ciphertext, salt: salt }); + } + }; + + /** + * A cipher wrapper that returns ciphertext as a serializable cipher params object. + */ + var SerializableCipher = C_lib.SerializableCipher = Base.extend({ + /** + * Configuration options. + * + * @property {Formatter} format The formatting strategy to convert cipher param objects to and from a string. Default: OpenSSL + */ + cfg: Base.extend({ + format: OpenSSLFormatter + }), + + /** + * Encrypts a message. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {WordArray|string} message The message to encrypt. + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {CipherParams} A cipher params object. + * + * @static + * + * @example + * + * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key); + * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key, { iv: iv }); + * var ciphertextParams = CryptoJS.lib.SerializableCipher.encrypt(CryptoJS.algo.AES, message, key, { iv: iv, format: CryptoJS.format.OpenSSL }); + */ + encrypt: function (cipher, message, key, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Encrypt + var encryptor = cipher.createEncryptor(key, cfg); + var ciphertext = encryptor.finalize(message); + + // Shortcut + var cipherCfg = encryptor.cfg; + + // Create and return serializable cipher params + return CipherParams.create({ + ciphertext: ciphertext, + key: key, + iv: cipherCfg.iv, + algorithm: cipher, + mode: cipherCfg.mode, + padding: cipherCfg.padding, + blockSize: cipher.blockSize, + formatter: cfg.format + }); + }, + + /** + * Decrypts serialized ciphertext. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {CipherParams|string} ciphertext The ciphertext to decrypt. + * @param {WordArray} key The key. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {WordArray} The plaintext. + * + * @static + * + * @example + * + * var plaintext = CryptoJS.lib.SerializableCipher.decrypt(CryptoJS.algo.AES, formattedCiphertext, key, { iv: iv, format: CryptoJS.format.OpenSSL }); + * var plaintext = CryptoJS.lib.SerializableCipher.decrypt(CryptoJS.algo.AES, ciphertextParams, key, { iv: iv, format: CryptoJS.format.OpenSSL }); + */ + decrypt: function (cipher, ciphertext, key, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Convert string to CipherParams + ciphertext = this._parse(ciphertext, cfg.format); + + // Decrypt + var plaintext = cipher.createDecryptor(key, cfg).finalize(ciphertext.ciphertext); + + return plaintext; + }, + + /** + * Converts serialized ciphertext to CipherParams, + * else assumed CipherParams already and returns ciphertext unchanged. + * + * @param {CipherParams|string} ciphertext The ciphertext. + * @param {Formatter} format The formatting strategy to use to parse serialized ciphertext. + * + * @return {CipherParams} The unserialized ciphertext. + * + * @static + * + * @example + * + * var ciphertextParams = CryptoJS.lib.SerializableCipher._parse(ciphertextStringOrParams, format); + */ + _parse: function (ciphertext, format) { + if (typeof ciphertext == 'string') { + return format.parse(ciphertext, this); + } else { + return ciphertext; + } + } + }); + + /** + * Key derivation function namespace. + */ + var C_kdf = C.kdf = {}; + + /** + * OpenSSL key derivation function. + */ + var OpenSSLKdf = C_kdf.OpenSSL = { + /** + * Derives a key and IV from a password. + * + * @param {string} password The password to derive from. + * @param {number} keySize The size in words of the key to generate. + * @param {number} ivSize The size in words of the IV to generate. + * @param {WordArray|string} salt (Optional) A 64-bit salt to use. If omitted, a salt will be generated randomly. + * + * @return {CipherParams} A cipher params object with the key, IV, and salt. + * + * @static + * + * @example + * + * var derivedParams = CryptoJS.kdf.OpenSSL.execute('Password', 256/32, 128/32); + * var derivedParams = CryptoJS.kdf.OpenSSL.execute('Password', 256/32, 128/32, 'saltsalt'); + */ + execute: function (password, keySize, ivSize, salt) { + // Generate random salt + if (!salt) { + salt = WordArray.random(64/8); + } + + // Derive key and IV + var key = EvpKDF.create({ keySize: keySize + ivSize }).compute(password, salt); + + // Separate key and IV + var iv = WordArray.create(key.words.slice(keySize), ivSize * 4); + key.sigBytes = keySize * 4; + + // Return params + return CipherParams.create({ key: key, iv: iv, salt: salt }); + } + }; + + /** + * A serializable cipher wrapper that derives the key from a password, + * and returns ciphertext as a serializable cipher params object. + */ + var PasswordBasedCipher = C_lib.PasswordBasedCipher = SerializableCipher.extend({ + /** + * Configuration options. + * + * @property {KDF} kdf The key derivation function to use to generate a key and IV from a password. Default: OpenSSL + */ + cfg: SerializableCipher.cfg.extend({ + kdf: OpenSSLKdf + }), + + /** + * Encrypts a message using a password. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {WordArray|string} message The message to encrypt. + * @param {string} password The password. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {CipherParams} A cipher params object. + * + * @static + * + * @example + * + * var ciphertextParams = CryptoJS.lib.PasswordBasedCipher.encrypt(CryptoJS.algo.AES, message, 'password'); + * var ciphertextParams = CryptoJS.lib.PasswordBasedCipher.encrypt(CryptoJS.algo.AES, message, 'password', { format: CryptoJS.format.OpenSSL }); + */ + encrypt: function (cipher, message, password, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Derive key and other params + var derivedParams = cfg.kdf.execute(password, cipher.keySize, cipher.ivSize); + + // Add IV to config + cfg.iv = derivedParams.iv; + + // Encrypt + var ciphertext = SerializableCipher.encrypt.call(this, cipher, message, derivedParams.key, cfg); + + // Mix in derived params + ciphertext.mixIn(derivedParams); + + return ciphertext; + }, + + /** + * Decrypts serialized ciphertext using a password. + * + * @param {Cipher} cipher The cipher algorithm to use. + * @param {CipherParams|string} ciphertext The ciphertext to decrypt. + * @param {string} password The password. + * @param {Object} cfg (Optional) The configuration options to use for this operation. + * + * @return {WordArray} The plaintext. + * + * @static + * + * @example + * + * var plaintext = CryptoJS.lib.PasswordBasedCipher.decrypt(CryptoJS.algo.AES, formattedCiphertext, 'password', { format: CryptoJS.format.OpenSSL }); + * var plaintext = CryptoJS.lib.PasswordBasedCipher.decrypt(CryptoJS.algo.AES, ciphertextParams, 'password', { format: CryptoJS.format.OpenSSL }); + */ + decrypt: function (cipher, ciphertext, password, cfg) { + // Apply config defaults + cfg = this.cfg.extend(cfg); + + // Convert string to CipherParams + ciphertext = this._parse(ciphertext, cfg.format); + + // Derive key and other params + var derivedParams = cfg.kdf.execute(password, cipher.keySize, cipher.ivSize, ciphertext.salt); + + // Add IV to config + cfg.iv = derivedParams.iv; + + // Decrypt + var plaintext = SerializableCipher.decrypt.call(this, cipher, ciphertext, derivedParams.key, cfg); + + return plaintext; + } + }); + }()); + + + /** + * Cipher Feedback block mode. + */ + CryptoJS.mode.CFB = (function () { + var CFB = CryptoJS.lib.BlockCipherMode.extend(); + + CFB.Encryptor = CFB.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + generateKeystreamAndEncrypt.call(this, words, offset, blockSize, cipher); + + // Remember this block to use with next block + this._prevBlock = words.slice(offset, offset + blockSize); + } + }); + + CFB.Decryptor = CFB.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + // Remember this block to use with next block + var thisBlock = words.slice(offset, offset + blockSize); + + generateKeystreamAndEncrypt.call(this, words, offset, blockSize, cipher); + + // This block becomes the previous block + this._prevBlock = thisBlock; + } + }); + + function generateKeystreamAndEncrypt(words, offset, blockSize, cipher) { + var keystream; + + // Shortcut + var iv = this._iv; + + // Generate keystream + if (iv) { + keystream = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } else { + keystream = this._prevBlock; + } + cipher.encryptBlock(keystream, 0); + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + + return CFB; + }()); + + + /** + * Counter block mode. + */ + CryptoJS.mode.CTR = (function () { + var CTR = CryptoJS.lib.BlockCipherMode.extend(); + + var Encryptor = CTR.Encryptor = CTR.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher + var blockSize = cipher.blockSize; + var iv = this._iv; + var counter = this._counter; + + // Generate keystream + if (iv) { + counter = this._counter = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } + var keystream = counter.slice(0); + cipher.encryptBlock(keystream, 0); + + // Increment counter + counter[blockSize - 1] = (counter[blockSize - 1] + 1) | 0 + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + }); + + CTR.Decryptor = Encryptor; + + return CTR; + }()); + + + /** @preserve + * Counter block mode compatible with Dr Brian Gladman fileenc.c + * derived from CryptoJS.mode.CTR + * Jan Hruby jhruby.web@gmail.com + */ + CryptoJS.mode.CTRGladman = (function () { + var CTRGladman = CryptoJS.lib.BlockCipherMode.extend(); + + function incWord(word) + { + if (((word >> 24) & 0xff) === 0xff) { //overflow + var b1 = (word >> 16)&0xff; + var b2 = (word >> 8)&0xff; + var b3 = word & 0xff; + + if (b1 === 0xff) // overflow b1 + { + b1 = 0; + if (b2 === 0xff) + { + b2 = 0; + if (b3 === 0xff) + { + b3 = 0; + } + else + { + ++b3; + } + } + else + { + ++b2; + } + } + else + { + ++b1; + } + + word = 0; + word += (b1 << 16); + word += (b2 << 8); + word += b3; + } + else + { + word += (0x01 << 24); + } + return word; + } + + function incCounter(counter) + { + if ((counter[0] = incWord(counter[0])) === 0) + { + // encr_data in fileenc.c from Dr Brian Gladman's counts only with DWORD j < 8 + counter[1] = incWord(counter[1]); + } + return counter; + } + + var Encryptor = CTRGladman.Encryptor = CTRGladman.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher + var blockSize = cipher.blockSize; + var iv = this._iv; + var counter = this._counter; + + // Generate keystream + if (iv) { + counter = this._counter = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } + + incCounter(counter); + + var keystream = counter.slice(0); + cipher.encryptBlock(keystream, 0); + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + }); + + CTRGladman.Decryptor = Encryptor; + + return CTRGladman; + }()); + + + + + /** + * Output Feedback block mode. + */ + CryptoJS.mode.OFB = (function () { + var OFB = CryptoJS.lib.BlockCipherMode.extend(); + + var Encryptor = OFB.Encryptor = OFB.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher + var blockSize = cipher.blockSize; + var iv = this._iv; + var keystream = this._keystream; + + // Generate keystream + if (iv) { + keystream = this._keystream = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } + cipher.encryptBlock(keystream, 0); + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + }); + + OFB.Decryptor = Encryptor; + + return OFB; + }()); + + + /** + * Electronic Codebook block mode. + */ + CryptoJS.mode.ECB = (function () { + var ECB = CryptoJS.lib.BlockCipherMode.extend(); + + ECB.Encryptor = ECB.extend({ + processBlock: function (words, offset) { + this._cipher.encryptBlock(words, offset); + } + }); + + ECB.Decryptor = ECB.extend({ + processBlock: function (words, offset) { + this._cipher.decryptBlock(words, offset); + } + }); + + return ECB; + }()); + + + /** + * ANSI X.923 padding strategy. + */ + CryptoJS.pad.AnsiX923 = { + pad: function (data, blockSize) { + // Shortcuts + var dataSigBytes = data.sigBytes; + var blockSizeBytes = blockSize * 4; + + // Count padding bytes + var nPaddingBytes = blockSizeBytes - dataSigBytes % blockSizeBytes; + + // Compute last byte position + var lastBytePos = dataSigBytes + nPaddingBytes - 1; + + // Pad + data.clamp(); + data.words[lastBytePos >>> 2] |= nPaddingBytes << (24 - (lastBytePos % 4) * 8); + data.sigBytes += nPaddingBytes; + }, + + unpad: function (data) { + // Get number of padding bytes from last byte + var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; + + // Remove padding + data.sigBytes -= nPaddingBytes; + } + }; + + + /** + * ISO 10126 padding strategy. + */ + CryptoJS.pad.Iso10126 = { + pad: function (data, blockSize) { + // Shortcut + var blockSizeBytes = blockSize * 4; + + // Count padding bytes + var nPaddingBytes = blockSizeBytes - data.sigBytes % blockSizeBytes; + + // Pad + data.concat(CryptoJS.lib.WordArray.random(nPaddingBytes - 1)). + concat(CryptoJS.lib.WordArray.create([nPaddingBytes << 24], 1)); + }, + + unpad: function (data) { + // Get number of padding bytes from last byte + var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; + + // Remove padding + data.sigBytes -= nPaddingBytes; + } + }; + + + /** + * ISO/IEC 9797-1 Padding Method 2. + */ + CryptoJS.pad.Iso97971 = { + pad: function (data, blockSize) { + // Add 0x80 byte + data.concat(CryptoJS.lib.WordArray.create([0x80000000], 1)); + + // Zero pad the rest + CryptoJS.pad.ZeroPadding.pad(data, blockSize); + }, + + unpad: function (data) { + // Remove zero padding + CryptoJS.pad.ZeroPadding.unpad(data); + + // Remove one more byte -- the 0x80 byte + data.sigBytes--; + } + }; + + + /** + * Zero padding strategy. + */ + CryptoJS.pad.ZeroPadding = { + pad: function (data, blockSize) { + // Shortcut + var blockSizeBytes = blockSize * 4; + + // Pad + data.clamp(); + data.sigBytes += blockSizeBytes - ((data.sigBytes % blockSizeBytes) || blockSizeBytes); + }, + + unpad: function (data) { + // Shortcut + var dataWords = data.words; + + // Unpad + var i = data.sigBytes - 1; + for (var i = data.sigBytes - 1; i >= 0; i--) { + if (((dataWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff)) { + data.sigBytes = i + 1; + break; + } + } + } + }; + + + /** + * A noop padding strategy. + */ + CryptoJS.pad.NoPadding = { + pad: function () { + }, + + unpad: function () { + } + }; + + + (function (undefined) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var CipherParams = C_lib.CipherParams; + var C_enc = C.enc; + var Hex = C_enc.Hex; + var C_format = C.format; + + var HexFormatter = C_format.Hex = { + /** + * Converts the ciphertext of a cipher params object to a hexadecimally encoded string. + * + * @param {CipherParams} cipherParams The cipher params object. + * + * @return {string} The hexadecimally encoded string. + * + * @static + * + * @example + * + * var hexString = CryptoJS.format.Hex.stringify(cipherParams); + */ + stringify: function (cipherParams) { + return cipherParams.ciphertext.toString(Hex); + }, + + /** + * Converts a hexadecimally encoded ciphertext string to a cipher params object. + * + * @param {string} input The hexadecimally encoded string. + * + * @return {CipherParams} The cipher params object. + * + * @static + * + * @example + * + * var cipherParams = CryptoJS.format.Hex.parse(hexString); + */ + parse: function (input) { + var ciphertext = Hex.parse(input); + return CipherParams.create({ ciphertext: ciphertext }); + } + }; + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var BlockCipher = C_lib.BlockCipher; + var C_algo = C.algo; + + // Lookup tables + var SBOX = []; + var INV_SBOX = []; + var SUB_MIX_0 = []; + var SUB_MIX_1 = []; + var SUB_MIX_2 = []; + var SUB_MIX_3 = []; + var INV_SUB_MIX_0 = []; + var INV_SUB_MIX_1 = []; + var INV_SUB_MIX_2 = []; + var INV_SUB_MIX_3 = []; + + // Compute lookup tables + (function () { + // Compute double table + var d = []; + for (var i = 0; i < 256; i++) { + if (i < 128) { + d[i] = i << 1; + } else { + d[i] = (i << 1) ^ 0x11b; + } + } + + // Walk GF(2^8) + var x = 0; + var xi = 0; + for (var i = 0; i < 256; i++) { + // Compute sbox + var sx = xi ^ (xi << 1) ^ (xi << 2) ^ (xi << 3) ^ (xi << 4); + sx = (sx >>> 8) ^ (sx & 0xff) ^ 0x63; + SBOX[x] = sx; + INV_SBOX[sx] = x; + + // Compute multiplication + var x2 = d[x]; + var x4 = d[x2]; + var x8 = d[x4]; + + // Compute sub bytes, mix columns tables + var t = (d[sx] * 0x101) ^ (sx * 0x1010100); + SUB_MIX_0[x] = (t << 24) | (t >>> 8); + SUB_MIX_1[x] = (t << 16) | (t >>> 16); + SUB_MIX_2[x] = (t << 8) | (t >>> 24); + SUB_MIX_3[x] = t; + + // Compute inv sub bytes, inv mix columns tables + var t = (x8 * 0x1010101) ^ (x4 * 0x10001) ^ (x2 * 0x101) ^ (x * 0x1010100); + INV_SUB_MIX_0[sx] = (t << 24) | (t >>> 8); + INV_SUB_MIX_1[sx] = (t << 16) | (t >>> 16); + INV_SUB_MIX_2[sx] = (t << 8) | (t >>> 24); + INV_SUB_MIX_3[sx] = t; + + // Compute next counter + if (!x) { + x = xi = 1; + } else { + x = x2 ^ d[d[d[x8 ^ x2]]]; + xi ^= d[d[xi]]; + } + } + }()); + + // Precomputed Rcon lookup + var RCON = [0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36]; + + /** + * AES block cipher algorithm. + */ + var AES = C_algo.AES = BlockCipher.extend({ + _doReset: function () { + var t; + + // Skip reset of nRounds has been set before and key did not change + if (this._nRounds && this._keyPriorReset === this._key) { + return; + } + + // Shortcuts + var key = this._keyPriorReset = this._key; + var keyWords = key.words; + var keySize = key.sigBytes / 4; + + // Compute number of rounds + var nRounds = this._nRounds = keySize + 6; + + // Compute number of key schedule rows + var ksRows = (nRounds + 1) * 4; + + // Compute key schedule + var keySchedule = this._keySchedule = []; + for (var ksRow = 0; ksRow < ksRows; ksRow++) { + if (ksRow < keySize) { + keySchedule[ksRow] = keyWords[ksRow]; + } else { + t = keySchedule[ksRow - 1]; + + if (!(ksRow % keySize)) { + // Rot word + t = (t << 8) | (t >>> 24); + + // Sub word + t = (SBOX[t >>> 24] << 24) | (SBOX[(t >>> 16) & 0xff] << 16) | (SBOX[(t >>> 8) & 0xff] << 8) | SBOX[t & 0xff]; + + // Mix Rcon + t ^= RCON[(ksRow / keySize) | 0] << 24; + } else if (keySize > 6 && ksRow % keySize == 4) { + // Sub word + t = (SBOX[t >>> 24] << 24) | (SBOX[(t >>> 16) & 0xff] << 16) | (SBOX[(t >>> 8) & 0xff] << 8) | SBOX[t & 0xff]; + } + + keySchedule[ksRow] = keySchedule[ksRow - keySize] ^ t; + } + } + + // Compute inv key schedule + var invKeySchedule = this._invKeySchedule = []; + for (var invKsRow = 0; invKsRow < ksRows; invKsRow++) { + var ksRow = ksRows - invKsRow; + + if (invKsRow % 4) { + var t = keySchedule[ksRow]; + } else { + var t = keySchedule[ksRow - 4]; + } + + if (invKsRow < 4 || ksRow <= 4) { + invKeySchedule[invKsRow] = t; + } else { + invKeySchedule[invKsRow] = INV_SUB_MIX_0[SBOX[t >>> 24]] ^ INV_SUB_MIX_1[SBOX[(t >>> 16) & 0xff]] ^ + INV_SUB_MIX_2[SBOX[(t >>> 8) & 0xff]] ^ INV_SUB_MIX_3[SBOX[t & 0xff]]; + } + } + }, + + encryptBlock: function (M, offset) { + this._doCryptBlock(M, offset, this._keySchedule, SUB_MIX_0, SUB_MIX_1, SUB_MIX_2, SUB_MIX_3, SBOX); + }, + + decryptBlock: function (M, offset) { + // Swap 2nd and 4th rows + var t = M[offset + 1]; + M[offset + 1] = M[offset + 3]; + M[offset + 3] = t; + + this._doCryptBlock(M, offset, this._invKeySchedule, INV_SUB_MIX_0, INV_SUB_MIX_1, INV_SUB_MIX_2, INV_SUB_MIX_3, INV_SBOX); + + // Inv swap 2nd and 4th rows + var t = M[offset + 1]; + M[offset + 1] = M[offset + 3]; + M[offset + 3] = t; + }, + + _doCryptBlock: function (M, offset, keySchedule, SUB_MIX_0, SUB_MIX_1, SUB_MIX_2, SUB_MIX_3, SBOX) { + // Shortcut + var nRounds = this._nRounds; + + // Get input, add round key + var s0 = M[offset] ^ keySchedule[0]; + var s1 = M[offset + 1] ^ keySchedule[1]; + var s2 = M[offset + 2] ^ keySchedule[2]; + var s3 = M[offset + 3] ^ keySchedule[3]; + + // Key schedule row counter + var ksRow = 4; + + // Rounds + for (var round = 1; round < nRounds; round++) { + // Shift rows, sub bytes, mix columns, add round key + var t0 = SUB_MIX_0[s0 >>> 24] ^ SUB_MIX_1[(s1 >>> 16) & 0xff] ^ SUB_MIX_2[(s2 >>> 8) & 0xff] ^ SUB_MIX_3[s3 & 0xff] ^ keySchedule[ksRow++]; + var t1 = SUB_MIX_0[s1 >>> 24] ^ SUB_MIX_1[(s2 >>> 16) & 0xff] ^ SUB_MIX_2[(s3 >>> 8) & 0xff] ^ SUB_MIX_3[s0 & 0xff] ^ keySchedule[ksRow++]; + var t2 = SUB_MIX_0[s2 >>> 24] ^ SUB_MIX_1[(s3 >>> 16) & 0xff] ^ SUB_MIX_2[(s0 >>> 8) & 0xff] ^ SUB_MIX_3[s1 & 0xff] ^ keySchedule[ksRow++]; + var t3 = SUB_MIX_0[s3 >>> 24] ^ SUB_MIX_1[(s0 >>> 16) & 0xff] ^ SUB_MIX_2[(s1 >>> 8) & 0xff] ^ SUB_MIX_3[s2 & 0xff] ^ keySchedule[ksRow++]; + + // Update state + s0 = t0; + s1 = t1; + s2 = t2; + s3 = t3; + } + + // Shift rows, sub bytes, add round key + var t0 = ((SBOX[s0 >>> 24] << 24) | (SBOX[(s1 >>> 16) & 0xff] << 16) | (SBOX[(s2 >>> 8) & 0xff] << 8) | SBOX[s3 & 0xff]) ^ keySchedule[ksRow++]; + var t1 = ((SBOX[s1 >>> 24] << 24) | (SBOX[(s2 >>> 16) & 0xff] << 16) | (SBOX[(s3 >>> 8) & 0xff] << 8) | SBOX[s0 & 0xff]) ^ keySchedule[ksRow++]; + var t2 = ((SBOX[s2 >>> 24] << 24) | (SBOX[(s3 >>> 16) & 0xff] << 16) | (SBOX[(s0 >>> 8) & 0xff] << 8) | SBOX[s1 & 0xff]) ^ keySchedule[ksRow++]; + var t3 = ((SBOX[s3 >>> 24] << 24) | (SBOX[(s0 >>> 16) & 0xff] << 16) | (SBOX[(s1 >>> 8) & 0xff] << 8) | SBOX[s2 & 0xff]) ^ keySchedule[ksRow++]; + + // Set output + M[offset] = t0; + M[offset + 1] = t1; + M[offset + 2] = t2; + M[offset + 3] = t3; + }, + + keySize: 256/32 + }); + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.AES.encrypt(message, key, cfg); + * var plaintext = CryptoJS.AES.decrypt(ciphertext, key, cfg); + */ + C.AES = BlockCipher._createHelper(AES); + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var BlockCipher = C_lib.BlockCipher; + var C_algo = C.algo; + + // Permuted Choice 1 constants + var PC1 = [ + 57, 49, 41, 33, 25, 17, 9, 1, + 58, 50, 42, 34, 26, 18, 10, 2, + 59, 51, 43, 35, 27, 19, 11, 3, + 60, 52, 44, 36, 63, 55, 47, 39, + 31, 23, 15, 7, 62, 54, 46, 38, + 30, 22, 14, 6, 61, 53, 45, 37, + 29, 21, 13, 5, 28, 20, 12, 4 + ]; + + // Permuted Choice 2 constants + var PC2 = [ + 14, 17, 11, 24, 1, 5, + 3, 28, 15, 6, 21, 10, + 23, 19, 12, 4, 26, 8, + 16, 7, 27, 20, 13, 2, + 41, 52, 31, 37, 47, 55, + 30, 40, 51, 45, 33, 48, + 44, 49, 39, 56, 34, 53, + 46, 42, 50, 36, 29, 32 + ]; + + // Cumulative bit shift constants + var BIT_SHIFTS = [1, 2, 4, 6, 8, 10, 12, 14, 15, 17, 19, 21, 23, 25, 27, 28]; + + // SBOXes and round permutation constants + var SBOX_P = [ + { + 0x0: 0x808200, + 0x10000000: 0x8000, + 0x20000000: 0x808002, + 0x30000000: 0x2, + 0x40000000: 0x200, + 0x50000000: 0x808202, + 0x60000000: 0x800202, + 0x70000000: 0x800000, + 0x80000000: 0x202, + 0x90000000: 0x800200, + 0xa0000000: 0x8200, + 0xb0000000: 0x808000, + 0xc0000000: 0x8002, + 0xd0000000: 0x800002, + 0xe0000000: 0x0, + 0xf0000000: 0x8202, + 0x8000000: 0x0, + 0x18000000: 0x808202, + 0x28000000: 0x8202, + 0x38000000: 0x8000, + 0x48000000: 0x808200, + 0x58000000: 0x200, + 0x68000000: 0x808002, + 0x78000000: 0x2, + 0x88000000: 0x800200, + 0x98000000: 0x8200, + 0xa8000000: 0x808000, + 0xb8000000: 0x800202, + 0xc8000000: 0x800002, + 0xd8000000: 0x8002, + 0xe8000000: 0x202, + 0xf8000000: 0x800000, + 0x1: 0x8000, + 0x10000001: 0x2, + 0x20000001: 0x808200, + 0x30000001: 0x800000, + 0x40000001: 0x808002, + 0x50000001: 0x8200, + 0x60000001: 0x200, + 0x70000001: 0x800202, + 0x80000001: 0x808202, + 0x90000001: 0x808000, + 0xa0000001: 0x800002, + 0xb0000001: 0x8202, + 0xc0000001: 0x202, + 0xd0000001: 0x800200, + 0xe0000001: 0x8002, + 0xf0000001: 0x0, + 0x8000001: 0x808202, + 0x18000001: 0x808000, + 0x28000001: 0x800000, + 0x38000001: 0x200, + 0x48000001: 0x8000, + 0x58000001: 0x800002, + 0x68000001: 0x2, + 0x78000001: 0x8202, + 0x88000001: 0x8002, + 0x98000001: 0x800202, + 0xa8000001: 0x202, + 0xb8000001: 0x808200, + 0xc8000001: 0x800200, + 0xd8000001: 0x0, + 0xe8000001: 0x8200, + 0xf8000001: 0x808002 + }, + { + 0x0: 0x40084010, + 0x1000000: 0x4000, + 0x2000000: 0x80000, + 0x3000000: 0x40080010, + 0x4000000: 0x40000010, + 0x5000000: 0x40084000, + 0x6000000: 0x40004000, + 0x7000000: 0x10, + 0x8000000: 0x84000, + 0x9000000: 0x40004010, + 0xa000000: 0x40000000, + 0xb000000: 0x84010, + 0xc000000: 0x80010, + 0xd000000: 0x0, + 0xe000000: 0x4010, + 0xf000000: 0x40080000, + 0x800000: 0x40004000, + 0x1800000: 0x84010, + 0x2800000: 0x10, + 0x3800000: 0x40004010, + 0x4800000: 0x40084010, + 0x5800000: 0x40000000, + 0x6800000: 0x80000, + 0x7800000: 0x40080010, + 0x8800000: 0x80010, + 0x9800000: 0x0, + 0xa800000: 0x4000, + 0xb800000: 0x40080000, + 0xc800000: 0x40000010, + 0xd800000: 0x84000, + 0xe800000: 0x40084000, + 0xf800000: 0x4010, + 0x10000000: 0x0, + 0x11000000: 0x40080010, + 0x12000000: 0x40004010, + 0x13000000: 0x40084000, + 0x14000000: 0x40080000, + 0x15000000: 0x10, + 0x16000000: 0x84010, + 0x17000000: 0x4000, + 0x18000000: 0x4010, + 0x19000000: 0x80000, + 0x1a000000: 0x80010, + 0x1b000000: 0x40000010, + 0x1c000000: 0x84000, + 0x1d000000: 0x40004000, + 0x1e000000: 0x40000000, + 0x1f000000: 0x40084010, + 0x10800000: 0x84010, + 0x11800000: 0x80000, + 0x12800000: 0x40080000, + 0x13800000: 0x4000, + 0x14800000: 0x40004000, + 0x15800000: 0x40084010, + 0x16800000: 0x10, + 0x17800000: 0x40000000, + 0x18800000: 0x40084000, + 0x19800000: 0x40000010, + 0x1a800000: 0x40004010, + 0x1b800000: 0x80010, + 0x1c800000: 0x0, + 0x1d800000: 0x4010, + 0x1e800000: 0x40080010, + 0x1f800000: 0x84000 + }, + { + 0x0: 0x104, + 0x100000: 0x0, + 0x200000: 0x4000100, + 0x300000: 0x10104, + 0x400000: 0x10004, + 0x500000: 0x4000004, + 0x600000: 0x4010104, + 0x700000: 0x4010000, + 0x800000: 0x4000000, + 0x900000: 0x4010100, + 0xa00000: 0x10100, + 0xb00000: 0x4010004, + 0xc00000: 0x4000104, + 0xd00000: 0x10000, + 0xe00000: 0x4, + 0xf00000: 0x100, + 0x80000: 0x4010100, + 0x180000: 0x4010004, + 0x280000: 0x0, + 0x380000: 0x4000100, + 0x480000: 0x4000004, + 0x580000: 0x10000, + 0x680000: 0x10004, + 0x780000: 0x104, + 0x880000: 0x4, + 0x980000: 0x100, + 0xa80000: 0x4010000, + 0xb80000: 0x10104, + 0xc80000: 0x10100, + 0xd80000: 0x4000104, + 0xe80000: 0x4010104, + 0xf80000: 0x4000000, + 0x1000000: 0x4010100, + 0x1100000: 0x10004, + 0x1200000: 0x10000, + 0x1300000: 0x4000100, + 0x1400000: 0x100, + 0x1500000: 0x4010104, + 0x1600000: 0x4000004, + 0x1700000: 0x0, + 0x1800000: 0x4000104, + 0x1900000: 0x4000000, + 0x1a00000: 0x4, + 0x1b00000: 0x10100, + 0x1c00000: 0x4010000, + 0x1d00000: 0x104, + 0x1e00000: 0x10104, + 0x1f00000: 0x4010004, + 0x1080000: 0x4000000, + 0x1180000: 0x104, + 0x1280000: 0x4010100, + 0x1380000: 0x0, + 0x1480000: 0x10004, + 0x1580000: 0x4000100, + 0x1680000: 0x100, + 0x1780000: 0x4010004, + 0x1880000: 0x10000, + 0x1980000: 0x4010104, + 0x1a80000: 0x10104, + 0x1b80000: 0x4000004, + 0x1c80000: 0x4000104, + 0x1d80000: 0x4010000, + 0x1e80000: 0x4, + 0x1f80000: 0x10100 + }, + { + 0x0: 0x80401000, + 0x10000: 0x80001040, + 0x20000: 0x401040, + 0x30000: 0x80400000, + 0x40000: 0x0, + 0x50000: 0x401000, + 0x60000: 0x80000040, + 0x70000: 0x400040, + 0x80000: 0x80000000, + 0x90000: 0x400000, + 0xa0000: 0x40, + 0xb0000: 0x80001000, + 0xc0000: 0x80400040, + 0xd0000: 0x1040, + 0xe0000: 0x1000, + 0xf0000: 0x80401040, + 0x8000: 0x80001040, + 0x18000: 0x40, + 0x28000: 0x80400040, + 0x38000: 0x80001000, + 0x48000: 0x401000, + 0x58000: 0x80401040, + 0x68000: 0x0, + 0x78000: 0x80400000, + 0x88000: 0x1000, + 0x98000: 0x80401000, + 0xa8000: 0x400000, + 0xb8000: 0x1040, + 0xc8000: 0x80000000, + 0xd8000: 0x400040, + 0xe8000: 0x401040, + 0xf8000: 0x80000040, + 0x100000: 0x400040, + 0x110000: 0x401000, + 0x120000: 0x80000040, + 0x130000: 0x0, + 0x140000: 0x1040, + 0x150000: 0x80400040, + 0x160000: 0x80401000, + 0x170000: 0x80001040, + 0x180000: 0x80401040, + 0x190000: 0x80000000, + 0x1a0000: 0x80400000, + 0x1b0000: 0x401040, + 0x1c0000: 0x80001000, + 0x1d0000: 0x400000, + 0x1e0000: 0x40, + 0x1f0000: 0x1000, + 0x108000: 0x80400000, + 0x118000: 0x80401040, + 0x128000: 0x0, + 0x138000: 0x401000, + 0x148000: 0x400040, + 0x158000: 0x80000000, + 0x168000: 0x80001040, + 0x178000: 0x40, + 0x188000: 0x80000040, + 0x198000: 0x1000, + 0x1a8000: 0x80001000, + 0x1b8000: 0x80400040, + 0x1c8000: 0x1040, + 0x1d8000: 0x80401000, + 0x1e8000: 0x400000, + 0x1f8000: 0x401040 + }, + { + 0x0: 0x80, + 0x1000: 0x1040000, + 0x2000: 0x40000, + 0x3000: 0x20000000, + 0x4000: 0x20040080, + 0x5000: 0x1000080, + 0x6000: 0x21000080, + 0x7000: 0x40080, + 0x8000: 0x1000000, + 0x9000: 0x20040000, + 0xa000: 0x20000080, + 0xb000: 0x21040080, + 0xc000: 0x21040000, + 0xd000: 0x0, + 0xe000: 0x1040080, + 0xf000: 0x21000000, + 0x800: 0x1040080, + 0x1800: 0x21000080, + 0x2800: 0x80, + 0x3800: 0x1040000, + 0x4800: 0x40000, + 0x5800: 0x20040080, + 0x6800: 0x21040000, + 0x7800: 0x20000000, + 0x8800: 0x20040000, + 0x9800: 0x0, + 0xa800: 0x21040080, + 0xb800: 0x1000080, + 0xc800: 0x20000080, + 0xd800: 0x21000000, + 0xe800: 0x1000000, + 0xf800: 0x40080, + 0x10000: 0x40000, + 0x11000: 0x80, + 0x12000: 0x20000000, + 0x13000: 0x21000080, + 0x14000: 0x1000080, + 0x15000: 0x21040000, + 0x16000: 0x20040080, + 0x17000: 0x1000000, + 0x18000: 0x21040080, + 0x19000: 0x21000000, + 0x1a000: 0x1040000, + 0x1b000: 0x20040000, + 0x1c000: 0x40080, + 0x1d000: 0x20000080, + 0x1e000: 0x0, + 0x1f000: 0x1040080, + 0x10800: 0x21000080, + 0x11800: 0x1000000, + 0x12800: 0x1040000, + 0x13800: 0x20040080, + 0x14800: 0x20000000, + 0x15800: 0x1040080, + 0x16800: 0x80, + 0x17800: 0x21040000, + 0x18800: 0x40080, + 0x19800: 0x21040080, + 0x1a800: 0x0, + 0x1b800: 0x21000000, + 0x1c800: 0x1000080, + 0x1d800: 0x40000, + 0x1e800: 0x20040000, + 0x1f800: 0x20000080 + }, + { + 0x0: 0x10000008, + 0x100: 0x2000, + 0x200: 0x10200000, + 0x300: 0x10202008, + 0x400: 0x10002000, + 0x500: 0x200000, + 0x600: 0x200008, + 0x700: 0x10000000, + 0x800: 0x0, + 0x900: 0x10002008, + 0xa00: 0x202000, + 0xb00: 0x8, + 0xc00: 0x10200008, + 0xd00: 0x202008, + 0xe00: 0x2008, + 0xf00: 0x10202000, + 0x80: 0x10200000, + 0x180: 0x10202008, + 0x280: 0x8, + 0x380: 0x200000, + 0x480: 0x202008, + 0x580: 0x10000008, + 0x680: 0x10002000, + 0x780: 0x2008, + 0x880: 0x200008, + 0x980: 0x2000, + 0xa80: 0x10002008, + 0xb80: 0x10200008, + 0xc80: 0x0, + 0xd80: 0x10202000, + 0xe80: 0x202000, + 0xf80: 0x10000000, + 0x1000: 0x10002000, + 0x1100: 0x10200008, + 0x1200: 0x10202008, + 0x1300: 0x2008, + 0x1400: 0x200000, + 0x1500: 0x10000000, + 0x1600: 0x10000008, + 0x1700: 0x202000, + 0x1800: 0x202008, + 0x1900: 0x0, + 0x1a00: 0x8, + 0x1b00: 0x10200000, + 0x1c00: 0x2000, + 0x1d00: 0x10002008, + 0x1e00: 0x10202000, + 0x1f00: 0x200008, + 0x1080: 0x8, + 0x1180: 0x202000, + 0x1280: 0x200000, + 0x1380: 0x10000008, + 0x1480: 0x10002000, + 0x1580: 0x2008, + 0x1680: 0x10202008, + 0x1780: 0x10200000, + 0x1880: 0x10202000, + 0x1980: 0x10200008, + 0x1a80: 0x2000, + 0x1b80: 0x202008, + 0x1c80: 0x200008, + 0x1d80: 0x0, + 0x1e80: 0x10000000, + 0x1f80: 0x10002008 + }, + { + 0x0: 0x100000, + 0x10: 0x2000401, + 0x20: 0x400, + 0x30: 0x100401, + 0x40: 0x2100401, + 0x50: 0x0, + 0x60: 0x1, + 0x70: 0x2100001, + 0x80: 0x2000400, + 0x90: 0x100001, + 0xa0: 0x2000001, + 0xb0: 0x2100400, + 0xc0: 0x2100000, + 0xd0: 0x401, + 0xe0: 0x100400, + 0xf0: 0x2000000, + 0x8: 0x2100001, + 0x18: 0x0, + 0x28: 0x2000401, + 0x38: 0x2100400, + 0x48: 0x100000, + 0x58: 0x2000001, + 0x68: 0x2000000, + 0x78: 0x401, + 0x88: 0x100401, + 0x98: 0x2000400, + 0xa8: 0x2100000, + 0xb8: 0x100001, + 0xc8: 0x400, + 0xd8: 0x2100401, + 0xe8: 0x1, + 0xf8: 0x100400, + 0x100: 0x2000000, + 0x110: 0x100000, + 0x120: 0x2000401, + 0x130: 0x2100001, + 0x140: 0x100001, + 0x150: 0x2000400, + 0x160: 0x2100400, + 0x170: 0x100401, + 0x180: 0x401, + 0x190: 0x2100401, + 0x1a0: 0x100400, + 0x1b0: 0x1, + 0x1c0: 0x0, + 0x1d0: 0x2100000, + 0x1e0: 0x2000001, + 0x1f0: 0x400, + 0x108: 0x100400, + 0x118: 0x2000401, + 0x128: 0x2100001, + 0x138: 0x1, + 0x148: 0x2000000, + 0x158: 0x100000, + 0x168: 0x401, + 0x178: 0x2100400, + 0x188: 0x2000001, + 0x198: 0x2100000, + 0x1a8: 0x0, + 0x1b8: 0x2100401, + 0x1c8: 0x100401, + 0x1d8: 0x400, + 0x1e8: 0x2000400, + 0x1f8: 0x100001 + }, + { + 0x0: 0x8000820, + 0x1: 0x20000, + 0x2: 0x8000000, + 0x3: 0x20, + 0x4: 0x20020, + 0x5: 0x8020820, + 0x6: 0x8020800, + 0x7: 0x800, + 0x8: 0x8020000, + 0x9: 0x8000800, + 0xa: 0x20800, + 0xb: 0x8020020, + 0xc: 0x820, + 0xd: 0x0, + 0xe: 0x8000020, + 0xf: 0x20820, + 0x80000000: 0x800, + 0x80000001: 0x8020820, + 0x80000002: 0x8000820, + 0x80000003: 0x8000000, + 0x80000004: 0x8020000, + 0x80000005: 0x20800, + 0x80000006: 0x20820, + 0x80000007: 0x20, + 0x80000008: 0x8000020, + 0x80000009: 0x820, + 0x8000000a: 0x20020, + 0x8000000b: 0x8020800, + 0x8000000c: 0x0, + 0x8000000d: 0x8020020, + 0x8000000e: 0x8000800, + 0x8000000f: 0x20000, + 0x10: 0x20820, + 0x11: 0x8020800, + 0x12: 0x20, + 0x13: 0x800, + 0x14: 0x8000800, + 0x15: 0x8000020, + 0x16: 0x8020020, + 0x17: 0x20000, + 0x18: 0x0, + 0x19: 0x20020, + 0x1a: 0x8020000, + 0x1b: 0x8000820, + 0x1c: 0x8020820, + 0x1d: 0x20800, + 0x1e: 0x820, + 0x1f: 0x8000000, + 0x80000010: 0x20000, + 0x80000011: 0x800, + 0x80000012: 0x8020020, + 0x80000013: 0x20820, + 0x80000014: 0x20, + 0x80000015: 0x8020000, + 0x80000016: 0x8000000, + 0x80000017: 0x8000820, + 0x80000018: 0x8020820, + 0x80000019: 0x8000020, + 0x8000001a: 0x8000800, + 0x8000001b: 0x0, + 0x8000001c: 0x20800, + 0x8000001d: 0x820, + 0x8000001e: 0x20020, + 0x8000001f: 0x8020800 + } + ]; + + // Masks that select the SBOX input + var SBOX_MASK = [ + 0xf8000001, 0x1f800000, 0x01f80000, 0x001f8000, + 0x0001f800, 0x00001f80, 0x000001f8, 0x8000001f + ]; + + /** + * DES block cipher algorithm. + */ + var DES = C_algo.DES = BlockCipher.extend({ + _doReset: function () { + // Shortcuts + var key = this._key; + var keyWords = key.words; + + // Select 56 bits according to PC1 + var keyBits = []; + for (var i = 0; i < 56; i++) { + var keyBitPos = PC1[i] - 1; + keyBits[i] = (keyWords[keyBitPos >>> 5] >>> (31 - keyBitPos % 32)) & 1; + } + + // Assemble 16 subkeys + var subKeys = this._subKeys = []; + for (var nSubKey = 0; nSubKey < 16; nSubKey++) { + // Create subkey + var subKey = subKeys[nSubKey] = []; + + // Shortcut + var bitShift = BIT_SHIFTS[nSubKey]; + + // Select 48 bits according to PC2 + for (var i = 0; i < 24; i++) { + // Select from the left 28 key bits + subKey[(i / 6) | 0] |= keyBits[((PC2[i] - 1) + bitShift) % 28] << (31 - i % 6); + + // Select from the right 28 key bits + subKey[4 + ((i / 6) | 0)] |= keyBits[28 + (((PC2[i + 24] - 1) + bitShift) % 28)] << (31 - i % 6); + } + + // Since each subkey is applied to an expanded 32-bit input, + // the subkey can be broken into 8 values scaled to 32-bits, + // which allows the key to be used without expansion + subKey[0] = (subKey[0] << 1) | (subKey[0] >>> 31); + for (var i = 1; i < 7; i++) { + subKey[i] = subKey[i] >>> ((i - 1) * 4 + 3); + } + subKey[7] = (subKey[7] << 5) | (subKey[7] >>> 27); + } + + // Compute inverse subkeys + var invSubKeys = this._invSubKeys = []; + for (var i = 0; i < 16; i++) { + invSubKeys[i] = subKeys[15 - i]; + } + }, + + encryptBlock: function (M, offset) { + this._doCryptBlock(M, offset, this._subKeys); + }, + + decryptBlock: function (M, offset) { + this._doCryptBlock(M, offset, this._invSubKeys); + }, + + _doCryptBlock: function (M, offset, subKeys) { + // Get input + this._lBlock = M[offset]; + this._rBlock = M[offset + 1]; + + // Initial permutation + exchangeLR.call(this, 4, 0x0f0f0f0f); + exchangeLR.call(this, 16, 0x0000ffff); + exchangeRL.call(this, 2, 0x33333333); + exchangeRL.call(this, 8, 0x00ff00ff); + exchangeLR.call(this, 1, 0x55555555); + + // Rounds + for (var round = 0; round < 16; round++) { + // Shortcuts + var subKey = subKeys[round]; + var lBlock = this._lBlock; + var rBlock = this._rBlock; + + // Feistel function + var f = 0; + for (var i = 0; i < 8; i++) { + f |= SBOX_P[i][((rBlock ^ subKey[i]) & SBOX_MASK[i]) >>> 0]; + } + this._lBlock = rBlock; + this._rBlock = lBlock ^ f; + } + + // Undo swap from last round + var t = this._lBlock; + this._lBlock = this._rBlock; + this._rBlock = t; + + // Final permutation + exchangeLR.call(this, 1, 0x55555555); + exchangeRL.call(this, 8, 0x00ff00ff); + exchangeRL.call(this, 2, 0x33333333); + exchangeLR.call(this, 16, 0x0000ffff); + exchangeLR.call(this, 4, 0x0f0f0f0f); + + // Set output + M[offset] = this._lBlock; + M[offset + 1] = this._rBlock; + }, + + keySize: 64/32, + + ivSize: 64/32, + + blockSize: 64/32 + }); + + // Swap bits across the left and right words + function exchangeLR(offset, mask) { + var t = ((this._lBlock >>> offset) ^ this._rBlock) & mask; + this._rBlock ^= t; + this._lBlock ^= t << offset; + } + + function exchangeRL(offset, mask) { + var t = ((this._rBlock >>> offset) ^ this._lBlock) & mask; + this._lBlock ^= t; + this._rBlock ^= t << offset; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.DES.encrypt(message, key, cfg); + * var plaintext = CryptoJS.DES.decrypt(ciphertext, key, cfg); + */ + C.DES = BlockCipher._createHelper(DES); + + /** + * Triple-DES block cipher algorithm. + */ + var TripleDES = C_algo.TripleDES = BlockCipher.extend({ + _doReset: function () { + // Shortcuts + var key = this._key; + var keyWords = key.words; + // Make sure the key length is valid (64, 128 or >= 192 bit) + if (keyWords.length !== 2 && keyWords.length !== 4 && keyWords.length < 6) { + throw new Error('Invalid key length - 3DES requires the key length to be 64, 128, 192 or >192.'); + } + + // Extend the key according to the keying options defined in 3DES standard + var key1 = keyWords.slice(0, 2); + var key2 = keyWords.length < 4 ? keyWords.slice(0, 2) : keyWords.slice(2, 4); + var key3 = keyWords.length < 6 ? keyWords.slice(0, 2) : keyWords.slice(4, 6); + + // Create DES instances + this._des1 = DES.createEncryptor(WordArray.create(key1)); + this._des2 = DES.createEncryptor(WordArray.create(key2)); + this._des3 = DES.createEncryptor(WordArray.create(key3)); + }, + + encryptBlock: function (M, offset) { + this._des1.encryptBlock(M, offset); + this._des2.decryptBlock(M, offset); + this._des3.encryptBlock(M, offset); + }, + + decryptBlock: function (M, offset) { + this._des3.decryptBlock(M, offset); + this._des2.encryptBlock(M, offset); + this._des1.decryptBlock(M, offset); + }, + + keySize: 192/32, + + ivSize: 64/32, + + blockSize: 64/32 + }); + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.TripleDES.encrypt(message, key, cfg); + * var plaintext = CryptoJS.TripleDES.decrypt(ciphertext, key, cfg); + */ + C.TripleDES = BlockCipher._createHelper(TripleDES); + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var StreamCipher = C_lib.StreamCipher; + var C_algo = C.algo; + + /** + * RC4 stream cipher algorithm. + */ + var RC4 = C_algo.RC4 = StreamCipher.extend({ + _doReset: function () { + // Shortcuts + var key = this._key; + var keyWords = key.words; + var keySigBytes = key.sigBytes; + + // Init sbox + var S = this._S = []; + for (var i = 0; i < 256; i++) { + S[i] = i; + } + + // Key setup + for (var i = 0, j = 0; i < 256; i++) { + var keyByteIndex = i % keySigBytes; + var keyByte = (keyWords[keyByteIndex >>> 2] >>> (24 - (keyByteIndex % 4) * 8)) & 0xff; + + j = (j + S[i] + keyByte) % 256; + + // Swap + var t = S[i]; + S[i] = S[j]; + S[j] = t; + } + + // Counters + this._i = this._j = 0; + }, + + _doProcessBlock: function (M, offset) { + M[offset] ^= generateKeystreamWord.call(this); + }, + + keySize: 256/32, + + ivSize: 0 + }); + + function generateKeystreamWord() { + // Shortcuts + var S = this._S; + var i = this._i; + var j = this._j; + + // Generate keystream word + var keystreamWord = 0; + for (var n = 0; n < 4; n++) { + i = (i + 1) % 256; + j = (j + S[i]) % 256; + + // Swap + var t = S[i]; + S[i] = S[j]; + S[j] = t; + + keystreamWord |= S[(S[i] + S[j]) % 256] << (24 - n * 8); + } + + // Update counters + this._i = i; + this._j = j; + + return keystreamWord; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.RC4.encrypt(message, key, cfg); + * var plaintext = CryptoJS.RC4.decrypt(ciphertext, key, cfg); + */ + C.RC4 = StreamCipher._createHelper(RC4); + + /** + * Modified RC4 stream cipher algorithm. + */ + var RC4Drop = C_algo.RC4Drop = RC4.extend({ + /** + * Configuration options. + * + * @property {number} drop The number of keystream words to drop. Default 192 + */ + cfg: RC4.cfg.extend({ + drop: 192 + }), + + _doReset: function () { + RC4._doReset.call(this); + + // Drop + for (var i = this.cfg.drop; i > 0; i--) { + generateKeystreamWord.call(this); + } + } + }); + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.RC4Drop.encrypt(message, key, cfg); + * var plaintext = CryptoJS.RC4Drop.decrypt(ciphertext, key, cfg); + */ + C.RC4Drop = StreamCipher._createHelper(RC4Drop); + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var StreamCipher = C_lib.StreamCipher; + var C_algo = C.algo; + + // Reusable objects + var S = []; + var C_ = []; + var G = []; + + /** + * Rabbit stream cipher algorithm + */ + var Rabbit = C_algo.Rabbit = StreamCipher.extend({ + _doReset: function () { + // Shortcuts + var K = this._key.words; + var iv = this.cfg.iv; + + // Swap endian + for (var i = 0; i < 4; i++) { + K[i] = (((K[i] << 8) | (K[i] >>> 24)) & 0x00ff00ff) | + (((K[i] << 24) | (K[i] >>> 8)) & 0xff00ff00); + } + + // Generate initial state values + var X = this._X = [ + K[0], (K[3] << 16) | (K[2] >>> 16), + K[1], (K[0] << 16) | (K[3] >>> 16), + K[2], (K[1] << 16) | (K[0] >>> 16), + K[3], (K[2] << 16) | (K[1] >>> 16) + ]; + + // Generate initial counter values + var C = this._C = [ + (K[2] << 16) | (K[2] >>> 16), (K[0] & 0xffff0000) | (K[1] & 0x0000ffff), + (K[3] << 16) | (K[3] >>> 16), (K[1] & 0xffff0000) | (K[2] & 0x0000ffff), + (K[0] << 16) | (K[0] >>> 16), (K[2] & 0xffff0000) | (K[3] & 0x0000ffff), + (K[1] << 16) | (K[1] >>> 16), (K[3] & 0xffff0000) | (K[0] & 0x0000ffff) + ]; + + // Carry bit + this._b = 0; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + + // Modify the counters + for (var i = 0; i < 8; i++) { + C[i] ^= X[(i + 4) & 7]; + } + + // IV setup + if (iv) { + // Shortcuts + var IV = iv.words; + var IV_0 = IV[0]; + var IV_1 = IV[1]; + + // Generate four subvectors + var i0 = (((IV_0 << 8) | (IV_0 >>> 24)) & 0x00ff00ff) | (((IV_0 << 24) | (IV_0 >>> 8)) & 0xff00ff00); + var i2 = (((IV_1 << 8) | (IV_1 >>> 24)) & 0x00ff00ff) | (((IV_1 << 24) | (IV_1 >>> 8)) & 0xff00ff00); + var i1 = (i0 >>> 16) | (i2 & 0xffff0000); + var i3 = (i2 << 16) | (i0 & 0x0000ffff); + + // Modify counter values + C[0] ^= i0; + C[1] ^= i1; + C[2] ^= i2; + C[3] ^= i3; + C[4] ^= i0; + C[5] ^= i1; + C[6] ^= i2; + C[7] ^= i3; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + } + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var X = this._X; + + // Iterate the system + nextState.call(this); + + // Generate four keystream words + S[0] = X[0] ^ (X[5] >>> 16) ^ (X[3] << 16); + S[1] = X[2] ^ (X[7] >>> 16) ^ (X[5] << 16); + S[2] = X[4] ^ (X[1] >>> 16) ^ (X[7] << 16); + S[3] = X[6] ^ (X[3] >>> 16) ^ (X[1] << 16); + + for (var i = 0; i < 4; i++) { + // Swap endian + S[i] = (((S[i] << 8) | (S[i] >>> 24)) & 0x00ff00ff) | + (((S[i] << 24) | (S[i] >>> 8)) & 0xff00ff00); + + // Encrypt + M[offset + i] ^= S[i]; + } + }, + + blockSize: 128/32, + + ivSize: 64/32 + }); + + function nextState() { + // Shortcuts + var X = this._X; + var C = this._C; + + // Save old counter values + for (var i = 0; i < 8; i++) { + C_[i] = C[i]; + } + + // Calculate new counter values + C[0] = (C[0] + 0x4d34d34d + this._b) | 0; + C[1] = (C[1] + 0xd34d34d3 + ((C[0] >>> 0) < (C_[0] >>> 0) ? 1 : 0)) | 0; + C[2] = (C[2] + 0x34d34d34 + ((C[1] >>> 0) < (C_[1] >>> 0) ? 1 : 0)) | 0; + C[3] = (C[3] + 0x4d34d34d + ((C[2] >>> 0) < (C_[2] >>> 0) ? 1 : 0)) | 0; + C[4] = (C[4] + 0xd34d34d3 + ((C[3] >>> 0) < (C_[3] >>> 0) ? 1 : 0)) | 0; + C[5] = (C[5] + 0x34d34d34 + ((C[4] >>> 0) < (C_[4] >>> 0) ? 1 : 0)) | 0; + C[6] = (C[6] + 0x4d34d34d + ((C[5] >>> 0) < (C_[5] >>> 0) ? 1 : 0)) | 0; + C[7] = (C[7] + 0xd34d34d3 + ((C[6] >>> 0) < (C_[6] >>> 0) ? 1 : 0)) | 0; + this._b = (C[7] >>> 0) < (C_[7] >>> 0) ? 1 : 0; + + // Calculate the g-values + for (var i = 0; i < 8; i++) { + var gx = X[i] + C[i]; + + // Construct high and low argument for squaring + var ga = gx & 0xffff; + var gb = gx >>> 16; + + // Calculate high and low result of squaring + var gh = ((((ga * ga) >>> 17) + ga * gb) >>> 15) + gb * gb; + var gl = (((gx & 0xffff0000) * gx) | 0) + (((gx & 0x0000ffff) * gx) | 0); + + // High XOR low + G[i] = gh ^ gl; + } + + // Calculate new state values + X[0] = (G[0] + ((G[7] << 16) | (G[7] >>> 16)) + ((G[6] << 16) | (G[6] >>> 16))) | 0; + X[1] = (G[1] + ((G[0] << 8) | (G[0] >>> 24)) + G[7]) | 0; + X[2] = (G[2] + ((G[1] << 16) | (G[1] >>> 16)) + ((G[0] << 16) | (G[0] >>> 16))) | 0; + X[3] = (G[3] + ((G[2] << 8) | (G[2] >>> 24)) + G[1]) | 0; + X[4] = (G[4] + ((G[3] << 16) | (G[3] >>> 16)) + ((G[2] << 16) | (G[2] >>> 16))) | 0; + X[5] = (G[5] + ((G[4] << 8) | (G[4] >>> 24)) + G[3]) | 0; + X[6] = (G[6] + ((G[5] << 16) | (G[5] >>> 16)) + ((G[4] << 16) | (G[4] >>> 16))) | 0; + X[7] = (G[7] + ((G[6] << 8) | (G[6] >>> 24)) + G[5]) | 0; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.Rabbit.encrypt(message, key, cfg); + * var plaintext = CryptoJS.Rabbit.decrypt(ciphertext, key, cfg); + */ + C.Rabbit = StreamCipher._createHelper(Rabbit); + }()); + + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var StreamCipher = C_lib.StreamCipher; + var C_algo = C.algo; + + // Reusable objects + var S = []; + var C_ = []; + var G = []; + + /** + * Rabbit stream cipher algorithm. + * + * This is a legacy version that neglected to convert the key to little-endian. + * This error doesn't affect the cipher's security, + * but it does affect its compatibility with other implementations. + */ + var RabbitLegacy = C_algo.RabbitLegacy = StreamCipher.extend({ + _doReset: function () { + // Shortcuts + var K = this._key.words; + var iv = this.cfg.iv; + + // Generate initial state values + var X = this._X = [ + K[0], (K[3] << 16) | (K[2] >>> 16), + K[1], (K[0] << 16) | (K[3] >>> 16), + K[2], (K[1] << 16) | (K[0] >>> 16), + K[3], (K[2] << 16) | (K[1] >>> 16) + ]; + + // Generate initial counter values + var C = this._C = [ + (K[2] << 16) | (K[2] >>> 16), (K[0] & 0xffff0000) | (K[1] & 0x0000ffff), + (K[3] << 16) | (K[3] >>> 16), (K[1] & 0xffff0000) | (K[2] & 0x0000ffff), + (K[0] << 16) | (K[0] >>> 16), (K[2] & 0xffff0000) | (K[3] & 0x0000ffff), + (K[1] << 16) | (K[1] >>> 16), (K[3] & 0xffff0000) | (K[0] & 0x0000ffff) + ]; + + // Carry bit + this._b = 0; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + + // Modify the counters + for (var i = 0; i < 8; i++) { + C[i] ^= X[(i + 4) & 7]; + } + + // IV setup + if (iv) { + // Shortcuts + var IV = iv.words; + var IV_0 = IV[0]; + var IV_1 = IV[1]; + + // Generate four subvectors + var i0 = (((IV_0 << 8) | (IV_0 >>> 24)) & 0x00ff00ff) | (((IV_0 << 24) | (IV_0 >>> 8)) & 0xff00ff00); + var i2 = (((IV_1 << 8) | (IV_1 >>> 24)) & 0x00ff00ff) | (((IV_1 << 24) | (IV_1 >>> 8)) & 0xff00ff00); + var i1 = (i0 >>> 16) | (i2 & 0xffff0000); + var i3 = (i2 << 16) | (i0 & 0x0000ffff); + + // Modify counter values + C[0] ^= i0; + C[1] ^= i1; + C[2] ^= i2; + C[3] ^= i3; + C[4] ^= i0; + C[5] ^= i1; + C[6] ^= i2; + C[7] ^= i3; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + } + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var X = this._X; + + // Iterate the system + nextState.call(this); + + // Generate four keystream words + S[0] = X[0] ^ (X[5] >>> 16) ^ (X[3] << 16); + S[1] = X[2] ^ (X[7] >>> 16) ^ (X[5] << 16); + S[2] = X[4] ^ (X[1] >>> 16) ^ (X[7] << 16); + S[3] = X[6] ^ (X[3] >>> 16) ^ (X[1] << 16); + + for (var i = 0; i < 4; i++) { + // Swap endian + S[i] = (((S[i] << 8) | (S[i] >>> 24)) & 0x00ff00ff) | + (((S[i] << 24) | (S[i] >>> 8)) & 0xff00ff00); + + // Encrypt + M[offset + i] ^= S[i]; + } + }, + + blockSize: 128/32, + + ivSize: 64/32 + }); + + function nextState() { + // Shortcuts + var X = this._X; + var C = this._C; + + // Save old counter values + for (var i = 0; i < 8; i++) { + C_[i] = C[i]; + } + + // Calculate new counter values + C[0] = (C[0] + 0x4d34d34d + this._b) | 0; + C[1] = (C[1] + 0xd34d34d3 + ((C[0] >>> 0) < (C_[0] >>> 0) ? 1 : 0)) | 0; + C[2] = (C[2] + 0x34d34d34 + ((C[1] >>> 0) < (C_[1] >>> 0) ? 1 : 0)) | 0; + C[3] = (C[3] + 0x4d34d34d + ((C[2] >>> 0) < (C_[2] >>> 0) ? 1 : 0)) | 0; + C[4] = (C[4] + 0xd34d34d3 + ((C[3] >>> 0) < (C_[3] >>> 0) ? 1 : 0)) | 0; + C[5] = (C[5] + 0x34d34d34 + ((C[4] >>> 0) < (C_[4] >>> 0) ? 1 : 0)) | 0; + C[6] = (C[6] + 0x4d34d34d + ((C[5] >>> 0) < (C_[5] >>> 0) ? 1 : 0)) | 0; + C[7] = (C[7] + 0xd34d34d3 + ((C[6] >>> 0) < (C_[6] >>> 0) ? 1 : 0)) | 0; + this._b = (C[7] >>> 0) < (C_[7] >>> 0) ? 1 : 0; + + // Calculate the g-values + for (var i = 0; i < 8; i++) { + var gx = X[i] + C[i]; + + // Construct high and low argument for squaring + var ga = gx & 0xffff; + var gb = gx >>> 16; + + // Calculate high and low result of squaring + var gh = ((((ga * ga) >>> 17) + ga * gb) >>> 15) + gb * gb; + var gl = (((gx & 0xffff0000) * gx) | 0) + (((gx & 0x0000ffff) * gx) | 0); + + // High XOR low + G[i] = gh ^ gl; + } + + // Calculate new state values + X[0] = (G[0] + ((G[7] << 16) | (G[7] >>> 16)) + ((G[6] << 16) | (G[6] >>> 16))) | 0; + X[1] = (G[1] + ((G[0] << 8) | (G[0] >>> 24)) + G[7]) | 0; + X[2] = (G[2] + ((G[1] << 16) | (G[1] >>> 16)) + ((G[0] << 16) | (G[0] >>> 16))) | 0; + X[3] = (G[3] + ((G[2] << 8) | (G[2] >>> 24)) + G[1]) | 0; + X[4] = (G[4] + ((G[3] << 16) | (G[3] >>> 16)) + ((G[2] << 16) | (G[2] >>> 16))) | 0; + X[5] = (G[5] + ((G[4] << 8) | (G[4] >>> 24)) + G[3]) | 0; + X[6] = (G[6] + ((G[5] << 16) | (G[5] >>> 16)) + ((G[4] << 16) | (G[4] >>> 16))) | 0; + X[7] = (G[7] + ((G[6] << 8) | (G[6] >>> 24)) + G[5]) | 0; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.RabbitLegacy.encrypt(message, key, cfg); + * var plaintext = CryptoJS.RabbitLegacy.decrypt(ciphertext, key, cfg); + */ + C.RabbitLegacy = StreamCipher._createHelper(RabbitLegacy); + }()); + + + return CryptoJS; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/docs/QuickStartGuide.wiki b/node_modules/crypto-js/docs/QuickStartGuide.wiki new file mode 100644 index 0000000..2bee35d --- /dev/null +++ b/node_modules/crypto-js/docs/QuickStartGuide.wiki @@ -0,0 +1,470 @@ + + +---- + += Quick-start Guide = + +== Hashers == + +=== The Hasher Algorithms === + +==== MD5 ==== + +MD5 is a widely used hash function. It's been used in a variety of security applications and is also commonly used to check the integrity of files. Though, MD5 is not collision resistant, and it isn't suitable for applications like SSL certificates or digital signatures that rely on this property. + +{{{ + + +}}} + +==== SHA-1 ==== + +The SHA hash functions were designed by the National Security Agency (NSA). SHA-1 is the most established of the existing SHA hash functions, and it's used in a variety of security applications and protocols. Though, SHA-1's collision resistance has been weakening as new attacks are discovered or improved. + +{{{ + + +}}} + +==== SHA-2 ==== + +SHA-256 is one of the four variants in the SHA-2 set. It isn't as widely used as SHA-1, though it appears to provide much better security. + +{{{ + + +}}} + +SHA-512 is largely identical to SHA-256 but operates on 64-bit words rather than 32. + +{{{ + + +}}} + +CryptoJS also supports SHA-224 and SHA-384, which are largely identical but truncated versions of SHA-256 and SHA-512 respectively. + +==== SHA-3 ==== + +SHA-3 is the winner of a five-year competition to select a new cryptographic hash algorithm where 64 competing designs were evaluated. + +{{{ + + +}}} + +SHA-3 can be configured to output hash lengths of one of 224, 256, 384, or 512 bits. The default is 512 bits. + +{{{ + + +}}} + +==== RIPEMD-160 ==== + +{{{ + + +}}} + +=== The Hasher Input === + +The hash algorithms accept either strings or instances of CryptoJS.lib.WordArray. A WordArray object represents an array of 32-bit words. When you pass a string, it's automatically converted to a WordArray encoded as UTF-8. + +=== The Hasher Output === + +The hash you get back isn't a string yet. It's a WordArray object. When you use a WordArray object in a string context, it's automatically converted to a hex string. + +{{{ + + +}}} + +You can convert a WordArray object to other formats by explicitly calling the toString method and passing an encoder. + +{{{ + + + +}}} + +=== Progressive Hashing === + +{{{ + + +}}} + +== HMAC == + +Keyed-hash message authentication codes (HMAC) is a mechanism for message authentication using cryptographic hash functions. + +HMAC can be used in combination with any iterated cryptographic hash function. + +{{{ + + + + + +}}} + +=== Progressive HMAC Hashing === + +{{{ + + +}}} + +== PBKDF2 == + +PBKDF2 is a password-based key derivation function. In many applications of cryptography, user security is ultimately dependent on a password, and because a password usually can't be used directly as a cryptographic key, some processing is required. + +A salt provides a large set of keys for any given password, and an iteration count increases the cost of producing keys from a password, thereby also increasing the difficulty of attack. + +{{{ + + +}}} + +== Ciphers == + +=== The Cipher Algorithms === + +==== AES ==== + +The Advanced Encryption Standard (AES) is a U.S. Federal Information Processing Standard (FIPS). It was selected after a 5-year process where 15 competing designs were evaluated. + +{{{ + + +}}} + +CryptoJS supports AES-128, AES-192, and AES-256. It will pick the variant by the size of the key you pass in. If you use a passphrase, then it will generate a 256-bit key. + +==== DES, Triple DES ==== + +DES is a previously dominant algorithm for encryption, and was published as an official Federal Information Processing Standard (FIPS). DES is now considered to be insecure due to the small key size. + +{{{ + + +}}} + +Triple DES applies DES three times to each block to increase the key size. The algorithm is believed to be secure in this form. + +{{{ + + +}}} + +==== Rabbit ==== + +Rabbit is a high-performance stream cipher and a finalist in the eSTREAM Portfolio. It is one of the four designs selected after a 3 1/2-year process where 22 designs were evaluated. + +{{{ + + +}}} + +==== RC4, RC4Drop ==== + +RC4 is a widely-used stream cipher. It's used in popular protocols such as SSL and WEP. Although remarkable for its simplicity and speed, the algorithm's history doesn't inspire confidence in its security. + +{{{ + + +}}} + +It was discovered that the first few bytes of keystream are strongly non-random and leak information about the key. We can defend against this attack by discarding the initial portion of the keystream. This modified algorithm is traditionally called RC4-drop. + +By default, 192 words (768 bytes) are dropped, but you can configure the algorithm to drop any number of words. + +{{{ + + +}}} + +=== Custom Key and IV === + +{{{ + + +}}} + +=== Block Modes and Padding === + +{{{ + + + + +}}} + +CryptoJS supports the following modes: + + * CBC (the default) + * CFB + * CTR + * OFB + * ECB + +And CryptoJS supports the following padding schemes: + + * Pkcs7 (the default) + * Iso97971 + * AnsiX923 + * Iso10126 + * ZeroPadding + * NoPadding + +=== The Cipher Input === + +For the plaintext message, the cipher algorithms accept either strings or instances of CryptoJS.lib.WordArray. + +For the key, when you pass a string, it's treated as a passphrase and used to derive an actual key and IV. Or you can pass a WordArray that represents the actual key. If you pass the actual key, you must also pass the actual IV. + +For the ciphertext, the cipher algorithms accept either strings or instances of CryptoJS.lib.CipherParams. A CipherParams object represents a collection of parameters such as the IV, a salt, and the raw ciphertext itself. When you pass a string, it's automatically converted to a CipherParams object according to a configurable format strategy. + +=== The Cipher Output === + +The plaintext you get back after decryption is a WordArray object. See Hashers' Output for more detail. + +The ciphertext you get back after encryption isn't a string yet. It's a CipherParams object. A CipherParams object gives you access to all the parameters used during encryption. When you use a CipherParams object in a string context, it's automatically converted to a string according to a format strategy. The default is an OpenSSL-compatible format. + +{{{ + + +}}} + +You can define your own formats in order to be compatible with other crypto implementations. A format is an object with two methods—stringify and parse—that converts between CipherParams objects and ciphertext strings. + +Here's how you might write a JSON formatter: + +{{{ + + +}}} + +=== Progressive Ciphering === + +{{{ + + +}}} + +=== Interoperability === + +==== With OpenSSL ==== + +Encrypt with OpenSSL: + +{{{ +openssl enc -aes-256-cbc -in infile -out outfile -pass pass:"Secret Passphrase" -e -base64 +}}} + +Decrypt with CryptoJS: + +{{{ + + +}}} + +== Encoders == + +CryptoJS can convert from encoding formats such as Base64, Latin1 or Hex to WordArray objects and vica versa. + +{{{ + + + + +}}} \ No newline at end of file diff --git a/node_modules/crypto-js/enc-base64.js b/node_modules/crypto-js/enc-base64.js new file mode 100644 index 0000000..0ffcd53 --- /dev/null +++ b/node_modules/crypto-js/enc-base64.js @@ -0,0 +1,136 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_enc = C.enc; + + /** + * Base64 encoding strategy. + */ + var Base64 = C_enc.Base64 = { + /** + * Converts a word array to a Base64 string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The Base64 string. + * + * @static + * + * @example + * + * var base64String = CryptoJS.enc.Base64.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + var map = this._map; + + // Clamp excess bits + wordArray.clamp(); + + // Convert + var base64Chars = []; + for (var i = 0; i < sigBytes; i += 3) { + var byte1 = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + var byte2 = (words[(i + 1) >>> 2] >>> (24 - ((i + 1) % 4) * 8)) & 0xff; + var byte3 = (words[(i + 2) >>> 2] >>> (24 - ((i + 2) % 4) * 8)) & 0xff; + + var triplet = (byte1 << 16) | (byte2 << 8) | byte3; + + for (var j = 0; (j < 4) && (i + j * 0.75 < sigBytes); j++) { + base64Chars.push(map.charAt((triplet >>> (6 * (3 - j))) & 0x3f)); + } + } + + // Add padding + var paddingChar = map.charAt(64); + if (paddingChar) { + while (base64Chars.length % 4) { + base64Chars.push(paddingChar); + } + } + + return base64Chars.join(''); + }, + + /** + * Converts a Base64 string to a word array. + * + * @param {string} base64Str The Base64 string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Base64.parse(base64String); + */ + parse: function (base64Str) { + // Shortcuts + var base64StrLength = base64Str.length; + var map = this._map; + var reverseMap = this._reverseMap; + + if (!reverseMap) { + reverseMap = this._reverseMap = []; + for (var j = 0; j < map.length; j++) { + reverseMap[map.charCodeAt(j)] = j; + } + } + + // Ignore padding + var paddingChar = map.charAt(64); + if (paddingChar) { + var paddingIndex = base64Str.indexOf(paddingChar); + if (paddingIndex !== -1) { + base64StrLength = paddingIndex; + } + } + + // Convert + return parseLoop(base64Str, base64StrLength, reverseMap); + + }, + + _map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=' + }; + + function parseLoop(base64Str, base64StrLength, reverseMap) { + var words = []; + var nBytes = 0; + for (var i = 0; i < base64StrLength; i++) { + if (i % 4) { + var bits1 = reverseMap[base64Str.charCodeAt(i - 1)] << ((i % 4) * 2); + var bits2 = reverseMap[base64Str.charCodeAt(i)] >>> (6 - (i % 4) * 2); + var bitsCombined = bits1 | bits2; + words[nBytes >>> 2] |= bitsCombined << (24 - (nBytes % 4) * 8); + nBytes++; + } + } + return WordArray.create(words, nBytes); + } + }()); + + + return CryptoJS.enc.Base64; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/enc-base64url.js b/node_modules/crypto-js/enc-base64url.js new file mode 100644 index 0000000..2468912 --- /dev/null +++ b/node_modules/crypto-js/enc-base64url.js @@ -0,0 +1,140 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_enc = C.enc; + + /** + * Base64url encoding strategy. + */ + var Base64url = C_enc.Base64url = { + /** + * Converts a word array to a Base64url string. + * + * @param {WordArray} wordArray The word array. + * + * @param {boolean} urlSafe Whether to use url safe + * + * @return {string} The Base64url string. + * + * @static + * + * @example + * + * var base64String = CryptoJS.enc.Base64url.stringify(wordArray); + */ + stringify: function (wordArray, urlSafe=true) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + var map = urlSafe ? this._safe_map : this._map; + + // Clamp excess bits + wordArray.clamp(); + + // Convert + var base64Chars = []; + for (var i = 0; i < sigBytes; i += 3) { + var byte1 = (words[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff; + var byte2 = (words[(i + 1) >>> 2] >>> (24 - ((i + 1) % 4) * 8)) & 0xff; + var byte3 = (words[(i + 2) >>> 2] >>> (24 - ((i + 2) % 4) * 8)) & 0xff; + + var triplet = (byte1 << 16) | (byte2 << 8) | byte3; + + for (var j = 0; (j < 4) && (i + j * 0.75 < sigBytes); j++) { + base64Chars.push(map.charAt((triplet >>> (6 * (3 - j))) & 0x3f)); + } + } + + // Add padding + var paddingChar = map.charAt(64); + if (paddingChar) { + while (base64Chars.length % 4) { + base64Chars.push(paddingChar); + } + } + + return base64Chars.join(''); + }, + + /** + * Converts a Base64url string to a word array. + * + * @param {string} base64Str The Base64url string. + * + * @param {boolean} urlSafe Whether to use url safe + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Base64url.parse(base64String); + */ + parse: function (base64Str, urlSafe=true) { + // Shortcuts + var base64StrLength = base64Str.length; + var map = urlSafe ? this._safe_map : this._map; + var reverseMap = this._reverseMap; + + if (!reverseMap) { + reverseMap = this._reverseMap = []; + for (var j = 0; j < map.length; j++) { + reverseMap[map.charCodeAt(j)] = j; + } + } + + // Ignore padding + var paddingChar = map.charAt(64); + if (paddingChar) { + var paddingIndex = base64Str.indexOf(paddingChar); + if (paddingIndex !== -1) { + base64StrLength = paddingIndex; + } + } + + // Convert + return parseLoop(base64Str, base64StrLength, reverseMap); + + }, + + _map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=', + _safe_map: 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_', + }; + + function parseLoop(base64Str, base64StrLength, reverseMap) { + var words = []; + var nBytes = 0; + for (var i = 0; i < base64StrLength; i++) { + if (i % 4) { + var bits1 = reverseMap[base64Str.charCodeAt(i - 1)] << ((i % 4) * 2); + var bits2 = reverseMap[base64Str.charCodeAt(i)] >>> (6 - (i % 4) * 2); + var bitsCombined = bits1 | bits2; + words[nBytes >>> 2] |= bitsCombined << (24 - (nBytes % 4) * 8); + nBytes++; + } + } + return WordArray.create(words, nBytes); + } + }()); + + return CryptoJS.enc.Base64url; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/enc-hex.js b/node_modules/crypto-js/enc-hex.js new file mode 100644 index 0000000..88161ff --- /dev/null +++ b/node_modules/crypto-js/enc-hex.js @@ -0,0 +1,18 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.enc.Hex; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/enc-latin1.js b/node_modules/crypto-js/enc-latin1.js new file mode 100644 index 0000000..ade56dc --- /dev/null +++ b/node_modules/crypto-js/enc-latin1.js @@ -0,0 +1,18 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.enc.Latin1; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/enc-utf16.js b/node_modules/crypto-js/enc-utf16.js new file mode 100644 index 0000000..7de6245 --- /dev/null +++ b/node_modules/crypto-js/enc-utf16.js @@ -0,0 +1,149 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_enc = C.enc; + + /** + * UTF-16 BE encoding strategy. + */ + var Utf16BE = C_enc.Utf16 = C_enc.Utf16BE = { + /** + * Converts a word array to a UTF-16 BE string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The UTF-16 BE string. + * + * @static + * + * @example + * + * var utf16String = CryptoJS.enc.Utf16.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var utf16Chars = []; + for (var i = 0; i < sigBytes; i += 2) { + var codePoint = (words[i >>> 2] >>> (16 - (i % 4) * 8)) & 0xffff; + utf16Chars.push(String.fromCharCode(codePoint)); + } + + return utf16Chars.join(''); + }, + + /** + * Converts a UTF-16 BE string to a word array. + * + * @param {string} utf16Str The UTF-16 BE string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Utf16.parse(utf16String); + */ + parse: function (utf16Str) { + // Shortcut + var utf16StrLength = utf16Str.length; + + // Convert + var words = []; + for (var i = 0; i < utf16StrLength; i++) { + words[i >>> 1] |= utf16Str.charCodeAt(i) << (16 - (i % 2) * 16); + } + + return WordArray.create(words, utf16StrLength * 2); + } + }; + + /** + * UTF-16 LE encoding strategy. + */ + C_enc.Utf16LE = { + /** + * Converts a word array to a UTF-16 LE string. + * + * @param {WordArray} wordArray The word array. + * + * @return {string} The UTF-16 LE string. + * + * @static + * + * @example + * + * var utf16Str = CryptoJS.enc.Utf16LE.stringify(wordArray); + */ + stringify: function (wordArray) { + // Shortcuts + var words = wordArray.words; + var sigBytes = wordArray.sigBytes; + + // Convert + var utf16Chars = []; + for (var i = 0; i < sigBytes; i += 2) { + var codePoint = swapEndian((words[i >>> 2] >>> (16 - (i % 4) * 8)) & 0xffff); + utf16Chars.push(String.fromCharCode(codePoint)); + } + + return utf16Chars.join(''); + }, + + /** + * Converts a UTF-16 LE string to a word array. + * + * @param {string} utf16Str The UTF-16 LE string. + * + * @return {WordArray} The word array. + * + * @static + * + * @example + * + * var wordArray = CryptoJS.enc.Utf16LE.parse(utf16Str); + */ + parse: function (utf16Str) { + // Shortcut + var utf16StrLength = utf16Str.length; + + // Convert + var words = []; + for (var i = 0; i < utf16StrLength; i++) { + words[i >>> 1] |= swapEndian(utf16Str.charCodeAt(i) << (16 - (i % 2) * 16)); + } + + return WordArray.create(words, utf16StrLength * 2); + } + }; + + function swapEndian(word) { + return ((word << 8) & 0xff00ff00) | ((word >>> 8) & 0x00ff00ff); + } + }()); + + + return CryptoJS.enc.Utf16; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/enc-utf8.js b/node_modules/crypto-js/enc-utf8.js new file mode 100644 index 0000000..e7a251d --- /dev/null +++ b/node_modules/crypto-js/enc-utf8.js @@ -0,0 +1,18 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.enc.Utf8; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/evpkdf.js b/node_modules/crypto-js/evpkdf.js new file mode 100644 index 0000000..578974a --- /dev/null +++ b/node_modules/crypto-js/evpkdf.js @@ -0,0 +1,134 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./sha1"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./sha1", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var WordArray = C_lib.WordArray; + var C_algo = C.algo; + var MD5 = C_algo.MD5; + + /** + * This key derivation function is meant to conform with EVP_BytesToKey. + * www.openssl.org/docs/crypto/EVP_BytesToKey.html + */ + var EvpKDF = C_algo.EvpKDF = Base.extend({ + /** + * Configuration options. + * + * @property {number} keySize The key size in words to generate. Default: 4 (128 bits) + * @property {Hasher} hasher The hash algorithm to use. Default: MD5 + * @property {number} iterations The number of iterations to perform. Default: 1 + */ + cfg: Base.extend({ + keySize: 128/32, + hasher: MD5, + iterations: 1 + }), + + /** + * Initializes a newly created key derivation function. + * + * @param {Object} cfg (Optional) The configuration options to use for the derivation. + * + * @example + * + * var kdf = CryptoJS.algo.EvpKDF.create(); + * var kdf = CryptoJS.algo.EvpKDF.create({ keySize: 8 }); + * var kdf = CryptoJS.algo.EvpKDF.create({ keySize: 8, iterations: 1000 }); + */ + init: function (cfg) { + this.cfg = this.cfg.extend(cfg); + }, + + /** + * Derives a key from a password. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * + * @return {WordArray} The derived key. + * + * @example + * + * var key = kdf.compute(password, salt); + */ + compute: function (password, salt) { + var block; + + // Shortcut + var cfg = this.cfg; + + // Init hasher + var hasher = cfg.hasher.create(); + + // Initial values + var derivedKey = WordArray.create(); + + // Shortcuts + var derivedKeyWords = derivedKey.words; + var keySize = cfg.keySize; + var iterations = cfg.iterations; + + // Generate key + while (derivedKeyWords.length < keySize) { + if (block) { + hasher.update(block); + } + block = hasher.update(password).finalize(salt); + hasher.reset(); + + // Iterations + for (var i = 1; i < iterations; i++) { + block = hasher.finalize(block); + hasher.reset(); + } + + derivedKey.concat(block); + } + derivedKey.sigBytes = keySize * 4; + + return derivedKey; + } + }); + + /** + * Derives a key from a password. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * @param {Object} cfg (Optional) The configuration options to use for this computation. + * + * @return {WordArray} The derived key. + * + * @static + * + * @example + * + * var key = CryptoJS.EvpKDF(password, salt); + * var key = CryptoJS.EvpKDF(password, salt, { keySize: 8 }); + * var key = CryptoJS.EvpKDF(password, salt, { keySize: 8, iterations: 1000 }); + */ + C.EvpKDF = function (password, salt, cfg) { + return EvpKDF.create(cfg).compute(password, salt); + }; + }()); + + + return CryptoJS.EvpKDF; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/format-hex.js b/node_modules/crypto-js/format-hex.js new file mode 100644 index 0000000..2e9a861 --- /dev/null +++ b/node_modules/crypto-js/format-hex.js @@ -0,0 +1,66 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function (undefined) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var CipherParams = C_lib.CipherParams; + var C_enc = C.enc; + var Hex = C_enc.Hex; + var C_format = C.format; + + var HexFormatter = C_format.Hex = { + /** + * Converts the ciphertext of a cipher params object to a hexadecimally encoded string. + * + * @param {CipherParams} cipherParams The cipher params object. + * + * @return {string} The hexadecimally encoded string. + * + * @static + * + * @example + * + * var hexString = CryptoJS.format.Hex.stringify(cipherParams); + */ + stringify: function (cipherParams) { + return cipherParams.ciphertext.toString(Hex); + }, + + /** + * Converts a hexadecimally encoded ciphertext string to a cipher params object. + * + * @param {string} input The hexadecimally encoded string. + * + * @return {CipherParams} The cipher params object. + * + * @static + * + * @example + * + * var cipherParams = CryptoJS.format.Hex.parse(hexString); + */ + parse: function (input) { + var ciphertext = Hex.parse(input); + return CipherParams.create({ ciphertext: ciphertext }); + } + }; + }()); + + + return CryptoJS.format.Hex; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/format-openssl.js b/node_modules/crypto-js/format-openssl.js new file mode 100644 index 0000000..3373edc --- /dev/null +++ b/node_modules/crypto-js/format-openssl.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.format.OpenSSL; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-md5.js b/node_modules/crypto-js/hmac-md5.js new file mode 100644 index 0000000..ad7a90a --- /dev/null +++ b/node_modules/crypto-js/hmac-md5.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./md5"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./md5", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacMD5; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-ripemd160.js b/node_modules/crypto-js/hmac-ripemd160.js new file mode 100644 index 0000000..73d55a7 --- /dev/null +++ b/node_modules/crypto-js/hmac-ripemd160.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./ripemd160"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./ripemd160", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacRIPEMD160; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-sha1.js b/node_modules/crypto-js/hmac-sha1.js new file mode 100644 index 0000000..0b570cb --- /dev/null +++ b/node_modules/crypto-js/hmac-sha1.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./sha1"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./sha1", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacSHA1; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-sha224.js b/node_modules/crypto-js/hmac-sha224.js new file mode 100644 index 0000000..3778863 --- /dev/null +++ b/node_modules/crypto-js/hmac-sha224.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./sha256"), require("./sha224"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./sha256", "./sha224", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacSHA224; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-sha256.js b/node_modules/crypto-js/hmac-sha256.js new file mode 100644 index 0000000..33b0c9f --- /dev/null +++ b/node_modules/crypto-js/hmac-sha256.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./sha256"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./sha256", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacSHA256; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-sha3.js b/node_modules/crypto-js/hmac-sha3.js new file mode 100644 index 0000000..1248804 --- /dev/null +++ b/node_modules/crypto-js/hmac-sha3.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./x64-core"), require("./sha3"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./x64-core", "./sha3", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacSHA3; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-sha384.js b/node_modules/crypto-js/hmac-sha384.js new file mode 100644 index 0000000..0036e2b --- /dev/null +++ b/node_modules/crypto-js/hmac-sha384.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./x64-core"), require("./sha512"), require("./sha384"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./x64-core", "./sha512", "./sha384", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacSHA384; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac-sha512.js b/node_modules/crypto-js/hmac-sha512.js new file mode 100644 index 0000000..c1005b6 --- /dev/null +++ b/node_modules/crypto-js/hmac-sha512.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./x64-core"), require("./sha512"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./x64-core", "./sha512", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.HmacSHA512; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/hmac.js b/node_modules/crypto-js/hmac.js new file mode 100644 index 0000000..8c09851 --- /dev/null +++ b/node_modules/crypto-js/hmac.js @@ -0,0 +1,143 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var C_enc = C.enc; + var Utf8 = C_enc.Utf8; + var C_algo = C.algo; + + /** + * HMAC algorithm. + */ + var HMAC = C_algo.HMAC = Base.extend({ + /** + * Initializes a newly created HMAC. + * + * @param {Hasher} hasher The hash algorithm to use. + * @param {WordArray|string} key The secret key. + * + * @example + * + * var hmacHasher = CryptoJS.algo.HMAC.create(CryptoJS.algo.SHA256, key); + */ + init: function (hasher, key) { + // Init hasher + hasher = this._hasher = new hasher.init(); + + // Convert string to WordArray, else assume WordArray already + if (typeof key == 'string') { + key = Utf8.parse(key); + } + + // Shortcuts + var hasherBlockSize = hasher.blockSize; + var hasherBlockSizeBytes = hasherBlockSize * 4; + + // Allow arbitrary length keys + if (key.sigBytes > hasherBlockSizeBytes) { + key = hasher.finalize(key); + } + + // Clamp excess bits + key.clamp(); + + // Clone key for inner and outer pads + var oKey = this._oKey = key.clone(); + var iKey = this._iKey = key.clone(); + + // Shortcuts + var oKeyWords = oKey.words; + var iKeyWords = iKey.words; + + // XOR keys with pad constants + for (var i = 0; i < hasherBlockSize; i++) { + oKeyWords[i] ^= 0x5c5c5c5c; + iKeyWords[i] ^= 0x36363636; + } + oKey.sigBytes = iKey.sigBytes = hasherBlockSizeBytes; + + // Set initial values + this.reset(); + }, + + /** + * Resets this HMAC to its initial state. + * + * @example + * + * hmacHasher.reset(); + */ + reset: function () { + // Shortcut + var hasher = this._hasher; + + // Reset + hasher.reset(); + hasher.update(this._iKey); + }, + + /** + * Updates this HMAC with a message. + * + * @param {WordArray|string} messageUpdate The message to append. + * + * @return {HMAC} This HMAC instance. + * + * @example + * + * hmacHasher.update('message'); + * hmacHasher.update(wordArray); + */ + update: function (messageUpdate) { + this._hasher.update(messageUpdate); + + // Chainable + return this; + }, + + /** + * Finalizes the HMAC computation. + * Note that the finalize operation is effectively a destructive, read-once operation. + * + * @param {WordArray|string} messageUpdate (Optional) A final message update. + * + * @return {WordArray} The HMAC. + * + * @example + * + * var hmac = hmacHasher.finalize(); + * var hmac = hmacHasher.finalize('message'); + * var hmac = hmacHasher.finalize(wordArray); + */ + finalize: function (messageUpdate) { + // Shortcut + var hasher = this._hasher; + + // Compute HMAC + var innerHash = hasher.finalize(messageUpdate); + hasher.reset(); + var hmac = hasher.finalize(this._oKey.clone().concat(innerHash)); + + return hmac; + } + }); + }()); + + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/index.js b/node_modules/crypto-js/index.js new file mode 100644 index 0000000..7235391 --- /dev/null +++ b/node_modules/crypto-js/index.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./x64-core"), require("./lib-typedarrays"), require("./enc-utf16"), require("./enc-base64"), require("./enc-base64url"), require("./md5"), require("./sha1"), require("./sha256"), require("./sha224"), require("./sha512"), require("./sha384"), require("./sha3"), require("./ripemd160"), require("./hmac"), require("./pbkdf2"), require("./evpkdf"), require("./cipher-core"), require("./mode-cfb"), require("./mode-ctr"), require("./mode-ctr-gladman"), require("./mode-ofb"), require("./mode-ecb"), require("./pad-ansix923"), require("./pad-iso10126"), require("./pad-iso97971"), require("./pad-zeropadding"), require("./pad-nopadding"), require("./format-hex"), require("./aes"), require("./tripledes"), require("./rc4"), require("./rabbit"), require("./rabbit-legacy")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./x64-core", "./lib-typedarrays", "./enc-utf16", "./enc-base64", "./enc-base64url", "./md5", "./sha1", "./sha256", "./sha224", "./sha512", "./sha384", "./sha3", "./ripemd160", "./hmac", "./pbkdf2", "./evpkdf", "./cipher-core", "./mode-cfb", "./mode-ctr", "./mode-ctr-gladman", "./mode-ofb", "./mode-ecb", "./pad-ansix923", "./pad-iso10126", "./pad-iso97971", "./pad-zeropadding", "./pad-nopadding", "./format-hex", "./aes", "./tripledes", "./rc4", "./rabbit", "./rabbit-legacy"], factory); + } + else { + // Global (browser) + root.CryptoJS = factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/lib-typedarrays.js b/node_modules/crypto-js/lib-typedarrays.js new file mode 100644 index 0000000..264b210 --- /dev/null +++ b/node_modules/crypto-js/lib-typedarrays.js @@ -0,0 +1,76 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Check if typed arrays are supported + if (typeof ArrayBuffer != 'function') { + return; + } + + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + + // Reference original init + var superInit = WordArray.init; + + // Augment WordArray.init to handle typed arrays + var subInit = WordArray.init = function (typedArray) { + // Convert buffers to uint8 + if (typedArray instanceof ArrayBuffer) { + typedArray = new Uint8Array(typedArray); + } + + // Convert other array views to uint8 + if ( + typedArray instanceof Int8Array || + (typeof Uint8ClampedArray !== "undefined" && typedArray instanceof Uint8ClampedArray) || + typedArray instanceof Int16Array || + typedArray instanceof Uint16Array || + typedArray instanceof Int32Array || + typedArray instanceof Uint32Array || + typedArray instanceof Float32Array || + typedArray instanceof Float64Array + ) { + typedArray = new Uint8Array(typedArray.buffer, typedArray.byteOffset, typedArray.byteLength); + } + + // Handle Uint8Array + if (typedArray instanceof Uint8Array) { + // Shortcut + var typedArrayByteLength = typedArray.byteLength; + + // Extract bytes + var words = []; + for (var i = 0; i < typedArrayByteLength; i++) { + words[i >>> 2] |= typedArray[i] << (24 - (i % 4) * 8); + } + + // Initialize this word array + superInit.call(this, words, typedArrayByteLength); + } else { + // Else call normal init + superInit.apply(this, arguments); + } + }; + + subInit.prototype = WordArray; + }()); + + + return CryptoJS.lib.WordArray; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/md5.js b/node_modules/crypto-js/md5.js new file mode 100644 index 0000000..12b0fdd --- /dev/null +++ b/node_modules/crypto-js/md5.js @@ -0,0 +1,268 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Constants table + var T = []; + + // Compute constants + (function () { + for (var i = 0; i < 64; i++) { + T[i] = (Math.abs(Math.sin(i + 1)) * 0x100000000) | 0; + } + }()); + + /** + * MD5 hash algorithm. + */ + var MD5 = C_algo.MD5 = Hasher.extend({ + _doReset: function () { + this._hash = new WordArray.init([ + 0x67452301, 0xefcdab89, + 0x98badcfe, 0x10325476 + ]); + }, + + _doProcessBlock: function (M, offset) { + // Swap endian + for (var i = 0; i < 16; i++) { + // Shortcuts + var offset_i = offset + i; + var M_offset_i = M[offset_i]; + + M[offset_i] = ( + (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | + (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) + ); + } + + // Shortcuts + var H = this._hash.words; + + var M_offset_0 = M[offset + 0]; + var M_offset_1 = M[offset + 1]; + var M_offset_2 = M[offset + 2]; + var M_offset_3 = M[offset + 3]; + var M_offset_4 = M[offset + 4]; + var M_offset_5 = M[offset + 5]; + var M_offset_6 = M[offset + 6]; + var M_offset_7 = M[offset + 7]; + var M_offset_8 = M[offset + 8]; + var M_offset_9 = M[offset + 9]; + var M_offset_10 = M[offset + 10]; + var M_offset_11 = M[offset + 11]; + var M_offset_12 = M[offset + 12]; + var M_offset_13 = M[offset + 13]; + var M_offset_14 = M[offset + 14]; + var M_offset_15 = M[offset + 15]; + + // Working varialbes + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + + // Computation + a = FF(a, b, c, d, M_offset_0, 7, T[0]); + d = FF(d, a, b, c, M_offset_1, 12, T[1]); + c = FF(c, d, a, b, M_offset_2, 17, T[2]); + b = FF(b, c, d, a, M_offset_3, 22, T[3]); + a = FF(a, b, c, d, M_offset_4, 7, T[4]); + d = FF(d, a, b, c, M_offset_5, 12, T[5]); + c = FF(c, d, a, b, M_offset_6, 17, T[6]); + b = FF(b, c, d, a, M_offset_7, 22, T[7]); + a = FF(a, b, c, d, M_offset_8, 7, T[8]); + d = FF(d, a, b, c, M_offset_9, 12, T[9]); + c = FF(c, d, a, b, M_offset_10, 17, T[10]); + b = FF(b, c, d, a, M_offset_11, 22, T[11]); + a = FF(a, b, c, d, M_offset_12, 7, T[12]); + d = FF(d, a, b, c, M_offset_13, 12, T[13]); + c = FF(c, d, a, b, M_offset_14, 17, T[14]); + b = FF(b, c, d, a, M_offset_15, 22, T[15]); + + a = GG(a, b, c, d, M_offset_1, 5, T[16]); + d = GG(d, a, b, c, M_offset_6, 9, T[17]); + c = GG(c, d, a, b, M_offset_11, 14, T[18]); + b = GG(b, c, d, a, M_offset_0, 20, T[19]); + a = GG(a, b, c, d, M_offset_5, 5, T[20]); + d = GG(d, a, b, c, M_offset_10, 9, T[21]); + c = GG(c, d, a, b, M_offset_15, 14, T[22]); + b = GG(b, c, d, a, M_offset_4, 20, T[23]); + a = GG(a, b, c, d, M_offset_9, 5, T[24]); + d = GG(d, a, b, c, M_offset_14, 9, T[25]); + c = GG(c, d, a, b, M_offset_3, 14, T[26]); + b = GG(b, c, d, a, M_offset_8, 20, T[27]); + a = GG(a, b, c, d, M_offset_13, 5, T[28]); + d = GG(d, a, b, c, M_offset_2, 9, T[29]); + c = GG(c, d, a, b, M_offset_7, 14, T[30]); + b = GG(b, c, d, a, M_offset_12, 20, T[31]); + + a = HH(a, b, c, d, M_offset_5, 4, T[32]); + d = HH(d, a, b, c, M_offset_8, 11, T[33]); + c = HH(c, d, a, b, M_offset_11, 16, T[34]); + b = HH(b, c, d, a, M_offset_14, 23, T[35]); + a = HH(a, b, c, d, M_offset_1, 4, T[36]); + d = HH(d, a, b, c, M_offset_4, 11, T[37]); + c = HH(c, d, a, b, M_offset_7, 16, T[38]); + b = HH(b, c, d, a, M_offset_10, 23, T[39]); + a = HH(a, b, c, d, M_offset_13, 4, T[40]); + d = HH(d, a, b, c, M_offset_0, 11, T[41]); + c = HH(c, d, a, b, M_offset_3, 16, T[42]); + b = HH(b, c, d, a, M_offset_6, 23, T[43]); + a = HH(a, b, c, d, M_offset_9, 4, T[44]); + d = HH(d, a, b, c, M_offset_12, 11, T[45]); + c = HH(c, d, a, b, M_offset_15, 16, T[46]); + b = HH(b, c, d, a, M_offset_2, 23, T[47]); + + a = II(a, b, c, d, M_offset_0, 6, T[48]); + d = II(d, a, b, c, M_offset_7, 10, T[49]); + c = II(c, d, a, b, M_offset_14, 15, T[50]); + b = II(b, c, d, a, M_offset_5, 21, T[51]); + a = II(a, b, c, d, M_offset_12, 6, T[52]); + d = II(d, a, b, c, M_offset_3, 10, T[53]); + c = II(c, d, a, b, M_offset_10, 15, T[54]); + b = II(b, c, d, a, M_offset_1, 21, T[55]); + a = II(a, b, c, d, M_offset_8, 6, T[56]); + d = II(d, a, b, c, M_offset_15, 10, T[57]); + c = II(c, d, a, b, M_offset_6, 15, T[58]); + b = II(b, c, d, a, M_offset_13, 21, T[59]); + a = II(a, b, c, d, M_offset_4, 6, T[60]); + d = II(d, a, b, c, M_offset_11, 10, T[61]); + c = II(c, d, a, b, M_offset_2, 15, T[62]); + b = II(b, c, d, a, M_offset_9, 21, T[63]); + + // Intermediate hash value + H[0] = (H[0] + a) | 0; + H[1] = (H[1] + b) | 0; + H[2] = (H[2] + c) | 0; + H[3] = (H[3] + d) | 0; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + + var nBitsTotalH = Math.floor(nBitsTotal / 0x100000000); + var nBitsTotalL = nBitsTotal; + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = ( + (((nBitsTotalH << 8) | (nBitsTotalH >>> 24)) & 0x00ff00ff) | + (((nBitsTotalH << 24) | (nBitsTotalH >>> 8)) & 0xff00ff00) + ); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( + (((nBitsTotalL << 8) | (nBitsTotalL >>> 24)) & 0x00ff00ff) | + (((nBitsTotalL << 24) | (nBitsTotalL >>> 8)) & 0xff00ff00) + ); + + data.sigBytes = (dataWords.length + 1) * 4; + + // Hash final blocks + this._process(); + + // Shortcuts + var hash = this._hash; + var H = hash.words; + + // Swap endian + for (var i = 0; i < 4; i++) { + // Shortcut + var H_i = H[i]; + + H[i] = (((H_i << 8) | (H_i >>> 24)) & 0x00ff00ff) | + (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00); + } + + // Return final computed hash + return hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + function FF(a, b, c, d, x, s, t) { + var n = a + ((b & c) | (~b & d)) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + function GG(a, b, c, d, x, s, t) { + var n = a + ((b & d) | (c & ~d)) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + function HH(a, b, c, d, x, s, t) { + var n = a + (b ^ c ^ d) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + function II(a, b, c, d, x, s, t) { + var n = a + (c ^ (b | ~d)) + x + t; + return ((n << s) | (n >>> (32 - s))) + b; + } + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.MD5('message'); + * var hash = CryptoJS.MD5(wordArray); + */ + C.MD5 = Hasher._createHelper(MD5); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacMD5(message, key); + */ + C.HmacMD5 = Hasher._createHmacHelper(MD5); + }(Math)); + + + return CryptoJS.MD5; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/mode-cfb.js b/node_modules/crypto-js/mode-cfb.js new file mode 100644 index 0000000..444c9cb --- /dev/null +++ b/node_modules/crypto-js/mode-cfb.js @@ -0,0 +1,80 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * Cipher Feedback block mode. + */ + CryptoJS.mode.CFB = (function () { + var CFB = CryptoJS.lib.BlockCipherMode.extend(); + + CFB.Encryptor = CFB.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + generateKeystreamAndEncrypt.call(this, words, offset, blockSize, cipher); + + // Remember this block to use with next block + this._prevBlock = words.slice(offset, offset + blockSize); + } + }); + + CFB.Decryptor = CFB.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher; + var blockSize = cipher.blockSize; + + // Remember this block to use with next block + var thisBlock = words.slice(offset, offset + blockSize); + + generateKeystreamAndEncrypt.call(this, words, offset, blockSize, cipher); + + // This block becomes the previous block + this._prevBlock = thisBlock; + } + }); + + function generateKeystreamAndEncrypt(words, offset, blockSize, cipher) { + var keystream; + + // Shortcut + var iv = this._iv; + + // Generate keystream + if (iv) { + keystream = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } else { + keystream = this._prevBlock; + } + cipher.encryptBlock(keystream, 0); + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + + return CFB; + }()); + + + return CryptoJS.mode.CFB; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/mode-ctr-gladman.js b/node_modules/crypto-js/mode-ctr-gladman.js new file mode 100644 index 0000000..bbc5687 --- /dev/null +++ b/node_modules/crypto-js/mode-ctr-gladman.js @@ -0,0 +1,116 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** @preserve + * Counter block mode compatible with Dr Brian Gladman fileenc.c + * derived from CryptoJS.mode.CTR + * Jan Hruby jhruby.web@gmail.com + */ + CryptoJS.mode.CTRGladman = (function () { + var CTRGladman = CryptoJS.lib.BlockCipherMode.extend(); + + function incWord(word) + { + if (((word >> 24) & 0xff) === 0xff) { //overflow + var b1 = (word >> 16)&0xff; + var b2 = (word >> 8)&0xff; + var b3 = word & 0xff; + + if (b1 === 0xff) // overflow b1 + { + b1 = 0; + if (b2 === 0xff) + { + b2 = 0; + if (b3 === 0xff) + { + b3 = 0; + } + else + { + ++b3; + } + } + else + { + ++b2; + } + } + else + { + ++b1; + } + + word = 0; + word += (b1 << 16); + word += (b2 << 8); + word += b3; + } + else + { + word += (0x01 << 24); + } + return word; + } + + function incCounter(counter) + { + if ((counter[0] = incWord(counter[0])) === 0) + { + // encr_data in fileenc.c from Dr Brian Gladman's counts only with DWORD j < 8 + counter[1] = incWord(counter[1]); + } + return counter; + } + + var Encryptor = CTRGladman.Encryptor = CTRGladman.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher + var blockSize = cipher.blockSize; + var iv = this._iv; + var counter = this._counter; + + // Generate keystream + if (iv) { + counter = this._counter = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } + + incCounter(counter); + + var keystream = counter.slice(0); + cipher.encryptBlock(keystream, 0); + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + }); + + CTRGladman.Decryptor = Encryptor; + + return CTRGladman; + }()); + + + + + return CryptoJS.mode.CTRGladman; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/mode-ctr.js b/node_modules/crypto-js/mode-ctr.js new file mode 100644 index 0000000..c3d470a --- /dev/null +++ b/node_modules/crypto-js/mode-ctr.js @@ -0,0 +1,58 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * Counter block mode. + */ + CryptoJS.mode.CTR = (function () { + var CTR = CryptoJS.lib.BlockCipherMode.extend(); + + var Encryptor = CTR.Encryptor = CTR.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher + var blockSize = cipher.blockSize; + var iv = this._iv; + var counter = this._counter; + + // Generate keystream + if (iv) { + counter = this._counter = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } + var keystream = counter.slice(0); + cipher.encryptBlock(keystream, 0); + + // Increment counter + counter[blockSize - 1] = (counter[blockSize - 1] + 1) | 0 + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + }); + + CTR.Decryptor = Encryptor; + + return CTR; + }()); + + + return CryptoJS.mode.CTR; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/mode-ecb.js b/node_modules/crypto-js/mode-ecb.js new file mode 100644 index 0000000..ff06921 --- /dev/null +++ b/node_modules/crypto-js/mode-ecb.js @@ -0,0 +1,40 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * Electronic Codebook block mode. + */ + CryptoJS.mode.ECB = (function () { + var ECB = CryptoJS.lib.BlockCipherMode.extend(); + + ECB.Encryptor = ECB.extend({ + processBlock: function (words, offset) { + this._cipher.encryptBlock(words, offset); + } + }); + + ECB.Decryptor = ECB.extend({ + processBlock: function (words, offset) { + this._cipher.decryptBlock(words, offset); + } + }); + + return ECB; + }()); + + + return CryptoJS.mode.ECB; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/mode-ofb.js b/node_modules/crypto-js/mode-ofb.js new file mode 100644 index 0000000..c01314c --- /dev/null +++ b/node_modules/crypto-js/mode-ofb.js @@ -0,0 +1,54 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * Output Feedback block mode. + */ + CryptoJS.mode.OFB = (function () { + var OFB = CryptoJS.lib.BlockCipherMode.extend(); + + var Encryptor = OFB.Encryptor = OFB.extend({ + processBlock: function (words, offset) { + // Shortcuts + var cipher = this._cipher + var blockSize = cipher.blockSize; + var iv = this._iv; + var keystream = this._keystream; + + // Generate keystream + if (iv) { + keystream = this._keystream = iv.slice(0); + + // Remove IV for subsequent blocks + this._iv = undefined; + } + cipher.encryptBlock(keystream, 0); + + // Encrypt + for (var i = 0; i < blockSize; i++) { + words[offset + i] ^= keystream[i]; + } + } + }); + + OFB.Decryptor = Encryptor; + + return OFB; + }()); + + + return CryptoJS.mode.OFB; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/package.json b/node_modules/crypto-js/package.json new file mode 100644 index 0000000..76bb388 --- /dev/null +++ b/node_modules/crypto-js/package.json @@ -0,0 +1,42 @@ +{ + "name": "crypto-js", + "version": "4.1.1", + "description": "JavaScript library of crypto standards.", + "license": "MIT", + "author": { + "name": "Evan Vosberg", + "url": "http://github.com/evanvosberg" + }, + "homepage": "http://github.com/brix/crypto-js", + "repository": { + "type": "git", + "url": "http://github.com/brix/crypto-js.git" + }, + "keywords": [ + "security", + "crypto", + "Hash", + "MD5", + "SHA1", + "SHA-1", + "SHA256", + "SHA-256", + "RC4", + "Rabbit", + "AES", + "DES", + "PBKDF2", + "HMAC", + "OFB", + "CFB", + "CTR", + "CBC", + "Base64", + "Base64url" + ], + "main": "index.js", + "dependencies": {}, + "browser": { + "crypto": false + } +} diff --git a/node_modules/crypto-js/pad-ansix923.js b/node_modules/crypto-js/pad-ansix923.js new file mode 100644 index 0000000..f01f21e --- /dev/null +++ b/node_modules/crypto-js/pad-ansix923.js @@ -0,0 +1,49 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * ANSI X.923 padding strategy. + */ + CryptoJS.pad.AnsiX923 = { + pad: function (data, blockSize) { + // Shortcuts + var dataSigBytes = data.sigBytes; + var blockSizeBytes = blockSize * 4; + + // Count padding bytes + var nPaddingBytes = blockSizeBytes - dataSigBytes % blockSizeBytes; + + // Compute last byte position + var lastBytePos = dataSigBytes + nPaddingBytes - 1; + + // Pad + data.clamp(); + data.words[lastBytePos >>> 2] |= nPaddingBytes << (24 - (lastBytePos % 4) * 8); + data.sigBytes += nPaddingBytes; + }, + + unpad: function (data) { + // Get number of padding bytes from last byte + var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; + + // Remove padding + data.sigBytes -= nPaddingBytes; + } + }; + + + return CryptoJS.pad.Ansix923; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/pad-iso10126.js b/node_modules/crypto-js/pad-iso10126.js new file mode 100644 index 0000000..6e2aefd --- /dev/null +++ b/node_modules/crypto-js/pad-iso10126.js @@ -0,0 +1,44 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * ISO 10126 padding strategy. + */ + CryptoJS.pad.Iso10126 = { + pad: function (data, blockSize) { + // Shortcut + var blockSizeBytes = blockSize * 4; + + // Count padding bytes + var nPaddingBytes = blockSizeBytes - data.sigBytes % blockSizeBytes; + + // Pad + data.concat(CryptoJS.lib.WordArray.random(nPaddingBytes - 1)). + concat(CryptoJS.lib.WordArray.create([nPaddingBytes << 24], 1)); + }, + + unpad: function (data) { + // Get number of padding bytes from last byte + var nPaddingBytes = data.words[(data.sigBytes - 1) >>> 2] & 0xff; + + // Remove padding + data.sigBytes -= nPaddingBytes; + } + }; + + + return CryptoJS.pad.Iso10126; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/pad-iso97971.js b/node_modules/crypto-js/pad-iso97971.js new file mode 100644 index 0000000..41049b4 --- /dev/null +++ b/node_modules/crypto-js/pad-iso97971.js @@ -0,0 +1,40 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * ISO/IEC 9797-1 Padding Method 2. + */ + CryptoJS.pad.Iso97971 = { + pad: function (data, blockSize) { + // Add 0x80 byte + data.concat(CryptoJS.lib.WordArray.create([0x80000000], 1)); + + // Zero pad the rest + CryptoJS.pad.ZeroPadding.pad(data, blockSize); + }, + + unpad: function (data) { + // Remove zero padding + CryptoJS.pad.ZeroPadding.unpad(data); + + // Remove one more byte -- the 0x80 byte + data.sigBytes--; + } + }; + + + return CryptoJS.pad.Iso97971; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/pad-nopadding.js b/node_modules/crypto-js/pad-nopadding.js new file mode 100644 index 0000000..c7787c9 --- /dev/null +++ b/node_modules/crypto-js/pad-nopadding.js @@ -0,0 +1,30 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * A noop padding strategy. + */ + CryptoJS.pad.NoPadding = { + pad: function () { + }, + + unpad: function () { + } + }; + + + return CryptoJS.pad.NoPadding; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/pad-pkcs7.js b/node_modules/crypto-js/pad-pkcs7.js new file mode 100644 index 0000000..3555168 --- /dev/null +++ b/node_modules/crypto-js/pad-pkcs7.js @@ -0,0 +1,18 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + return CryptoJS.pad.Pkcs7; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/pad-zeropadding.js b/node_modules/crypto-js/pad-zeropadding.js new file mode 100644 index 0000000..a1a459e --- /dev/null +++ b/node_modules/crypto-js/pad-zeropadding.js @@ -0,0 +1,47 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** + * Zero padding strategy. + */ + CryptoJS.pad.ZeroPadding = { + pad: function (data, blockSize) { + // Shortcut + var blockSizeBytes = blockSize * 4; + + // Pad + data.clamp(); + data.sigBytes += blockSizeBytes - ((data.sigBytes % blockSizeBytes) || blockSizeBytes); + }, + + unpad: function (data) { + // Shortcut + var dataWords = data.words; + + // Unpad + var i = data.sigBytes - 1; + for (var i = data.sigBytes - 1; i >= 0; i--) { + if (((dataWords[i >>> 2] >>> (24 - (i % 4) * 8)) & 0xff)) { + data.sigBytes = i + 1; + break; + } + } + } + }; + + + return CryptoJS.pad.ZeroPadding; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/pbkdf2.js b/node_modules/crypto-js/pbkdf2.js new file mode 100644 index 0000000..1258251 --- /dev/null +++ b/node_modules/crypto-js/pbkdf2.js @@ -0,0 +1,145 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./sha1"), require("./hmac")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./sha1", "./hmac"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var WordArray = C_lib.WordArray; + var C_algo = C.algo; + var SHA1 = C_algo.SHA1; + var HMAC = C_algo.HMAC; + + /** + * Password-Based Key Derivation Function 2 algorithm. + */ + var PBKDF2 = C_algo.PBKDF2 = Base.extend({ + /** + * Configuration options. + * + * @property {number} keySize The key size in words to generate. Default: 4 (128 bits) + * @property {Hasher} hasher The hasher to use. Default: SHA1 + * @property {number} iterations The number of iterations to perform. Default: 1 + */ + cfg: Base.extend({ + keySize: 128/32, + hasher: SHA1, + iterations: 1 + }), + + /** + * Initializes a newly created key derivation function. + * + * @param {Object} cfg (Optional) The configuration options to use for the derivation. + * + * @example + * + * var kdf = CryptoJS.algo.PBKDF2.create(); + * var kdf = CryptoJS.algo.PBKDF2.create({ keySize: 8 }); + * var kdf = CryptoJS.algo.PBKDF2.create({ keySize: 8, iterations: 1000 }); + */ + init: function (cfg) { + this.cfg = this.cfg.extend(cfg); + }, + + /** + * Computes the Password-Based Key Derivation Function 2. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * + * @return {WordArray} The derived key. + * + * @example + * + * var key = kdf.compute(password, salt); + */ + compute: function (password, salt) { + // Shortcut + var cfg = this.cfg; + + // Init HMAC + var hmac = HMAC.create(cfg.hasher, password); + + // Initial values + var derivedKey = WordArray.create(); + var blockIndex = WordArray.create([0x00000001]); + + // Shortcuts + var derivedKeyWords = derivedKey.words; + var blockIndexWords = blockIndex.words; + var keySize = cfg.keySize; + var iterations = cfg.iterations; + + // Generate key + while (derivedKeyWords.length < keySize) { + var block = hmac.update(salt).finalize(blockIndex); + hmac.reset(); + + // Shortcuts + var blockWords = block.words; + var blockWordsLength = blockWords.length; + + // Iterations + var intermediate = block; + for (var i = 1; i < iterations; i++) { + intermediate = hmac.finalize(intermediate); + hmac.reset(); + + // Shortcut + var intermediateWords = intermediate.words; + + // XOR intermediate with block + for (var j = 0; j < blockWordsLength; j++) { + blockWords[j] ^= intermediateWords[j]; + } + } + + derivedKey.concat(block); + blockIndexWords[0]++; + } + derivedKey.sigBytes = keySize * 4; + + return derivedKey; + } + }); + + /** + * Computes the Password-Based Key Derivation Function 2. + * + * @param {WordArray|string} password The password. + * @param {WordArray|string} salt A salt. + * @param {Object} cfg (Optional) The configuration options to use for this computation. + * + * @return {WordArray} The derived key. + * + * @static + * + * @example + * + * var key = CryptoJS.PBKDF2(password, salt); + * var key = CryptoJS.PBKDF2(password, salt, { keySize: 8 }); + * var key = CryptoJS.PBKDF2(password, salt, { keySize: 8, iterations: 1000 }); + */ + C.PBKDF2 = function (password, salt, cfg) { + return PBKDF2.create(cfg).compute(password, salt); + }; + }()); + + + return CryptoJS.PBKDF2; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/rabbit-legacy.js b/node_modules/crypto-js/rabbit-legacy.js new file mode 100644 index 0000000..e118b6b --- /dev/null +++ b/node_modules/crypto-js/rabbit-legacy.js @@ -0,0 +1,190 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./enc-base64"), require("./md5"), require("./evpkdf"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var StreamCipher = C_lib.StreamCipher; + var C_algo = C.algo; + + // Reusable objects + var S = []; + var C_ = []; + var G = []; + + /** + * Rabbit stream cipher algorithm. + * + * This is a legacy version that neglected to convert the key to little-endian. + * This error doesn't affect the cipher's security, + * but it does affect its compatibility with other implementations. + */ + var RabbitLegacy = C_algo.RabbitLegacy = StreamCipher.extend({ + _doReset: function () { + // Shortcuts + var K = this._key.words; + var iv = this.cfg.iv; + + // Generate initial state values + var X = this._X = [ + K[0], (K[3] << 16) | (K[2] >>> 16), + K[1], (K[0] << 16) | (K[3] >>> 16), + K[2], (K[1] << 16) | (K[0] >>> 16), + K[3], (K[2] << 16) | (K[1] >>> 16) + ]; + + // Generate initial counter values + var C = this._C = [ + (K[2] << 16) | (K[2] >>> 16), (K[0] & 0xffff0000) | (K[1] & 0x0000ffff), + (K[3] << 16) | (K[3] >>> 16), (K[1] & 0xffff0000) | (K[2] & 0x0000ffff), + (K[0] << 16) | (K[0] >>> 16), (K[2] & 0xffff0000) | (K[3] & 0x0000ffff), + (K[1] << 16) | (K[1] >>> 16), (K[3] & 0xffff0000) | (K[0] & 0x0000ffff) + ]; + + // Carry bit + this._b = 0; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + + // Modify the counters + for (var i = 0; i < 8; i++) { + C[i] ^= X[(i + 4) & 7]; + } + + // IV setup + if (iv) { + // Shortcuts + var IV = iv.words; + var IV_0 = IV[0]; + var IV_1 = IV[1]; + + // Generate four subvectors + var i0 = (((IV_0 << 8) | (IV_0 >>> 24)) & 0x00ff00ff) | (((IV_0 << 24) | (IV_0 >>> 8)) & 0xff00ff00); + var i2 = (((IV_1 << 8) | (IV_1 >>> 24)) & 0x00ff00ff) | (((IV_1 << 24) | (IV_1 >>> 8)) & 0xff00ff00); + var i1 = (i0 >>> 16) | (i2 & 0xffff0000); + var i3 = (i2 << 16) | (i0 & 0x0000ffff); + + // Modify counter values + C[0] ^= i0; + C[1] ^= i1; + C[2] ^= i2; + C[3] ^= i3; + C[4] ^= i0; + C[5] ^= i1; + C[6] ^= i2; + C[7] ^= i3; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + } + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var X = this._X; + + // Iterate the system + nextState.call(this); + + // Generate four keystream words + S[0] = X[0] ^ (X[5] >>> 16) ^ (X[3] << 16); + S[1] = X[2] ^ (X[7] >>> 16) ^ (X[5] << 16); + S[2] = X[4] ^ (X[1] >>> 16) ^ (X[7] << 16); + S[3] = X[6] ^ (X[3] >>> 16) ^ (X[1] << 16); + + for (var i = 0; i < 4; i++) { + // Swap endian + S[i] = (((S[i] << 8) | (S[i] >>> 24)) & 0x00ff00ff) | + (((S[i] << 24) | (S[i] >>> 8)) & 0xff00ff00); + + // Encrypt + M[offset + i] ^= S[i]; + } + }, + + blockSize: 128/32, + + ivSize: 64/32 + }); + + function nextState() { + // Shortcuts + var X = this._X; + var C = this._C; + + // Save old counter values + for (var i = 0; i < 8; i++) { + C_[i] = C[i]; + } + + // Calculate new counter values + C[0] = (C[0] + 0x4d34d34d + this._b) | 0; + C[1] = (C[1] + 0xd34d34d3 + ((C[0] >>> 0) < (C_[0] >>> 0) ? 1 : 0)) | 0; + C[2] = (C[2] + 0x34d34d34 + ((C[1] >>> 0) < (C_[1] >>> 0) ? 1 : 0)) | 0; + C[3] = (C[3] + 0x4d34d34d + ((C[2] >>> 0) < (C_[2] >>> 0) ? 1 : 0)) | 0; + C[4] = (C[4] + 0xd34d34d3 + ((C[3] >>> 0) < (C_[3] >>> 0) ? 1 : 0)) | 0; + C[5] = (C[5] + 0x34d34d34 + ((C[4] >>> 0) < (C_[4] >>> 0) ? 1 : 0)) | 0; + C[6] = (C[6] + 0x4d34d34d + ((C[5] >>> 0) < (C_[5] >>> 0) ? 1 : 0)) | 0; + C[7] = (C[7] + 0xd34d34d3 + ((C[6] >>> 0) < (C_[6] >>> 0) ? 1 : 0)) | 0; + this._b = (C[7] >>> 0) < (C_[7] >>> 0) ? 1 : 0; + + // Calculate the g-values + for (var i = 0; i < 8; i++) { + var gx = X[i] + C[i]; + + // Construct high and low argument for squaring + var ga = gx & 0xffff; + var gb = gx >>> 16; + + // Calculate high and low result of squaring + var gh = ((((ga * ga) >>> 17) + ga * gb) >>> 15) + gb * gb; + var gl = (((gx & 0xffff0000) * gx) | 0) + (((gx & 0x0000ffff) * gx) | 0); + + // High XOR low + G[i] = gh ^ gl; + } + + // Calculate new state values + X[0] = (G[0] + ((G[7] << 16) | (G[7] >>> 16)) + ((G[6] << 16) | (G[6] >>> 16))) | 0; + X[1] = (G[1] + ((G[0] << 8) | (G[0] >>> 24)) + G[7]) | 0; + X[2] = (G[2] + ((G[1] << 16) | (G[1] >>> 16)) + ((G[0] << 16) | (G[0] >>> 16))) | 0; + X[3] = (G[3] + ((G[2] << 8) | (G[2] >>> 24)) + G[1]) | 0; + X[4] = (G[4] + ((G[3] << 16) | (G[3] >>> 16)) + ((G[2] << 16) | (G[2] >>> 16))) | 0; + X[5] = (G[5] + ((G[4] << 8) | (G[4] >>> 24)) + G[3]) | 0; + X[6] = (G[6] + ((G[5] << 16) | (G[5] >>> 16)) + ((G[4] << 16) | (G[4] >>> 16))) | 0; + X[7] = (G[7] + ((G[6] << 8) | (G[6] >>> 24)) + G[5]) | 0; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.RabbitLegacy.encrypt(message, key, cfg); + * var plaintext = CryptoJS.RabbitLegacy.decrypt(ciphertext, key, cfg); + */ + C.RabbitLegacy = StreamCipher._createHelper(RabbitLegacy); + }()); + + + return CryptoJS.RabbitLegacy; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/rabbit.js b/node_modules/crypto-js/rabbit.js new file mode 100644 index 0000000..1b06833 --- /dev/null +++ b/node_modules/crypto-js/rabbit.js @@ -0,0 +1,192 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./enc-base64"), require("./md5"), require("./evpkdf"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var StreamCipher = C_lib.StreamCipher; + var C_algo = C.algo; + + // Reusable objects + var S = []; + var C_ = []; + var G = []; + + /** + * Rabbit stream cipher algorithm + */ + var Rabbit = C_algo.Rabbit = StreamCipher.extend({ + _doReset: function () { + // Shortcuts + var K = this._key.words; + var iv = this.cfg.iv; + + // Swap endian + for (var i = 0; i < 4; i++) { + K[i] = (((K[i] << 8) | (K[i] >>> 24)) & 0x00ff00ff) | + (((K[i] << 24) | (K[i] >>> 8)) & 0xff00ff00); + } + + // Generate initial state values + var X = this._X = [ + K[0], (K[3] << 16) | (K[2] >>> 16), + K[1], (K[0] << 16) | (K[3] >>> 16), + K[2], (K[1] << 16) | (K[0] >>> 16), + K[3], (K[2] << 16) | (K[1] >>> 16) + ]; + + // Generate initial counter values + var C = this._C = [ + (K[2] << 16) | (K[2] >>> 16), (K[0] & 0xffff0000) | (K[1] & 0x0000ffff), + (K[3] << 16) | (K[3] >>> 16), (K[1] & 0xffff0000) | (K[2] & 0x0000ffff), + (K[0] << 16) | (K[0] >>> 16), (K[2] & 0xffff0000) | (K[3] & 0x0000ffff), + (K[1] << 16) | (K[1] >>> 16), (K[3] & 0xffff0000) | (K[0] & 0x0000ffff) + ]; + + // Carry bit + this._b = 0; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + + // Modify the counters + for (var i = 0; i < 8; i++) { + C[i] ^= X[(i + 4) & 7]; + } + + // IV setup + if (iv) { + // Shortcuts + var IV = iv.words; + var IV_0 = IV[0]; + var IV_1 = IV[1]; + + // Generate four subvectors + var i0 = (((IV_0 << 8) | (IV_0 >>> 24)) & 0x00ff00ff) | (((IV_0 << 24) | (IV_0 >>> 8)) & 0xff00ff00); + var i2 = (((IV_1 << 8) | (IV_1 >>> 24)) & 0x00ff00ff) | (((IV_1 << 24) | (IV_1 >>> 8)) & 0xff00ff00); + var i1 = (i0 >>> 16) | (i2 & 0xffff0000); + var i3 = (i2 << 16) | (i0 & 0x0000ffff); + + // Modify counter values + C[0] ^= i0; + C[1] ^= i1; + C[2] ^= i2; + C[3] ^= i3; + C[4] ^= i0; + C[5] ^= i1; + C[6] ^= i2; + C[7] ^= i3; + + // Iterate the system four times + for (var i = 0; i < 4; i++) { + nextState.call(this); + } + } + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var X = this._X; + + // Iterate the system + nextState.call(this); + + // Generate four keystream words + S[0] = X[0] ^ (X[5] >>> 16) ^ (X[3] << 16); + S[1] = X[2] ^ (X[7] >>> 16) ^ (X[5] << 16); + S[2] = X[4] ^ (X[1] >>> 16) ^ (X[7] << 16); + S[3] = X[6] ^ (X[3] >>> 16) ^ (X[1] << 16); + + for (var i = 0; i < 4; i++) { + // Swap endian + S[i] = (((S[i] << 8) | (S[i] >>> 24)) & 0x00ff00ff) | + (((S[i] << 24) | (S[i] >>> 8)) & 0xff00ff00); + + // Encrypt + M[offset + i] ^= S[i]; + } + }, + + blockSize: 128/32, + + ivSize: 64/32 + }); + + function nextState() { + // Shortcuts + var X = this._X; + var C = this._C; + + // Save old counter values + for (var i = 0; i < 8; i++) { + C_[i] = C[i]; + } + + // Calculate new counter values + C[0] = (C[0] + 0x4d34d34d + this._b) | 0; + C[1] = (C[1] + 0xd34d34d3 + ((C[0] >>> 0) < (C_[0] >>> 0) ? 1 : 0)) | 0; + C[2] = (C[2] + 0x34d34d34 + ((C[1] >>> 0) < (C_[1] >>> 0) ? 1 : 0)) | 0; + C[3] = (C[3] + 0x4d34d34d + ((C[2] >>> 0) < (C_[2] >>> 0) ? 1 : 0)) | 0; + C[4] = (C[4] + 0xd34d34d3 + ((C[3] >>> 0) < (C_[3] >>> 0) ? 1 : 0)) | 0; + C[5] = (C[5] + 0x34d34d34 + ((C[4] >>> 0) < (C_[4] >>> 0) ? 1 : 0)) | 0; + C[6] = (C[6] + 0x4d34d34d + ((C[5] >>> 0) < (C_[5] >>> 0) ? 1 : 0)) | 0; + C[7] = (C[7] + 0xd34d34d3 + ((C[6] >>> 0) < (C_[6] >>> 0) ? 1 : 0)) | 0; + this._b = (C[7] >>> 0) < (C_[7] >>> 0) ? 1 : 0; + + // Calculate the g-values + for (var i = 0; i < 8; i++) { + var gx = X[i] + C[i]; + + // Construct high and low argument for squaring + var ga = gx & 0xffff; + var gb = gx >>> 16; + + // Calculate high and low result of squaring + var gh = ((((ga * ga) >>> 17) + ga * gb) >>> 15) + gb * gb; + var gl = (((gx & 0xffff0000) * gx) | 0) + (((gx & 0x0000ffff) * gx) | 0); + + // High XOR low + G[i] = gh ^ gl; + } + + // Calculate new state values + X[0] = (G[0] + ((G[7] << 16) | (G[7] >>> 16)) + ((G[6] << 16) | (G[6] >>> 16))) | 0; + X[1] = (G[1] + ((G[0] << 8) | (G[0] >>> 24)) + G[7]) | 0; + X[2] = (G[2] + ((G[1] << 16) | (G[1] >>> 16)) + ((G[0] << 16) | (G[0] >>> 16))) | 0; + X[3] = (G[3] + ((G[2] << 8) | (G[2] >>> 24)) + G[1]) | 0; + X[4] = (G[4] + ((G[3] << 16) | (G[3] >>> 16)) + ((G[2] << 16) | (G[2] >>> 16))) | 0; + X[5] = (G[5] + ((G[4] << 8) | (G[4] >>> 24)) + G[3]) | 0; + X[6] = (G[6] + ((G[5] << 16) | (G[5] >>> 16)) + ((G[4] << 16) | (G[4] >>> 16))) | 0; + X[7] = (G[7] + ((G[6] << 8) | (G[6] >>> 24)) + G[5]) | 0; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.Rabbit.encrypt(message, key, cfg); + * var plaintext = CryptoJS.Rabbit.decrypt(ciphertext, key, cfg); + */ + C.Rabbit = StreamCipher._createHelper(Rabbit); + }()); + + + return CryptoJS.Rabbit; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/rc4.js b/node_modules/crypto-js/rc4.js new file mode 100644 index 0000000..0e4bdff --- /dev/null +++ b/node_modules/crypto-js/rc4.js @@ -0,0 +1,139 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./enc-base64"), require("./md5"), require("./evpkdf"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var StreamCipher = C_lib.StreamCipher; + var C_algo = C.algo; + + /** + * RC4 stream cipher algorithm. + */ + var RC4 = C_algo.RC4 = StreamCipher.extend({ + _doReset: function () { + // Shortcuts + var key = this._key; + var keyWords = key.words; + var keySigBytes = key.sigBytes; + + // Init sbox + var S = this._S = []; + for (var i = 0; i < 256; i++) { + S[i] = i; + } + + // Key setup + for (var i = 0, j = 0; i < 256; i++) { + var keyByteIndex = i % keySigBytes; + var keyByte = (keyWords[keyByteIndex >>> 2] >>> (24 - (keyByteIndex % 4) * 8)) & 0xff; + + j = (j + S[i] + keyByte) % 256; + + // Swap + var t = S[i]; + S[i] = S[j]; + S[j] = t; + } + + // Counters + this._i = this._j = 0; + }, + + _doProcessBlock: function (M, offset) { + M[offset] ^= generateKeystreamWord.call(this); + }, + + keySize: 256/32, + + ivSize: 0 + }); + + function generateKeystreamWord() { + // Shortcuts + var S = this._S; + var i = this._i; + var j = this._j; + + // Generate keystream word + var keystreamWord = 0; + for (var n = 0; n < 4; n++) { + i = (i + 1) % 256; + j = (j + S[i]) % 256; + + // Swap + var t = S[i]; + S[i] = S[j]; + S[j] = t; + + keystreamWord |= S[(S[i] + S[j]) % 256] << (24 - n * 8); + } + + // Update counters + this._i = i; + this._j = j; + + return keystreamWord; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.RC4.encrypt(message, key, cfg); + * var plaintext = CryptoJS.RC4.decrypt(ciphertext, key, cfg); + */ + C.RC4 = StreamCipher._createHelper(RC4); + + /** + * Modified RC4 stream cipher algorithm. + */ + var RC4Drop = C_algo.RC4Drop = RC4.extend({ + /** + * Configuration options. + * + * @property {number} drop The number of keystream words to drop. Default 192 + */ + cfg: RC4.cfg.extend({ + drop: 192 + }), + + _doReset: function () { + RC4._doReset.call(this); + + // Drop + for (var i = this.cfg.drop; i > 0; i--) { + generateKeystreamWord.call(this); + } + } + }); + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.RC4Drop.encrypt(message, key, cfg); + * var plaintext = CryptoJS.RC4Drop.decrypt(ciphertext, key, cfg); + */ + C.RC4Drop = StreamCipher._createHelper(RC4Drop); + }()); + + + return CryptoJS.RC4; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/ripemd160.js b/node_modules/crypto-js/ripemd160.js new file mode 100644 index 0000000..24feb47 --- /dev/null +++ b/node_modules/crypto-js/ripemd160.js @@ -0,0 +1,267 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + /** @preserve + (c) 2012 by Cédric Mesnil. All rights reserved. + + Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + + - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Constants table + var _zl = WordArray.create([ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, + 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, + 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, + 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, + 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13]); + var _zr = WordArray.create([ + 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, + 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, + 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, + 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, + 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11]); + var _sl = WordArray.create([ + 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, + 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, + 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, + 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, + 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6 ]); + var _sr = WordArray.create([ + 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, + 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, + 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, + 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, + 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11 ]); + + var _hl = WordArray.create([ 0x00000000, 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xA953FD4E]); + var _hr = WordArray.create([ 0x50A28BE6, 0x5C4DD124, 0x6D703EF3, 0x7A6D76E9, 0x00000000]); + + /** + * RIPEMD160 hash algorithm. + */ + var RIPEMD160 = C_algo.RIPEMD160 = Hasher.extend({ + _doReset: function () { + this._hash = WordArray.create([0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0]); + }, + + _doProcessBlock: function (M, offset) { + + // Swap endian + for (var i = 0; i < 16; i++) { + // Shortcuts + var offset_i = offset + i; + var M_offset_i = M[offset_i]; + + // Swap + M[offset_i] = ( + (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | + (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) + ); + } + // Shortcut + var H = this._hash.words; + var hl = _hl.words; + var hr = _hr.words; + var zl = _zl.words; + var zr = _zr.words; + var sl = _sl.words; + var sr = _sr.words; + + // Working variables + var al, bl, cl, dl, el; + var ar, br, cr, dr, er; + + ar = al = H[0]; + br = bl = H[1]; + cr = cl = H[2]; + dr = dl = H[3]; + er = el = H[4]; + // Computation + var t; + for (var i = 0; i < 80; i += 1) { + t = (al + M[offset+zl[i]])|0; + if (i<16){ + t += f1(bl,cl,dl) + hl[0]; + } else if (i<32) { + t += f2(bl,cl,dl) + hl[1]; + } else if (i<48) { + t += f3(bl,cl,dl) + hl[2]; + } else if (i<64) { + t += f4(bl,cl,dl) + hl[3]; + } else {// if (i<80) { + t += f5(bl,cl,dl) + hl[4]; + } + t = t|0; + t = rotl(t,sl[i]); + t = (t+el)|0; + al = el; + el = dl; + dl = rotl(cl, 10); + cl = bl; + bl = t; + + t = (ar + M[offset+zr[i]])|0; + if (i<16){ + t += f5(br,cr,dr) + hr[0]; + } else if (i<32) { + t += f4(br,cr,dr) + hr[1]; + } else if (i<48) { + t += f3(br,cr,dr) + hr[2]; + } else if (i<64) { + t += f2(br,cr,dr) + hr[3]; + } else {// if (i<80) { + t += f1(br,cr,dr) + hr[4]; + } + t = t|0; + t = rotl(t,sr[i]) ; + t = (t+er)|0; + ar = er; + er = dr; + dr = rotl(cr, 10); + cr = br; + br = t; + } + // Intermediate hash value + t = (H[1] + cl + dr)|0; + H[1] = (H[2] + dl + er)|0; + H[2] = (H[3] + el + ar)|0; + H[3] = (H[4] + al + br)|0; + H[4] = (H[0] + bl + cr)|0; + H[0] = t; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( + (((nBitsTotal << 8) | (nBitsTotal >>> 24)) & 0x00ff00ff) | + (((nBitsTotal << 24) | (nBitsTotal >>> 8)) & 0xff00ff00) + ); + data.sigBytes = (dataWords.length + 1) * 4; + + // Hash final blocks + this._process(); + + // Shortcuts + var hash = this._hash; + var H = hash.words; + + // Swap endian + for (var i = 0; i < 5; i++) { + // Shortcut + var H_i = H[i]; + + // Swap + H[i] = (((H_i << 8) | (H_i >>> 24)) & 0x00ff00ff) | + (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00); + } + + // Return final computed hash + return hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + + function f1(x, y, z) { + return ((x) ^ (y) ^ (z)); + + } + + function f2(x, y, z) { + return (((x)&(y)) | ((~x)&(z))); + } + + function f3(x, y, z) { + return (((x) | (~(y))) ^ (z)); + } + + function f4(x, y, z) { + return (((x) & (z)) | ((y)&(~(z)))); + } + + function f5(x, y, z) { + return ((x) ^ ((y) |(~(z)))); + + } + + function rotl(x,n) { + return (x<>>(32-n)); + } + + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.RIPEMD160('message'); + * var hash = CryptoJS.RIPEMD160(wordArray); + */ + C.RIPEMD160 = Hasher._createHelper(RIPEMD160); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacRIPEMD160(message, key); + */ + C.HmacRIPEMD160 = Hasher._createHmacHelper(RIPEMD160); + }(Math)); + + + return CryptoJS.RIPEMD160; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/sha1.js b/node_modules/crypto-js/sha1.js new file mode 100644 index 0000000..6691149 --- /dev/null +++ b/node_modules/crypto-js/sha1.js @@ -0,0 +1,150 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Reusable object + var W = []; + + /** + * SHA-1 hash algorithm. + */ + var SHA1 = C_algo.SHA1 = Hasher.extend({ + _doReset: function () { + this._hash = new WordArray.init([ + 0x67452301, 0xefcdab89, + 0x98badcfe, 0x10325476, + 0xc3d2e1f0 + ]); + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var H = this._hash.words; + + // Working variables + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + var e = H[4]; + + // Computation + for (var i = 0; i < 80; i++) { + if (i < 16) { + W[i] = M[offset + i] | 0; + } else { + var n = W[i - 3] ^ W[i - 8] ^ W[i - 14] ^ W[i - 16]; + W[i] = (n << 1) | (n >>> 31); + } + + var t = ((a << 5) | (a >>> 27)) + e + W[i]; + if (i < 20) { + t += ((b & c) | (~b & d)) + 0x5a827999; + } else if (i < 40) { + t += (b ^ c ^ d) + 0x6ed9eba1; + } else if (i < 60) { + t += ((b & c) | (b & d) | (c & d)) - 0x70e44324; + } else /* if (i < 80) */ { + t += (b ^ c ^ d) - 0x359d3e2a; + } + + e = d; + d = c; + c = (b << 30) | (b >>> 2); + b = a; + a = t; + } + + // Intermediate hash value + H[0] = (H[0] + a) | 0; + H[1] = (H[1] + b) | 0; + H[2] = (H[2] + c) | 0; + H[3] = (H[3] + d) | 0; + H[4] = (H[4] + e) | 0; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Return final computed hash + return this._hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA1('message'); + * var hash = CryptoJS.SHA1(wordArray); + */ + C.SHA1 = Hasher._createHelper(SHA1); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA1(message, key); + */ + C.HmacSHA1 = Hasher._createHmacHelper(SHA1); + }()); + + + return CryptoJS.SHA1; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/sha224.js b/node_modules/crypto-js/sha224.js new file mode 100644 index 0000000..d8ce988 --- /dev/null +++ b/node_modules/crypto-js/sha224.js @@ -0,0 +1,80 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./sha256")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./sha256"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var C_algo = C.algo; + var SHA256 = C_algo.SHA256; + + /** + * SHA-224 hash algorithm. + */ + var SHA224 = C_algo.SHA224 = SHA256.extend({ + _doReset: function () { + this._hash = new WordArray.init([ + 0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939, + 0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4 + ]); + }, + + _doFinalize: function () { + var hash = SHA256._doFinalize.call(this); + + hash.sigBytes -= 4; + + return hash; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA224('message'); + * var hash = CryptoJS.SHA224(wordArray); + */ + C.SHA224 = SHA256._createHelper(SHA224); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA224(message, key); + */ + C.HmacSHA224 = SHA256._createHmacHelper(SHA224); + }()); + + + return CryptoJS.SHA224; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/sha256.js b/node_modules/crypto-js/sha256.js new file mode 100644 index 0000000..de2d7fc --- /dev/null +++ b/node_modules/crypto-js/sha256.js @@ -0,0 +1,199 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_algo = C.algo; + + // Initialization and round constants tables + var H = []; + var K = []; + + // Compute constants + (function () { + function isPrime(n) { + var sqrtN = Math.sqrt(n); + for (var factor = 2; factor <= sqrtN; factor++) { + if (!(n % factor)) { + return false; + } + } + + return true; + } + + function getFractionalBits(n) { + return ((n - (n | 0)) * 0x100000000) | 0; + } + + var n = 2; + var nPrime = 0; + while (nPrime < 64) { + if (isPrime(n)) { + if (nPrime < 8) { + H[nPrime] = getFractionalBits(Math.pow(n, 1 / 2)); + } + K[nPrime] = getFractionalBits(Math.pow(n, 1 / 3)); + + nPrime++; + } + + n++; + } + }()); + + // Reusable object + var W = []; + + /** + * SHA-256 hash algorithm. + */ + var SHA256 = C_algo.SHA256 = Hasher.extend({ + _doReset: function () { + this._hash = new WordArray.init(H.slice(0)); + }, + + _doProcessBlock: function (M, offset) { + // Shortcut + var H = this._hash.words; + + // Working variables + var a = H[0]; + var b = H[1]; + var c = H[2]; + var d = H[3]; + var e = H[4]; + var f = H[5]; + var g = H[6]; + var h = H[7]; + + // Computation + for (var i = 0; i < 64; i++) { + if (i < 16) { + W[i] = M[offset + i] | 0; + } else { + var gamma0x = W[i - 15]; + var gamma0 = ((gamma0x << 25) | (gamma0x >>> 7)) ^ + ((gamma0x << 14) | (gamma0x >>> 18)) ^ + (gamma0x >>> 3); + + var gamma1x = W[i - 2]; + var gamma1 = ((gamma1x << 15) | (gamma1x >>> 17)) ^ + ((gamma1x << 13) | (gamma1x >>> 19)) ^ + (gamma1x >>> 10); + + W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16]; + } + + var ch = (e & f) ^ (~e & g); + var maj = (a & b) ^ (a & c) ^ (b & c); + + var sigma0 = ((a << 30) | (a >>> 2)) ^ ((a << 19) | (a >>> 13)) ^ ((a << 10) | (a >>> 22)); + var sigma1 = ((e << 26) | (e >>> 6)) ^ ((e << 21) | (e >>> 11)) ^ ((e << 7) | (e >>> 25)); + + var t1 = h + sigma1 + ch + K[i] + W[i]; + var t2 = sigma0 + maj; + + h = g; + g = f; + f = e; + e = (d + t1) | 0; + d = c; + c = b; + b = a; + a = (t1 + t2) | 0; + } + + // Intermediate hash value + H[0] = (H[0] + a) | 0; + H[1] = (H[1] + b) | 0; + H[2] = (H[2] + c) | 0; + H[3] = (H[3] + d) | 0; + H[4] = (H[4] + e) | 0; + H[5] = (H[5] + f) | 0; + H[6] = (H[6] + g) | 0; + H[7] = (H[7] + h) | 0; + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 14] = Math.floor(nBitsTotal / 0x100000000); + dataWords[(((nBitsLeft + 64) >>> 9) << 4) + 15] = nBitsTotal; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Return final computed hash + return this._hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA256('message'); + * var hash = CryptoJS.SHA256(wordArray); + */ + C.SHA256 = Hasher._createHelper(SHA256); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA256(message, key); + */ + C.HmacSHA256 = Hasher._createHmacHelper(SHA256); + }(Math)); + + + return CryptoJS.SHA256; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/sha3.js b/node_modules/crypto-js/sha3.js new file mode 100644 index 0000000..34ad86c --- /dev/null +++ b/node_modules/crypto-js/sha3.js @@ -0,0 +1,326 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./x64-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./x64-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function (Math) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var Hasher = C_lib.Hasher; + var C_x64 = C.x64; + var X64Word = C_x64.Word; + var C_algo = C.algo; + + // Constants tables + var RHO_OFFSETS = []; + var PI_INDEXES = []; + var ROUND_CONSTANTS = []; + + // Compute Constants + (function () { + // Compute rho offset constants + var x = 1, y = 0; + for (var t = 0; t < 24; t++) { + RHO_OFFSETS[x + 5 * y] = ((t + 1) * (t + 2) / 2) % 64; + + var newX = y % 5; + var newY = (2 * x + 3 * y) % 5; + x = newX; + y = newY; + } + + // Compute pi index constants + for (var x = 0; x < 5; x++) { + for (var y = 0; y < 5; y++) { + PI_INDEXES[x + 5 * y] = y + ((2 * x + 3 * y) % 5) * 5; + } + } + + // Compute round constants + var LFSR = 0x01; + for (var i = 0; i < 24; i++) { + var roundConstantMsw = 0; + var roundConstantLsw = 0; + + for (var j = 0; j < 7; j++) { + if (LFSR & 0x01) { + var bitPosition = (1 << j) - 1; + if (bitPosition < 32) { + roundConstantLsw ^= 1 << bitPosition; + } else /* if (bitPosition >= 32) */ { + roundConstantMsw ^= 1 << (bitPosition - 32); + } + } + + // Compute next LFSR + if (LFSR & 0x80) { + // Primitive polynomial over GF(2): x^8 + x^6 + x^5 + x^4 + 1 + LFSR = (LFSR << 1) ^ 0x71; + } else { + LFSR <<= 1; + } + } + + ROUND_CONSTANTS[i] = X64Word.create(roundConstantMsw, roundConstantLsw); + } + }()); + + // Reusable objects for temporary values + var T = []; + (function () { + for (var i = 0; i < 25; i++) { + T[i] = X64Word.create(); + } + }()); + + /** + * SHA-3 hash algorithm. + */ + var SHA3 = C_algo.SHA3 = Hasher.extend({ + /** + * Configuration options. + * + * @property {number} outputLength + * The desired number of bits in the output hash. + * Only values permitted are: 224, 256, 384, 512. + * Default: 512 + */ + cfg: Hasher.cfg.extend({ + outputLength: 512 + }), + + _doReset: function () { + var state = this._state = [] + for (var i = 0; i < 25; i++) { + state[i] = new X64Word.init(); + } + + this.blockSize = (1600 - 2 * this.cfg.outputLength) / 32; + }, + + _doProcessBlock: function (M, offset) { + // Shortcuts + var state = this._state; + var nBlockSizeLanes = this.blockSize / 2; + + // Absorb + for (var i = 0; i < nBlockSizeLanes; i++) { + // Shortcuts + var M2i = M[offset + 2 * i]; + var M2i1 = M[offset + 2 * i + 1]; + + // Swap endian + M2i = ( + (((M2i << 8) | (M2i >>> 24)) & 0x00ff00ff) | + (((M2i << 24) | (M2i >>> 8)) & 0xff00ff00) + ); + M2i1 = ( + (((M2i1 << 8) | (M2i1 >>> 24)) & 0x00ff00ff) | + (((M2i1 << 24) | (M2i1 >>> 8)) & 0xff00ff00) + ); + + // Absorb message into state + var lane = state[i]; + lane.high ^= M2i1; + lane.low ^= M2i; + } + + // Rounds + for (var round = 0; round < 24; round++) { + // Theta + for (var x = 0; x < 5; x++) { + // Mix column lanes + var tMsw = 0, tLsw = 0; + for (var y = 0; y < 5; y++) { + var lane = state[x + 5 * y]; + tMsw ^= lane.high; + tLsw ^= lane.low; + } + + // Temporary values + var Tx = T[x]; + Tx.high = tMsw; + Tx.low = tLsw; + } + for (var x = 0; x < 5; x++) { + // Shortcuts + var Tx4 = T[(x + 4) % 5]; + var Tx1 = T[(x + 1) % 5]; + var Tx1Msw = Tx1.high; + var Tx1Lsw = Tx1.low; + + // Mix surrounding columns + var tMsw = Tx4.high ^ ((Tx1Msw << 1) | (Tx1Lsw >>> 31)); + var tLsw = Tx4.low ^ ((Tx1Lsw << 1) | (Tx1Msw >>> 31)); + for (var y = 0; y < 5; y++) { + var lane = state[x + 5 * y]; + lane.high ^= tMsw; + lane.low ^= tLsw; + } + } + + // Rho Pi + for (var laneIndex = 1; laneIndex < 25; laneIndex++) { + var tMsw; + var tLsw; + + // Shortcuts + var lane = state[laneIndex]; + var laneMsw = lane.high; + var laneLsw = lane.low; + var rhoOffset = RHO_OFFSETS[laneIndex]; + + // Rotate lanes + if (rhoOffset < 32) { + tMsw = (laneMsw << rhoOffset) | (laneLsw >>> (32 - rhoOffset)); + tLsw = (laneLsw << rhoOffset) | (laneMsw >>> (32 - rhoOffset)); + } else /* if (rhoOffset >= 32) */ { + tMsw = (laneLsw << (rhoOffset - 32)) | (laneMsw >>> (64 - rhoOffset)); + tLsw = (laneMsw << (rhoOffset - 32)) | (laneLsw >>> (64 - rhoOffset)); + } + + // Transpose lanes + var TPiLane = T[PI_INDEXES[laneIndex]]; + TPiLane.high = tMsw; + TPiLane.low = tLsw; + } + + // Rho pi at x = y = 0 + var T0 = T[0]; + var state0 = state[0]; + T0.high = state0.high; + T0.low = state0.low; + + // Chi + for (var x = 0; x < 5; x++) { + for (var y = 0; y < 5; y++) { + // Shortcuts + var laneIndex = x + 5 * y; + var lane = state[laneIndex]; + var TLane = T[laneIndex]; + var Tx1Lane = T[((x + 1) % 5) + 5 * y]; + var Tx2Lane = T[((x + 2) % 5) + 5 * y]; + + // Mix rows + lane.high = TLane.high ^ (~Tx1Lane.high & Tx2Lane.high); + lane.low = TLane.low ^ (~Tx1Lane.low & Tx2Lane.low); + } + } + + // Iota + var lane = state[0]; + var roundConstant = ROUND_CONSTANTS[round]; + lane.high ^= roundConstant.high; + lane.low ^= roundConstant.low; + } + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + var blockSizeBits = this.blockSize * 32; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x1 << (24 - nBitsLeft % 32); + dataWords[((Math.ceil((nBitsLeft + 1) / blockSizeBits) * blockSizeBits) >>> 5) - 1] |= 0x80; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Shortcuts + var state = this._state; + var outputLengthBytes = this.cfg.outputLength / 8; + var outputLengthLanes = outputLengthBytes / 8; + + // Squeeze + var hashWords = []; + for (var i = 0; i < outputLengthLanes; i++) { + // Shortcuts + var lane = state[i]; + var laneMsw = lane.high; + var laneLsw = lane.low; + + // Swap endian + laneMsw = ( + (((laneMsw << 8) | (laneMsw >>> 24)) & 0x00ff00ff) | + (((laneMsw << 24) | (laneMsw >>> 8)) & 0xff00ff00) + ); + laneLsw = ( + (((laneLsw << 8) | (laneLsw >>> 24)) & 0x00ff00ff) | + (((laneLsw << 24) | (laneLsw >>> 8)) & 0xff00ff00) + ); + + // Squeeze state to retrieve hash + hashWords.push(laneLsw); + hashWords.push(laneMsw); + } + + // Return final computed hash + return new WordArray.init(hashWords, outputLengthBytes); + }, + + clone: function () { + var clone = Hasher.clone.call(this); + + var state = clone._state = this._state.slice(0); + for (var i = 0; i < 25; i++) { + state[i] = state[i].clone(); + } + + return clone; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA3('message'); + * var hash = CryptoJS.SHA3(wordArray); + */ + C.SHA3 = Hasher._createHelper(SHA3); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA3(message, key); + */ + C.HmacSHA3 = Hasher._createHmacHelper(SHA3); + }(Math)); + + + return CryptoJS.SHA3; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/sha384.js b/node_modules/crypto-js/sha384.js new file mode 100644 index 0000000..a0b95bf --- /dev/null +++ b/node_modules/crypto-js/sha384.js @@ -0,0 +1,83 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./x64-core"), require("./sha512")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./x64-core", "./sha512"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_x64 = C.x64; + var X64Word = C_x64.Word; + var X64WordArray = C_x64.WordArray; + var C_algo = C.algo; + var SHA512 = C_algo.SHA512; + + /** + * SHA-384 hash algorithm. + */ + var SHA384 = C_algo.SHA384 = SHA512.extend({ + _doReset: function () { + this._hash = new X64WordArray.init([ + new X64Word.init(0xcbbb9d5d, 0xc1059ed8), new X64Word.init(0x629a292a, 0x367cd507), + new X64Word.init(0x9159015a, 0x3070dd17), new X64Word.init(0x152fecd8, 0xf70e5939), + new X64Word.init(0x67332667, 0xffc00b31), new X64Word.init(0x8eb44a87, 0x68581511), + new X64Word.init(0xdb0c2e0d, 0x64f98fa7), new X64Word.init(0x47b5481d, 0xbefa4fa4) + ]); + }, + + _doFinalize: function () { + var hash = SHA512._doFinalize.call(this); + + hash.sigBytes -= 16; + + return hash; + } + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA384('message'); + * var hash = CryptoJS.SHA384(wordArray); + */ + C.SHA384 = SHA512._createHelper(SHA384); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA384(message, key); + */ + C.HmacSHA384 = SHA512._createHmacHelper(SHA384); + }()); + + + return CryptoJS.SHA384; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/sha512.js b/node_modules/crypto-js/sha512.js new file mode 100644 index 0000000..d274ab0 --- /dev/null +++ b/node_modules/crypto-js/sha512.js @@ -0,0 +1,326 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./x64-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./x64-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Hasher = C_lib.Hasher; + var C_x64 = C.x64; + var X64Word = C_x64.Word; + var X64WordArray = C_x64.WordArray; + var C_algo = C.algo; + + function X64Word_create() { + return X64Word.create.apply(X64Word, arguments); + } + + // Constants + var K = [ + X64Word_create(0x428a2f98, 0xd728ae22), X64Word_create(0x71374491, 0x23ef65cd), + X64Word_create(0xb5c0fbcf, 0xec4d3b2f), X64Word_create(0xe9b5dba5, 0x8189dbbc), + X64Word_create(0x3956c25b, 0xf348b538), X64Word_create(0x59f111f1, 0xb605d019), + X64Word_create(0x923f82a4, 0xaf194f9b), X64Word_create(0xab1c5ed5, 0xda6d8118), + X64Word_create(0xd807aa98, 0xa3030242), X64Word_create(0x12835b01, 0x45706fbe), + X64Word_create(0x243185be, 0x4ee4b28c), X64Word_create(0x550c7dc3, 0xd5ffb4e2), + X64Word_create(0x72be5d74, 0xf27b896f), X64Word_create(0x80deb1fe, 0x3b1696b1), + X64Word_create(0x9bdc06a7, 0x25c71235), X64Word_create(0xc19bf174, 0xcf692694), + X64Word_create(0xe49b69c1, 0x9ef14ad2), X64Word_create(0xefbe4786, 0x384f25e3), + X64Word_create(0x0fc19dc6, 0x8b8cd5b5), X64Word_create(0x240ca1cc, 0x77ac9c65), + X64Word_create(0x2de92c6f, 0x592b0275), X64Word_create(0x4a7484aa, 0x6ea6e483), + X64Word_create(0x5cb0a9dc, 0xbd41fbd4), X64Word_create(0x76f988da, 0x831153b5), + X64Word_create(0x983e5152, 0xee66dfab), X64Word_create(0xa831c66d, 0x2db43210), + X64Word_create(0xb00327c8, 0x98fb213f), X64Word_create(0xbf597fc7, 0xbeef0ee4), + X64Word_create(0xc6e00bf3, 0x3da88fc2), X64Word_create(0xd5a79147, 0x930aa725), + X64Word_create(0x06ca6351, 0xe003826f), X64Word_create(0x14292967, 0x0a0e6e70), + X64Word_create(0x27b70a85, 0x46d22ffc), X64Word_create(0x2e1b2138, 0x5c26c926), + X64Word_create(0x4d2c6dfc, 0x5ac42aed), X64Word_create(0x53380d13, 0x9d95b3df), + X64Word_create(0x650a7354, 0x8baf63de), X64Word_create(0x766a0abb, 0x3c77b2a8), + X64Word_create(0x81c2c92e, 0x47edaee6), X64Word_create(0x92722c85, 0x1482353b), + X64Word_create(0xa2bfe8a1, 0x4cf10364), X64Word_create(0xa81a664b, 0xbc423001), + X64Word_create(0xc24b8b70, 0xd0f89791), X64Word_create(0xc76c51a3, 0x0654be30), + X64Word_create(0xd192e819, 0xd6ef5218), X64Word_create(0xd6990624, 0x5565a910), + X64Word_create(0xf40e3585, 0x5771202a), X64Word_create(0x106aa070, 0x32bbd1b8), + X64Word_create(0x19a4c116, 0xb8d2d0c8), X64Word_create(0x1e376c08, 0x5141ab53), + X64Word_create(0x2748774c, 0xdf8eeb99), X64Word_create(0x34b0bcb5, 0xe19b48a8), + X64Word_create(0x391c0cb3, 0xc5c95a63), X64Word_create(0x4ed8aa4a, 0xe3418acb), + X64Word_create(0x5b9cca4f, 0x7763e373), X64Word_create(0x682e6ff3, 0xd6b2b8a3), + X64Word_create(0x748f82ee, 0x5defb2fc), X64Word_create(0x78a5636f, 0x43172f60), + X64Word_create(0x84c87814, 0xa1f0ab72), X64Word_create(0x8cc70208, 0x1a6439ec), + X64Word_create(0x90befffa, 0x23631e28), X64Word_create(0xa4506ceb, 0xde82bde9), + X64Word_create(0xbef9a3f7, 0xb2c67915), X64Word_create(0xc67178f2, 0xe372532b), + X64Word_create(0xca273ece, 0xea26619c), X64Word_create(0xd186b8c7, 0x21c0c207), + X64Word_create(0xeada7dd6, 0xcde0eb1e), X64Word_create(0xf57d4f7f, 0xee6ed178), + X64Word_create(0x06f067aa, 0x72176fba), X64Word_create(0x0a637dc5, 0xa2c898a6), + X64Word_create(0x113f9804, 0xbef90dae), X64Word_create(0x1b710b35, 0x131c471b), + X64Word_create(0x28db77f5, 0x23047d84), X64Word_create(0x32caab7b, 0x40c72493), + X64Word_create(0x3c9ebe0a, 0x15c9bebc), X64Word_create(0x431d67c4, 0x9c100d4c), + X64Word_create(0x4cc5d4be, 0xcb3e42b6), X64Word_create(0x597f299c, 0xfc657e2a), + X64Word_create(0x5fcb6fab, 0x3ad6faec), X64Word_create(0x6c44198c, 0x4a475817) + ]; + + // Reusable objects + var W = []; + (function () { + for (var i = 0; i < 80; i++) { + W[i] = X64Word_create(); + } + }()); + + /** + * SHA-512 hash algorithm. + */ + var SHA512 = C_algo.SHA512 = Hasher.extend({ + _doReset: function () { + this._hash = new X64WordArray.init([ + new X64Word.init(0x6a09e667, 0xf3bcc908), new X64Word.init(0xbb67ae85, 0x84caa73b), + new X64Word.init(0x3c6ef372, 0xfe94f82b), new X64Word.init(0xa54ff53a, 0x5f1d36f1), + new X64Word.init(0x510e527f, 0xade682d1), new X64Word.init(0x9b05688c, 0x2b3e6c1f), + new X64Word.init(0x1f83d9ab, 0xfb41bd6b), new X64Word.init(0x5be0cd19, 0x137e2179) + ]); + }, + + _doProcessBlock: function (M, offset) { + // Shortcuts + var H = this._hash.words; + + var H0 = H[0]; + var H1 = H[1]; + var H2 = H[2]; + var H3 = H[3]; + var H4 = H[4]; + var H5 = H[5]; + var H6 = H[6]; + var H7 = H[7]; + + var H0h = H0.high; + var H0l = H0.low; + var H1h = H1.high; + var H1l = H1.low; + var H2h = H2.high; + var H2l = H2.low; + var H3h = H3.high; + var H3l = H3.low; + var H4h = H4.high; + var H4l = H4.low; + var H5h = H5.high; + var H5l = H5.low; + var H6h = H6.high; + var H6l = H6.low; + var H7h = H7.high; + var H7l = H7.low; + + // Working variables + var ah = H0h; + var al = H0l; + var bh = H1h; + var bl = H1l; + var ch = H2h; + var cl = H2l; + var dh = H3h; + var dl = H3l; + var eh = H4h; + var el = H4l; + var fh = H5h; + var fl = H5l; + var gh = H6h; + var gl = H6l; + var hh = H7h; + var hl = H7l; + + // Rounds + for (var i = 0; i < 80; i++) { + var Wil; + var Wih; + + // Shortcut + var Wi = W[i]; + + // Extend message + if (i < 16) { + Wih = Wi.high = M[offset + i * 2] | 0; + Wil = Wi.low = M[offset + i * 2 + 1] | 0; + } else { + // Gamma0 + var gamma0x = W[i - 15]; + var gamma0xh = gamma0x.high; + var gamma0xl = gamma0x.low; + var gamma0h = ((gamma0xh >>> 1) | (gamma0xl << 31)) ^ ((gamma0xh >>> 8) | (gamma0xl << 24)) ^ (gamma0xh >>> 7); + var gamma0l = ((gamma0xl >>> 1) | (gamma0xh << 31)) ^ ((gamma0xl >>> 8) | (gamma0xh << 24)) ^ ((gamma0xl >>> 7) | (gamma0xh << 25)); + + // Gamma1 + var gamma1x = W[i - 2]; + var gamma1xh = gamma1x.high; + var gamma1xl = gamma1x.low; + var gamma1h = ((gamma1xh >>> 19) | (gamma1xl << 13)) ^ ((gamma1xh << 3) | (gamma1xl >>> 29)) ^ (gamma1xh >>> 6); + var gamma1l = ((gamma1xl >>> 19) | (gamma1xh << 13)) ^ ((gamma1xl << 3) | (gamma1xh >>> 29)) ^ ((gamma1xl >>> 6) | (gamma1xh << 26)); + + // W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16] + var Wi7 = W[i - 7]; + var Wi7h = Wi7.high; + var Wi7l = Wi7.low; + + var Wi16 = W[i - 16]; + var Wi16h = Wi16.high; + var Wi16l = Wi16.low; + + Wil = gamma0l + Wi7l; + Wih = gamma0h + Wi7h + ((Wil >>> 0) < (gamma0l >>> 0) ? 1 : 0); + Wil = Wil + gamma1l; + Wih = Wih + gamma1h + ((Wil >>> 0) < (gamma1l >>> 0) ? 1 : 0); + Wil = Wil + Wi16l; + Wih = Wih + Wi16h + ((Wil >>> 0) < (Wi16l >>> 0) ? 1 : 0); + + Wi.high = Wih; + Wi.low = Wil; + } + + var chh = (eh & fh) ^ (~eh & gh); + var chl = (el & fl) ^ (~el & gl); + var majh = (ah & bh) ^ (ah & ch) ^ (bh & ch); + var majl = (al & bl) ^ (al & cl) ^ (bl & cl); + + var sigma0h = ((ah >>> 28) | (al << 4)) ^ ((ah << 30) | (al >>> 2)) ^ ((ah << 25) | (al >>> 7)); + var sigma0l = ((al >>> 28) | (ah << 4)) ^ ((al << 30) | (ah >>> 2)) ^ ((al << 25) | (ah >>> 7)); + var sigma1h = ((eh >>> 14) | (el << 18)) ^ ((eh >>> 18) | (el << 14)) ^ ((eh << 23) | (el >>> 9)); + var sigma1l = ((el >>> 14) | (eh << 18)) ^ ((el >>> 18) | (eh << 14)) ^ ((el << 23) | (eh >>> 9)); + + // t1 = h + sigma1 + ch + K[i] + W[i] + var Ki = K[i]; + var Kih = Ki.high; + var Kil = Ki.low; + + var t1l = hl + sigma1l; + var t1h = hh + sigma1h + ((t1l >>> 0) < (hl >>> 0) ? 1 : 0); + var t1l = t1l + chl; + var t1h = t1h + chh + ((t1l >>> 0) < (chl >>> 0) ? 1 : 0); + var t1l = t1l + Kil; + var t1h = t1h + Kih + ((t1l >>> 0) < (Kil >>> 0) ? 1 : 0); + var t1l = t1l + Wil; + var t1h = t1h + Wih + ((t1l >>> 0) < (Wil >>> 0) ? 1 : 0); + + // t2 = sigma0 + maj + var t2l = sigma0l + majl; + var t2h = sigma0h + majh + ((t2l >>> 0) < (sigma0l >>> 0) ? 1 : 0); + + // Update working variables + hh = gh; + hl = gl; + gh = fh; + gl = fl; + fh = eh; + fl = el; + el = (dl + t1l) | 0; + eh = (dh + t1h + ((el >>> 0) < (dl >>> 0) ? 1 : 0)) | 0; + dh = ch; + dl = cl; + ch = bh; + cl = bl; + bh = ah; + bl = al; + al = (t1l + t2l) | 0; + ah = (t1h + t2h + ((al >>> 0) < (t1l >>> 0) ? 1 : 0)) | 0; + } + + // Intermediate hash value + H0l = H0.low = (H0l + al); + H0.high = (H0h + ah + ((H0l >>> 0) < (al >>> 0) ? 1 : 0)); + H1l = H1.low = (H1l + bl); + H1.high = (H1h + bh + ((H1l >>> 0) < (bl >>> 0) ? 1 : 0)); + H2l = H2.low = (H2l + cl); + H2.high = (H2h + ch + ((H2l >>> 0) < (cl >>> 0) ? 1 : 0)); + H3l = H3.low = (H3l + dl); + H3.high = (H3h + dh + ((H3l >>> 0) < (dl >>> 0) ? 1 : 0)); + H4l = H4.low = (H4l + el); + H4.high = (H4h + eh + ((H4l >>> 0) < (el >>> 0) ? 1 : 0)); + H5l = H5.low = (H5l + fl); + H5.high = (H5h + fh + ((H5l >>> 0) < (fl >>> 0) ? 1 : 0)); + H6l = H6.low = (H6l + gl); + H6.high = (H6h + gh + ((H6l >>> 0) < (gl >>> 0) ? 1 : 0)); + H7l = H7.low = (H7l + hl); + H7.high = (H7h + hh + ((H7l >>> 0) < (hl >>> 0) ? 1 : 0)); + }, + + _doFinalize: function () { + // Shortcuts + var data = this._data; + var dataWords = data.words; + + var nBitsTotal = this._nDataBytes * 8; + var nBitsLeft = data.sigBytes * 8; + + // Add padding + dataWords[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32); + dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 30] = Math.floor(nBitsTotal / 0x100000000); + dataWords[(((nBitsLeft + 128) >>> 10) << 5) + 31] = nBitsTotal; + data.sigBytes = dataWords.length * 4; + + // Hash final blocks + this._process(); + + // Convert hash to 32-bit word array before returning + var hash = this._hash.toX32(); + + // Return final computed hash + return hash; + }, + + clone: function () { + var clone = Hasher.clone.call(this); + clone._hash = this._hash.clone(); + + return clone; + }, + + blockSize: 1024/32 + }); + + /** + * Shortcut function to the hasher's object interface. + * + * @param {WordArray|string} message The message to hash. + * + * @return {WordArray} The hash. + * + * @static + * + * @example + * + * var hash = CryptoJS.SHA512('message'); + * var hash = CryptoJS.SHA512(wordArray); + */ + C.SHA512 = Hasher._createHelper(SHA512); + + /** + * Shortcut function to the HMAC's object interface. + * + * @param {WordArray|string} message The message to hash. + * @param {WordArray|string} key The secret key. + * + * @return {WordArray} The HMAC. + * + * @static + * + * @example + * + * var hmac = CryptoJS.HmacSHA512(message, key); + */ + C.HmacSHA512 = Hasher._createHmacHelper(SHA512); + }()); + + + return CryptoJS.SHA512; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/tripledes.js b/node_modules/crypto-js/tripledes.js new file mode 100644 index 0000000..1a92477 --- /dev/null +++ b/node_modules/crypto-js/tripledes.js @@ -0,0 +1,779 @@ +;(function (root, factory, undef) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core"), require("./enc-base64"), require("./md5"), require("./evpkdf"), require("./cipher-core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core", "./enc-base64", "./md5", "./evpkdf", "./cipher-core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function () { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var WordArray = C_lib.WordArray; + var BlockCipher = C_lib.BlockCipher; + var C_algo = C.algo; + + // Permuted Choice 1 constants + var PC1 = [ + 57, 49, 41, 33, 25, 17, 9, 1, + 58, 50, 42, 34, 26, 18, 10, 2, + 59, 51, 43, 35, 27, 19, 11, 3, + 60, 52, 44, 36, 63, 55, 47, 39, + 31, 23, 15, 7, 62, 54, 46, 38, + 30, 22, 14, 6, 61, 53, 45, 37, + 29, 21, 13, 5, 28, 20, 12, 4 + ]; + + // Permuted Choice 2 constants + var PC2 = [ + 14, 17, 11, 24, 1, 5, + 3, 28, 15, 6, 21, 10, + 23, 19, 12, 4, 26, 8, + 16, 7, 27, 20, 13, 2, + 41, 52, 31, 37, 47, 55, + 30, 40, 51, 45, 33, 48, + 44, 49, 39, 56, 34, 53, + 46, 42, 50, 36, 29, 32 + ]; + + // Cumulative bit shift constants + var BIT_SHIFTS = [1, 2, 4, 6, 8, 10, 12, 14, 15, 17, 19, 21, 23, 25, 27, 28]; + + // SBOXes and round permutation constants + var SBOX_P = [ + { + 0x0: 0x808200, + 0x10000000: 0x8000, + 0x20000000: 0x808002, + 0x30000000: 0x2, + 0x40000000: 0x200, + 0x50000000: 0x808202, + 0x60000000: 0x800202, + 0x70000000: 0x800000, + 0x80000000: 0x202, + 0x90000000: 0x800200, + 0xa0000000: 0x8200, + 0xb0000000: 0x808000, + 0xc0000000: 0x8002, + 0xd0000000: 0x800002, + 0xe0000000: 0x0, + 0xf0000000: 0x8202, + 0x8000000: 0x0, + 0x18000000: 0x808202, + 0x28000000: 0x8202, + 0x38000000: 0x8000, + 0x48000000: 0x808200, + 0x58000000: 0x200, + 0x68000000: 0x808002, + 0x78000000: 0x2, + 0x88000000: 0x800200, + 0x98000000: 0x8200, + 0xa8000000: 0x808000, + 0xb8000000: 0x800202, + 0xc8000000: 0x800002, + 0xd8000000: 0x8002, + 0xe8000000: 0x202, + 0xf8000000: 0x800000, + 0x1: 0x8000, + 0x10000001: 0x2, + 0x20000001: 0x808200, + 0x30000001: 0x800000, + 0x40000001: 0x808002, + 0x50000001: 0x8200, + 0x60000001: 0x200, + 0x70000001: 0x800202, + 0x80000001: 0x808202, + 0x90000001: 0x808000, + 0xa0000001: 0x800002, + 0xb0000001: 0x8202, + 0xc0000001: 0x202, + 0xd0000001: 0x800200, + 0xe0000001: 0x8002, + 0xf0000001: 0x0, + 0x8000001: 0x808202, + 0x18000001: 0x808000, + 0x28000001: 0x800000, + 0x38000001: 0x200, + 0x48000001: 0x8000, + 0x58000001: 0x800002, + 0x68000001: 0x2, + 0x78000001: 0x8202, + 0x88000001: 0x8002, + 0x98000001: 0x800202, + 0xa8000001: 0x202, + 0xb8000001: 0x808200, + 0xc8000001: 0x800200, + 0xd8000001: 0x0, + 0xe8000001: 0x8200, + 0xf8000001: 0x808002 + }, + { + 0x0: 0x40084010, + 0x1000000: 0x4000, + 0x2000000: 0x80000, + 0x3000000: 0x40080010, + 0x4000000: 0x40000010, + 0x5000000: 0x40084000, + 0x6000000: 0x40004000, + 0x7000000: 0x10, + 0x8000000: 0x84000, + 0x9000000: 0x40004010, + 0xa000000: 0x40000000, + 0xb000000: 0x84010, + 0xc000000: 0x80010, + 0xd000000: 0x0, + 0xe000000: 0x4010, + 0xf000000: 0x40080000, + 0x800000: 0x40004000, + 0x1800000: 0x84010, + 0x2800000: 0x10, + 0x3800000: 0x40004010, + 0x4800000: 0x40084010, + 0x5800000: 0x40000000, + 0x6800000: 0x80000, + 0x7800000: 0x40080010, + 0x8800000: 0x80010, + 0x9800000: 0x0, + 0xa800000: 0x4000, + 0xb800000: 0x40080000, + 0xc800000: 0x40000010, + 0xd800000: 0x84000, + 0xe800000: 0x40084000, + 0xf800000: 0x4010, + 0x10000000: 0x0, + 0x11000000: 0x40080010, + 0x12000000: 0x40004010, + 0x13000000: 0x40084000, + 0x14000000: 0x40080000, + 0x15000000: 0x10, + 0x16000000: 0x84010, + 0x17000000: 0x4000, + 0x18000000: 0x4010, + 0x19000000: 0x80000, + 0x1a000000: 0x80010, + 0x1b000000: 0x40000010, + 0x1c000000: 0x84000, + 0x1d000000: 0x40004000, + 0x1e000000: 0x40000000, + 0x1f000000: 0x40084010, + 0x10800000: 0x84010, + 0x11800000: 0x80000, + 0x12800000: 0x40080000, + 0x13800000: 0x4000, + 0x14800000: 0x40004000, + 0x15800000: 0x40084010, + 0x16800000: 0x10, + 0x17800000: 0x40000000, + 0x18800000: 0x40084000, + 0x19800000: 0x40000010, + 0x1a800000: 0x40004010, + 0x1b800000: 0x80010, + 0x1c800000: 0x0, + 0x1d800000: 0x4010, + 0x1e800000: 0x40080010, + 0x1f800000: 0x84000 + }, + { + 0x0: 0x104, + 0x100000: 0x0, + 0x200000: 0x4000100, + 0x300000: 0x10104, + 0x400000: 0x10004, + 0x500000: 0x4000004, + 0x600000: 0x4010104, + 0x700000: 0x4010000, + 0x800000: 0x4000000, + 0x900000: 0x4010100, + 0xa00000: 0x10100, + 0xb00000: 0x4010004, + 0xc00000: 0x4000104, + 0xd00000: 0x10000, + 0xe00000: 0x4, + 0xf00000: 0x100, + 0x80000: 0x4010100, + 0x180000: 0x4010004, + 0x280000: 0x0, + 0x380000: 0x4000100, + 0x480000: 0x4000004, + 0x580000: 0x10000, + 0x680000: 0x10004, + 0x780000: 0x104, + 0x880000: 0x4, + 0x980000: 0x100, + 0xa80000: 0x4010000, + 0xb80000: 0x10104, + 0xc80000: 0x10100, + 0xd80000: 0x4000104, + 0xe80000: 0x4010104, + 0xf80000: 0x4000000, + 0x1000000: 0x4010100, + 0x1100000: 0x10004, + 0x1200000: 0x10000, + 0x1300000: 0x4000100, + 0x1400000: 0x100, + 0x1500000: 0x4010104, + 0x1600000: 0x4000004, + 0x1700000: 0x0, + 0x1800000: 0x4000104, + 0x1900000: 0x4000000, + 0x1a00000: 0x4, + 0x1b00000: 0x10100, + 0x1c00000: 0x4010000, + 0x1d00000: 0x104, + 0x1e00000: 0x10104, + 0x1f00000: 0x4010004, + 0x1080000: 0x4000000, + 0x1180000: 0x104, + 0x1280000: 0x4010100, + 0x1380000: 0x0, + 0x1480000: 0x10004, + 0x1580000: 0x4000100, + 0x1680000: 0x100, + 0x1780000: 0x4010004, + 0x1880000: 0x10000, + 0x1980000: 0x4010104, + 0x1a80000: 0x10104, + 0x1b80000: 0x4000004, + 0x1c80000: 0x4000104, + 0x1d80000: 0x4010000, + 0x1e80000: 0x4, + 0x1f80000: 0x10100 + }, + { + 0x0: 0x80401000, + 0x10000: 0x80001040, + 0x20000: 0x401040, + 0x30000: 0x80400000, + 0x40000: 0x0, + 0x50000: 0x401000, + 0x60000: 0x80000040, + 0x70000: 0x400040, + 0x80000: 0x80000000, + 0x90000: 0x400000, + 0xa0000: 0x40, + 0xb0000: 0x80001000, + 0xc0000: 0x80400040, + 0xd0000: 0x1040, + 0xe0000: 0x1000, + 0xf0000: 0x80401040, + 0x8000: 0x80001040, + 0x18000: 0x40, + 0x28000: 0x80400040, + 0x38000: 0x80001000, + 0x48000: 0x401000, + 0x58000: 0x80401040, + 0x68000: 0x0, + 0x78000: 0x80400000, + 0x88000: 0x1000, + 0x98000: 0x80401000, + 0xa8000: 0x400000, + 0xb8000: 0x1040, + 0xc8000: 0x80000000, + 0xd8000: 0x400040, + 0xe8000: 0x401040, + 0xf8000: 0x80000040, + 0x100000: 0x400040, + 0x110000: 0x401000, + 0x120000: 0x80000040, + 0x130000: 0x0, + 0x140000: 0x1040, + 0x150000: 0x80400040, + 0x160000: 0x80401000, + 0x170000: 0x80001040, + 0x180000: 0x80401040, + 0x190000: 0x80000000, + 0x1a0000: 0x80400000, + 0x1b0000: 0x401040, + 0x1c0000: 0x80001000, + 0x1d0000: 0x400000, + 0x1e0000: 0x40, + 0x1f0000: 0x1000, + 0x108000: 0x80400000, + 0x118000: 0x80401040, + 0x128000: 0x0, + 0x138000: 0x401000, + 0x148000: 0x400040, + 0x158000: 0x80000000, + 0x168000: 0x80001040, + 0x178000: 0x40, + 0x188000: 0x80000040, + 0x198000: 0x1000, + 0x1a8000: 0x80001000, + 0x1b8000: 0x80400040, + 0x1c8000: 0x1040, + 0x1d8000: 0x80401000, + 0x1e8000: 0x400000, + 0x1f8000: 0x401040 + }, + { + 0x0: 0x80, + 0x1000: 0x1040000, + 0x2000: 0x40000, + 0x3000: 0x20000000, + 0x4000: 0x20040080, + 0x5000: 0x1000080, + 0x6000: 0x21000080, + 0x7000: 0x40080, + 0x8000: 0x1000000, + 0x9000: 0x20040000, + 0xa000: 0x20000080, + 0xb000: 0x21040080, + 0xc000: 0x21040000, + 0xd000: 0x0, + 0xe000: 0x1040080, + 0xf000: 0x21000000, + 0x800: 0x1040080, + 0x1800: 0x21000080, + 0x2800: 0x80, + 0x3800: 0x1040000, + 0x4800: 0x40000, + 0x5800: 0x20040080, + 0x6800: 0x21040000, + 0x7800: 0x20000000, + 0x8800: 0x20040000, + 0x9800: 0x0, + 0xa800: 0x21040080, + 0xb800: 0x1000080, + 0xc800: 0x20000080, + 0xd800: 0x21000000, + 0xe800: 0x1000000, + 0xf800: 0x40080, + 0x10000: 0x40000, + 0x11000: 0x80, + 0x12000: 0x20000000, + 0x13000: 0x21000080, + 0x14000: 0x1000080, + 0x15000: 0x21040000, + 0x16000: 0x20040080, + 0x17000: 0x1000000, + 0x18000: 0x21040080, + 0x19000: 0x21000000, + 0x1a000: 0x1040000, + 0x1b000: 0x20040000, + 0x1c000: 0x40080, + 0x1d000: 0x20000080, + 0x1e000: 0x0, + 0x1f000: 0x1040080, + 0x10800: 0x21000080, + 0x11800: 0x1000000, + 0x12800: 0x1040000, + 0x13800: 0x20040080, + 0x14800: 0x20000000, + 0x15800: 0x1040080, + 0x16800: 0x80, + 0x17800: 0x21040000, + 0x18800: 0x40080, + 0x19800: 0x21040080, + 0x1a800: 0x0, + 0x1b800: 0x21000000, + 0x1c800: 0x1000080, + 0x1d800: 0x40000, + 0x1e800: 0x20040000, + 0x1f800: 0x20000080 + }, + { + 0x0: 0x10000008, + 0x100: 0x2000, + 0x200: 0x10200000, + 0x300: 0x10202008, + 0x400: 0x10002000, + 0x500: 0x200000, + 0x600: 0x200008, + 0x700: 0x10000000, + 0x800: 0x0, + 0x900: 0x10002008, + 0xa00: 0x202000, + 0xb00: 0x8, + 0xc00: 0x10200008, + 0xd00: 0x202008, + 0xe00: 0x2008, + 0xf00: 0x10202000, + 0x80: 0x10200000, + 0x180: 0x10202008, + 0x280: 0x8, + 0x380: 0x200000, + 0x480: 0x202008, + 0x580: 0x10000008, + 0x680: 0x10002000, + 0x780: 0x2008, + 0x880: 0x200008, + 0x980: 0x2000, + 0xa80: 0x10002008, + 0xb80: 0x10200008, + 0xc80: 0x0, + 0xd80: 0x10202000, + 0xe80: 0x202000, + 0xf80: 0x10000000, + 0x1000: 0x10002000, + 0x1100: 0x10200008, + 0x1200: 0x10202008, + 0x1300: 0x2008, + 0x1400: 0x200000, + 0x1500: 0x10000000, + 0x1600: 0x10000008, + 0x1700: 0x202000, + 0x1800: 0x202008, + 0x1900: 0x0, + 0x1a00: 0x8, + 0x1b00: 0x10200000, + 0x1c00: 0x2000, + 0x1d00: 0x10002008, + 0x1e00: 0x10202000, + 0x1f00: 0x200008, + 0x1080: 0x8, + 0x1180: 0x202000, + 0x1280: 0x200000, + 0x1380: 0x10000008, + 0x1480: 0x10002000, + 0x1580: 0x2008, + 0x1680: 0x10202008, + 0x1780: 0x10200000, + 0x1880: 0x10202000, + 0x1980: 0x10200008, + 0x1a80: 0x2000, + 0x1b80: 0x202008, + 0x1c80: 0x200008, + 0x1d80: 0x0, + 0x1e80: 0x10000000, + 0x1f80: 0x10002008 + }, + { + 0x0: 0x100000, + 0x10: 0x2000401, + 0x20: 0x400, + 0x30: 0x100401, + 0x40: 0x2100401, + 0x50: 0x0, + 0x60: 0x1, + 0x70: 0x2100001, + 0x80: 0x2000400, + 0x90: 0x100001, + 0xa0: 0x2000001, + 0xb0: 0x2100400, + 0xc0: 0x2100000, + 0xd0: 0x401, + 0xe0: 0x100400, + 0xf0: 0x2000000, + 0x8: 0x2100001, + 0x18: 0x0, + 0x28: 0x2000401, + 0x38: 0x2100400, + 0x48: 0x100000, + 0x58: 0x2000001, + 0x68: 0x2000000, + 0x78: 0x401, + 0x88: 0x100401, + 0x98: 0x2000400, + 0xa8: 0x2100000, + 0xb8: 0x100001, + 0xc8: 0x400, + 0xd8: 0x2100401, + 0xe8: 0x1, + 0xf8: 0x100400, + 0x100: 0x2000000, + 0x110: 0x100000, + 0x120: 0x2000401, + 0x130: 0x2100001, + 0x140: 0x100001, + 0x150: 0x2000400, + 0x160: 0x2100400, + 0x170: 0x100401, + 0x180: 0x401, + 0x190: 0x2100401, + 0x1a0: 0x100400, + 0x1b0: 0x1, + 0x1c0: 0x0, + 0x1d0: 0x2100000, + 0x1e0: 0x2000001, + 0x1f0: 0x400, + 0x108: 0x100400, + 0x118: 0x2000401, + 0x128: 0x2100001, + 0x138: 0x1, + 0x148: 0x2000000, + 0x158: 0x100000, + 0x168: 0x401, + 0x178: 0x2100400, + 0x188: 0x2000001, + 0x198: 0x2100000, + 0x1a8: 0x0, + 0x1b8: 0x2100401, + 0x1c8: 0x100401, + 0x1d8: 0x400, + 0x1e8: 0x2000400, + 0x1f8: 0x100001 + }, + { + 0x0: 0x8000820, + 0x1: 0x20000, + 0x2: 0x8000000, + 0x3: 0x20, + 0x4: 0x20020, + 0x5: 0x8020820, + 0x6: 0x8020800, + 0x7: 0x800, + 0x8: 0x8020000, + 0x9: 0x8000800, + 0xa: 0x20800, + 0xb: 0x8020020, + 0xc: 0x820, + 0xd: 0x0, + 0xe: 0x8000020, + 0xf: 0x20820, + 0x80000000: 0x800, + 0x80000001: 0x8020820, + 0x80000002: 0x8000820, + 0x80000003: 0x8000000, + 0x80000004: 0x8020000, + 0x80000005: 0x20800, + 0x80000006: 0x20820, + 0x80000007: 0x20, + 0x80000008: 0x8000020, + 0x80000009: 0x820, + 0x8000000a: 0x20020, + 0x8000000b: 0x8020800, + 0x8000000c: 0x0, + 0x8000000d: 0x8020020, + 0x8000000e: 0x8000800, + 0x8000000f: 0x20000, + 0x10: 0x20820, + 0x11: 0x8020800, + 0x12: 0x20, + 0x13: 0x800, + 0x14: 0x8000800, + 0x15: 0x8000020, + 0x16: 0x8020020, + 0x17: 0x20000, + 0x18: 0x0, + 0x19: 0x20020, + 0x1a: 0x8020000, + 0x1b: 0x8000820, + 0x1c: 0x8020820, + 0x1d: 0x20800, + 0x1e: 0x820, + 0x1f: 0x8000000, + 0x80000010: 0x20000, + 0x80000011: 0x800, + 0x80000012: 0x8020020, + 0x80000013: 0x20820, + 0x80000014: 0x20, + 0x80000015: 0x8020000, + 0x80000016: 0x8000000, + 0x80000017: 0x8000820, + 0x80000018: 0x8020820, + 0x80000019: 0x8000020, + 0x8000001a: 0x8000800, + 0x8000001b: 0x0, + 0x8000001c: 0x20800, + 0x8000001d: 0x820, + 0x8000001e: 0x20020, + 0x8000001f: 0x8020800 + } + ]; + + // Masks that select the SBOX input + var SBOX_MASK = [ + 0xf8000001, 0x1f800000, 0x01f80000, 0x001f8000, + 0x0001f800, 0x00001f80, 0x000001f8, 0x8000001f + ]; + + /** + * DES block cipher algorithm. + */ + var DES = C_algo.DES = BlockCipher.extend({ + _doReset: function () { + // Shortcuts + var key = this._key; + var keyWords = key.words; + + // Select 56 bits according to PC1 + var keyBits = []; + for (var i = 0; i < 56; i++) { + var keyBitPos = PC1[i] - 1; + keyBits[i] = (keyWords[keyBitPos >>> 5] >>> (31 - keyBitPos % 32)) & 1; + } + + // Assemble 16 subkeys + var subKeys = this._subKeys = []; + for (var nSubKey = 0; nSubKey < 16; nSubKey++) { + // Create subkey + var subKey = subKeys[nSubKey] = []; + + // Shortcut + var bitShift = BIT_SHIFTS[nSubKey]; + + // Select 48 bits according to PC2 + for (var i = 0; i < 24; i++) { + // Select from the left 28 key bits + subKey[(i / 6) | 0] |= keyBits[((PC2[i] - 1) + bitShift) % 28] << (31 - i % 6); + + // Select from the right 28 key bits + subKey[4 + ((i / 6) | 0)] |= keyBits[28 + (((PC2[i + 24] - 1) + bitShift) % 28)] << (31 - i % 6); + } + + // Since each subkey is applied to an expanded 32-bit input, + // the subkey can be broken into 8 values scaled to 32-bits, + // which allows the key to be used without expansion + subKey[0] = (subKey[0] << 1) | (subKey[0] >>> 31); + for (var i = 1; i < 7; i++) { + subKey[i] = subKey[i] >>> ((i - 1) * 4 + 3); + } + subKey[7] = (subKey[7] << 5) | (subKey[7] >>> 27); + } + + // Compute inverse subkeys + var invSubKeys = this._invSubKeys = []; + for (var i = 0; i < 16; i++) { + invSubKeys[i] = subKeys[15 - i]; + } + }, + + encryptBlock: function (M, offset) { + this._doCryptBlock(M, offset, this._subKeys); + }, + + decryptBlock: function (M, offset) { + this._doCryptBlock(M, offset, this._invSubKeys); + }, + + _doCryptBlock: function (M, offset, subKeys) { + // Get input + this._lBlock = M[offset]; + this._rBlock = M[offset + 1]; + + // Initial permutation + exchangeLR.call(this, 4, 0x0f0f0f0f); + exchangeLR.call(this, 16, 0x0000ffff); + exchangeRL.call(this, 2, 0x33333333); + exchangeRL.call(this, 8, 0x00ff00ff); + exchangeLR.call(this, 1, 0x55555555); + + // Rounds + for (var round = 0; round < 16; round++) { + // Shortcuts + var subKey = subKeys[round]; + var lBlock = this._lBlock; + var rBlock = this._rBlock; + + // Feistel function + var f = 0; + for (var i = 0; i < 8; i++) { + f |= SBOX_P[i][((rBlock ^ subKey[i]) & SBOX_MASK[i]) >>> 0]; + } + this._lBlock = rBlock; + this._rBlock = lBlock ^ f; + } + + // Undo swap from last round + var t = this._lBlock; + this._lBlock = this._rBlock; + this._rBlock = t; + + // Final permutation + exchangeLR.call(this, 1, 0x55555555); + exchangeRL.call(this, 8, 0x00ff00ff); + exchangeRL.call(this, 2, 0x33333333); + exchangeLR.call(this, 16, 0x0000ffff); + exchangeLR.call(this, 4, 0x0f0f0f0f); + + // Set output + M[offset] = this._lBlock; + M[offset + 1] = this._rBlock; + }, + + keySize: 64/32, + + ivSize: 64/32, + + blockSize: 64/32 + }); + + // Swap bits across the left and right words + function exchangeLR(offset, mask) { + var t = ((this._lBlock >>> offset) ^ this._rBlock) & mask; + this._rBlock ^= t; + this._lBlock ^= t << offset; + } + + function exchangeRL(offset, mask) { + var t = ((this._rBlock >>> offset) ^ this._lBlock) & mask; + this._lBlock ^= t; + this._rBlock ^= t << offset; + } + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.DES.encrypt(message, key, cfg); + * var plaintext = CryptoJS.DES.decrypt(ciphertext, key, cfg); + */ + C.DES = BlockCipher._createHelper(DES); + + /** + * Triple-DES block cipher algorithm. + */ + var TripleDES = C_algo.TripleDES = BlockCipher.extend({ + _doReset: function () { + // Shortcuts + var key = this._key; + var keyWords = key.words; + // Make sure the key length is valid (64, 128 or >= 192 bit) + if (keyWords.length !== 2 && keyWords.length !== 4 && keyWords.length < 6) { + throw new Error('Invalid key length - 3DES requires the key length to be 64, 128, 192 or >192.'); + } + + // Extend the key according to the keying options defined in 3DES standard + var key1 = keyWords.slice(0, 2); + var key2 = keyWords.length < 4 ? keyWords.slice(0, 2) : keyWords.slice(2, 4); + var key3 = keyWords.length < 6 ? keyWords.slice(0, 2) : keyWords.slice(4, 6); + + // Create DES instances + this._des1 = DES.createEncryptor(WordArray.create(key1)); + this._des2 = DES.createEncryptor(WordArray.create(key2)); + this._des3 = DES.createEncryptor(WordArray.create(key3)); + }, + + encryptBlock: function (M, offset) { + this._des1.encryptBlock(M, offset); + this._des2.decryptBlock(M, offset); + this._des3.encryptBlock(M, offset); + }, + + decryptBlock: function (M, offset) { + this._des3.decryptBlock(M, offset); + this._des2.encryptBlock(M, offset); + this._des1.decryptBlock(M, offset); + }, + + keySize: 192/32, + + ivSize: 64/32, + + blockSize: 64/32 + }); + + /** + * Shortcut functions to the cipher's object interface. + * + * @example + * + * var ciphertext = CryptoJS.TripleDES.encrypt(message, key, cfg); + * var plaintext = CryptoJS.TripleDES.decrypt(ciphertext, key, cfg); + */ + C.TripleDES = BlockCipher._createHelper(TripleDES); + }()); + + + return CryptoJS.TripleDES; + +})); \ No newline at end of file diff --git a/node_modules/crypto-js/x64-core.js b/node_modules/crypto-js/x64-core.js new file mode 100644 index 0000000..57dcc14 --- /dev/null +++ b/node_modules/crypto-js/x64-core.js @@ -0,0 +1,304 @@ +;(function (root, factory) { + if (typeof exports === "object") { + // CommonJS + module.exports = exports = factory(require("./core")); + } + else if (typeof define === "function" && define.amd) { + // AMD + define(["./core"], factory); + } + else { + // Global (browser) + factory(root.CryptoJS); + } +}(this, function (CryptoJS) { + + (function (undefined) { + // Shortcuts + var C = CryptoJS; + var C_lib = C.lib; + var Base = C_lib.Base; + var X32WordArray = C_lib.WordArray; + + /** + * x64 namespace. + */ + var C_x64 = C.x64 = {}; + + /** + * A 64-bit word. + */ + var X64Word = C_x64.Word = Base.extend({ + /** + * Initializes a newly created 64-bit word. + * + * @param {number} high The high 32 bits. + * @param {number} low The low 32 bits. + * + * @example + * + * var x64Word = CryptoJS.x64.Word.create(0x00010203, 0x04050607); + */ + init: function (high, low) { + this.high = high; + this.low = low; + } + + /** + * Bitwise NOTs this word. + * + * @return {X64Word} A new x64-Word object after negating. + * + * @example + * + * var negated = x64Word.not(); + */ + // not: function () { + // var high = ~this.high; + // var low = ~this.low; + + // return X64Word.create(high, low); + // }, + + /** + * Bitwise ANDs this word with the passed word. + * + * @param {X64Word} word The x64-Word to AND with this word. + * + * @return {X64Word} A new x64-Word object after ANDing. + * + * @example + * + * var anded = x64Word.and(anotherX64Word); + */ + // and: function (word) { + // var high = this.high & word.high; + // var low = this.low & word.low; + + // return X64Word.create(high, low); + // }, + + /** + * Bitwise ORs this word with the passed word. + * + * @param {X64Word} word The x64-Word to OR with this word. + * + * @return {X64Word} A new x64-Word object after ORing. + * + * @example + * + * var ored = x64Word.or(anotherX64Word); + */ + // or: function (word) { + // var high = this.high | word.high; + // var low = this.low | word.low; + + // return X64Word.create(high, low); + // }, + + /** + * Bitwise XORs this word with the passed word. + * + * @param {X64Word} word The x64-Word to XOR with this word. + * + * @return {X64Word} A new x64-Word object after XORing. + * + * @example + * + * var xored = x64Word.xor(anotherX64Word); + */ + // xor: function (word) { + // var high = this.high ^ word.high; + // var low = this.low ^ word.low; + + // return X64Word.create(high, low); + // }, + + /** + * Shifts this word n bits to the left. + * + * @param {number} n The number of bits to shift. + * + * @return {X64Word} A new x64-Word object after shifting. + * + * @example + * + * var shifted = x64Word.shiftL(25); + */ + // shiftL: function (n) { + // if (n < 32) { + // var high = (this.high << n) | (this.low >>> (32 - n)); + // var low = this.low << n; + // } else { + // var high = this.low << (n - 32); + // var low = 0; + // } + + // return X64Word.create(high, low); + // }, + + /** + * Shifts this word n bits to the right. + * + * @param {number} n The number of bits to shift. + * + * @return {X64Word} A new x64-Word object after shifting. + * + * @example + * + * var shifted = x64Word.shiftR(7); + */ + // shiftR: function (n) { + // if (n < 32) { + // var low = (this.low >>> n) | (this.high << (32 - n)); + // var high = this.high >>> n; + // } else { + // var low = this.high >>> (n - 32); + // var high = 0; + // } + + // return X64Word.create(high, low); + // }, + + /** + * Rotates this word n bits to the left. + * + * @param {number} n The number of bits to rotate. + * + * @return {X64Word} A new x64-Word object after rotating. + * + * @example + * + * var rotated = x64Word.rotL(25); + */ + // rotL: function (n) { + // return this.shiftL(n).or(this.shiftR(64 - n)); + // }, + + /** + * Rotates this word n bits to the right. + * + * @param {number} n The number of bits to rotate. + * + * @return {X64Word} A new x64-Word object after rotating. + * + * @example + * + * var rotated = x64Word.rotR(7); + */ + // rotR: function (n) { + // return this.shiftR(n).or(this.shiftL(64 - n)); + // }, + + /** + * Adds this word with the passed word. + * + * @param {X64Word} word The x64-Word to add with this word. + * + * @return {X64Word} A new x64-Word object after adding. + * + * @example + * + * var added = x64Word.add(anotherX64Word); + */ + // add: function (word) { + // var low = (this.low + word.low) | 0; + // var carry = (low >>> 0) < (this.low >>> 0) ? 1 : 0; + // var high = (this.high + word.high + carry) | 0; + + // return X64Word.create(high, low); + // } + }); + + /** + * An array of 64-bit words. + * + * @property {Array} words The array of CryptoJS.x64.Word objects. + * @property {number} sigBytes The number of significant bytes in this word array. + */ + var X64WordArray = C_x64.WordArray = Base.extend({ + /** + * Initializes a newly created word array. + * + * @param {Array} words (Optional) An array of CryptoJS.x64.Word objects. + * @param {number} sigBytes (Optional) The number of significant bytes in the words. + * + * @example + * + * var wordArray = CryptoJS.x64.WordArray.create(); + * + * var wordArray = CryptoJS.x64.WordArray.create([ + * CryptoJS.x64.Word.create(0x00010203, 0x04050607), + * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) + * ]); + * + * var wordArray = CryptoJS.x64.WordArray.create([ + * CryptoJS.x64.Word.create(0x00010203, 0x04050607), + * CryptoJS.x64.Word.create(0x18191a1b, 0x1c1d1e1f) + * ], 10); + */ + init: function (words, sigBytes) { + words = this.words = words || []; + + if (sigBytes != undefined) { + this.sigBytes = sigBytes; + } else { + this.sigBytes = words.length * 8; + } + }, + + /** + * Converts this 64-bit word array to a 32-bit word array. + * + * @return {CryptoJS.lib.WordArray} This word array's data as a 32-bit word array. + * + * @example + * + * var x32WordArray = x64WordArray.toX32(); + */ + toX32: function () { + // Shortcuts + var x64Words = this.words; + var x64WordsLength = x64Words.length; + + // Convert + var x32Words = []; + for (var i = 0; i < x64WordsLength; i++) { + var x64Word = x64Words[i]; + x32Words.push(x64Word.high); + x32Words.push(x64Word.low); + } + + return X32WordArray.create(x32Words, this.sigBytes); + }, + + /** + * Creates a copy of this word array. + * + * @return {X64WordArray} The clone. + * + * @example + * + * var clone = x64WordArray.clone(); + */ + clone: function () { + var clone = Base.clone.call(this); + + // Clone "words" array + var words = clone.words = this.words.slice(0); + + // Clone each X64Word object + var wordsLength = words.length; + for (var i = 0; i < wordsLength; i++) { + words[i] = words[i].clone(); + } + + return clone; + } + }); + }()); + + + return CryptoJS; + +})); \ No newline at end of file diff --git a/node_modules/debug/LICENSE b/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/debug/README.md b/node_modules/debug/README.md new file mode 100644 index 0000000..e9c3e04 --- /dev/null +++ b/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![Build Status](https://travis-ci.org/debug-js/debug.svg?branch=master)](https://travis-ci.org/debug-js/debug) [![Coverage Status](https://coveralls.io/repos/github/debug-js/debug/badge.svg?branch=master)](https://coveralls.io/github/debug-js/debug?branch=master) [![Slack](https://visionmedia-community-slackin.now.sh/badge.svg)](https://visionmedia-community-slackin.now.sh/) [![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/debug/package.json b/node_modules/debug/package.json new file mode 100644 index 0000000..3bcdc24 --- /dev/null +++ b/node_modules/debug/package.json @@ -0,0 +1,59 @@ +{ + "name": "debug", + "version": "4.3.4", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon ", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "istanbul cover _mocha -- test.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "2.1.2" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + } +} diff --git a/node_modules/debug/src/browser.js b/node_modules/debug/src/browser.js new file mode 100644 index 0000000..cd0fc35 --- /dev/null +++ b/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/debug/src/common.js b/node_modules/debug/src/common.js new file mode 100644 index 0000000..e3291b2 --- /dev/null +++ b/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/debug/src/index.js b/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/debug/src/node.js b/node_modules/debug/src/node.js new file mode 100644 index 0000000..79bc085 --- /dev/null +++ b/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/decamelize-keys/index.js b/node_modules/decamelize-keys/index.js new file mode 100644 index 0000000..f731ca0 --- /dev/null +++ b/node_modules/decamelize-keys/index.js @@ -0,0 +1,19 @@ +'use strict'; +var mapObj = require('map-obj'); +var decamelize = require('decamelize'); + +module.exports = function (input, separator, options) { + if (typeof separator !== 'string') { + options = separator; + separator = null; + } + + options = options || {}; + separator = separator || options.separator; + var exclude = options.exclude || []; + + return mapObj(input, function (key, val) { + key = exclude.indexOf(key) === -1 ? decamelize(key, separator) : key; + return [key, val]; + }); +}; diff --git a/node_modules/decamelize-keys/license b/node_modules/decamelize-keys/license new file mode 100644 index 0000000..878ece4 --- /dev/null +++ b/node_modules/decamelize-keys/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com), Dmirty Sobolev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/decamelize-keys/package.json b/node_modules/decamelize-keys/package.json new file mode 100644 index 0000000..f863f9d --- /dev/null +++ b/node_modules/decamelize-keys/package.json @@ -0,0 +1,63 @@ +{ + "name": "decamelize-keys", + "version": "1.1.1", + "description": "Convert object keys from camelCase to lowercase with a custom separator", + "license": "MIT", + "repository": "sindresorhus/decamelize-keys", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "http://sindresorhus.com" + }, + "contributors": [ + { + "name": "Dmirty Sobolev", + "email": "disobolev@icloud.com", + "url": "https://github.com/dsblv" + } + ], + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "map", + "obj", + "object", + "key", + "keys", + "value", + "values", + "val", + "iterate", + "decamelize", + "decamelcase", + "lowercase", + "camelcase", + "camel-case", + "camel", + "case", + "dash", + "hyphen", + "dot", + "underscore", + "separator", + "string", + "text", + "convert" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "dependencies": { + "decamelize": "^1.1.0", + "map-obj": "^1.0.0" + } +} diff --git a/node_modules/decamelize-keys/readme.md b/node_modules/decamelize-keys/readme.md new file mode 100644 index 0000000..7602a87 --- /dev/null +++ b/node_modules/decamelize-keys/readme.md @@ -0,0 +1,69 @@ +# decamelize-keys [![Build Status](https://travis-ci.org/dsblv/decamelize-keys.svg?branch=master)](https://travis-ci.org/dsblv/decamelize-keys) + +> Convert object keys from camelCase to lowercase with a custom separator using [`decamelize`](https://github.com/sindresorhus/decamelize) + +*This project was forked from [`camelcase-keys`](https://github.com/sindresorhus/camelcase-keys) and converted to do the opposite* + + +## Install + +```sh +$ npm install --save decamelize-keys +``` + + +## Usage + +```js +const decamelizeKeys = require('decamelize-keys'); + +decamelizeKeys({fooBar: true}, '-'); +//=> {'foo-bar': true} +``` + + +## API + +### decamelizeKeys(input, [separator], [options]) + +### input + +Type: `object` +*Required* + +Object to decamelize. + +### separator + +Type: `string` +Default: `_` + +A string to insert between words. + +### options + +Type: `object` + +#### separator + +Type: `string` +Default: `_` + +Alternative way to specify [separator](#separator). + +#### exclude + +Type: `array` +Default: `[]` + +Exclude keys from being decamelized. + + +## Related + +See [`camelcase-keys`](https://github.com/sindresorhus/camelcase-keys) for the inverse. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com), [Dmirty Sobolev](https://github.com/dsblv) diff --git a/node_modules/decamelize/index.js b/node_modules/decamelize/index.js new file mode 100644 index 0000000..8d5bab7 --- /dev/null +++ b/node_modules/decamelize/index.js @@ -0,0 +1,13 @@ +'use strict'; +module.exports = function (str, sep) { + if (typeof str !== 'string') { + throw new TypeError('Expected a string'); + } + + sep = typeof sep === 'undefined' ? '_' : sep; + + return str + .replace(/([a-z\d])([A-Z])/g, '$1' + sep + '$2') + .replace(/([A-Z]+)([A-Z][a-z\d]+)/g, '$1' + sep + '$2') + .toLowerCase(); +}; diff --git a/node_modules/decamelize/license b/node_modules/decamelize/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/decamelize/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/decamelize/package.json b/node_modules/decamelize/package.json new file mode 100644 index 0000000..ca35790 --- /dev/null +++ b/node_modules/decamelize/package.json @@ -0,0 +1,38 @@ +{ + "name": "decamelize", + "version": "1.2.0", + "description": "Convert a camelized string into a lowercased one with a custom separator: unicornRainbow → unicorn_rainbow", + "license": "MIT", + "repository": "sindresorhus/decamelize", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "decamelize", + "decamelcase", + "camelcase", + "lowercase", + "case", + "dash", + "hyphen", + "string", + "str", + "text", + "convert" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/decamelize/readme.md b/node_modules/decamelize/readme.md new file mode 100644 index 0000000..624c7ee --- /dev/null +++ b/node_modules/decamelize/readme.md @@ -0,0 +1,48 @@ +# decamelize [![Build Status](https://travis-ci.org/sindresorhus/decamelize.svg?branch=master)](https://travis-ci.org/sindresorhus/decamelize) + +> Convert a camelized string into a lowercased one with a custom separator
+> Example: `unicornRainbow` → `unicorn_rainbow` + + +## Install + +``` +$ npm install --save decamelize +``` + + +## Usage + +```js +const decamelize = require('decamelize'); + +decamelize('unicornRainbow'); +//=> 'unicorn_rainbow' + +decamelize('unicornRainbow', '-'); +//=> 'unicorn-rainbow' +``` + + +## API + +### decamelize(input, [separator]) + +#### input + +Type: `string` + +#### separator + +Type: `string`
+Default: `_` + + +## Related + +See [`camelcase`](https://github.com/sindresorhus/camelcase) for the inverse. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/delegates/.npmignore b/node_modules/delegates/.npmignore new file mode 100644 index 0000000..c2658d7 --- /dev/null +++ b/node_modules/delegates/.npmignore @@ -0,0 +1 @@ +node_modules/ diff --git a/node_modules/delegates/History.md b/node_modules/delegates/History.md new file mode 100644 index 0000000..25959ea --- /dev/null +++ b/node_modules/delegates/History.md @@ -0,0 +1,22 @@ + +1.0.0 / 2015-12-14 +================== + + * Merge pull request #12 from kasicka/master + * Add license text + +0.1.0 / 2014-10-17 +================== + + * adds `.fluent()` to api + +0.0.3 / 2014-01-13 +================== + + * fix receiver for .method() + +0.0.2 / 2014-01-13 +================== + + * Object.defineProperty() sucks + * Initial commit diff --git a/node_modules/delegates/License b/node_modules/delegates/License new file mode 100644 index 0000000..60de60a --- /dev/null +++ b/node_modules/delegates/License @@ -0,0 +1,20 @@ +Copyright (c) 2015 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/delegates/Makefile b/node_modules/delegates/Makefile new file mode 100644 index 0000000..a9dcfd5 --- /dev/null +++ b/node_modules/delegates/Makefile @@ -0,0 +1,8 @@ + +test: + @./node_modules/.bin/mocha \ + --require should \ + --reporter spec \ + --bail + +.PHONY: test \ No newline at end of file diff --git a/node_modules/delegates/Readme.md b/node_modules/delegates/Readme.md new file mode 100644 index 0000000..ab8cf4a --- /dev/null +++ b/node_modules/delegates/Readme.md @@ -0,0 +1,94 @@ + +# delegates + + Node method and accessor delegation utilty. + +## Installation + +``` +$ npm install delegates +``` + +## Example + +```js +var delegate = require('delegates'); + +... + +delegate(proto, 'request') + .method('acceptsLanguages') + .method('acceptsEncodings') + .method('acceptsCharsets') + .method('accepts') + .method('is') + .access('querystring') + .access('idempotent') + .access('socket') + .access('length') + .access('query') + .access('search') + .access('status') + .access('method') + .access('path') + .access('body') + .access('host') + .access('url') + .getter('subdomains') + .getter('protocol') + .getter('header') + .getter('stale') + .getter('fresh') + .getter('secure') + .getter('ips') + .getter('ip') +``` + +# API + +## Delegate(proto, prop) + +Creates a delegator instance used to configure using the `prop` on the given +`proto` object. (which is usually a prototype) + +## Delegate#method(name) + +Allows the given method `name` to be accessed on the host. + +## Delegate#getter(name) + +Creates a "getter" for the property with the given `name` on the delegated +object. + +## Delegate#setter(name) + +Creates a "setter" for the property with the given `name` on the delegated +object. + +## Delegate#access(name) + +Creates an "accessor" (ie: both getter *and* setter) for the property with the +given `name` on the delegated object. + +## Delegate#fluent(name) + +A unique type of "accessor" that works for a "fluent" API. When called as a +getter, the method returns the expected value. However, if the method is called +with a value, it will return itself so it can be chained. For example: + +```js +delegate(proto, 'request') + .fluent('query') + +// getter +var q = request.query(); + +// setter (chainable) +request + .query({ a: 1 }) + .query({ b: 2 }); +``` + +# License + + MIT diff --git a/node_modules/delegates/index.js b/node_modules/delegates/index.js new file mode 100644 index 0000000..17c222d --- /dev/null +++ b/node_modules/delegates/index.js @@ -0,0 +1,121 @@ + +/** + * Expose `Delegator`. + */ + +module.exports = Delegator; + +/** + * Initialize a delegator. + * + * @param {Object} proto + * @param {String} target + * @api public + */ + +function Delegator(proto, target) { + if (!(this instanceof Delegator)) return new Delegator(proto, target); + this.proto = proto; + this.target = target; + this.methods = []; + this.getters = []; + this.setters = []; + this.fluents = []; +} + +/** + * Delegate method `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.method = function(name){ + var proto = this.proto; + var target = this.target; + this.methods.push(name); + + proto[name] = function(){ + return this[target][name].apply(this[target], arguments); + }; + + return this; +}; + +/** + * Delegator accessor `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.access = function(name){ + return this.getter(name).setter(name); +}; + +/** + * Delegator getter `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.getter = function(name){ + var proto = this.proto; + var target = this.target; + this.getters.push(name); + + proto.__defineGetter__(name, function(){ + return this[target][name]; + }); + + return this; +}; + +/** + * Delegator setter `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.setter = function(name){ + var proto = this.proto; + var target = this.target; + this.setters.push(name); + + proto.__defineSetter__(name, function(val){ + return this[target][name] = val; + }); + + return this; +}; + +/** + * Delegator fluent accessor + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.fluent = function (name) { + var proto = this.proto; + var target = this.target; + this.fluents.push(name); + + proto[name] = function(val){ + if ('undefined' != typeof val) { + this[target][name] = val; + return this; + } else { + return this[target][name]; + } + }; + + return this; +}; diff --git a/node_modules/delegates/package.json b/node_modules/delegates/package.json new file mode 100644 index 0000000..1724038 --- /dev/null +++ b/node_modules/delegates/package.json @@ -0,0 +1,13 @@ +{ + "name": "delegates", + "version": "1.0.0", + "repository": "visionmedia/node-delegates", + "description": "delegate methods and accessors to another property", + "keywords": ["delegate", "delegation"], + "dependencies": {}, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "license": "MIT" +} diff --git a/node_modules/delegates/test/index.js b/node_modules/delegates/test/index.js new file mode 100644 index 0000000..7b6e3d4 --- /dev/null +++ b/node_modules/delegates/test/index.js @@ -0,0 +1,94 @@ + +var assert = require('assert'); +var delegate = require('..'); + +describe('.method(name)', function(){ + it('should delegate methods', function(){ + var obj = {}; + + obj.request = { + foo: function(bar){ + assert(this == obj.request); + return bar; + } + }; + + delegate(obj, 'request').method('foo'); + + obj.foo('something').should.equal('something'); + }) +}) + +describe('.getter(name)', function(){ + it('should delegate getters', function(){ + var obj = {}; + + obj.request = { + get type() { + return 'text/html'; + } + } + + delegate(obj, 'request').getter('type'); + + obj.type.should.equal('text/html'); + }) +}) + +describe('.setter(name)', function(){ + it('should delegate setters', function(){ + var obj = {}; + + obj.request = { + get type() { + return this._type.toUpperCase(); + }, + + set type(val) { + this._type = val; + } + } + + delegate(obj, 'request').setter('type'); + + obj.type = 'hey'; + obj.request.type.should.equal('HEY'); + }) +}) + +describe('.access(name)', function(){ + it('should delegate getters and setters', function(){ + var obj = {}; + + obj.request = { + get type() { + return this._type.toUpperCase(); + }, + + set type(val) { + this._type = val; + } + } + + delegate(obj, 'request').access('type'); + + obj.type = 'hey'; + obj.type.should.equal('HEY'); + }) +}) + +describe('.fluent(name)', function () { + it('should delegate in a fluent fashion', function () { + var obj = { + settings: { + env: 'development' + } + }; + + delegate(obj, 'settings').fluent('env'); + + obj.env().should.equal('development'); + obj.env('production').should.equal(obj); + obj.settings.env.should.equal('production'); + }) +}) diff --git a/node_modules/emoji-regex/LICENSE-MIT.txt b/node_modules/emoji-regex/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/emoji-regex/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/emoji-regex/README.md b/node_modules/emoji-regex/README.md new file mode 100644 index 0000000..f10e173 --- /dev/null +++ b/node_modules/emoji-regex/README.md @@ -0,0 +1,73 @@ +# emoji-regex [![Build status](https://travis-ci.org/mathiasbynens/emoji-regex.svg?branch=master)](https://travis-ci.org/mathiasbynens/emoji-regex) + +_emoji-regex_ offers a regular expression to match all emoji symbols (including textual representations of emoji) as per the Unicode Standard. + +This repository contains a script that generates this regular expression based on [the data from Unicode v12](https://github.com/mathiasbynens/unicode-12.0.0). Because of this, the regular expression can easily be updated whenever new emoji are added to the Unicode standard. + +## Installation + +Via [npm](https://www.npmjs.com/): + +```bash +npm install emoji-regex +``` + +In [Node.js](https://nodejs.org/): + +```js +const emojiRegex = require('emoji-regex'); +// Note: because the regular expression has the global flag set, this module +// exports a function that returns the regex rather than exporting the regular +// expression itself, to make it impossible to (accidentally) mutate the +// original regular expression. + +const text = ` +\u{231A}: ⌚ default emoji presentation character (Emoji_Presentation) +\u{2194}\u{FE0F}: ↔️ default text presentation character rendered as emoji +\u{1F469}: 👩 emoji modifier base (Emoji_Modifier_Base) +\u{1F469}\u{1F3FF}: 👩🏿 emoji modifier base followed by a modifier +`; + +const regex = emojiRegex(); +let match; +while (match = regex.exec(text)) { + const emoji = match[0]; + console.log(`Matched sequence ${ emoji } — code points: ${ [...emoji].length }`); +} +``` + +Console output: + +``` +Matched sequence ⌚ — code points: 1 +Matched sequence ⌚ — code points: 1 +Matched sequence ↔️ — code points: 2 +Matched sequence ↔️ — code points: 2 +Matched sequence 👩 — code points: 1 +Matched sequence 👩 — code points: 1 +Matched sequence 👩🏿 — code points: 2 +Matched sequence 👩🏿 — code points: 2 +``` + +To match emoji in their textual representation as well (i.e. emoji that are not `Emoji_Presentation` symbols and that aren’t forced to render as emoji by a variation selector), `require` the other regex: + +```js +const emojiRegex = require('emoji-regex/text.js'); +``` + +Additionally, in environments which support ES2015 Unicode escapes, you may `require` ES2015-style versions of the regexes: + +```js +const emojiRegex = require('emoji-regex/es2015/index.js'); +const emojiRegexText = require('emoji-regex/es2015/text.js'); +``` + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## License + +_emoji-regex_ is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/emoji-regex/es2015/index.js b/node_modules/emoji-regex/es2015/index.js new file mode 100644 index 0000000..b4cf3dc --- /dev/null +++ b/node_modules/emoji-regex/es2015/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/node_modules/emoji-regex/es2015/text.js b/node_modules/emoji-regex/es2015/text.js new file mode 100644 index 0000000..780309d --- /dev/null +++ b/node_modules/emoji-regex/es2015/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F?|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/node_modules/emoji-regex/index.d.ts b/node_modules/emoji-regex/index.d.ts new file mode 100644 index 0000000..1955b47 --- /dev/null +++ b/node_modules/emoji-regex/index.d.ts @@ -0,0 +1,23 @@ +declare module 'emoji-regex' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} + +declare module 'emoji-regex/text' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} + +declare module 'emoji-regex/es2015' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} + +declare module 'emoji-regex/es2015/text' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} diff --git a/node_modules/emoji-regex/index.js b/node_modules/emoji-regex/index.js new file mode 100644 index 0000000..d993a3a --- /dev/null +++ b/node_modules/emoji-regex/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F|\uD83D\uDC68(?:\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68\uD83C\uDFFB|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|[\u2695\u2696\u2708]\uFE0F|\uD83D[\uDC66\uDC67]|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708])\uFE0F|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C[\uDFFB-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)\uD83C\uDFFB|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB\uDFFC])|\uD83D\uDC69(?:\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB-\uDFFD])|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)\uFE0F|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\uD83C\uDFF4\u200D\u2620)\uFE0F|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF4\uD83C\uDDF2|\uD83C\uDDF6\uD83C\uDDE6|[#\*0-9]\uFE0F\u20E3|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83D\uDC69(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270A-\u270D]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC70\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDCAA\uDD74\uDD7A\uDD90\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD36\uDDB5\uDDB6\uDDBB\uDDD2-\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5\uDEEB\uDEEC\uDEF4-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])\uFE0F|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; +}; diff --git a/node_modules/emoji-regex/package.json b/node_modules/emoji-regex/package.json new file mode 100644 index 0000000..6d32352 --- /dev/null +++ b/node_modules/emoji-regex/package.json @@ -0,0 +1,50 @@ +{ + "name": "emoji-regex", + "version": "8.0.0", + "description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.", + "homepage": "https://mths.be/emoji-regex", + "main": "index.js", + "types": "index.d.ts", + "keywords": [ + "unicode", + "regex", + "regexp", + "regular expressions", + "code points", + "symbols", + "characters", + "emoji" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/emoji-regex.git" + }, + "bugs": "https://github.com/mathiasbynens/emoji-regex/issues", + "files": [ + "LICENSE-MIT.txt", + "index.js", + "index.d.ts", + "text.js", + "es2015/index.js", + "es2015/text.js" + ], + "scripts": { + "build": "rm -rf -- es2015; babel src -d .; NODE_ENV=es2015 babel src -d ./es2015; node script/inject-sequences.js", + "test": "mocha", + "test:watch": "npm run test -- --watch" + }, + "devDependencies": { + "@babel/cli": "^7.2.3", + "@babel/core": "^7.3.4", + "@babel/plugin-proposal-unicode-property-regex": "^7.2.0", + "@babel/preset-env": "^7.3.4", + "mocha": "^6.0.2", + "regexgen": "^1.3.0", + "unicode-12.0.0": "^0.7.9" + } +} diff --git a/node_modules/emoji-regex/text.js b/node_modules/emoji-regex/text.js new file mode 100644 index 0000000..0a55ce2 --- /dev/null +++ b/node_modules/emoji-regex/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F|\uD83D\uDC68(?:\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68\uD83C\uDFFB|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|[\u2695\u2696\u2708]\uFE0F|\uD83D[\uDC66\uDC67]|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708])\uFE0F|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C[\uDFFB-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)\uD83C\uDFFB|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB\uDFFC])|\uD83D\uDC69(?:\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB-\uDFFD])|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)\uFE0F|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\uD83C\uDFF4\u200D\u2620)\uFE0F|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF4\uD83C\uDDF2|\uD83C\uDDF6\uD83C\uDDE6|[#\*0-9]\uFE0F\u20E3|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83D\uDC69(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270A-\u270D]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC70\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDCAA\uDD74\uDD7A\uDD90\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD36\uDDB5\uDDB6\uDDBB\uDDD2-\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5\uDEEB\uDEEC\uDEF4-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])\uFE0F?|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; +}; diff --git a/node_modules/encoding/.prettierrc.js b/node_modules/encoding/.prettierrc.js new file mode 100644 index 0000000..3f83654 --- /dev/null +++ b/node_modules/encoding/.prettierrc.js @@ -0,0 +1,8 @@ +module.exports = { + printWidth: 160, + tabWidth: 4, + singleQuote: true, + endOfLine: 'lf', + trailingComma: 'none', + arrowParens: 'avoid' +}; diff --git a/node_modules/encoding/.travis.yml b/node_modules/encoding/.travis.yml new file mode 100644 index 0000000..abc4f48 --- /dev/null +++ b/node_modules/encoding/.travis.yml @@ -0,0 +1,25 @@ +language: node_js +sudo: false +node_js: + - "0.10" + - 0.12 + - iojs + - 4 + - 5 +env: + - CXX=g++-4.8 +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-4.8 +notifications: + email: + - andris@kreata.ee + webhooks: + urls: + - https://webhooks.gitter.im/e/0ed18fd9b3e529b3c2cc + on_success: change # options: [always|never|change] default: always + on_failure: always # options: [always|never|change] default: always + on_start: false # default: false diff --git a/node_modules/encoding/LICENSE b/node_modules/encoding/LICENSE new file mode 100644 index 0000000..33f5a9a --- /dev/null +++ b/node_modules/encoding/LICENSE @@ -0,0 +1,16 @@ +Copyright (c) 2012-2014 Andris Reinman + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/encoding/README.md b/node_modules/encoding/README.md new file mode 100644 index 0000000..6188918 --- /dev/null +++ b/node_modules/encoding/README.md @@ -0,0 +1,41 @@ +# Encoding + +**encoding** is a simple wrapper around [iconv-lite](https://github.com/ashtuchkin/iconv-lite/) to convert strings from one encoding to another. + +[![Build Status](https://secure.travis-ci.org/andris9/encoding.svg)](http://travis-ci.org/andris9/Nodemailer) +[![npm version](https://badge.fury.io/js/encoding.svg)](http://badge.fury.io/js/encoding) + +Initially _encoding_ was a wrapper around _node-iconv_ (main) and _iconv-lite_ (fallback) and was used as the encoding layer for Nodemailer/mailparser. Somehow it also ended up as a dependency for a bunch of other project, none of these actually using _node-iconv_. The loading mechanics caused issues for front-end projects and Nodemailer/malparser had moved on, so _node-iconv_ was removed. + +## Install + +Install through npm + + npm install encoding + +## Usage + +Require the module + + var encoding = require("encoding"); + +Convert with encoding.convert() + + var resultBuffer = encoding.convert(text, toCharset, fromCharset); + +Where + +- **text** is either a Buffer or a String to be converted +- **toCharset** is the characterset to convert the string +- **fromCharset** (_optional_, defaults to UTF-8) is the source charset + +Output of the conversion is always a Buffer object. + +Example + + var result = encoding.convert("ÕÄÖÜ", "Latin_1"); + console.log(result); // + +## License + +**MIT** diff --git a/node_modules/encoding/lib/encoding.js b/node_modules/encoding/lib/encoding.js new file mode 100644 index 0000000..865c24b --- /dev/null +++ b/node_modules/encoding/lib/encoding.js @@ -0,0 +1,83 @@ +'use strict'; + +var iconvLite = require('iconv-lite'); + +// Expose to the world +module.exports.convert = convert; + +/** + * Convert encoding of an UTF-8 string or a buffer + * + * @param {String|Buffer} str String to be converted + * @param {String} to Encoding to be converted to + * @param {String} [from='UTF-8'] Encoding to be converted from + * @return {Buffer} Encoded string + */ +function convert(str, to, from) { + from = checkEncoding(from || 'UTF-8'); + to = checkEncoding(to || 'UTF-8'); + str = str || ''; + + var result; + + if (from !== 'UTF-8' && typeof str === 'string') { + str = Buffer.from(str, 'binary'); + } + + if (from === to) { + if (typeof str === 'string') { + result = Buffer.from(str); + } else { + result = str; + } + } else { + try { + result = convertIconvLite(str, to, from); + } catch (E) { + console.error(E); + result = str; + } + } + + if (typeof result === 'string') { + result = Buffer.from(result, 'utf-8'); + } + + return result; +} + +/** + * Convert encoding of astring with iconv-lite + * + * @param {String|Buffer} str String to be converted + * @param {String} to Encoding to be converted to + * @param {String} [from='UTF-8'] Encoding to be converted from + * @return {Buffer} Encoded string + */ +function convertIconvLite(str, to, from) { + if (to === 'UTF-8') { + return iconvLite.decode(str, from); + } else if (from === 'UTF-8') { + return iconvLite.encode(str, to); + } else { + return iconvLite.encode(iconvLite.decode(str, from), to); + } +} + +/** + * Converts charset name if needed + * + * @param {String} name Character set + * @return {String} Character set name + */ +function checkEncoding(name) { + return (name || '') + .toString() + .trim() + .replace(/^latin[\-_]?(\d+)$/i, 'ISO-8859-$1') + .replace(/^win(?:dows)?[\-_]?(\d+)$/i, 'WINDOWS-$1') + .replace(/^utf[\-_]?(\d+)$/i, 'UTF-$1') + .replace(/^ks_c_5601\-1987$/i, 'CP949') + .replace(/^us[\-_]?ascii$/i, 'ASCII') + .toUpperCase(); +} diff --git a/node_modules/encoding/package.json b/node_modules/encoding/package.json new file mode 100644 index 0000000..773a943 --- /dev/null +++ b/node_modules/encoding/package.json @@ -0,0 +1,18 @@ +{ + "name": "encoding", + "version": "0.1.13", + "description": "Convert encodings, uses iconv-lite", + "main": "lib/encoding.js", + "scripts": { + "test": "nodeunit test" + }, + "repository": "https://github.com/andris9/encoding.git", + "author": "Andris Reinman", + "license": "MIT", + "dependencies": { + "iconv-lite": "^0.6.2" + }, + "devDependencies": { + "nodeunit": "0.11.3" + } +} diff --git a/node_modules/encoding/test/test.js b/node_modules/encoding/test/test.js new file mode 100644 index 0000000..3a7dfee --- /dev/null +++ b/node_modules/encoding/test/test.js @@ -0,0 +1,49 @@ +'use strict'; + +var encoding = require('../lib/encoding'); + +exports['General tests'] = { + 'From UTF-8 to Latin_1': function (test) { + var input = 'ÕÄÖÜ', + expected = Buffer.from([0xd5, 0xc4, 0xd6, 0xdc]); + test.deepEqual(encoding.convert(input, 'latin1'), expected); + test.done(); + }, + + 'From Latin_1 to UTF-8': function (test) { + var input = Buffer.from([0xd5, 0xc4, 0xd6, 0xdc]), + expected = 'ÕÄÖÜ'; + test.deepEqual(encoding.convert(input, 'utf-8', 'latin1').toString(), expected); + test.done(); + }, + + 'From UTF-8 to UTF-8': function (test) { + var input = 'ÕÄÖÜ', + expected = Buffer.from('ÕÄÖÜ'); + test.deepEqual(encoding.convert(input, 'utf-8', 'utf-8'), expected); + test.done(); + }, + + 'From Latin_13 to Latin_15': function (test) { + var input = Buffer.from([0xd5, 0xc4, 0xd6, 0xdc, 0xd0]), + expected = Buffer.from([0xd5, 0xc4, 0xd6, 0xdc, 0xa6]); + test.deepEqual(encoding.convert(input, 'latin_15', 'latin13'), expected); + test.done(); + } + + /* + // ISO-2022-JP is not supported by iconv-lite + "From ISO-2022-JP to UTF-8 with Iconv": function (test) { + var input = Buffer.from( + "GyRCM1g5OzU7PVEwdzgmPSQ4IUYkMnFKczlwGyhC", + "base64" + ), + expected = Buffer.from( + "5a2m5qCh5oqA6KGT5ZOh56CU5L+u5qSc6KiO5Lya5aCx5ZGK", + "base64" + ); + test.deepEqual(encoding.convert(input, "utf-8", "ISO-2022-JP"), expected); + test.done(); + }, + */ +}; diff --git a/node_modules/env-paths/index.d.ts b/node_modules/env-paths/index.d.ts new file mode 100644 index 0000000..277ddc0 --- /dev/null +++ b/node_modules/env-paths/index.d.ts @@ -0,0 +1,101 @@ +declare namespace envPaths { + export interface Options { + /** + __Don't use this option unless you really have to!__ + + Suffix appended to the project name to avoid name conflicts with native apps. Pass an empty string to disable it. + + @default 'nodejs' + */ + readonly suffix?: string; + } + + export interface Paths { + /** + Directory for data files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Application Support/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\MyApp-nodejs\Data` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Data`) + - Linux: `~/.local/share/MyApp-nodejs` (or `$XDG_DATA_HOME/MyApp-nodejs`) + */ + readonly data: string; + + /** + Directory for data files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Preferences/MyApp-nodejs` + - Windows: `%APPDATA%\MyApp-nodejs\Config` (for example, `C:\Users\USERNAME\AppData\Roaming\MyApp-nodejs\Config`) + - Linux: `~/.config/MyApp-nodejs` (or `$XDG_CONFIG_HOME/MyApp-nodejs`) + */ + readonly config: string; + + /** + Directory for non-essential data files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Caches/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\MyApp-nodejs\Cache` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Cache`) + - Linux: `~/.cache/MyApp-nodejs` (or `$XDG_CACHE_HOME/MyApp-nodejs`) + */ + readonly cache: string; + + /** + Directory for log files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `~/Library/Logs/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\MyApp-nodejs\Log` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Log`) + - Linux: `~/.local/state/MyApp-nodejs` (or `$XDG_STATE_HOME/MyApp-nodejs`) + */ + readonly log: string; + + /** + Directory for temporary files. + + Example locations (with the default `nodejs` suffix): + + - macOS: `/var/folders/jf/f2twvvvs5jl_m49tf034ffpw0000gn/T/MyApp-nodejs` + - Windows: `%LOCALAPPDATA%\Temp\MyApp-nodejs` (for example, `C:\Users\USERNAME\AppData\Local\Temp\MyApp-nodejs`) + - Linux: `/tmp/USERNAME/MyApp-nodejs` + */ + readonly temp: string; + } +} + +declare const envPaths: { + /** + Get paths for storing things like data, config, cache, etc. + + Note: It only generates the path strings. It doesn't create the directories for you. You could use [`make-dir`](https://github.com/sindresorhus/make-dir) to create the directories. + + @param name - Name of your project. Used to generate the paths. + @returns The paths to use for your project on current OS. + + @example + ``` + import envPaths = require('env-paths'); + + const paths = envPaths('MyApp'); + + paths.data; + //=> '/home/sindresorhus/.local/share/MyApp-nodejs' + + paths.config + //=> '/home/sindresorhus/.config/MyApp-nodejs' + ``` + */ + (name: string, options?: envPaths.Options): envPaths.Paths; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function envPaths(name: string, options?: envPaths.Options): envPaths.Paths; + // export = envPaths; + default: typeof envPaths; +}; + +export = envPaths; diff --git a/node_modules/env-paths/index.js b/node_modules/env-paths/index.js new file mode 100644 index 0000000..7e7b50b --- /dev/null +++ b/node_modules/env-paths/index.js @@ -0,0 +1,74 @@ +'use strict'; +const path = require('path'); +const os = require('os'); + +const homedir = os.homedir(); +const tmpdir = os.tmpdir(); +const {env} = process; + +const macos = name => { + const library = path.join(homedir, 'Library'); + + return { + data: path.join(library, 'Application Support', name), + config: path.join(library, 'Preferences', name), + cache: path.join(library, 'Caches', name), + log: path.join(library, 'Logs', name), + temp: path.join(tmpdir, name) + }; +}; + +const windows = name => { + const appData = env.APPDATA || path.join(homedir, 'AppData', 'Roaming'); + const localAppData = env.LOCALAPPDATA || path.join(homedir, 'AppData', 'Local'); + + return { + // Data/config/cache/log are invented by me as Windows isn't opinionated about this + data: path.join(localAppData, name, 'Data'), + config: path.join(appData, name, 'Config'), + cache: path.join(localAppData, name, 'Cache'), + log: path.join(localAppData, name, 'Log'), + temp: path.join(tmpdir, name) + }; +}; + +// https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html +const linux = name => { + const username = path.basename(homedir); + + return { + data: path.join(env.XDG_DATA_HOME || path.join(homedir, '.local', 'share'), name), + config: path.join(env.XDG_CONFIG_HOME || path.join(homedir, '.config'), name), + cache: path.join(env.XDG_CACHE_HOME || path.join(homedir, '.cache'), name), + // https://wiki.debian.org/XDGBaseDirectorySpecification#state + log: path.join(env.XDG_STATE_HOME || path.join(homedir, '.local', 'state'), name), + temp: path.join(tmpdir, username, name) + }; +}; + +const envPaths = (name, options) => { + if (typeof name !== 'string') { + throw new TypeError(`Expected string, got ${typeof name}`); + } + + options = Object.assign({suffix: 'nodejs'}, options); + + if (options.suffix) { + // Add suffix to prevent possible conflict with native apps + name += `-${options.suffix}`; + } + + if (process.platform === 'darwin') { + return macos(name); + } + + if (process.platform === 'win32') { + return windows(name); + } + + return linux(name); +}; + +module.exports = envPaths; +// TODO: Remove this for the next major release +module.exports.default = envPaths; diff --git a/node_modules/env-paths/license b/node_modules/env-paths/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/env-paths/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/env-paths/package.json b/node_modules/env-paths/package.json new file mode 100644 index 0000000..fae4ebc --- /dev/null +++ b/node_modules/env-paths/package.json @@ -0,0 +1,45 @@ +{ + "name": "env-paths", + "version": "2.2.1", + "description": "Get paths for storing things like data, config, cache, etc", + "license": "MIT", + "repository": "sindresorhus/env-paths", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "common", + "user", + "paths", + "env", + "environment", + "directory", + "dir", + "appdir", + "path", + "data", + "config", + "cache", + "logs", + "temp", + "linux", + "unix" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/env-paths/readme.md b/node_modules/env-paths/readme.md new file mode 100644 index 0000000..b66d571 --- /dev/null +++ b/node_modules/env-paths/readme.md @@ -0,0 +1,115 @@ +# env-paths + +> Get paths for storing things like data, config, cache, etc + +Uses the correct OS-specific paths. Most developers get this wrong. + + +## Install + +``` +$ npm install env-paths +``` + + +## Usage + +```js +const envPaths = require('env-paths'); + +const paths = envPaths('MyApp'); + +paths.data; +//=> '/home/sindresorhus/.local/share/MyApp-nodejs' + +paths.config +//=> '/home/sindresorhus/.config/MyApp-nodejs' +``` + + +## API + +### paths = envPaths(name, options?) + +Note: It only generates the path strings. It doesn't create the directories for you. You could use [`make-dir`](https://github.com/sindresorhus/make-dir) to create the directories. + +#### name + +Type: `string` + +Name of your project. Used to generate the paths. + +#### options + +Type: `object` + +##### suffix + +Type: `string`
+Default: `'nodejs'` + +**Don't use this option unless you really have to!**
+Suffix appended to the project name to avoid name conflicts with native +apps. Pass an empty string to disable it. + +### paths.data + +Directory for data files. + +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Application Support/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\MyApp-nodejs\Data` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Data`) +- Linux: `~/.local/share/MyApp-nodejs` (or `$XDG_DATA_HOME/MyApp-nodejs`) + +### paths.config + +Directory for config files. + +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Preferences/MyApp-nodejs` +- Windows: `%APPDATA%\MyApp-nodejs\Config` (for example, `C:\Users\USERNAME\AppData\Roaming\MyApp-nodejs\Config`) +- Linux: `~/.config/MyApp-nodejs` (or `$XDG_CONFIG_HOME/MyApp-nodejs`) + +### paths.cache + +Directory for non-essential data files. + +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Caches/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\MyApp-nodejs\Cache` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Cache`) +- Linux: `~/.cache/MyApp-nodejs` (or `$XDG_CACHE_HOME/MyApp-nodejs`) + +### paths.log + +Directory for log files. + +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `~/Library/Logs/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\MyApp-nodejs\Log` (for example, `C:\Users\USERNAME\AppData\Local\MyApp-nodejs\Log`) +- Linux: `~/.local/state/MyApp-nodejs` (or `$XDG_STATE_HOME/MyApp-nodejs`) + +### paths.temp + +Directory for temporary files. + +Example locations (with the default `nodejs` [suffix](#suffix)): + +- macOS: `/var/folders/jf/f2twvvvs5jl_m49tf034ffpw0000gn/T/MyApp-nodejs` +- Windows: `%LOCALAPPDATA%\Temp\MyApp-nodejs` (for example, `C:\Users\USERNAME\AppData\Local\Temp\MyApp-nodejs`) +- Linux: `/tmp/USERNAME/MyApp-nodejs` + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/err-code/.editorconfig b/node_modules/err-code/.editorconfig new file mode 100644 index 0000000..829280b --- /dev/null +++ b/node_modules/err-code/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[package.json] +indent_size = 2 diff --git a/node_modules/err-code/.eslintrc.json b/node_modules/err-code/.eslintrc.json new file mode 100644 index 0000000..4829595 --- /dev/null +++ b/node_modules/err-code/.eslintrc.json @@ -0,0 +1,7 @@ +{ + "root": true, + "extends": [ + "@satazor/eslint-config/es6", + "@satazor/eslint-config/addons/node" + ] +} \ No newline at end of file diff --git a/node_modules/err-code/.travis.yml b/node_modules/err-code/.travis.yml new file mode 100644 index 0000000..b29cf66 --- /dev/null +++ b/node_modules/err-code/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "4" + - "6" diff --git a/node_modules/err-code/README.md b/node_modules/err-code/README.md new file mode 100644 index 0000000..5afdab0 --- /dev/null +++ b/node_modules/err-code/README.md @@ -0,0 +1,70 @@ +# err-code + +[![NPM version][npm-image]][npm-url] [![Downloads][downloads-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Dependency status][david-dm-image]][david-dm-url] [![Dev Dependency status][david-dm-dev-image]][david-dm-dev-url] [![Greenkeeper badge][greenkeeper-image]][greenkeeper-url] + +[npm-url]:https://npmjs.org/package/err-code +[downloads-image]:http://img.shields.io/npm/dm/err-code.svg +[npm-image]:http://img.shields.io/npm/v/err-code.svg +[travis-url]:https://travis-ci.org/IndigoUnited/js-err-code +[travis-image]:http://img.shields.io/travis/IndigoUnited/js-err-code/master.svg +[david-dm-url]:https://david-dm.org/IndigoUnited/js-err-code +[david-dm-image]:https://img.shields.io/david/IndigoUnited/js-err-code.svg +[david-dm-dev-url]:https://david-dm.org/IndigoUnited/js-err-code?type=dev +[david-dm-dev-image]:https://img.shields.io/david/dev/IndigoUnited/js-err-code.svg +[greenkeeper-image]:https://badges.greenkeeper.io/IndigoUnited/js-err-code.svg +[greenkeeper-url]:https://greenkeeper.io/ + +Create new error instances with a code and additional properties. + + +## Installation + +```console +$ npm install err-code +// or +$ bower install err-code +``` + +The browser file is named index.umd.js which supports CommonJS, AMD and globals (errCode). + + +## Why + +I find myself doing this repeatedly: + +```js +var err = new Error('My message'); +err.code = 'SOMECODE'; +err.detail = 'Additional information about the error'; +throw err; +``` + + +## Usage + +Simple usage. + +```js +var errcode = require('err-code'); + +// fill error with message + code +throw errcode(new Error('My message'), 'ESOMECODE'); +// fill error with message + code + props +throw errcode(new Error('My message'), 'ESOMECODE', { detail: 'Additional information about the error' }); +// fill error with message + props +throw errcode(new Error('My message'), { detail: 'Additional information about the error' }); +``` + +## Pre-existing fields + +If the passed `Error` already has a `.code` field, or fields specified in the third argument to `errcode` they will be overwritten, unless the fields are read only or otherwise throw during assignment in which case a new object will be created that shares a prototype chain with the original `Error`. The `.stack` and `.message` properties will be carried over from the original error and `.code` or any passed properties will be set on it. + + +## Tests + +`$ npm test` + + +## License + +Released under the [MIT License](http://www.opensource.org/licenses/mit-license.php). diff --git a/node_modules/err-code/bower.json b/node_modules/err-code/bower.json new file mode 100644 index 0000000..a39cb70 --- /dev/null +++ b/node_modules/err-code/bower.json @@ -0,0 +1,30 @@ +{ + "name": "err-code", + "version": "1.1.1", + "description": "Create new error instances with a code and additional properties", + "main": "index.umd.js", + "homepage": "https://github.com/IndigoUnited/js-err-code", + "authors": [ + "IndigoUnited (http://indigounited.com)" + ], + "moduleType": [ + "amd", + "globals", + "node" + ], + "keywords": [ + "error", + "err", + "code", + "properties", + "property" + ], + "license": "MIT", + "ignore": [ + "**/.*", + "node_modules", + "bower_components", + "test", + "tests" + ] +} diff --git a/node_modules/err-code/index.js b/node_modules/err-code/index.js new file mode 100644 index 0000000..9ff3e9c --- /dev/null +++ b/node_modules/err-code/index.js @@ -0,0 +1,47 @@ +'use strict'; + +function assign(obj, props) { + for (const key in props) { + Object.defineProperty(obj, key, { + value: props[key], + enumerable: true, + configurable: true, + }); + } + + return obj; +} + +function createError(err, code, props) { + if (!err || typeof err === 'string') { + throw new TypeError('Please pass an Error to err-code'); + } + + if (!props) { + props = {}; + } + + if (typeof code === 'object') { + props = code; + code = undefined; + } + + if (code != null) { + props.code = code; + } + + try { + return assign(err, props); + } catch (_) { + props.message = err.message; + props.stack = err.stack; + + const ErrClass = function () {}; + + ErrClass.prototype = Object.create(Object.getPrototypeOf(err)); + + return assign(new ErrClass(), props); + } +} + +module.exports = createError; diff --git a/node_modules/err-code/index.umd.js b/node_modules/err-code/index.umd.js new file mode 100644 index 0000000..4100726 --- /dev/null +++ b/node_modules/err-code/index.umd.js @@ -0,0 +1,51 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.errCode = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i index.umd.js" + }, + "bugs": { + "url": "https://github.com/IndigoUnited/js-err-code/issues/" + }, + "repository": { + "type": "git", + "url": "git://github.com/IndigoUnited/js-err-code.git" + }, + "keywords": [ + "error", + "err", + "code", + "properties", + "property" + ], + "author": "IndigoUnited (http://indigounited.com)", + "license": "MIT", + "devDependencies": { + "@satazor/eslint-config": "^3.0.0", + "browserify": "^16.5.1", + "eslint": "^7.2.0", + "expect.js": "^0.3.1", + "mocha": "^8.0.1" + } +} diff --git a/node_modules/err-code/test/.eslintrc.json b/node_modules/err-code/test/.eslintrc.json new file mode 100644 index 0000000..f9fbb2d --- /dev/null +++ b/node_modules/err-code/test/.eslintrc.json @@ -0,0 +1,5 @@ +{ + "env": { + "mocha": true + } +} \ No newline at end of file diff --git a/node_modules/err-code/test/test.js b/node_modules/err-code/test/test.js new file mode 100644 index 0000000..22ba0a8 --- /dev/null +++ b/node_modules/err-code/test/test.js @@ -0,0 +1,159 @@ +'use strict'; + +const errcode = require('../index'); +const expect = require('expect.js'); + +describe('errcode', () => { + describe('string as first argument', () => { + it('should throw an error', () => { + expect(() => { errcode('my message'); }).to.throwError((err) => { + expect(err).to.be.a(TypeError); + }); + }); + }); + + describe('error as first argument', () => { + it('should accept an error and do nothing', () => { + const myErr = new Error('my message'); + const err = errcode(myErr); + + expect(err).to.be(myErr); + expect(err.hasOwnProperty(err.code)).to.be(false); + }); + + it('should accept an error and add a code', () => { + const myErr = new Error('my message'); + const err = errcode(myErr, 'ESOME'); + + expect(err).to.be(myErr); + expect(err.code).to.be('ESOME'); + }); + + it('should accept an error object and add code & properties', () => { + const myErr = new Error('my message'); + const err = errcode(myErr, 'ESOME', { foo: 'bar', bar: 'foo' }); + + expect(err).to.be.an(Error); + expect(err.code).to.be('ESOME'); + expect(err.foo).to.be('bar'); + expect(err.bar).to.be('foo'); + }); + + it('should create an error object without code but with properties', () => { + const myErr = new Error('my message'); + const err = errcode(myErr, { foo: 'bar', bar: 'foo' }); + + expect(err).to.be.an(Error); + expect(err.code).to.be(undefined); + expect(err.foo).to.be('bar'); + expect(err.bar).to.be('foo'); + }); + + it('should set a non-writable field', () => { + const myErr = new Error('my message'); + + Object.defineProperty(myErr, 'code', { + value: 'derp', + writable: false, + }); + const err = errcode(myErr, 'ERR_WAT'); + + expect(err).to.be.an(Error); + expect(err.stack).to.equal(myErr.stack); + expect(err.code).to.be('ERR_WAT'); + }); + + it('should add a code to frozen object', () => { + const myErr = new Error('my message'); + const err = errcode(Object.freeze(myErr), 'ERR_WAT'); + + expect(err).to.be.an(Error); + expect(err.stack).to.equal(myErr.stack); + expect(err.code).to.be('ERR_WAT'); + }); + + it('should to set a field that throws at assignment time', () => { + const myErr = new Error('my message'); + + Object.defineProperty(myErr, 'code', { + enumerable: true, + set() { + throw new Error('Nope!'); + }, + get() { + return 'derp'; + }, + }); + const err = errcode(myErr, 'ERR_WAT'); + + expect(err).to.be.an(Error); + expect(err.stack).to.equal(myErr.stack); + expect(err.code).to.be('ERR_WAT'); + }); + + it('should retain error type', () => { + const myErr = new TypeError('my message'); + + Object.defineProperty(myErr, 'code', { + value: 'derp', + writable: false, + }); + const err = errcode(myErr, 'ERR_WAT'); + + expect(err).to.be.a(TypeError); + expect(err.stack).to.equal(myErr.stack); + expect(err.code).to.be('ERR_WAT'); + }); + + it('should add a code to a class that extends Error', () => { + class CustomError extends Error { + set code(val) { + throw new Error('Nope!'); + } + } + + const myErr = new CustomError('my message'); + + Object.defineProperty(myErr, 'code', { + value: 'derp', + writable: false, + configurable: false, + }); + const err = errcode(myErr, 'ERR_WAT'); + + expect(err).to.be.a(CustomError); + expect(err.stack).to.equal(myErr.stack); + expect(err.code).to.be('ERR_WAT'); + + // original prototype chain should be intact + expect(() => { + const otherErr = new CustomError('my message'); + + otherErr.code = 'derp'; + }).to.throwError(); + }); + + it('should support errors that are not Errors', () => { + const err = errcode({ + message: 'Oh noes!', + }, 'ERR_WAT'); + + expect(err.message).to.be('Oh noes!'); + expect(err.code).to.be('ERR_WAT'); + }); + }); + + describe('falsy first arguments', () => { + it('should not allow passing null as the first argument', () => { + expect(() => { errcode(null); }).to.throwError((err) => { + expect(err).to.be.a(TypeError); + }); + }); + + it('should not allow passing undefined as the first argument', () => { + expect(() => { errcode(undefined); }).to.throwError((err) => { + expect(err).to.be.a(TypeError); + }); + }); + }); +}); diff --git a/node_modules/error-ex/LICENSE b/node_modules/error-ex/LICENSE new file mode 100644 index 0000000..0a5f461 --- /dev/null +++ b/node_modules/error-ex/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 JD Ballard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/error-ex/README.md b/node_modules/error-ex/README.md new file mode 100644 index 0000000..97f744a --- /dev/null +++ b/node_modules/error-ex/README.md @@ -0,0 +1,144 @@ +# node-error-ex [![Travis-CI.org Build Status](https://img.shields.io/travis/Qix-/node-error-ex.svg?style=flat-square)](https://travis-ci.org/Qix-/node-error-ex) [![Coveralls.io Coverage Rating](https://img.shields.io/coveralls/Qix-/node-error-ex.svg?style=flat-square)](https://coveralls.io/r/Qix-/node-error-ex) +> Easily subclass and customize new Error types + +## Examples +To include in your project: +```javascript +var errorEx = require('error-ex'); +``` + +To create an error message type with a specific name (note, that `ErrorFn.name` +will not reflect this): +```javascript +var JSONError = errorEx('JSONError'); + +var err = new JSONError('error'); +err.name; //-> JSONError +throw err; //-> JSONError: error +``` + +To add a stack line: +```javascript +var JSONError = errorEx('JSONError', {fileName: errorEx.line('in %s')}); + +var err = new JSONError('error') +err.fileName = '/a/b/c/foo.json'; +throw err; //-> (line 2)-> in /a/b/c/foo.json +``` + +To append to the error message: +```javascript +var JSONError = errorEx('JSONError', {fileName: errorEx.append('in %s')}); + +var err = new JSONError('error'); +err.fileName = '/a/b/c/foo.json'; +throw err; //-> JSONError: error in /a/b/c/foo.json +``` + +## API + +#### `errorEx([name], [properties])` +Creates a new ErrorEx error type + +- `name`: the name of the new type (appears in the error message upon throw; + defaults to `Error.name`) +- `properties`: if supplied, used as a key/value dictionary of properties to + use when building up the stack message. Keys are property names that are + looked up on the error message, and then passed to function values. + - `line`: if specified and is a function, return value is added as a stack + entry (error-ex will indent for you). Passed the property value given + the key. + - `stack`: if specified and is a function, passed the value of the property + using the key, and the raw stack lines as a second argument. Takes no + return value (but the stack can be modified directly). + - `message`: if specified and is a function, return value is used as new + `.message` value upon get. Passed the property value of the property named + by key, and the existing message is passed as the second argument as an + array of lines (suitable for multi-line messages). + +Returns a constructor (Function) that can be used just like the regular Error +constructor. + +```javascript +var errorEx = require('error-ex'); + +var BasicError = errorEx(); + +var NamedError = errorEx('NamedError'); + +// -- + +var AdvancedError = errorEx('AdvancedError', { + foo: { + line: function (value, stack) { + if (value) { + return 'bar ' + value; + } + return null; + } + } +} + +var err = new AdvancedError('hello, world'); +err.foo = 'baz'; +throw err; + +/* + AdvancedError: hello, world + bar baz + at tryReadme() (readme.js:20:1) +*/ +``` + +#### `errorEx.line(str)` +Creates a stack line using a delimiter + +> This is a helper function. It is to be used in lieu of writing a value object +> for `properties` values. + +- `str`: The string to create + - Use the delimiter `%s` to specify where in the string the value should go + +```javascript +var errorEx = require('error-ex'); + +var FileError = errorEx('FileError', {fileName: errorEx.line('in %s')}); + +var err = new FileError('problem reading file'); +err.fileName = '/a/b/c/d/foo.js'; +throw err; + +/* + FileError: problem reading file + in /a/b/c/d/foo.js + at tryReadme() (readme.js:7:1) +*/ +``` + +#### `errorEx.append(str)` +Appends to the `error.message` string + +> This is a helper function. It is to be used in lieu of writing a value object +> for `properties` values. + +- `str`: The string to append + - Use the delimiter `%s` to specify where in the string the value should go + +```javascript +var errorEx = require('error-ex'); + +var SyntaxError = errorEx('SyntaxError', {fileName: errorEx.append('in %s')}); + +var err = new SyntaxError('improper indentation'); +err.fileName = '/a/b/c/d/foo.js'; +throw err; + +/* + SyntaxError: improper indentation in /a/b/c/d/foo.js + at tryReadme() (readme.js:7:1) +*/ +``` + +## License +Licensed under the [MIT License](http://opensource.org/licenses/MIT). +You can find a copy of it in [LICENSE](LICENSE). diff --git a/node_modules/error-ex/index.js b/node_modules/error-ex/index.js new file mode 100644 index 0000000..4fb20b4 --- /dev/null +++ b/node_modules/error-ex/index.js @@ -0,0 +1,141 @@ +'use strict'; + +var util = require('util'); +var isArrayish = require('is-arrayish'); + +var errorEx = function errorEx(name, properties) { + if (!name || name.constructor !== String) { + properties = name || {}; + name = Error.name; + } + + var errorExError = function ErrorEXError(message) { + if (!this) { + return new ErrorEXError(message); + } + + message = message instanceof Error + ? message.message + : (message || this.message); + + Error.call(this, message); + Error.captureStackTrace(this, errorExError); + + this.name = name; + + Object.defineProperty(this, 'message', { + configurable: true, + enumerable: false, + get: function () { + var newMessage = message.split(/\r?\n/g); + + for (var key in properties) { + if (!properties.hasOwnProperty(key)) { + continue; + } + + var modifier = properties[key]; + + if ('message' in modifier) { + newMessage = modifier.message(this[key], newMessage) || newMessage; + if (!isArrayish(newMessage)) { + newMessage = [newMessage]; + } + } + } + + return newMessage.join('\n'); + }, + set: function (v) { + message = v; + } + }); + + var overwrittenStack = null; + + var stackDescriptor = Object.getOwnPropertyDescriptor(this, 'stack'); + var stackGetter = stackDescriptor.get; + var stackValue = stackDescriptor.value; + delete stackDescriptor.value; + delete stackDescriptor.writable; + + stackDescriptor.set = function (newstack) { + overwrittenStack = newstack; + }; + + stackDescriptor.get = function () { + var stack = (overwrittenStack || ((stackGetter) + ? stackGetter.call(this) + : stackValue)).split(/\r?\n+/g); + + // starting in Node 7, the stack builder caches the message. + // just replace it. + if (!overwrittenStack) { + stack[0] = this.name + ': ' + this.message; + } + + var lineCount = 1; + for (var key in properties) { + if (!properties.hasOwnProperty(key)) { + continue; + } + + var modifier = properties[key]; + + if ('line' in modifier) { + var line = modifier.line(this[key]); + if (line) { + stack.splice(lineCount++, 0, ' ' + line); + } + } + + if ('stack' in modifier) { + modifier.stack(this[key], stack); + } + } + + return stack.join('\n'); + }; + + Object.defineProperty(this, 'stack', stackDescriptor); + }; + + if (Object.setPrototypeOf) { + Object.setPrototypeOf(errorExError.prototype, Error.prototype); + Object.setPrototypeOf(errorExError, Error); + } else { + util.inherits(errorExError, Error); + } + + return errorExError; +}; + +errorEx.append = function (str, def) { + return { + message: function (v, message) { + v = v || def; + + if (v) { + message[0] += ' ' + str.replace('%s', v.toString()); + } + + return message; + } + }; +}; + +errorEx.line = function (str, def) { + return { + line: function (v) { + v = v || def; + + if (v) { + return str.replace('%s', v.toString()); + } + + return null; + } + }; +}; + +module.exports = errorEx; diff --git a/node_modules/error-ex/package.json b/node_modules/error-ex/package.json new file mode 100644 index 0000000..f3d9ae0 --- /dev/null +++ b/node_modules/error-ex/package.json @@ -0,0 +1,46 @@ +{ + "name": "error-ex", + "description": "Easy error subclassing and stack customization", + "version": "1.3.2", + "maintainers": [ + "Josh Junon (github.com/qix-)", + "Sindre Sorhus (sindresorhus.com)" + ], + "keywords": [ + "error", + "errors", + "extend", + "extending", + "extension", + "subclass", + "stack", + "custom" + ], + "license": "MIT", + "scripts": { + "pretest": "xo", + "test": "mocha --compilers coffee:coffee-script/register" + }, + "xo": { + "rules": { + "operator-linebreak": [ + 0 + ] + } + }, + "repository": "qix-/node-error-ex", + "files": [ + "index.js" + ], + "devDependencies": { + "coffee-script": "^1.9.3", + "coveralls": "^2.11.2", + "istanbul": "^0.3.17", + "mocha": "^2.2.5", + "should": "^7.0.1", + "xo": "^0.7.1" + }, + "dependencies": { + "is-arrayish": "^0.2.1" + } +} diff --git a/node_modules/escalade/dist/index.js b/node_modules/escalade/dist/index.js new file mode 100644 index 0000000..ad236c4 --- /dev/null +++ b/node_modules/escalade/dist/index.js @@ -0,0 +1,22 @@ +const { dirname, resolve } = require('path'); +const { readdir, stat } = require('fs'); +const { promisify } = require('util'); + +const toStats = promisify(stat); +const toRead = promisify(readdir); + +module.exports = async function (start, callback) { + let dir = resolve('.', start); + let tmp, stats = await toStats(dir); + + if (!stats.isDirectory()) { + dir = dirname(dir); + } + + while (true) { + tmp = await callback(dir, await toRead(dir)); + if (tmp) return resolve(dir, tmp); + dir = dirname(tmp = dir); + if (tmp === dir) break; + } +} diff --git a/node_modules/escalade/dist/index.mjs b/node_modules/escalade/dist/index.mjs new file mode 100644 index 0000000..bf95be0 --- /dev/null +++ b/node_modules/escalade/dist/index.mjs @@ -0,0 +1,22 @@ +import { dirname, resolve } from 'path'; +import { readdir, stat } from 'fs'; +import { promisify } from 'util'; + +const toStats = promisify(stat); +const toRead = promisify(readdir); + +export default async function (start, callback) { + let dir = resolve('.', start); + let tmp, stats = await toStats(dir); + + if (!stats.isDirectory()) { + dir = dirname(dir); + } + + while (true) { + tmp = await callback(dir, await toRead(dir)); + if (tmp) return resolve(dir, tmp); + dir = dirname(tmp = dir); + if (tmp === dir) break; + } +} diff --git a/node_modules/escalade/index.d.ts b/node_modules/escalade/index.d.ts new file mode 100644 index 0000000..283e398 --- /dev/null +++ b/node_modules/escalade/index.d.ts @@ -0,0 +1,3 @@ +type Promisable = T | Promise; +export type Callback = (directory: string, files: string[]) => Promisable; +export default function (directory: string, callback: Callback): Promise; diff --git a/node_modules/escalade/license b/node_modules/escalade/license new file mode 100644 index 0000000..fa6089f --- /dev/null +++ b/node_modules/escalade/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Luke Edwards (lukeed.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/escalade/package.json b/node_modules/escalade/package.json new file mode 100644 index 0000000..0d1bfce --- /dev/null +++ b/node_modules/escalade/package.json @@ -0,0 +1,61 @@ +{ + "name": "escalade", + "version": "3.1.1", + "repository": "lukeed/escalade", + "description": "A tiny (183B to 210B) and fast utility to ascend parent directories", + "module": "dist/index.mjs", + "main": "dist/index.js", + "types": "index.d.ts", + "license": "MIT", + "author": { + "name": "Luke Edwards", + "email": "luke.edwards05@gmail.com", + "url": "https://lukeed.com" + }, + "exports": { + ".": [ + { + "import": "./dist/index.mjs", + "require": "./dist/index.js" + }, + "./dist/index.js" + ], + "./sync": [ + { + "import": "./sync/index.mjs", + "require": "./sync/index.js" + }, + "./sync/index.js" + ] + }, + "files": [ + "*.d.ts", + "dist", + "sync" + ], + "modes": { + "sync": "src/sync.js", + "default": "src/async.js" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "build": "bundt", + "pretest": "npm run build", + "test": "uvu -r esm test -i fixtures" + }, + "keywords": [ + "find", + "parent", + "parents", + "directory", + "search", + "walk" + ], + "devDependencies": { + "bundt": "1.1.1", + "esm": "3.2.25", + "uvu": "0.3.3" + } +} diff --git a/node_modules/escalade/readme.md b/node_modules/escalade/readme.md new file mode 100644 index 0000000..4e2195c --- /dev/null +++ b/node_modules/escalade/readme.md @@ -0,0 +1,211 @@ +# escalade [![CI](https://github.com/lukeed/escalade/workflows/CI/badge.svg)](https://github.com/lukeed/escalade/actions) [![codecov](https://badgen.now.sh/codecov/c/github/lukeed/escalade)](https://codecov.io/gh/lukeed/escalade) + +> A tiny (183B to 210B) and [fast](#benchmarks) utility to ascend parent directories + +With [escalade](https://en.wikipedia.org/wiki/Escalade), you can scale parent directories until you've found what you're looking for.
Given an input file or directory, `escalade` will continue executing your callback function until either: + +1) the callback returns a truthy value +2) `escalade` has reached the system root directory (eg, `/`) + +> **Important:**
Please note that `escalade` only deals with direct ancestry – it will not dive into parents' sibling directories. + +--- + +**Notice:** As of v3.1.0, `escalade` now includes [Deno support](http://deno.land/x/escalade)! Please see [Deno Usage](#deno) below. + +--- + +## Install + +``` +$ npm install --save escalade +``` + + +## Modes + +There are two "versions" of `escalade` available: + +#### "async" +> **Node.js:** >= 8.x
+> **Size (gzip):** 210 bytes
+> **Availability:** [CommonJS](https://unpkg.com/escalade/dist/index.js), [ES Module](https://unpkg.com/escalade/dist/index.mjs) + +This is the primary/default mode. It makes use of `async`/`await` and [`util.promisify`](https://nodejs.org/api/util.html#util_util_promisify_original). + +#### "sync" +> **Node.js:** >= 6.x
+> **Size (gzip):** 183 bytes
+> **Availability:** [CommonJS](https://unpkg.com/escalade/sync/index.js), [ES Module](https://unpkg.com/escalade/sync/index.mjs) + +This is the opt-in mode, ideal for scenarios where `async` usage cannot be supported. + + +## Usage + +***Example Structure*** + +``` +/Users/lukeed + └── oss + ├── license + └── escalade + ├── package.json + └── test + └── fixtures + ├── index.js + └── foobar + └── demo.js +``` + +***Example Usage*** + +```js +//~> demo.js +import { join } from 'path'; +import escalade from 'escalade'; + +const input = join(__dirname, 'demo.js'); +// or: const input = __dirname; + +const pkg = await escalade(input, (dir, names) => { + console.log('~> dir:', dir); + console.log('~> names:', names); + console.log('---'); + + if (names.includes('package.json')) { + // will be resolved into absolute + return 'package.json'; + } +}); + +//~> dir: /Users/lukeed/oss/escalade/test/fixtures/foobar +//~> names: ['demo.js'] +//--- +//~> dir: /Users/lukeed/oss/escalade/test/fixtures +//~> names: ['index.js', 'foobar'] +//--- +//~> dir: /Users/lukeed/oss/escalade/test +//~> names: ['fixtures'] +//--- +//~> dir: /Users/lukeed/oss/escalade +//~> names: ['package.json', 'test'] +//--- + +console.log(pkg); +//=> /Users/lukeed/oss/escalade/package.json + +// Now search for "missing123.txt" +// (Assume it doesn't exist anywhere!) +const missing = await escalade(input, (dir, names) => { + console.log('~> dir:', dir); + return names.includes('missing123.txt') && 'missing123.txt'; +}); + +//~> dir: /Users/lukeed/oss/escalade/test/fixtures/foobar +//~> dir: /Users/lukeed/oss/escalade/test/fixtures +//~> dir: /Users/lukeed/oss/escalade/test +//~> dir: /Users/lukeed/oss/escalade +//~> dir: /Users/lukeed/oss +//~> dir: /Users/lukeed +//~> dir: /Users +//~> dir: / + +console.log(missing); +//=> undefined +``` + +> **Note:** To run the above example with "sync" mode, import from `escalade/sync` and remove the `await` keyword. + + +## API + +### escalade(input, callback) +Returns: `string|void` or `Promise` + +When your `callback` locates a file, `escalade` will resolve/return with an absolute path.
+If your `callback` was never satisfied, then `escalade` will resolve/return with nothing (undefined). + +> **Important:**
The `sync` and `async` versions share the same API.
The **only** difference is that `sync` is not Promise-based. + +#### input +Type: `string` + +The path from which to start ascending. + +This may be a file or a directory path.
However, when `input` is a file, `escalade` will begin with its parent directory. + +> **Important:** Unless given an absolute path, `input` will be resolved from `process.cwd()` location. + +#### callback +Type: `Function` + +The callback to execute for each ancestry level. It always is given two arguments: + +1) `dir` - an absolute path of the current parent directory +2) `names` - a list (`string[]`) of contents _relative to_ the `dir` parent + +> **Note:** The `names` list can contain names of files _and_ directories. + +When your callback returns a _falsey_ value, then `escalade` will continue with `dir`'s parent directory, re-invoking your callback with new argument values. + +When your callback returns a string, then `escalade` stops iteration immediately.
+If the string is an absolute path, then it's left as is. Otherwise, the string is resolved into an absolute path _from_ the `dir` that housed the satisfying condition. + +> **Important:** Your `callback` can be a `Promise/AsyncFunction` when using the "async" version of `escalade`. + +## Benchmarks + +> Running on Node.js v10.13.0 + +``` +# Load Time + find-up 3.891ms + escalade 0.485ms + escalade/sync 0.309ms + +# Levels: 6 (target = "foo.txt"): + find-up x 24,856 ops/sec ±6.46% (55 runs sampled) + escalade x 73,084 ops/sec ±4.23% (73 runs sampled) + find-up.sync x 3,663 ops/sec ±1.12% (83 runs sampled) + escalade/sync x 9,360 ops/sec ±0.62% (88 runs sampled) + +# Levels: 12 (target = "package.json"): + find-up x 29,300 ops/sec ±10.68% (70 runs sampled) + escalade x 73,685 ops/sec ± 5.66% (66 runs sampled) + find-up.sync x 1,707 ops/sec ± 0.58% (91 runs sampled) + escalade/sync x 4,667 ops/sec ± 0.68% (94 runs sampled) + +# Levels: 18 (target = "missing123.txt"): + find-up x 21,818 ops/sec ±17.37% (14 runs sampled) + escalade x 67,101 ops/sec ±21.60% (20 runs sampled) + find-up.sync x 1,037 ops/sec ± 2.86% (88 runs sampled) + escalade/sync x 1,248 ops/sec ± 0.50% (93 runs sampled) +``` + +## Deno + +As of v3.1.0, `escalade` is available on the Deno registry. + +Please note that the [API](#api) is identical and that there are still [two modes](#modes) from which to choose: + +```ts +// Choose "async" mode +import escalade from 'https://deno.land/escalade/async.ts'; + +// Choose "sync" mode +import escalade from 'https://deno.land/escalade/sync.ts'; +``` + +> **Important:** The `allow-read` permission is required! + + +## Related + +- [premove](https://github.com/lukeed/premove) - A tiny (247B) utility to remove items recursively +- [totalist](https://github.com/lukeed/totalist) - A tiny (195B to 224B) utility to recursively list all (total) files in a directory +- [mk-dirs](https://github.com/lukeed/mk-dirs) - A tiny (420B) utility to make a directory and its parents, recursively + +## License + +MIT © [Luke Edwards](https://lukeed.com) diff --git a/node_modules/escalade/sync/index.d.ts b/node_modules/escalade/sync/index.d.ts new file mode 100644 index 0000000..746ddd3 --- /dev/null +++ b/node_modules/escalade/sync/index.d.ts @@ -0,0 +1,2 @@ +export type Callback = (directory: string, files: string[]) => string | false | void; +export default function (directory: string, callback: Callback): string | void; diff --git a/node_modules/escalade/sync/index.js b/node_modules/escalade/sync/index.js new file mode 100644 index 0000000..902cc46 --- /dev/null +++ b/node_modules/escalade/sync/index.js @@ -0,0 +1,18 @@ +const { dirname, resolve } = require('path'); +const { readdirSync, statSync } = require('fs'); + +module.exports = function (start, callback) { + let dir = resolve('.', start); + let tmp, stats = statSync(dir); + + if (!stats.isDirectory()) { + dir = dirname(dir); + } + + while (true) { + tmp = callback(dir, readdirSync(dir)); + if (tmp) return resolve(dir, tmp); + dir = dirname(tmp = dir); + if (tmp === dir) break; + } +} diff --git a/node_modules/escalade/sync/index.mjs b/node_modules/escalade/sync/index.mjs new file mode 100644 index 0000000..3cdc5bd --- /dev/null +++ b/node_modules/escalade/sync/index.mjs @@ -0,0 +1,18 @@ +import { dirname, resolve } from 'path'; +import { readdirSync, statSync } from 'fs'; + +export default function (start, callback) { + let dir = resolve('.', start); + let tmp, stats = statSync(dir); + + if (!stats.isDirectory()) { + dir = dirname(dir); + } + + while (true) { + tmp = callback(dir, readdirSync(dir)); + if (tmp) return resolve(dir, tmp); + dir = dirname(tmp = dir); + if (tmp === dir) break; + } +} diff --git a/node_modules/escape-string-regexp/index.js b/node_modules/escape-string-regexp/index.js new file mode 100644 index 0000000..7834bf9 --- /dev/null +++ b/node_modules/escape-string-regexp/index.js @@ -0,0 +1,11 @@ +'use strict'; + +var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g; + +module.exports = function (str) { + if (typeof str !== 'string') { + throw new TypeError('Expected a string'); + } + + return str.replace(matchOperatorsRe, '\\$&'); +}; diff --git a/node_modules/escape-string-regexp/license b/node_modules/escape-string-regexp/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/escape-string-regexp/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/escape-string-regexp/package.json b/node_modules/escape-string-regexp/package.json new file mode 100644 index 0000000..f307df3 --- /dev/null +++ b/node_modules/escape-string-regexp/package.json @@ -0,0 +1,41 @@ +{ + "name": "escape-string-regexp", + "version": "1.0.5", + "description": "Escape RegExp special characters", + "license": "MIT", + "repository": "sindresorhus/escape-string-regexp", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "maintainers": [ + "Sindre Sorhus (sindresorhus.com)", + "Joshua Boy Nicolai Appelman (jbna.nl)" + ], + "engines": { + "node": ">=0.8.0" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "escape", + "regex", + "regexp", + "re", + "regular", + "expression", + "string", + "str", + "special", + "characters" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/escape-string-regexp/readme.md b/node_modules/escape-string-regexp/readme.md new file mode 100644 index 0000000..87ac82d --- /dev/null +++ b/node_modules/escape-string-regexp/readme.md @@ -0,0 +1,27 @@ +# escape-string-regexp [![Build Status](https://travis-ci.org/sindresorhus/escape-string-regexp.svg?branch=master)](https://travis-ci.org/sindresorhus/escape-string-regexp) + +> Escape RegExp special characters + + +## Install + +``` +$ npm install --save escape-string-regexp +``` + + +## Usage + +```js +const escapeStringRegexp = require('escape-string-regexp'); + +const escapedString = escapeStringRegexp('how much $ for a unicorn?'); +//=> 'how much \$ for a unicorn\?' + +new RegExp(escapedString); +``` + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/find-up/index.d.ts b/node_modules/find-up/index.d.ts new file mode 100644 index 0000000..41e3192 --- /dev/null +++ b/node_modules/find-up/index.d.ts @@ -0,0 +1,137 @@ +import {Options as LocatePathOptions} from 'locate-path'; + +declare const stop: unique symbol; + +declare namespace findUp { + interface Options extends LocatePathOptions {} + + type StopSymbol = typeof stop; + + type Match = string | StopSymbol | undefined; +} + +declare const findUp: { + /** + Find a file or directory by walking up parent directories. + + @param name - Name of the file or directory to find. Can be multiple. + @returns The first path found (by respecting the order of `name`s) or `undefined` if none could be found. + + @example + ``` + // / + // └── Users + // └── sindresorhus + // ├── unicorn.png + // └── foo + // └── bar + // ├── baz + // └── example.js + + // example.js + import findUp = require('find-up'); + + (async () => { + console.log(await findUp('unicorn.png')); + //=> '/Users/sindresorhus/unicorn.png' + + console.log(await findUp(['rainbow.png', 'unicorn.png'])); + //=> '/Users/sindresorhus/unicorn.png' + })(); + ``` + */ + (name: string | string[], options?: findUp.Options): Promise; + + /** + Find a file or directory by walking up parent directories. + + @param matcher - Called for each directory in the search. Return a path or `findUp.stop` to stop the search. + @returns The first path found or `undefined` if none could be found. + + @example + ``` + import path = require('path'); + import findUp = require('find-up'); + + (async () => { + console.log(await findUp(async directory => { + const hasUnicorns = await findUp.exists(path.join(directory, 'unicorn.png')); + return hasUnicorns && directory; + }, {type: 'directory'})); + //=> '/Users/sindresorhus' + })(); + ``` + */ + (matcher: (directory: string) => (findUp.Match | Promise), options?: findUp.Options): Promise; + + sync: { + /** + Synchronously find a file or directory by walking up parent directories. + + @param name - Name of the file or directory to find. Can be multiple. + @returns The first path found (by respecting the order of `name`s) or `undefined` if none could be found. + */ + (name: string | string[], options?: findUp.Options): string | undefined; + + /** + Synchronously find a file or directory by walking up parent directories. + + @param matcher - Called for each directory in the search. Return a path or `findUp.stop` to stop the search. + @returns The first path found or `undefined` if none could be found. + + @example + ``` + import path = require('path'); + import findUp = require('find-up'); + + console.log(findUp.sync(directory => { + const hasUnicorns = findUp.sync.exists(path.join(directory, 'unicorn.png')); + return hasUnicorns && directory; + }, {type: 'directory'})); + //=> '/Users/sindresorhus' + ``` + */ + (matcher: (directory: string) => findUp.Match, options?: findUp.Options): string | undefined; + + /** + Synchronously check if a path exists. + + @param path - Path to the file or directory. + @returns Whether the path exists. + + @example + ``` + import findUp = require('find-up'); + + console.log(findUp.sync.exists('/Users/sindresorhus/unicorn.png')); + //=> true + ``` + */ + exists(path: string): boolean; + } + + /** + Check if a path exists. + + @param path - Path to a file or directory. + @returns Whether the path exists. + + @example + ``` + import findUp = require('find-up'); + + (async () => { + console.log(await findUp.exists('/Users/sindresorhus/unicorn.png')); + //=> true + })(); + ``` + */ + exists(path: string): Promise; + + /** + Return this in a `matcher` function to stop the search and force `findUp` to immediately return `undefined`. + */ + readonly stop: findUp.StopSymbol; +}; + +export = findUp; diff --git a/node_modules/find-up/index.js b/node_modules/find-up/index.js new file mode 100644 index 0000000..ce564e5 --- /dev/null +++ b/node_modules/find-up/index.js @@ -0,0 +1,89 @@ +'use strict'; +const path = require('path'); +const locatePath = require('locate-path'); +const pathExists = require('path-exists'); + +const stop = Symbol('findUp.stop'); + +module.exports = async (name, options = {}) => { + let directory = path.resolve(options.cwd || ''); + const {root} = path.parse(directory); + const paths = [].concat(name); + + const runMatcher = async locateOptions => { + if (typeof name !== 'function') { + return locatePath(paths, locateOptions); + } + + const foundPath = await name(locateOptions.cwd); + if (typeof foundPath === 'string') { + return locatePath([foundPath], locateOptions); + } + + return foundPath; + }; + + // eslint-disable-next-line no-constant-condition + while (true) { + // eslint-disable-next-line no-await-in-loop + const foundPath = await runMatcher({...options, cwd: directory}); + + if (foundPath === stop) { + return; + } + + if (foundPath) { + return path.resolve(directory, foundPath); + } + + if (directory === root) { + return; + } + + directory = path.dirname(directory); + } +}; + +module.exports.sync = (name, options = {}) => { + let directory = path.resolve(options.cwd || ''); + const {root} = path.parse(directory); + const paths = [].concat(name); + + const runMatcher = locateOptions => { + if (typeof name !== 'function') { + return locatePath.sync(paths, locateOptions); + } + + const foundPath = name(locateOptions.cwd); + if (typeof foundPath === 'string') { + return locatePath.sync([foundPath], locateOptions); + } + + return foundPath; + }; + + // eslint-disable-next-line no-constant-condition + while (true) { + const foundPath = runMatcher({...options, cwd: directory}); + + if (foundPath === stop) { + return; + } + + if (foundPath) { + return path.resolve(directory, foundPath); + } + + if (directory === root) { + return; + } + + directory = path.dirname(directory); + } +}; + +module.exports.exists = pathExists; + +module.exports.sync.exists = pathExists.sync; + +module.exports.stop = stop; diff --git a/node_modules/find-up/license b/node_modules/find-up/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/find-up/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/find-up/package.json b/node_modules/find-up/package.json new file mode 100644 index 0000000..cd50281 --- /dev/null +++ b/node_modules/find-up/package.json @@ -0,0 +1,53 @@ +{ + "name": "find-up", + "version": "4.1.0", + "description": "Find a file or directory by walking up parent directories", + "license": "MIT", + "repository": "sindresorhus/find-up", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "find", + "up", + "find-up", + "findup", + "look-up", + "look", + "file", + "search", + "match", + "package", + "resolve", + "parent", + "parents", + "folder", + "directory", + "walk", + "walking", + "path" + ], + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "devDependencies": { + "ava": "^2.1.0", + "is-path-inside": "^2.1.0", + "tempy": "^0.3.0", + "tsd": "^0.7.3", + "xo": "^0.24.0" + } +} diff --git a/node_modules/find-up/readme.md b/node_modules/find-up/readme.md new file mode 100644 index 0000000..d6a21e5 --- /dev/null +++ b/node_modules/find-up/readme.md @@ -0,0 +1,156 @@ +# find-up [![Build Status](https://travis-ci.org/sindresorhus/find-up.svg?branch=master)](https://travis-ci.org/sindresorhus/find-up) + +> Find a file or directory by walking up parent directories + + +## Install + +``` +$ npm install find-up +``` + + +## Usage + +``` +/ +└── Users + └── sindresorhus + ├── unicorn.png + └── foo + └── bar + ├── baz + └── example.js +``` + +`example.js` + +```js +const path = require('path'); +const findUp = require('find-up'); + +(async () => { + console.log(await findUp('unicorn.png')); + //=> '/Users/sindresorhus/unicorn.png' + + console.log(await findUp(['rainbow.png', 'unicorn.png'])); + //=> '/Users/sindresorhus/unicorn.png' + + console.log(await findUp(async directory => { + const hasUnicorns = await findUp.exists(path.join(directory, 'unicorn.png')); + return hasUnicorns && directory; + }, {type: 'directory'})); + //=> '/Users/sindresorhus' +})(); +``` + + +## API + +### findUp(name, options?) +### findUp(matcher, options?) + +Returns a `Promise` for either the path or `undefined` if it couldn't be found. + +### findUp([...name], options?) + +Returns a `Promise` for either the first path found (by respecting the order of the array) or `undefined` if none could be found. + +### findUp.sync(name, options?) +### findUp.sync(matcher, options?) + +Returns a path or `undefined` if it couldn't be found. + +### findUp.sync([...name], options?) + +Returns the first path found (by respecting the order of the array) or `undefined` if none could be found. + +#### name + +Type: `string` + +Name of the file or directory to find. + +#### matcher + +Type: `Function` + +A function that will be called with each directory until it returns a `string` with the path, which stops the search, or the root directory has been reached and nothing was found. Useful if you want to match files with certain patterns, set of permissions, or other advanced use-cases. + +When using async mode, the `matcher` may optionally be an async or promise-returning function that returns the path. + +#### options + +Type: `object` + +##### cwd + +Type: `string`
+Default: `process.cwd()` + +Directory to start from. + +##### type + +Type: `string`
+Default: `'file'`
+Values: `'file'` `'directory'` + +The type of paths that can match. + +##### allowSymlinks + +Type: `boolean`
+Default: `true` + +Allow symbolic links to match if they point to the chosen path type. + +### findUp.exists(path) + +Returns a `Promise` of whether the path exists. + +### findUp.sync.exists(path) + +Returns a `boolean` of whether the path exists. + +#### path + +Type: `string` + +Path to a file or directory. + +### findUp.stop + +A [`Symbol`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol) that can be returned by a `matcher` function to stop the search and cause `findUp` to immediately return `undefined`. Useful as a performance optimization in case the current working directory is deeply nested in the filesystem. + +```js +const path = require('path'); +const findUp = require('find-up'); + +(async () => { + await findUp(directory => { + return path.basename(directory) === 'work' ? findUp.stop : 'logo.png'; + }); +})(); +``` + + +## Related + +- [find-up-cli](https://github.com/sindresorhus/find-up-cli) - CLI for this module +- [pkg-up](https://github.com/sindresorhus/pkg-up) - Find the closest package.json file +- [pkg-dir](https://github.com/sindresorhus/pkg-dir) - Find the root directory of an npm package +- [resolve-from](https://github.com/sindresorhus/resolve-from) - Resolve the path of a module like `require.resolve()` but from a given path + + +--- + +
+ + Get professional support for 'find-up' with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/fs-minipass/LICENSE b/node_modules/fs-minipass/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fs-minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fs-minipass/README.md b/node_modules/fs-minipass/README.md new file mode 100644 index 0000000..1e61241 --- /dev/null +++ b/node_modules/fs-minipass/README.md @@ -0,0 +1,70 @@ +# fs-minipass + +Filesystem streams based on [minipass](http://npm.im/minipass). + +4 classes are exported: + +- ReadStream +- ReadStreamSync +- WriteStream +- WriteStreamSync + +When using `ReadStreamSync`, all of the data is made available +immediately upon consuming the stream. Nothing is buffered in memory +when the stream is constructed. If the stream is piped to a writer, +then it will synchronously `read()` and emit data into the writer as +fast as the writer can consume it. (That is, it will respect +backpressure.) If you call `stream.read()` then it will read the +entire file and return the contents. + +When using `WriteStreamSync`, every write is flushed to the file +synchronously. If your writes all come in a single tick, then it'll +write it all out in a single tick. It's as synchronous as you are. + +The async versions work much like their node builtin counterparts, +with the exception of introducing significantly less Stream machinery +overhead. + +## USAGE + +It's just streams, you pipe them or read() them or write() to them. + +```js +const fsm = require('fs-minipass') +const readStream = new fsm.ReadStream('file.txt') +const writeStream = new fsm.WriteStream('output.txt') +writeStream.write('some file header or whatever\n') +readStream.pipe(writeStream) +``` + +## ReadStream(path, options) + +Path string is required, but somewhat irrelevant if an open file +descriptor is passed in as an option. + +Options: + +- `fd` Pass in a numeric file descriptor, if the file is already open. +- `readSize` The size of reads to do, defaults to 16MB +- `size` The size of the file, if known. Prevents zero-byte read() + call at the end. +- `autoClose` Set to `false` to prevent the file descriptor from being + closed when the file is done being read. + +## WriteStream(path, options) + +Path string is required, but somewhat irrelevant if an open file +descriptor is passed in as an option. + +Options: + +- `fd` Pass in a numeric file descriptor, if the file is already open. +- `mode` The mode to create the file with. Defaults to `0o666`. +- `start` The position in the file to start reading. If not + specified, then the file will start writing at position zero, and be + truncated by default. +- `autoClose` Set to `false` to prevent the file descriptor from being + closed when the stream is ended. +- `flags` Flags to use when opening the file. Irrelevant if `fd` is + passed in, since file won't be opened in that case. Defaults to + `'a'` if a `pos` is specified, or `'w'` otherwise. diff --git a/node_modules/fs-minipass/index.js b/node_modules/fs-minipass/index.js new file mode 100644 index 0000000..9b0779c --- /dev/null +++ b/node_modules/fs-minipass/index.js @@ -0,0 +1,422 @@ +'use strict' +const MiniPass = require('minipass') +const EE = require('events').EventEmitter +const fs = require('fs') + +let writev = fs.writev +/* istanbul ignore next */ +if (!writev) { + // This entire block can be removed if support for earlier than Node.js + // 12.9.0 is not needed. + const binding = process.binding('fs') + const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback + + writev = (fd, iovec, pos, cb) => { + const done = (er, bw) => cb(er, bw, iovec) + const req = new FSReqWrap() + req.oncomplete = done + binding.writeBuffers(fd, iovec, pos, req) + } +} + +const _autoClose = Symbol('_autoClose') +const _close = Symbol('_close') +const _ended = Symbol('_ended') +const _fd = Symbol('_fd') +const _finished = Symbol('_finished') +const _flags = Symbol('_flags') +const _flush = Symbol('_flush') +const _handleChunk = Symbol('_handleChunk') +const _makeBuf = Symbol('_makeBuf') +const _mode = Symbol('_mode') +const _needDrain = Symbol('_needDrain') +const _onerror = Symbol('_onerror') +const _onopen = Symbol('_onopen') +const _onread = Symbol('_onread') +const _onwrite = Symbol('_onwrite') +const _open = Symbol('_open') +const _path = Symbol('_path') +const _pos = Symbol('_pos') +const _queue = Symbol('_queue') +const _read = Symbol('_read') +const _readSize = Symbol('_readSize') +const _reading = Symbol('_reading') +const _remain = Symbol('_remain') +const _size = Symbol('_size') +const _write = Symbol('_write') +const _writing = Symbol('_writing') +const _defaultFlag = Symbol('_defaultFlag') +const _errored = Symbol('_errored') + +class ReadStream extends MiniPass { + constructor (path, opt) { + opt = opt || {} + super(opt) + + this.readable = true + this.writable = false + + if (typeof path !== 'string') + throw new TypeError('path must be a string') + + this[_errored] = false + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_path] = path + this[_readSize] = opt.readSize || 16*1024*1024 + this[_reading] = false + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity + this[_remain] = this[_size] + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + if (typeof this[_fd] === 'number') + this[_read]() + else + this[_open]() + } + + get fd () { return this[_fd] } + get path () { return this[_path] } + + write () { + throw new TypeError('this is a readable stream') + } + + end () { + throw new TypeError('this is a readable stream') + } + + [_open] () { + fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) + } + + [_onopen] (er, fd) { + if (er) + this[_onerror](er) + else { + this[_fd] = fd + this.emit('open', fd) + this[_read]() + } + } + + [_makeBuf] () { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) + } + + [_read] () { + if (!this[_reading]) { + this[_reading] = true + const buf = this[_makeBuf]() + /* istanbul ignore if */ + if (buf.length === 0) + return process.nextTick(() => this[_onread](null, 0, buf)) + fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) => + this[_onread](er, br, buf)) + } + } + + [_onread] (er, br, buf) { + this[_reading] = false + if (er) + this[_onerror](er) + else if (this[_handleChunk](br, buf)) + this[_read]() + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } + } + + [_onerror] (er) { + this[_reading] = true + this[_close]() + this.emit('error', er) + } + + [_handleChunk] (br, buf) { + let ret = false + // no effect if infinite + this[_remain] -= br + if (br > 0) + ret = super.write(br < buf.length ? buf.slice(0, br) : buf) + + if (br === 0 || this[_remain] <= 0) { + ret = false + this[_close]() + super.end() + } + + return ret + } + + emit (ev, data) { + switch (ev) { + case 'prefinish': + case 'finish': + break + + case 'drain': + if (typeof this[_fd] === 'number') + this[_read]() + break + + case 'error': + if (this[_errored]) + return + this[_errored] = true + return super.emit(ev, data) + + default: + return super.emit(ev, data) + } + } +} + +class ReadStreamSync extends ReadStream { + [_open] () { + let threw = true + try { + this[_onopen](null, fs.openSync(this[_path], 'r')) + threw = false + } finally { + if (threw) + this[_close]() + } + } + + [_read] () { + let threw = true + try { + if (!this[_reading]) { + this[_reading] = true + do { + const buf = this[_makeBuf]() + /* istanbul ignore next */ + const br = buf.length === 0 ? 0 + : fs.readSync(this[_fd], buf, 0, buf.length, null) + if (!this[_handleChunk](br, buf)) + break + } while (true) + this[_reading] = false + } + threw = false + } finally { + if (threw) + this[_close]() + } + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') + } + } +} + +class WriteStream extends EE { + constructor (path, opt) { + opt = opt || {} + super(opt) + this.readable = false + this.writable = true + this[_errored] = false + this[_writing] = false + this[_ended] = false + this[_needDrain] = false + this[_queue] = [] + this[_path] = path + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode + this[_pos] = typeof opt.start === 'number' ? opt.start : null + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== null ? 'r+' : 'w' + this[_defaultFlag] = opt.flags === undefined + this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags + + if (this[_fd] === null) + this[_open]() + } + + emit (ev, data) { + if (ev === 'error') { + if (this[_errored]) + return + this[_errored] = true + } + return super.emit(ev, data) + } + + + get fd () { return this[_fd] } + get path () { return this[_path] } + + [_onerror] (er) { + this[_close]() + this[_writing] = true + this.emit('error', er) + } + + [_open] () { + fs.open(this[_path], this[_flags], this[_mode], + (er, fd) => this[_onopen](er, fd)) + } + + [_onopen] (er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && er.code === 'ENOENT') { + this[_flags] = 'w' + this[_open]() + } else if (er) + this[_onerror](er) + else { + this[_fd] = fd + this.emit('open', fd) + this[_flush]() + } + } + + end (buf, enc) { + if (buf) + this.write(buf, enc) + + this[_ended] = true + + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && !this[_queue].length && + typeof this[_fd] === 'number') + this[_onwrite](null, 0) + return this + } + + write (buf, enc) { + if (typeof buf === 'string') + buf = Buffer.from(buf, enc) + + if (this[_ended]) { + this.emit('error', new Error('write() after end()')) + return false + } + + if (this[_fd] === null || this[_writing] || this[_queue].length) { + this[_queue].push(buf) + this[_needDrain] = true + return false + } + + this[_writing] = true + this[_write](buf) + return true + } + + [_write] (buf) { + fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => + this[_onwrite](er, bw)) + } + + [_onwrite] (er, bw) { + if (er) + this[_onerror](er) + else { + if (this[_pos] !== null) + this[_pos] += bw + if (this[_queue].length) + this[_flush]() + else { + this[_writing] = false + + if (this[_ended] && !this[_finished]) { + this[_finished] = true + this[_close]() + this.emit('finish') + } else if (this[_needDrain]) { + this[_needDrain] = false + this.emit('drain') + } + } + } + } + + [_flush] () { + if (this[_queue].length === 0) { + if (this[_ended]) + this[_onwrite](null, 0) + } else if (this[_queue].length === 1) + this[_write](this[_queue].pop()) + else { + const iovec = this[_queue] + this[_queue] = [] + writev(this[_fd], iovec, this[_pos], + (er, bw) => this[_onwrite](er, bw)) + } + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } + } +} + +class WriteStreamSync extends WriteStream { + [_open] () { + let fd + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs.openSync(this[_path], this[_flags], this[_mode]) + } catch (er) { + if (er.code === 'ENOENT') { + this[_flags] = 'w' + return this[_open]() + } else + throw er + } + } else + fd = fs.openSync(this[_path], this[_flags], this[_mode]) + + this[_onopen](null, fd) + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') + } + } + + [_write] (buf) { + // throw the original, but try to close if it fails + let threw = true + try { + this[_onwrite](null, + fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) + threw = false + } finally { + if (threw) + try { this[_close]() } catch (_) {} + } + } +} + +exports.ReadStream = ReadStream +exports.ReadStreamSync = ReadStreamSync + +exports.WriteStream = WriteStream +exports.WriteStreamSync = WriteStreamSync diff --git a/node_modules/fs-minipass/package.json b/node_modules/fs-minipass/package.json new file mode 100644 index 0000000..2f2436c --- /dev/null +++ b/node_modules/fs-minipass/package.json @@ -0,0 +1,39 @@ +{ + "name": "fs-minipass", + "version": "2.1.0", + "main": "index.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "keywords": [], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs-minipass.git" + }, + "bugs": { + "url": "https://github.com/npm/fs-minipass/issues" + }, + "homepage": "https://github.com/npm/fs-minipass#readme", + "description": "fs read and write streams based on minipass", + "dependencies": { + "minipass": "^3.0.0" + }, + "devDependencies": { + "mutate-fs": "^2.0.1", + "tap": "^14.6.4" + }, + "files": [ + "index.js" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/fs.realpath/LICENSE b/node_modules/fs.realpath/LICENSE new file mode 100644 index 0000000..5bd884c --- /dev/null +++ b/node_modules/fs.realpath/LICENSE @@ -0,0 +1,43 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---- + +This library bundles a version of the `fs.realpath` and `fs.realpathSync` +methods from Node.js v0.10 under the terms of the Node.js MIT license. + +Node's license follows, also included at the header of `old.js` which contains +the licensed code: + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fs.realpath/README.md b/node_modules/fs.realpath/README.md new file mode 100644 index 0000000..a42ceac --- /dev/null +++ b/node_modules/fs.realpath/README.md @@ -0,0 +1,33 @@ +# fs.realpath + +A backwards-compatible fs.realpath for Node v6 and above + +In Node v6, the JavaScript implementation of fs.realpath was replaced +with a faster (but less resilient) native implementation. That raises +new and platform-specific errors and cannot handle long or excessively +symlink-looping paths. + +This module handles those cases by detecting the new errors and +falling back to the JavaScript implementation. On versions of Node +prior to v6, it has no effect. + +## USAGE + +```js +var rp = require('fs.realpath') + +// async version +rp.realpath(someLongAndLoopingPath, function (er, real) { + // the ELOOP was handled, but it was a bit slower +}) + +// sync version +var real = rp.realpathSync(someLongAndLoopingPath) + +// monkeypatch at your own risk! +// This replaces the fs.realpath/fs.realpathSync builtins +rp.monkeypatch() + +// un-do the monkeypatching +rp.unmonkeypatch() +``` diff --git a/node_modules/fs.realpath/index.js b/node_modules/fs.realpath/index.js new file mode 100644 index 0000000..b09c7c7 --- /dev/null +++ b/node_modules/fs.realpath/index.js @@ -0,0 +1,66 @@ +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = require('fs') +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = require('./old.js') + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} diff --git a/node_modules/fs.realpath/old.js b/node_modules/fs.realpath/old.js new file mode 100644 index 0000000..b40305e --- /dev/null +++ b/node_modules/fs.realpath/old.js @@ -0,0 +1,303 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = require('path'); +var isWindows = process.platform === 'win32'; +var fs = require('fs'); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; diff --git a/node_modules/fs.realpath/package.json b/node_modules/fs.realpath/package.json new file mode 100644 index 0000000..3edc57d --- /dev/null +++ b/node_modules/fs.realpath/package.json @@ -0,0 +1,26 @@ +{ + "name": "fs.realpath", + "version": "1.0.0", + "description": "Use node's fs.realpath, but fall back to the JS implementation if the native one fails", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "scripts": { + "test": "tap test/*.js --cov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/fs.realpath.git" + }, + "keywords": [ + "realpath", + "fs", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "old.js", + "index.js" + ] +} diff --git a/node_modules/function-bind/.editorconfig b/node_modules/function-bind/.editorconfig new file mode 100644 index 0000000..ac29ade --- /dev/null +++ b/node_modules/function-bind/.editorconfig @@ -0,0 +1,20 @@ +root = true + +[*] +indent_style = tab +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +max_line_length = 120 + +[CHANGELOG.md] +indent_style = space +indent_size = 2 + +[*.json] +max_line_length = off + +[Makefile] +max_line_length = off diff --git a/node_modules/function-bind/.eslintrc b/node_modules/function-bind/.eslintrc new file mode 100644 index 0000000..9b33d8e --- /dev/null +++ b/node_modules/function-bind/.eslintrc @@ -0,0 +1,15 @@ +{ + "root": true, + + "extends": "@ljharb", + + "rules": { + "func-name-matching": 0, + "indent": [2, 4], + "max-nested-callbacks": [2, 3], + "max-params": [2, 3], + "max-statements": [2, 20], + "no-new-func": [1], + "strict": [0] + } +} diff --git a/node_modules/function-bind/.jscs.json b/node_modules/function-bind/.jscs.json new file mode 100644 index 0000000..8c44794 --- /dev/null +++ b/node_modules/function-bind/.jscs.json @@ -0,0 +1,176 @@ +{ + "es3": true, + + "additionalRules": [], + + "requireSemicolons": true, + + "disallowMultipleSpaces": true, + + "disallowIdentifierNames": [], + + "requireCurlyBraces": { + "allExcept": [], + "keywords": ["if", "else", "for", "while", "do", "try", "catch"] + }, + + "requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch", "function"], + + "disallowSpaceAfterKeywords": [], + + "disallowSpaceBeforeComma": true, + "disallowSpaceAfterComma": false, + "disallowSpaceBeforeSemicolon": true, + + "disallowNodeTypes": [ + "DebuggerStatement", + "ForInStatement", + "LabeledStatement", + "SwitchCase", + "SwitchStatement", + "WithStatement" + ], + + "requireObjectKeysOnNewLine": { "allExcept": ["sameLine"] }, + + "requireSpacesInAnonymousFunctionExpression": { "beforeOpeningRoundBrace": true, "beforeOpeningCurlyBrace": true }, + "requireSpacesInNamedFunctionExpression": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInNamedFunctionExpression": { "beforeOpeningRoundBrace": true }, + "requireSpacesInFunctionDeclaration": { "beforeOpeningCurlyBrace": true }, + "disallowSpacesInFunctionDeclaration": { "beforeOpeningRoundBrace": true }, + + "requireSpaceBetweenArguments": true, + + "disallowSpacesInsideParentheses": true, + + "disallowSpacesInsideArrayBrackets": true, + + "disallowQuotedKeysInObjects": { "allExcept": ["reserved"] }, + + "disallowSpaceAfterObjectKeys": true, + + "requireCommaBeforeLineBreak": true, + + "disallowSpaceAfterPrefixUnaryOperators": ["++", "--", "+", "-", "~", "!"], + "requireSpaceAfterPrefixUnaryOperators": [], + + "disallowSpaceBeforePostfixUnaryOperators": ["++", "--"], + "requireSpaceBeforePostfixUnaryOperators": [], + + "disallowSpaceBeforeBinaryOperators": [], + "requireSpaceBeforeBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + + "requireSpaceAfterBinaryOperators": ["+", "-", "/", "*", "=", "==", "===", "!=", "!=="], + "disallowSpaceAfterBinaryOperators": [], + + "disallowImplicitTypeConversion": ["binary", "string"], + + "disallowKeywords": ["with", "eval"], + + "requireKeywordsOnNewLine": [], + "disallowKeywordsOnNewLine": ["else"], + + "requireLineFeedAtFileEnd": true, + + "disallowTrailingWhitespace": true, + + "disallowTrailingComma": true, + + "excludeFiles": ["node_modules/**", "vendor/**"], + + "disallowMultipleLineStrings": true, + + "requireDotNotation": { "allExcept": ["keywords"] }, + + "requireParenthesesAroundIIFE": true, + + "validateLineBreaks": "LF", + + "validateQuoteMarks": { + "escape": true, + "mark": "'" + }, + + "disallowOperatorBeforeLineBreak": [], + + "requireSpaceBeforeKeywords": [ + "do", + "for", + "if", + "else", + "switch", + "case", + "try", + "catch", + "finally", + "while", + "with", + "return" + ], + + "validateAlignedFunctionParameters": { + "lineBreakAfterOpeningBraces": true, + "lineBreakBeforeClosingBraces": true + }, + + "requirePaddingNewLinesBeforeExport": true, + + "validateNewlineAfterArrayElements": { + "maximum": 8 + }, + + "requirePaddingNewLinesAfterUseStrict": true, + + "disallowArrowFunctions": true, + + "disallowMultiLineTernary": true, + + "validateOrderInObjectKeys": "asc-insensitive", + + "disallowIdenticalDestructuringNames": true, + + "disallowNestedTernaries": { "maxLevel": 1 }, + + "requireSpaceAfterComma": { "allExcept": ["trailing"] }, + "requireAlignedMultilineParams": false, + + "requireSpacesInGenerator": { + "afterStar": true + }, + + "disallowSpacesInGenerator": { + "beforeStar": true + }, + + "disallowVar": false, + + "requireArrayDestructuring": false, + + "requireEnhancedObjectLiterals": false, + + "requireObjectDestructuring": false, + + "requireEarlyReturn": false, + + "requireCapitalizedConstructorsNew": { + "allExcept": ["Function", "String", "Object", "Symbol", "Number", "Date", "RegExp", "Error", "Boolean", "Array"] + }, + + "requireImportAlphabetized": false, + + "requireSpaceBeforeObjectValues": true, + "requireSpaceBeforeDestructuredValues": true, + + "disallowSpacesInsideTemplateStringPlaceholders": true, + + "disallowArrayDestructuringReturn": false, + + "requireNewlineBeforeSingleStatementsInIf": false, + + "disallowUnusedVariables": true, + + "requireSpacesInsideImportedObjectBraces": true, + + "requireUseStrict": true +} + diff --git a/node_modules/function-bind/.npmignore b/node_modules/function-bind/.npmignore new file mode 100644 index 0000000..dbb555f --- /dev/null +++ b/node_modules/function-bind/.npmignore @@ -0,0 +1,22 @@ +# gitignore +.DS_Store +.monitor +.*.swp +.nodemonignore +releases +*.log +*.err +fleet.json +public/browserify +bin/*.json +.bin +build +compile +.lock-wscript +coverage +node_modules + +# Only apps should have lockfiles +npm-shrinkwrap.json +package-lock.json +yarn.lock diff --git a/node_modules/function-bind/.travis.yml b/node_modules/function-bind/.travis.yml new file mode 100644 index 0000000..85f70d2 --- /dev/null +++ b/node_modules/function-bind/.travis.yml @@ -0,0 +1,168 @@ +language: node_js +os: + - linux +node_js: + - "8.4" + - "7.10" + - "6.11" + - "5.12" + - "4.8" + - "iojs-v3.3" + - "iojs-v2.5" + - "iojs-v1.8" + - "0.12" + - "0.10" + - "0.8" +before_install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then npm install -g npm@1.3 ; elif [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then case "$(npm --version)" in 1.*) npm install -g npm@1.4.28 ;; 2.*) npm install -g npm@2 ;; esac ; fi' + - 'if [ "${TRAVIS_NODE_VERSION}" != "0.6" ] && [ "${TRAVIS_NODE_VERSION}" != "0.9" ]; then if [ "${TRAVIS_NODE_VERSION%${TRAVIS_NODE_VERSION#[0-9]}}" = "0" ] || [ "${TRAVIS_NODE_VERSION:0:4}" = "iojs" ]; then npm install -g npm@4.5 ; else npm install -g npm; fi; fi' +install: + - 'if [ "${TRAVIS_NODE_VERSION}" = "0.6" ]; then nvm install 0.8 && npm install -g npm@1.3 && npm install -g npm@1.4.28 && npm install -g npm@2 && npm install && nvm use "${TRAVIS_NODE_VERSION}"; else npm install; fi;' +script: + - 'if [ -n "${PRETEST-}" ]; then npm run pretest ; fi' + - 'if [ -n "${POSTTEST-}" ]; then npm run posttest ; fi' + - 'if [ -n "${COVERAGE-}" ]; then npm run coverage ; fi' + - 'if [ -n "${TEST-}" ]; then npm run tests-only ; fi' +sudo: false +env: + - TEST=true +matrix: + fast_finish: true + include: + - node_js: "node" + env: PRETEST=true + - node_js: "4" + env: COVERAGE=true + - node_js: "8.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "8.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "7.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "6.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.10" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.8" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "5.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "4.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v3.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v2.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.7" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.5" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.4" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.3" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.2" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.1" + env: TEST=true ALLOW_FAILURE=true + - node_js: "iojs-v1.0" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.11" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.9" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.6" + env: TEST=true ALLOW_FAILURE=true + - node_js: "0.4" + env: TEST=true ALLOW_FAILURE=true + allow_failures: + - os: osx + - env: TEST=true ALLOW_FAILURE=true diff --git a/node_modules/function-bind/LICENSE b/node_modules/function-bind/LICENSE new file mode 100644 index 0000000..62d6d23 --- /dev/null +++ b/node_modules/function-bind/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + diff --git a/node_modules/function-bind/README.md b/node_modules/function-bind/README.md new file mode 100644 index 0000000..81862a0 --- /dev/null +++ b/node_modules/function-bind/README.md @@ -0,0 +1,48 @@ +# function-bind + + + + + +Implementation of function.prototype.bind + +## Example + +I mainly do this for unit tests I run on phantomjs. +PhantomJS does not have Function.prototype.bind :( + +```js +Function.prototype.bind = require("function-bind") +``` + +## Installation + +`npm install function-bind` + +## Contributors + + - Raynos + +## MIT Licenced + + [travis-svg]: https://travis-ci.org/Raynos/function-bind.svg + [travis-url]: https://travis-ci.org/Raynos/function-bind + [npm-badge-svg]: https://badge.fury.io/js/function-bind.svg + [npm-url]: https://npmjs.org/package/function-bind + [5]: https://coveralls.io/repos/Raynos/function-bind/badge.png + [6]: https://coveralls.io/r/Raynos/function-bind + [7]: https://gemnasium.com/Raynos/function-bind.png + [8]: https://gemnasium.com/Raynos/function-bind + [deps-svg]: https://david-dm.org/Raynos/function-bind.svg + [deps-url]: https://david-dm.org/Raynos/function-bind + [dev-deps-svg]: https://david-dm.org/Raynos/function-bind/dev-status.svg + [dev-deps-url]: https://david-dm.org/Raynos/function-bind#info=devDependencies + [11]: https://ci.testling.com/Raynos/function-bind.png + [12]: https://ci.testling.com/Raynos/function-bind diff --git a/node_modules/function-bind/implementation.js b/node_modules/function-bind/implementation.js new file mode 100644 index 0000000..cc4daec --- /dev/null +++ b/node_modules/function-bind/implementation.js @@ -0,0 +1,52 @@ +'use strict'; + +/* eslint no-invalid-this: 1 */ + +var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; +var slice = Array.prototype.slice; +var toStr = Object.prototype.toString; +var funcType = '[object Function]'; + +module.exports = function bind(that) { + var target = this; + if (typeof target !== 'function' || toStr.call(target) !== funcType) { + throw new TypeError(ERROR_MESSAGE + target); + } + var args = slice.call(arguments, 1); + + var bound; + var binder = function () { + if (this instanceof bound) { + var result = target.apply( + this, + args.concat(slice.call(arguments)) + ); + if (Object(result) === result) { + return result; + } + return this; + } else { + return target.apply( + that, + args.concat(slice.call(arguments)) + ); + } + }; + + var boundLength = Math.max(0, target.length - args.length); + var boundArgs = []; + for (var i = 0; i < boundLength; i++) { + boundArgs.push('$' + i); + } + + bound = Function('binder', 'return function (' + boundArgs.join(',') + '){ return binder.apply(this,arguments); }')(binder); + + if (target.prototype) { + var Empty = function Empty() {}; + Empty.prototype = target.prototype; + bound.prototype = new Empty(); + Empty.prototype = null; + } + + return bound; +}; diff --git a/node_modules/function-bind/index.js b/node_modules/function-bind/index.js new file mode 100644 index 0000000..3bb6b96 --- /dev/null +++ b/node_modules/function-bind/index.js @@ -0,0 +1,5 @@ +'use strict'; + +var implementation = require('./implementation'); + +module.exports = Function.prototype.bind || implementation; diff --git a/node_modules/function-bind/package.json b/node_modules/function-bind/package.json new file mode 100644 index 0000000..20a1727 --- /dev/null +++ b/node_modules/function-bind/package.json @@ -0,0 +1,63 @@ +{ + "name": "function-bind", + "version": "1.1.1", + "description": "Implementation of Function.prototype.bind", + "keywords": [ + "function", + "bind", + "shim", + "es5" + ], + "author": "Raynos ", + "repository": "git://github.com/Raynos/function-bind.git", + "main": "index", + "homepage": "https://github.com/Raynos/function-bind", + "contributors": [ + { + "name": "Raynos" + }, + { + "name": "Jordan Harband", + "url": "https://github.com/ljharb" + } + ], + "bugs": { + "url": "https://github.com/Raynos/function-bind/issues", + "email": "raynos2@gmail.com" + }, + "dependencies": {}, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "covert": "^1.1.0", + "eslint": "^4.5.0", + "jscs": "^3.0.7", + "tape": "^4.8.0" + }, + "license": "MIT", + "scripts": { + "pretest": "npm run lint", + "test": "npm run tests-only", + "posttest": "npm run coverage -- --quiet", + "tests-only": "node test", + "coverage": "covert test/*.js", + "lint": "npm run jscs && npm run eslint", + "jscs": "jscs *.js */*.js", + "eslint": "eslint *.js */*.js" + }, + "testling": { + "files": "test/index.js", + "browsers": [ + "ie/8..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/function-bind/test/.eslintrc b/node_modules/function-bind/test/.eslintrc new file mode 100644 index 0000000..8a56d5b --- /dev/null +++ b/node_modules/function-bind/test/.eslintrc @@ -0,0 +1,9 @@ +{ + "rules": { + "array-bracket-newline": 0, + "array-element-newline": 0, + "max-statements-per-line": [2, { "max": 2 }], + "no-invalid-this": 0, + "no-magic-numbers": 0, + } +} diff --git a/node_modules/function-bind/test/index.js b/node_modules/function-bind/test/index.js new file mode 100644 index 0000000..2edecce --- /dev/null +++ b/node_modules/function-bind/test/index.js @@ -0,0 +1,252 @@ +// jscs:disable requireUseStrict + +var test = require('tape'); + +var functionBind = require('../implementation'); +var getCurrentContext = function () { return this; }; + +test('functionBind is a function', function (t) { + t.equal(typeof functionBind, 'function'); + t.end(); +}); + +test('non-functions', function (t) { + var nonFunctions = [true, false, [], {}, 42, 'foo', NaN, /a/g]; + t.plan(nonFunctions.length); + for (var i = 0; i < nonFunctions.length; ++i) { + try { functionBind.call(nonFunctions[i]); } catch (ex) { + t.ok(ex instanceof TypeError, 'throws when given ' + String(nonFunctions[i])); + } + } + t.end(); +}); + +test('without a context', function (t) { + t.test('binds properly', function (st) { + var args, context; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }) + }; + namespace.func(1, 2, 3); + st.deepEqual(args, [1, 2, 3]); + st.equal(context, getCurrentContext.call()); + st.end(); + }); + + t.test('binds properly, and still supplies bound arguments', function (st) { + var args, context; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, undefined, 1, 2, 3) + }; + namespace.func(4, 5, 6); + st.deepEqual(args, [1, 2, 3, 4, 5, 6]); + st.equal(context, getCurrentContext.call()); + st.end(); + }); + + t.test('returns properly', function (st) { + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, null) + }; + var context = namespace.func(1, 2, 3); + st.equal(context, getCurrentContext.call(), 'returned context is namespaced context'); + st.deepEqual(args, [1, 2, 3], 'passed arguments are correct'); + st.end(); + }); + + t.test('returns properly with bound arguments', function (st) { + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, null, 1, 2, 3) + }; + var context = namespace.func(4, 5, 6); + st.equal(context, getCurrentContext.call(), 'returned context is namespaced context'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'passed arguments are correct'); + st.end(); + }); + + t.test('called as a constructor', function (st) { + var thunkify = function (value) { + return function () { return value; }; + }; + st.test('returns object value', function (sst) { + var expectedReturnValue = [1, 2, 3]; + var Constructor = functionBind.call(thunkify(expectedReturnValue), null); + var result = new Constructor(); + sst.equal(result, expectedReturnValue); + sst.end(); + }); + + st.test('does not return primitive value', function (sst) { + var Constructor = functionBind.call(thunkify(42), null); + var result = new Constructor(); + sst.notEqual(result, 42); + sst.end(); + }); + + st.test('object from bound constructor is instance of original and bound constructor', function (sst) { + var A = function (x) { + this.name = x || 'A'; + }; + var B = functionBind.call(A, null, 'B'); + + var result = new B(); + sst.ok(result instanceof B, 'result is instance of bound constructor'); + sst.ok(result instanceof A, 'result is instance of original constructor'); + sst.end(); + }); + + st.end(); + }); + + t.end(); +}); + +test('with a context', function (t) { + t.test('with no bound arguments', function (st) { + var args, context; + var boundContext = {}; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, boundContext) + }; + namespace.func(1, 2, 3); + st.equal(context, boundContext, 'binds a context properly'); + st.deepEqual(args, [1, 2, 3], 'supplies passed arguments'); + st.end(); + }); + + t.test('with bound arguments', function (st) { + var args, context; + var boundContext = {}; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + context = this; + }, boundContext, 1, 2, 3) + }; + namespace.func(4, 5, 6); + st.equal(context, boundContext, 'binds a context properly'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'supplies bound and passed arguments'); + st.end(); + }); + + t.test('returns properly', function (st) { + var boundContext = {}; + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, boundContext) + }; + var context = namespace.func(1, 2, 3); + st.equal(context, boundContext, 'returned context is bound context'); + st.notEqual(context, getCurrentContext.call(), 'returned context is not lexical context'); + st.deepEqual(args, [1, 2, 3], 'passed arguments are correct'); + st.end(); + }); + + t.test('returns properly with bound arguments', function (st) { + var boundContext = {}; + var args; + var namespace = { + func: functionBind.call(function () { + args = Array.prototype.slice.call(arguments); + return this; + }, boundContext, 1, 2, 3) + }; + var context = namespace.func(4, 5, 6); + st.equal(context, boundContext, 'returned context is bound context'); + st.notEqual(context, getCurrentContext.call(), 'returned context is not lexical context'); + st.deepEqual(args, [1, 2, 3, 4, 5, 6], 'passed arguments are correct'); + st.end(); + }); + + t.test('passes the correct arguments when called as a constructor', function (st) { + var expected = { name: 'Correct' }; + var namespace = { + Func: functionBind.call(function (arg) { + return arg; + }, { name: 'Incorrect' }) + }; + var returned = new namespace.Func(expected); + st.equal(returned, expected, 'returns the right arg when called as a constructor'); + st.end(); + }); + + t.test('has the new instance\'s context when called as a constructor', function (st) { + var actualContext; + var expectedContext = { foo: 'bar' }; + var namespace = { + Func: functionBind.call(function () { + actualContext = this; + }, expectedContext) + }; + var result = new namespace.Func(); + st.equal(result instanceof namespace.Func, true); + st.notEqual(actualContext, expectedContext); + st.end(); + }); + + t.end(); +}); + +test('bound function length', function (t) { + t.test('sets a correct length without thisArg', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }); + st.equal(subject.length, 3); + st.equal(subject(1, 2, 3), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}); + st.equal(subject.length, 3); + st.equal(subject(1, 2, 3), 6); + st.end(); + }); + + t.test('sets a correct length without thisArg and first argument', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, undefined, 1); + st.equal(subject.length, 2); + st.equal(subject(2, 3), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg and first argument', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}, 1); + st.equal(subject.length, 2); + st.equal(subject(2, 3), 6); + st.end(); + }); + + t.test('sets a correct length without thisArg and too many arguments', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, undefined, 1, 2, 3, 4); + st.equal(subject.length, 0); + st.equal(subject(), 6); + st.end(); + }); + + t.test('sets a correct length with thisArg and too many arguments', function (st) { + var subject = functionBind.call(function (a, b, c) { return a + b + c; }, {}, 1, 2, 3, 4); + st.equal(subject.length, 0); + st.equal(subject(), 6); + st.end(); + }); +}); diff --git a/node_modules/gauge/LICENSE.md b/node_modules/gauge/LICENSE.md new file mode 100644 index 0000000..5fc208f --- /dev/null +++ b/node_modules/gauge/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/gauge/README.md b/node_modules/gauge/README.md new file mode 100644 index 0000000..cbef5a5 --- /dev/null +++ b/node_modules/gauge/README.md @@ -0,0 +1,402 @@ +gauge +===== + +A nearly stateless terminal based horizontal gauge / progress bar. + +```javascript +var Gauge = require("gauge") + +var gauge = new Gauge() + +gauge.show("working…", 0) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.25) }, 500) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.50) }, 1000) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.75) }, 1500) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.99) }, 2000) +setTimeout(() => gauge.hide(), 2300) +``` + +See also the [demos](docs/demo.js): + +![](./docs/gauge-demo.gif) + + +### CHANGES FROM 1.x + +Gauge 2.x is breaking release, please see the [changelog] for details on +what's changed if you were previously a user of this module. + +[changelog]: CHANGELOG.md + +### THE GAUGE CLASS + +This is the typical interface to the module– it provides a pretty +fire-and-forget interface to displaying your status information. + +``` +var Gauge = require("gauge") + +var gauge = new Gauge([stream], [options]) +``` + +* **stream** – *(optional, default STDERR)* A stream that progress bar + updates are to be written to. Gauge honors backpressure and will pause + most writing if it is indicated. +* **options** – *(optional)* An option object. + +Constructs a new gauge. Gauges are drawn on a single line, and are not drawn +if **stream** isn't a tty and a tty isn't explicitly provided. + +If **stream** is a terminal or if you pass in **tty** to **options** then we +will detect terminal resizes and redraw to fit. We do this by watching for +`resize` events on the tty. (To work around a bug in versions of Node prior +to 2.5.0, we watch for them on stdout if the tty is stderr.) Resizes to +larger window sizes will be clean, but shrinking the window will always +result in some cruft. + +**IMPORTANT:** If you previously were passing in a non-tty stream but you still +want output (for example, a stream wrapped by the `ansi` module) then you +need to pass in the **tty** option below, as `gauge` needs access to +the underlying tty in order to do things like terminal resizes and terminal +width detection. + +The **options** object can have the following properties, all of which are +optional: + +* **updateInterval**: How often gauge updates should be drawn, in milliseconds. +* **fixedFramerate**: Defaults to false on node 0.8, true on everything + else. When this is true a timer is created to trigger once every + `updateInterval` ms, when false, updates are printed as soon as they come + in but updates more often than `updateInterval` are ignored. The reason + 0.8 doesn't have this set to true is that it can't `unref` its timer and + so it would stop your program from exiting– if you want to use this + feature with 0.8 just make sure you call `gauge.disable()` before you + expect your program to exit. +* **themes**: A themeset to use when selecting the theme to use. Defaults + to `gauge/themes`, see the [themes] documentation for details. +* **theme**: Select a theme for use, it can be a: + * Theme object, in which case the **themes** is not used. + * The name of a theme, which will be looked up in the current *themes* + object. + * A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if will be used to override our guesses when making + a default theme selection. + + If no theme is selected then a default is picked using a combination of our + best guesses at your OS, color support and unicode support. +* **template**: Describes what you want your gauge to look like. The + default is what npm uses. Detailed [documentation] is later in this + document. +* **hideCursor**: Defaults to true. If true, then the cursor will be hidden + while the gauge is displayed. +* **tty**: The tty that you're ultimately writing to. Defaults to the same + as **stream**. This is used for detecting the width of the terminal and + resizes. The width used is `tty.columns - 1`. If no tty is available then + a width of `79` is assumed. +* **enabled**: Defaults to true if `tty` is a TTY, false otherwise. If true + the gauge starts enabled. If disabled then all update commands are + ignored and no gauge will be printed until you call `.enable()`. +* **Plumbing**: The class to use to actually generate the gauge for + printing. This defaults to `require('gauge/plumbing')` and ordinarily you + shouldn't need to override this. +* **cleanupOnExit**: Defaults to true. Ordinarily we register an exit + handler to make sure your cursor is turned back on and the progress bar + erased when your process exits, even if you Ctrl-C out or otherwise exit + unexpectedly. You can disable this and it won't register the exit handler. + +[has-unicode]: https://www.npmjs.com/package/has-unicode +[themes]: #themes +[documentation]: #templates + +#### `gauge.show(section | status, [completed])` + +The first argument is either the section, the name of the current thing +contributing to progress, or an object with keys like **section**, +**subsection** & **completed** (or any others you have types for in a custom +template). If you don't want to update or set any of these you can pass +`null` and it will be ignored. + +The second argument is the percent completed as a value between 0 and 1. +Without it, completion is just not updated. You'll also note that completion +can be passed in as part of a status object as the first argument. If both +it and the completed argument are passed in, the completed argument wins. + +#### `gauge.hide([cb])` + +Removes the gauge from the terminal. Optionally, callback `cb` after IO has +had an opportunity to happen (currently this just means after `setImmediate` +has called back.) + +It turns out this is important when you're pausing the progress bar on one +filehandle and printing to another– otherwise (with a big enough print) node +can end up printing the "end progress bar" bits to the progress bar filehandle +while other stuff is printing to another filehandle. These getting interleaved +can cause corruption in some terminals. + +#### `gauge.pulse([subsection])` + +* **subsection** – *(optional)* The specific thing that triggered this pulse + +Spins the spinner in the gauge to show output. If **subsection** is +included then it will be combined with the last name passed to `gauge.show`. + +#### `gauge.disable()` + +Hides the gauge and ignores further calls to `show` or `pulse`. + +#### `gauge.enable()` + +Shows the gauge and resumes updating when `show` or `pulse` is called. + +#### `gauge.isEnabled()` + +Returns true if the gauge is enabled. + +#### `gauge.setThemeset(themes)` + +Change the themeset to select a theme from. The same as the `themes` option +used in the constructor. The theme will be reselected from this themeset. + +#### `gauge.setTheme(theme)` + +Change the active theme, will be displayed with the next show or pulse. This can be: + +* Theme object, in which case the **themes** is not used. +* The name of a theme, which will be looked up in the current *themes* + object. +* A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if will be used to override our guesses when making + a default theme selection. + +If no theme is selected then a default is picked using a combination of our +best guesses at your OS, color support and unicode support. + +#### `gauge.setTemplate(template)` + +Change the active template, will be displayed with the next show or pulse + +### Tracking Completion + +If you have more than one thing going on that you want to track completion +of, you may find the related [are-we-there-yet] helpful. It's `change` +event can be wired up to the `show` method to get a more traditional +progress bar interface. + +[are-we-there-yet]: https://www.npmjs.com/package/are-we-there-yet + +### THEMES + +``` +var themes = require('gauge/themes') + +// fetch the default color unicode theme for this platform +var ourTheme = themes({hasUnicode: true, hasColor: true}) + +// fetch the default non-color unicode theme for osx +var ourTheme = themes({hasUnicode: true, hasColor: false, platform: 'darwin'}) + +// create a new theme based on the color ascii theme for this platform +// that brackets the progress bar with arrows +var ourTheme = themes.newTheme(themes({hasUnicode: false, hasColor: true}), { + preProgressbar: '→', + postProgressbar: '←' +}) +``` + +The object returned by `gauge/themes` is an instance of the `ThemeSet` class. + +``` +var ThemeSet = require('gauge/theme-set') +var themes = new ThemeSet() +// or +var themes = require('gauge/themes') +var mythemes = themes.newThemeSet() // creates a new themeset based on the default themes +``` + +#### themes(opts) +#### themes.getDefault(opts) + +Theme objects are a function that fetches the default theme based on +platform, unicode and color support. + +Options is an object with the following properties: + +* **hasUnicode** - If true, fetch a unicode theme, if no unicode theme is + available then a non-unicode theme will be used. +* **hasColor** - If true, fetch a color theme, if no color theme is + available a non-color theme will be used. +* **platform** (optional) - Defaults to `process.platform`. If no + platform match is available then `fallback` is used instead. + +If no compatible theme can be found then an error will be thrown with a +`code` of `EMISSINGTHEME`. + +#### themes.addTheme(themeName, themeObj) +#### themes.addTheme(themeName, [parentTheme], newTheme) + +Adds a named theme to the themeset. You can pass in either a theme object, +as returned by `themes.newTheme` or the arguments you'd pass to +`themes.newTheme`. + +#### themes.getThemeNames() + +Return a list of all of the names of the themes in this themeset. Suitable +for use in `themes.getTheme(…)`. + +#### themes.getTheme(name) + +Returns the theme object from this theme set named `name`. + +If `name` does not exist in this themeset an error will be thrown with +a `code` of `EMISSINGTHEME`. + +#### themes.setDefault([opts], themeName) + +`opts` is an object with the following properties. + +* **platform** - Defaults to `'fallback'`. If your theme is platform + specific, specify that here with the platform from `process.platform`, eg, + `win32`, `darwin`, etc. +* **hasUnicode** - Defaults to `false`. If your theme uses unicode you + should set this to true. +* **hasColor** - Defaults to `false`. If your theme uses color you should + set this to true. + +`themeName` is the name of the theme (as given to `addTheme`) to use for +this set of `opts`. + +#### themes.newTheme([parentTheme,] newTheme) + +Create a new theme object based on `parentTheme`. If no `parentTheme` is +provided then a minimal parentTheme that defines functions for rendering the +activity indicator (spinner) and progress bar will be defined. (This +fallback parent is defined in `gauge/base-theme`.) + +newTheme should be a bare object– we'll start by discussing the properties +defined by the default themes: + +* **preProgressbar** - displayed prior to the progress bar, if the progress + bar is displayed. +* **postProgressbar** - displayed after the progress bar, if the progress bar + is displayed. +* **progressBarTheme** - The subtheme passed through to the progress bar + renderer, it's an object with `complete` and `remaining` properties + that are the strings you want repeated for those sections of the progress + bar. +* **activityIndicatorTheme** - The theme for the activity indicator (spinner), + this can either be a string, in which each character is a different step, or + an array of strings. +* **preSubsection** - Displayed as a separator between the `section` and + `subsection` when the latter is printed. + +More generally, themes can have any value that would be a valid value when rendering +templates. The properties in the theme are used when their name matches a type in +the template. Their values can be: + +* **strings & numbers** - They'll be included as is +* **function (values, theme, width)** - Should return what you want in your output. + *values* is an object with values provided via `gauge.show`, + *theme* is the theme specific to this item (see below) or this theme object, + and *width* is the number of characters wide your result should be. + +There are a couple of special prefixes: + +* **pre** - Is shown prior to the property, if its displayed. +* **post** - Is shown after the property, if its displayed. + +And one special suffix: + +* **Theme** - Its value is passed to a function-type item as the theme. + +#### themes.addToAllThemes(theme) + +This *mixes-in* `theme` into all themes currently defined. It also adds it +to the default parent theme for this themeset, so future themes added to +this themeset will get the values from `theme` by default. + +#### themes.newThemeSet() + +Copy the current themeset into a new one. This allows you to easily inherit +one themeset from another. + +### TEMPLATES + +A template is an array of objects and strings that, after being evaluated, +will be turned into the gauge line. The default template is: + +```javascript +[ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', kerning: 1, default: ''}, + {type: 'subsection', kerning: 1, default: ''} +] +``` + +The various template elements can either be **plain strings**, in which case they will +be be included verbatum in the output, or objects with the following properties: + +* *type* can be any of the following plus any keys you pass into `gauge.show` plus + any keys you have on a custom theme. + * `section` – What big thing you're working on now. + * `subsection` – What component of that thing is currently working. + * `activityIndicator` – Shows a spinner using the `activityIndicatorTheme` + from your active theme. + * `progressbar` – A progress bar representing your current `completed` + using the `progressbarTheme` from your active theme. +* *kerning* – Number of spaces that must be between this item and other + items, if this item is displayed at all. +* *maxLength* – The maximum length for this element. If its value is longer it + will be truncated. +* *minLength* – The minimum length for this element. If its value is shorter it + will be padded according to the *align* value. +* *align* – (Default: left) Possible values "left", "right" and "center". Works + as you'd expect from word processors. +* *length* – Provides a single value for both *minLength* and *maxLength*. If both + *length* and *minLength or *maxLength* are specified then the latter take precedence. +* *value* – A literal value to use for this template item. +* *default* – A default value to use for this template item if a value + wasn't otherwise passed in. + +### PLUMBING + +This is the super simple, assume nothing, do no magic internals used by gauge to +implement its ordinary interface. + +``` +var Plumbing = require('gauge/plumbing') +var gauge = new Plumbing(theme, template, width) +``` + +* **theme**: The theme to use. +* **template**: The template to use. +* **width**: How wide your gauge should be + +#### `gauge.setTheme(theme)` + +Change the active theme. + +#### `gauge.setTemplate(template)` + +Change the active template. + +#### `gauge.setWidth(width)` + +Change the width to render at. + +#### `gauge.hide()` + +Return the string necessary to hide the progress bar + +#### `gauge.hideCursor()` + +Return a string to hide the cursor. + +#### `gauge.showCursor()` + +Return a string to show the cursor. + +#### `gauge.show(status)` + +Using `status` for values, render the provided template with the theme and return +a string that is suitable for printing to update the gauge. diff --git a/node_modules/gauge/lib/base-theme.js b/node_modules/gauge/lib/base-theme.js new file mode 100644 index 0000000..00bf568 --- /dev/null +++ b/node_modules/gauge/lib/base-theme.js @@ -0,0 +1,18 @@ +'use strict' +var spin = require('./spin.js') +var progressBar = require('./progress-bar.js') + +module.exports = { + activityIndicator: function (values, theme, width) { + if (values.spun == null) { + return + } + return spin(theme, values.spun) + }, + progressbar: function (values, theme, width) { + if (values.completed == null) { + return + } + return progressBar(theme, width, values.completed) + }, +} diff --git a/node_modules/gauge/lib/error.js b/node_modules/gauge/lib/error.js new file mode 100644 index 0000000..d9914ba --- /dev/null +++ b/node_modules/gauge/lib/error.js @@ -0,0 +1,24 @@ +'use strict' +var util = require('util') + +var User = exports.User = function User (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, User) + err.code = 'EGAUGE' + return err +} + +exports.MissingTemplateValue = function MissingTemplateValue (item, values) { + var err = new User(util.format('Missing template value "%s"', item.type)) + Error.captureStackTrace(err, MissingTemplateValue) + err.template = item + err.values = values + return err +} + +exports.Internal = function Internal (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, Internal) + err.code = 'EGAUGEINTERNAL' + return err +} diff --git a/node_modules/gauge/lib/has-color.js b/node_modules/gauge/lib/has-color.js new file mode 100644 index 0000000..16cba0e --- /dev/null +++ b/node_modules/gauge/lib/has-color.js @@ -0,0 +1,4 @@ +'use strict' +var colorSupport = require('color-support') + +module.exports = colorSupport().hasBasic diff --git a/node_modules/gauge/lib/index.js b/node_modules/gauge/lib/index.js new file mode 100644 index 0000000..37fc5ac --- /dev/null +++ b/node_modules/gauge/lib/index.js @@ -0,0 +1,289 @@ +'use strict' +var Plumbing = require('./plumbing.js') +var hasUnicode = require('has-unicode') +var hasColor = require('./has-color.js') +var onExit = require('signal-exit') +var defaultThemes = require('./themes') +var setInterval = require('./set-interval.js') +var process = require('./process.js') +var setImmediate = require('./set-immediate') + +module.exports = Gauge + +function callWith (obj, method) { + return function () { + return method.call(obj) + } +} + +function Gauge (arg1, arg2) { + var options, writeTo + if (arg1 && arg1.write) { + writeTo = arg1 + options = arg2 || {} + } else if (arg2 && arg2.write) { + writeTo = arg2 + options = arg1 || {} + } else { + writeTo = process.stderr + options = arg1 || arg2 || {} + } + + this._status = { + spun: 0, + section: '', + subsection: '', + } + this._paused = false // are we paused for back pressure? + this._disabled = true // are all progress bar updates disabled? + this._showing = false // do we WANT the progress bar on screen + this._onScreen = false // IS the progress bar on screen + this._needsRedraw = false // should we print something at next tick? + this._hideCursor = options.hideCursor == null ? true : options.hideCursor + this._fixedFramerate = options.fixedFramerate == null + ? !(/^v0\.8\./.test(process.version)) + : options.fixedFramerate + this._lastUpdateAt = null + this._updateInterval = options.updateInterval == null ? 50 : options.updateInterval + + this._themes = options.themes || defaultThemes + this._theme = options.theme + var theme = this._computeTheme(options.theme) + var template = options.template || [ + { type: 'progressbar', length: 20 }, + { type: 'activityIndicator', kerning: 1, length: 1 }, + { type: 'section', kerning: 1, default: '' }, + { type: 'subsection', kerning: 1, default: '' }, + ] + this.setWriteTo(writeTo, options.tty) + var PlumbingClass = options.Plumbing || Plumbing + this._gauge = new PlumbingClass(theme, template, this.getWidth()) + + this._$$doRedraw = callWith(this, this._doRedraw) + this._$$handleSizeChange = callWith(this, this._handleSizeChange) + + this._cleanupOnExit = options.cleanupOnExit == null || options.cleanupOnExit + this._removeOnExit = null + + if (options.enabled || (options.enabled == null && this._tty && this._tty.isTTY)) { + this.enable() + } else { + this.disable() + } +} +Gauge.prototype = {} + +Gauge.prototype.isEnabled = function () { + return !this._disabled +} + +Gauge.prototype.setTemplate = function (template) { + this._gauge.setTemplate(template) + if (this._showing) { + this._requestRedraw() + } +} + +Gauge.prototype._computeTheme = function (theme) { + if (!theme) { + theme = {} + } + if (typeof theme === 'string') { + theme = this._themes.getTheme(theme) + } else if ( + Object.keys(theme).length === 0 || theme.hasUnicode != null || theme.hasColor != null + ) { + var useUnicode = theme.hasUnicode == null ? hasUnicode() : theme.hasUnicode + var useColor = theme.hasColor == null ? hasColor : theme.hasColor + theme = this._themes.getDefault({ + hasUnicode: useUnicode, + hasColor: useColor, + platform: theme.platform, + }) + } + return theme +} + +Gauge.prototype.setThemeset = function (themes) { + this._themes = themes + this.setTheme(this._theme) +} + +Gauge.prototype.setTheme = function (theme) { + this._gauge.setTheme(this._computeTheme(theme)) + if (this._showing) { + this._requestRedraw() + } + this._theme = theme +} + +Gauge.prototype._requestRedraw = function () { + this._needsRedraw = true + if (!this._fixedFramerate) { + this._doRedraw() + } +} + +Gauge.prototype.getWidth = function () { + return ((this._tty && this._tty.columns) || 80) - 1 +} + +Gauge.prototype.setWriteTo = function (writeTo, tty) { + var enabled = !this._disabled + if (enabled) { + this.disable() + } + this._writeTo = writeTo + this._tty = tty || + (writeTo === process.stderr && process.stdout.isTTY && process.stdout) || + (writeTo.isTTY && writeTo) || + this._tty + if (this._gauge) { + this._gauge.setWidth(this.getWidth()) + } + if (enabled) { + this.enable() + } +} + +Gauge.prototype.enable = function () { + if (!this._disabled) { + return + } + this._disabled = false + if (this._tty) { + this._enableEvents() + } + if (this._showing) { + this.show() + } +} + +Gauge.prototype.disable = function () { + if (this._disabled) { + return + } + if (this._showing) { + this._lastUpdateAt = null + this._showing = false + this._doRedraw() + this._showing = true + } + this._disabled = true + if (this._tty) { + this._disableEvents() + } +} + +Gauge.prototype._enableEvents = function () { + if (this._cleanupOnExit) { + this._removeOnExit = onExit(callWith(this, this.disable)) + } + this._tty.on('resize', this._$$handleSizeChange) + if (this._fixedFramerate) { + this.redrawTracker = setInterval(this._$$doRedraw, this._updateInterval) + if (this.redrawTracker.unref) { + this.redrawTracker.unref() + } + } +} + +Gauge.prototype._disableEvents = function () { + this._tty.removeListener('resize', this._$$handleSizeChange) + if (this._fixedFramerate) { + clearInterval(this.redrawTracker) + } + if (this._removeOnExit) { + this._removeOnExit() + } +} + +Gauge.prototype.hide = function (cb) { + if (this._disabled) { + return cb && process.nextTick(cb) + } + if (!this._showing) { + return cb && process.nextTick(cb) + } + this._showing = false + this._doRedraw() + cb && setImmediate(cb) +} + +Gauge.prototype.show = function (section, completed) { + this._showing = true + if (typeof section === 'string') { + this._status.section = section + } else if (typeof section === 'object') { + var sectionKeys = Object.keys(section) + for (var ii = 0; ii < sectionKeys.length; ++ii) { + var key = sectionKeys[ii] + this._status[key] = section[key] + } + } + if (completed != null) { + this._status.completed = completed + } + if (this._disabled) { + return + } + this._requestRedraw() +} + +Gauge.prototype.pulse = function (subsection) { + this._status.subsection = subsection || '' + this._status.spun++ + if (this._disabled) { + return + } + if (!this._showing) { + return + } + this._requestRedraw() +} + +Gauge.prototype._handleSizeChange = function () { + this._gauge.setWidth(this._tty.columns - 1) + this._requestRedraw() +} + +Gauge.prototype._doRedraw = function () { + if (this._disabled || this._paused) { + return + } + if (!this._fixedFramerate) { + var now = Date.now() + if (this._lastUpdateAt && now - this._lastUpdateAt < this._updateInterval) { + return + } + this._lastUpdateAt = now + } + if (!this._showing && this._onScreen) { + this._onScreen = false + var result = this._gauge.hide() + if (this._hideCursor) { + result += this._gauge.showCursor() + } + return this._writeTo.write(result) + } + if (!this._showing && !this._onScreen) { + return + } + if (this._showing && !this._onScreen) { + this._onScreen = true + this._needsRedraw = true + if (this._hideCursor) { + this._writeTo.write(this._gauge.hideCursor()) + } + } + if (!this._needsRedraw) { + return + } + if (!this._writeTo.write(this._gauge.show(this._status))) { + this._paused = true + this._writeTo.on('drain', callWith(this, function () { + this._paused = false + this._doRedraw() + })) + } +} diff --git a/node_modules/gauge/lib/plumbing.js b/node_modules/gauge/lib/plumbing.js new file mode 100644 index 0000000..c4dc3e0 --- /dev/null +++ b/node_modules/gauge/lib/plumbing.js @@ -0,0 +1,50 @@ +'use strict' +var consoleControl = require('console-control-strings') +var renderTemplate = require('./render-template.js') +var validate = require('aproba') + +var Plumbing = module.exports = function (theme, template, width) { + if (!width) { + width = 80 + } + validate('OAN', [theme, template, width]) + this.showing = false + this.theme = theme + this.width = width + this.template = template +} +Plumbing.prototype = {} + +Plumbing.prototype.setTheme = function (theme) { + validate('O', [theme]) + this.theme = theme +} + +Plumbing.prototype.setTemplate = function (template) { + validate('A', [template]) + this.template = template +} + +Plumbing.prototype.setWidth = function (width) { + validate('N', [width]) + this.width = width +} + +Plumbing.prototype.hide = function () { + return consoleControl.gotoSOL() + consoleControl.eraseLine() +} + +Plumbing.prototype.hideCursor = consoleControl.hideCursor + +Plumbing.prototype.showCursor = consoleControl.showCursor + +Plumbing.prototype.show = function (status) { + var values = Object.create(this.theme) + for (var key in status) { + values[key] = status[key] + } + + return renderTemplate(this.width, this.template, values).trim() + + consoleControl.color('reset') + + consoleControl.eraseLine() + consoleControl.gotoSOL() +} diff --git a/node_modules/gauge/lib/process.js b/node_modules/gauge/lib/process.js new file mode 100644 index 0000000..05e8569 --- /dev/null +++ b/node_modules/gauge/lib/process.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = process diff --git a/node_modules/gauge/lib/progress-bar.js b/node_modules/gauge/lib/progress-bar.js new file mode 100644 index 0000000..184ff25 --- /dev/null +++ b/node_modules/gauge/lib/progress-bar.js @@ -0,0 +1,41 @@ +'use strict' +var validate = require('aproba') +var renderTemplate = require('./render-template.js') +var wideTruncate = require('./wide-truncate') +var stringWidth = require('string-width') + +module.exports = function (theme, width, completed) { + validate('ONN', [theme, width, completed]) + if (completed < 0) { + completed = 0 + } + if (completed > 1) { + completed = 1 + } + if (width <= 0) { + return '' + } + var sofar = Math.round(width * completed) + var rest = width - sofar + var template = [ + { type: 'complete', value: repeat(theme.complete, sofar), length: sofar }, + { type: 'remaining', value: repeat(theme.remaining, rest), length: rest }, + ] + return renderTemplate(width, template, theme) +} + +// lodash's way of repeating +function repeat (string, width) { + var result = '' + var n = width + do { + if (n % 2) { + result += string + } + n = Math.floor(n / 2) + /* eslint no-self-assign: 0 */ + string += string + } while (n && stringWidth(result) < width) + + return wideTruncate(result, width) +} diff --git a/node_modules/gauge/lib/render-template.js b/node_modules/gauge/lib/render-template.js new file mode 100644 index 0000000..d1b52c0 --- /dev/null +++ b/node_modules/gauge/lib/render-template.js @@ -0,0 +1,222 @@ +'use strict' +var align = require('wide-align') +var validate = require('aproba') +var wideTruncate = require('./wide-truncate') +var error = require('./error') +var TemplateItem = require('./template-item') + +function renderValueWithValues (values) { + return function (item) { + return renderValue(item, values) + } +} + +var renderTemplate = module.exports = function (width, template, values) { + var items = prepareItems(width, template, values) + var rendered = items.map(renderValueWithValues(values)).join('') + return align.left(wideTruncate(rendered, width), width) +} + +function preType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'pre' + cappedTypeName +} + +function postType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'post' + cappedTypeName +} + +function hasPreOrPost (item, values) { + if (!item.type) { + return + } + return values[preType(item)] || values[postType(item)] +} + +function generatePreAndPost (baseItem, parentValues) { + var item = Object.assign({}, baseItem) + var values = Object.create(parentValues) + var template = [] + var pre = preType(item) + var post = postType(item) + if (values[pre]) { + template.push({ value: values[pre] }) + values[pre] = null + } + item.minLength = null + item.length = null + item.maxLength = null + template.push(item) + values[item.type] = values[item.type] + if (values[post]) { + template.push({ value: values[post] }) + values[post] = null + } + return function ($1, $2, length) { + return renderTemplate(length, template, values) + } +} + +function prepareItems (width, template, values) { + function cloneAndObjectify (item, index, arr) { + var cloned = new TemplateItem(item, width) + var type = cloned.type + if (cloned.value == null) { + if (!(type in values)) { + if (cloned.default == null) { + throw new error.MissingTemplateValue(cloned, values) + } else { + cloned.value = cloned.default + } + } else { + cloned.value = values[type] + } + } + if (cloned.value == null || cloned.value === '') { + return null + } + cloned.index = index + cloned.first = index === 0 + cloned.last = index === arr.length - 1 + if (hasPreOrPost(cloned, values)) { + cloned.value = generatePreAndPost(cloned, values) + } + return cloned + } + + var output = template.map(cloneAndObjectify).filter(function (item) { + return item != null + }) + + var remainingSpace = width + var variableCount = output.length + + function consumeSpace (length) { + if (length > remainingSpace) { + length = remainingSpace + } + remainingSpace -= length + } + + function finishSizing (item, length) { + if (item.finished) { + throw new error.Internal('Tried to finish template item that was already finished') + } + if (length === Infinity) { + throw new error.Internal('Length of template item cannot be infinity') + } + if (length != null) { + item.length = length + } + item.minLength = null + item.maxLength = null + --variableCount + item.finished = true + if (item.length == null) { + item.length = item.getBaseLength() + } + if (item.length == null) { + throw new error.Internal('Finished template items must have a length') + } + consumeSpace(item.getLength()) + } + + output.forEach(function (item) { + if (!item.kerning) { + return + } + var prevPadRight = item.first ? 0 : output[item.index - 1].padRight + if (!item.first && prevPadRight < item.kerning) { + item.padLeft = item.kerning - prevPadRight + } + if (!item.last) { + item.padRight = item.kerning + } + }) + + // Finish any that have a fixed (literal or intuited) length + output.forEach(function (item) { + if (item.getBaseLength() == null) { + return + } + finishSizing(item) + }) + + var resized = 0 + var resizing + var hunkSize + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) { + return + } + if (!item.maxLength) { + return + } + if (item.getMaxLength() < hunkSize) { + finishSizing(item, item.maxLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) { + throw new error.Internal('Resize loop iterated too many times while determining maxLength') + } + + resized = 0 + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) { + return + } + if (!item.minLength) { + return + } + if (item.getMinLength() >= hunkSize) { + finishSizing(item, item.minLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) { + throw new error.Internal('Resize loop iterated too many times while determining minLength') + } + + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) { + return + } + finishSizing(item, hunkSize) + }) + + return output +} + +function renderFunction (item, values, length) { + validate('OON', arguments) + if (item.type) { + return item.value(values, values[item.type + 'Theme'] || {}, length) + } else { + return item.value(values, {}, length) + } +} + +function renderValue (item, values) { + var length = item.getBaseLength() + var value = typeof item.value === 'function' ? renderFunction(item, values, length) : item.value + if (value == null || value === '') { + return '' + } + var alignWith = align[item.align] || align.left + var leftPadding = item.padLeft ? align.left('', item.padLeft) : '' + var rightPadding = item.padRight ? align.right('', item.padRight) : '' + var truncated = wideTruncate(String(value), length) + var aligned = alignWith(truncated, length) + return leftPadding + aligned + rightPadding +} diff --git a/node_modules/gauge/lib/set-immediate.js b/node_modules/gauge/lib/set-immediate.js new file mode 100644 index 0000000..6650a48 --- /dev/null +++ b/node_modules/gauge/lib/set-immediate.js @@ -0,0 +1,7 @@ +'use strict' +var process = require('./process') +try { + module.exports = setImmediate +} catch (ex) { + module.exports = process.nextTick +} diff --git a/node_modules/gauge/lib/set-interval.js b/node_modules/gauge/lib/set-interval.js new file mode 100644 index 0000000..5761987 --- /dev/null +++ b/node_modules/gauge/lib/set-interval.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = setInterval diff --git a/node_modules/gauge/lib/spin.js b/node_modules/gauge/lib/spin.js new file mode 100644 index 0000000..34142ee --- /dev/null +++ b/node_modules/gauge/lib/spin.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = function spin (spinstr, spun) { + return spinstr[spun % spinstr.length] +} diff --git a/node_modules/gauge/lib/template-item.js b/node_modules/gauge/lib/template-item.js new file mode 100644 index 0000000..e307e9b --- /dev/null +++ b/node_modules/gauge/lib/template-item.js @@ -0,0 +1,87 @@ +'use strict' +var stringWidth = require('string-width') + +module.exports = TemplateItem + +function isPercent (num) { + if (typeof num !== 'string') { + return false + } + return num.slice(-1) === '%' +} + +function percent (num) { + return Number(num.slice(0, -1)) / 100 +} + +function TemplateItem (values, outputLength) { + this.overallOutputLength = outputLength + this.finished = false + this.type = null + this.value = null + this.length = null + this.maxLength = null + this.minLength = null + this.kerning = null + this.align = 'left' + this.padLeft = 0 + this.padRight = 0 + this.index = null + this.first = null + this.last = null + if (typeof values === 'string') { + this.value = values + } else { + for (var prop in values) { + this[prop] = values[prop] + } + } + // Realize percents + if (isPercent(this.length)) { + this.length = Math.round(this.overallOutputLength * percent(this.length)) + } + if (isPercent(this.minLength)) { + this.minLength = Math.round(this.overallOutputLength * percent(this.minLength)) + } + if (isPercent(this.maxLength)) { + this.maxLength = Math.round(this.overallOutputLength * percent(this.maxLength)) + } + return this +} + +TemplateItem.prototype = {} + +TemplateItem.prototype.getBaseLength = function () { + var length = this.length + if ( + length == null && + typeof this.value === 'string' && + this.maxLength == null && + this.minLength == null + ) { + length = stringWidth(this.value) + } + return length +} + +TemplateItem.prototype.getLength = function () { + var length = this.getBaseLength() + if (length == null) { + return null + } + return length + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMaxLength = function () { + if (this.maxLength == null) { + return null + } + return this.maxLength + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMinLength = function () { + if (this.minLength == null) { + return null + } + return this.minLength + this.padLeft + this.padRight +} diff --git a/node_modules/gauge/lib/theme-set.js b/node_modules/gauge/lib/theme-set.js new file mode 100644 index 0000000..643d7db --- /dev/null +++ b/node_modules/gauge/lib/theme-set.js @@ -0,0 +1,122 @@ +'use strict' + +module.exports = function () { + return ThemeSetProto.newThemeSet() +} + +var ThemeSetProto = {} + +ThemeSetProto.baseTheme = require('./base-theme.js') + +ThemeSetProto.newTheme = function (parent, theme) { + if (!theme) { + theme = parent + parent = this.baseTheme + } + return Object.assign({}, parent, theme) +} + +ThemeSetProto.getThemeNames = function () { + return Object.keys(this.themes) +} + +ThemeSetProto.addTheme = function (name, parent, theme) { + this.themes[name] = this.newTheme(parent, theme) +} + +ThemeSetProto.addToAllThemes = function (theme) { + var themes = this.themes + Object.keys(themes).forEach(function (name) { + Object.assign(themes[name], theme) + }) + Object.assign(this.baseTheme, theme) +} + +ThemeSetProto.getTheme = function (name) { + if (!this.themes[name]) { + throw this.newMissingThemeError(name) + } + return this.themes[name] +} + +ThemeSetProto.setDefault = function (opts, name) { + if (name == null) { + name = opts + opts = {} + } + var platform = opts.platform == null ? 'fallback' : opts.platform + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!this.defaults[platform]) { + this.defaults[platform] = { true: {}, false: {} } + } + this.defaults[platform][hasUnicode][hasColor] = name +} + +ThemeSetProto.getDefault = function (opts) { + if (!opts) { + opts = {} + } + var platformName = opts.platform || process.platform + var platform = this.defaults[platformName] || this.defaults.fallback + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!platform) { + throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + } + if (!platform[hasUnicode][hasColor]) { + if (hasUnicode && hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (hasUnicode && hasColor && platform[!hasUnicode][!hasColor]) { + hasUnicode = false + hasColor = false + } else if (hasUnicode && !hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (!hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (platform === this.defaults.fallback) { + throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + } + } + if (platform[hasUnicode][hasColor]) { + return this.getTheme(platform[hasUnicode][hasColor]) + } else { + return this.getDefault(Object.assign({}, opts, { platform: 'fallback' })) + } +} + +ThemeSetProto.newMissingThemeError = function newMissingThemeError (name) { + var err = new Error('Could not find a gauge theme named "' + name + '"') + Error.captureStackTrace.call(err, newMissingThemeError) + err.theme = name + err.code = 'EMISSINGTHEME' + return err +} + +ThemeSetProto.newMissingDefaultThemeError = + function newMissingDefaultThemeError (platformName, hasUnicode, hasColor) { + var err = new Error( + 'Could not find a gauge theme for your platform/unicode/color use combo:\n' + + ' platform = ' + platformName + '\n' + + ' hasUnicode = ' + hasUnicode + '\n' + + ' hasColor = ' + hasColor) + Error.captureStackTrace.call(err, newMissingDefaultThemeError) + err.platform = platformName + err.hasUnicode = hasUnicode + err.hasColor = hasColor + err.code = 'EMISSINGTHEME' + return err + } + +ThemeSetProto.newThemeSet = function () { + var themeset = function (opts) { + return themeset.getDefault(opts) + } + return Object.assign(themeset, ThemeSetProto, { + themes: Object.assign({}, this.themes), + baseTheme: Object.assign({}, this.baseTheme), + defaults: JSON.parse(JSON.stringify(this.defaults || {})), + }) +} diff --git a/node_modules/gauge/lib/themes.js b/node_modules/gauge/lib/themes.js new file mode 100644 index 0000000..d2e62bb --- /dev/null +++ b/node_modules/gauge/lib/themes.js @@ -0,0 +1,56 @@ +'use strict' +var color = require('console-control-strings').color +var ThemeSet = require('./theme-set.js') + +var themes = module.exports = new ThemeSet() + +themes.addTheme('ASCII', { + preProgressbar: '[', + postProgressbar: ']', + progressbarTheme: { + complete: '#', + remaining: '.', + }, + activityIndicatorTheme: '-\\|/', + preSubsection: '>', +}) + +themes.addTheme('colorASCII', themes.getTheme('ASCII'), { + progressbarTheme: { + preComplete: color('bgBrightWhite', 'brightWhite'), + complete: '#', + postComplete: color('reset'), + preRemaining: color('bgBrightBlack', 'brightBlack'), + remaining: '.', + postRemaining: color('reset'), + }, +}) + +themes.addTheme('brailleSpinner', { + preProgressbar: '(', + postProgressbar: ')', + progressbarTheme: { + complete: '#', + remaining: '⠂', + }, + activityIndicatorTheme: '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏', + preSubsection: '>', +}) + +themes.addTheme('colorBrailleSpinner', themes.getTheme('brailleSpinner'), { + progressbarTheme: { + preComplete: color('bgBrightWhite', 'brightWhite'), + complete: '#', + postComplete: color('reset'), + preRemaining: color('bgBrightBlack', 'brightBlack'), + remaining: '⠂', + postRemaining: color('reset'), + }, +}) + +themes.setDefault({}, 'ASCII') +themes.setDefault({ hasColor: true }, 'colorASCII') +themes.setDefault({ platform: 'darwin', hasUnicode: true }, 'brailleSpinner') +themes.setDefault({ platform: 'darwin', hasUnicode: true, hasColor: true }, 'colorBrailleSpinner') +themes.setDefault({ platform: 'linux', hasUnicode: true }, 'brailleSpinner') +themes.setDefault({ platform: 'linux', hasUnicode: true, hasColor: true }, 'colorBrailleSpinner') diff --git a/node_modules/gauge/lib/wide-truncate.js b/node_modules/gauge/lib/wide-truncate.js new file mode 100644 index 0000000..5284a69 --- /dev/null +++ b/node_modules/gauge/lib/wide-truncate.js @@ -0,0 +1,31 @@ +'use strict' +var stringWidth = require('string-width') +var stripAnsi = require('strip-ansi') + +module.exports = wideTruncate + +function wideTruncate (str, target) { + if (stringWidth(str) === 0) { + return str + } + if (target <= 0) { + return '' + } + if (stringWidth(str) <= target) { + return str + } + + // We compute the number of bytes of ansi sequences here and add + // that to our initial truncation to ensure that we don't slice one + // that we want to keep in half. + var noAnsi = stripAnsi(str) + var ansiSize = str.length + noAnsi.length + var truncated = str.slice(0, target + ansiSize) + + // we have to shrink the result to account for our ansi sequence buffer + // (if an ansi sequence was truncated) and double width characters. + while (stringWidth(truncated) > target) { + truncated = truncated.slice(0, -1) + } + return truncated +} diff --git a/node_modules/gauge/node_modules/.bin/color-support b/node_modules/gauge/node_modules/.bin/color-support new file mode 100644 index 0000000..78d2037 --- /dev/null +++ b/node_modules/gauge/node_modules/.bin/color-support @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../color-support/bin.js" "$@" + ret=$? +else + node "$basedir/../../../color-support/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/gauge/node_modules/.bin/color-support.cmd b/node_modules/gauge/node_modules/.bin/color-support.cmd new file mode 100644 index 0000000..16ea753 --- /dev/null +++ b/node_modules/gauge/node_modules/.bin/color-support.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\color-support\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\color-support\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/gauge/package.json b/node_modules/gauge/package.json new file mode 100644 index 0000000..bce3e68 --- /dev/null +++ b/node_modules/gauge/package.json @@ -0,0 +1,66 @@ +{ + "name": "gauge", + "version": "4.0.4", + "description": "A terminal based horizontal gauge", + "main": "lib", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/gauge.git" + }, + "keywords": [ + "progressbar", + "progress", + "gauge" + ], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/gauge/issues" + }, + "homepage": "https://github.com/npm/gauge", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.3", + "console-control-strings": "^1.1.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.7", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.5" + }, + "devDependencies": { + "@npmcli/eslint-config": "^3.0.1", + "@npmcli/template-oss": "3.2.0", + "readable-stream": "^3.6.0", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + }, + "tap": { + "branches": 79, + "statements": 89, + "functions": 92, + "lines": 90 + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "3.2.0" + } +} diff --git a/node_modules/gaze/LICENSE-MIT b/node_modules/gaze/LICENSE-MIT new file mode 100644 index 0000000..0733a63 --- /dev/null +++ b/node_modules/gaze/LICENSE-MIT @@ -0,0 +1,22 @@ +Copyright (c) 2018 Kyle Robinson Young + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/gaze/README.md b/node_modules/gaze/README.md new file mode 100644 index 0000000..d7debb5 --- /dev/null +++ b/node_modules/gaze/README.md @@ -0,0 +1,196 @@ +# gaze [![Build Status](http://img.shields.io/travis/shama/gaze.svg)](https://travis-ci.org/shama/gaze) [![Build status](https://ci.appveyor.com/api/projects/status/vtx65w9eg511tgo4)](https://ci.appveyor.com/project/shama/gaze) + +A globbing `fs.watch` wrapper built from the best parts of other fine watch libs. +Compatible with Node.js >= 4.x, Windows, macOS, and Linux. + +![gaze](http://dontkry.com/images/repos/gaze.png) + +[![NPM](https://nodei.co/npm/gaze.png?downloads=true)](https://nodei.co/npm/gaze/) + +## Usage +Install the module with: `npm install gaze` or place into your `package.json` +and run `npm install`. + +```javascript +var gaze = require('gaze'); + +// Watch all .js files/dirs in process.cwd() +gaze('**/*.js', function(err, watcher) { + // Files have all started watching + // watcher === this + + // Get all watched files + var watched = this.watched(); + + // On file changed + this.on('changed', function(filepath) { + console.log(filepath + ' was changed'); + }); + + // On file added + this.on('added', function(filepath) { + console.log(filepath + ' was added'); + }); + + // On file deleted + this.on('deleted', function(filepath) { + console.log(filepath + ' was deleted'); + }); + + // On changed/added/deleted + this.on('all', function(event, filepath) { + console.log(filepath + ' was ' + event); + }); + + // Get watched files with relative paths + var files = this.relative(); +}); + +// Also accepts an array of patterns +gaze(['stylesheets/*.css', 'images/**/*.png'], function() { + // Add more patterns later to be watched + this.add(['js/*.js']); +}); +``` + +### Alternate Interface + +```javascript +var Gaze = require('gaze').Gaze; + +var gaze = new Gaze('**/*'); + +// Files have all started watching +gaze.on('ready', function(watcher) { }); + +// A file has been added/changed/deleted has occurred +gaze.on('all', function(event, filepath) { }); +``` + +### Errors + +```javascript +gaze('**/*', function(error, watcher) { + if (error) { + // Handle error if it occurred while starting up + } +}); + +// Or with the alternative interface +var gaze = new Gaze(); +gaze.on('error', function(error) { + // Handle error here +}); +gaze.add('**/*'); +``` + +### Minimatch / Glob + +See [isaacs's `minimatch`](https://github.com/isaacs/minimatch) for more +information on glob patterns. + +## Documentation + +### gaze([patterns, options, callback]) + +* `patterns` {`String`|`Array`} File patterns to be matched +* `options` {`Object`} +* `callback` {`Function`} + * `err` {`Error` | `null`} + * `watcher` {`Object`} Instance of the `Gaze` watcher + +### Class: `gaze.Gaze` + +Create a `Gaze` object by instancing the `gaze.Gaze` class. + +```javascript +var Gaze = require('gaze').Gaze; +var gaze = new Gaze(pattern, options, callback); +``` + +#### Properties + +* `options` The options object passed in. + * `interval` {integer} Interval to pass to `fs.watchFile` + * `debounceDelay` {integer} Delay for events called in succession for the same + file/event in milliseconds + * `mode` {string} Force the watch mode. Either `'auto'` (default), `'watch'` (force native events), or `'poll'` (force stat polling). + * `cwd` {string} The current working directory to base file patterns from. Default is `process.cwd()`. + +#### Events + +* `ready(watcher)` When files have been globbed and watching has begun. +* `all(event, filepath)` When an `added`, `changed`, `renamed`, or `deleted` event occurs. +* `added(filepath)` When a file has been added to a watch directory. +* `changed(filepath)` When a file has been changed. +* `deleted(filepath)` When a file has been deleted. +* `renamed(newPath, oldPath)` When a file has been renamed. +* `end()` When the watcher is closed and watches have been removed. +* `error(err)` When an error occurs. +* `nomatch` When no files have been matched. + +#### Methods + +* `emit(event, [...])` Wrapper for `EventEmitter.emit`. + `added`|`changed`|`renamed`|`deleted` events will also trigger the `all` event. +* `close()` Unwatch all files and reset the watch instance. +* `add(patterns, callback)` Adds file(s) `patterns` to be watched. +* `remove(filepath)` Removes a file or directory from being watched. Does not + recurse directories. +* `watched()` Returns the currently watched files. +* `relative([dir, unixify])` Returns the currently watched files with relative paths. + * `dir` {string} Only return relative files for this directory. + * `unixify` {boolean} Return paths with `/` instead of `\\` if on Windows. + +## Similar Projects + +Other great watch libraries to try are: + +* [paulmillr's `chokidar`](https://github.com/paulmillr/chokidar) +* [amasad's `sane`](https://github.com/amasad/sane) +* [mikeal's `watch`](https://github.com/mikeal/watch) +* [github's `pathwatcher`](https://github.com/atom/node-pathwatcher) +* [bevry's `watchr`](https://github.com/bevry/watchr) + +## Contributing +In lieu of a formal styleguide, take care to maintain the existing coding style. +Add unit tests for any new or changed functionality. Lint and test your code +using [grunt](http://gruntjs.com/). + +## Release History +* 1.1.3 - Fix for Node 10 support (@aredridel). Officially dropping support for Node < 4. +* 1.1.2 - Prevent more `ENOENT` errors from escaping (@alexgorbatchev). +* 1.1.1 - Prevent `fs.watch` errors from escaping error handler (@rosen-vladimirov). Fix `_addToWatched` without `path.sep` (@wyicwx). +* 1.1.0 - Update to `globule@1.0.0` with `minimatch >= 3.0.0`. +* 1.0.0 - Revert back to 0.5.2. Drop support for Node.js v0.8. Fix for `maxListeners`. Update `globule` to `0.2.0`. +* 0.6.4 - Catch and emit `error` from `readdir` (@oconnore). Fix for `0 maxListeners`. Use `graceful-fs` to avoid `EMFILE` errors in other places `fs` is used. Better method to determine if `pathwatcher` was built. Fix keeping process alive too much, only init `pathwatcher` if a file is being watched. Set min required to Windows Vista when building on Windows (@pvolok). +* 0.6.3 - Add support for Node.js v0.11 +* 0.6.2 - Fix argument error with `watched()`. Fix for erroneous `added` events on folders. Ignore `msvs` build error 4244. +* 0.6.1 - Fix for absolute paths. +* 0.6.0 - Uses native OS events (fork of `pathwatcher`) but can fall back to stat polling. Everything is async to avoid blocking, including `relative()` and `watched()`. Better error handling. Update to `globule@0.2.0`. No longer watches `cwd` by default. Added `mode` option. Better `EMFILE` message. Avoids `ENOENT` errors with symlinks. All constructor arguments are optional. +* 0.5.2 - Fix for `ENOENT` error with non-existent symlinks [BACKPORTED]. +* 0.5.1 - Use `setImmediate` (`process.nextTick` for Node.js v0.8) to defer `ready`/`nomatch` events (@amasad). +* 0.5.0 - Process is now kept alive while watching files. Emits a `nomatch` event when no files are matching. +* 0.4.3 - Track file additions in newly created folders (@brett-shwom). +* 0.4.2 - Fix `.remove()` method to remove a single file in a directory (@kaelzhang). Fixing “`Cannot call method 'call' of undefined`” (@krasimir). Track new file additions within folders (@brett-shwom). +* 0.4.1 - Fix `watchDir` not respecting close in race condition (@chrisirhc). +* 0.4.0 - Drop support for Node.js v0.6. Use `globule` for file matching. Avoid Node.js v0.10 `path.resolve`/`join` errors. Register new files when added to non-existent folder. Multiple instances can now poll the same files (@jpommerening). +* 0.3.4 - Code clean up. Fix “`path must be strings`” errors (@groner). Fix incorrect `added` events (@groner). +* 0.3.3 - Fix for multiple patterns with negate. +* 0.3.2 - Emit `end` before `removeAllListeners`. +* 0.3.1 - Fix `added` events within subfolder patterns. +* 0.3.0 - Handle safewrite events, `forceWatchMethod` option removed, bug fixes and watch optimizations (@rgaskill). +* 0.2.2 - Fix issue where subsequent `add` calls dont get watched (@samcday). `removeAllListeners` on `close`. +* 0.2.1 - Fix issue with invalid `added` events in current working dir. +* 0.2.0 - Support and mark folders with `path.sep`. Add `forceWatchMethod` option. Support `renamed` events. +* 0.1.6 - Recognize the `cwd` option properly +* 0.1.5 - Catch “`too many open file`” errors +* 0.1.4 - Really fix the race condition with 2 watches +* 0.1.3 - Fix race condition with 2 watches +* 0.1.2 - Read triggering changed event fix +* 0.1.1 - Minor fixes +* 0.1.0 - Initial release + +## License +Copyright (c) 2018 Kyle Robinson Young +Licensed under the MIT license. diff --git a/node_modules/gaze/lib/gaze.js b/node_modules/gaze/lib/gaze.js new file mode 100644 index 0000000..1f556a6 --- /dev/null +++ b/node_modules/gaze/lib/gaze.js @@ -0,0 +1,469 @@ +/* + * gaze + * https://github.com/shama/gaze + * + * Copyright (c) 2018 Kyle Robinson Young + * Licensed under the MIT license. + */ + +'use strict'; + +// libs +var util = require('util'); +var EE = require('events').EventEmitter; +var fs = require('fs'); +var path = require('path'); +var globule = require('globule'); +var helper = require('./helper'); + +// shim setImmediate for node v0.8 +var setImmediate = require('timers').setImmediate; +if (typeof setImmediate !== 'function') { + setImmediate = process.nextTick; +} + +// globals +var delay = 10; + +// `Gaze` EventEmitter object to return in the callback +function Gaze (patterns, opts, done) { + var self = this; + EE.call(self); + + // If second arg is the callback + if (typeof opts === 'function') { + done = opts; + opts = {}; + } + + // Default options + opts = opts || {}; + opts.mark = true; + opts.interval = opts.interval || 100; + opts.debounceDelay = opts.debounceDelay || 500; + opts.cwd = opts.cwd || process.cwd(); + this.options = opts; + + // Default done callback + done = done || function () {}; + + // Remember our watched dir:files + this._watched = Object.create(null); + + // Store watchers + this._watchers = Object.create(null); + + // Store watchFile listeners + this._pollers = Object.create(null); + + // Store patterns + this._patterns = []; + + // Cached events for debouncing + this._cached = Object.create(null); + + // Set maxListeners + if (this.options.maxListeners != null) { + this.setMaxListeners(this.options.maxListeners); + Gaze.super_.prototype.setMaxListeners(this.options.maxListeners); + delete this.options.maxListeners; + } + + // Initialize the watch on files + if (patterns) { + this.add(patterns, done); + } + + // keep the process alive + this._keepalive = setInterval(function () {}, 200); + + return this; +} +util.inherits(Gaze, EE); + +// Main entry point. Start watching and call done when setup +module.exports = function gaze (patterns, opts, done) { + return new Gaze(patterns, opts, done); +}; +module.exports.Gaze = Gaze; + +// Override the emit function to emit `all` events +// and debounce on duplicate events per file +Gaze.prototype.emit = function () { + var self = this; + var args = arguments; + + var e = args[0]; + var filepath = args[1]; + var timeoutId; + + // If not added/deleted/changed/renamed then just emit the event + if (e.slice(-2) !== 'ed') { + Gaze.super_.prototype.emit.apply(self, args); + return this; + } + + // Detect rename event, if added and previous deleted is in the cache + if (e === 'added') { + Object.keys(this._cached).forEach(function (oldFile) { + if (self._cached[oldFile].indexOf('deleted') !== -1) { + args[0] = e = 'renamed'; + [].push.call(args, oldFile); + delete self._cached[oldFile]; + return false; + } + }); + } + + // If cached doesnt exist, create a delay before running the next + // then emit the event + var cache = this._cached[filepath] || []; + if (cache.indexOf(e) === -1) { + helper.objectPush(self._cached, filepath, e); + clearTimeout(timeoutId); + timeoutId = setTimeout(function () { + delete self._cached[filepath]; + }, this.options.debounceDelay); + // Emit the event and `all` event + Gaze.super_.prototype.emit.apply(self, args); + Gaze.super_.prototype.emit.apply(self, ['all', e].concat([].slice.call(args, 1))); + } + + // Detect if new folder added to trigger for matching files within folder + if (e === 'added') { + if (helper.isDir(filepath)) { + // It's possible that between `isDir` and `readdirSync()` calls the `filepath` + // gets removed, which will result in `ENOENT` exception + + var files; + + try { + files = fs.readdirSync(filepath); + } catch (e) { + // Rethrow the error if it's anything other than `ENOENT` + if (e.code !== 'ENOENT') { + throw e; + } + + files = []; + } + + files.map(function (file) { + return path.join(filepath, file); + }).filter(function (file) { + return globule.isMatch(self._patterns, file, self.options); + }).forEach(function (file) { + self.emit('added', file); + }); + } + } + + return this; +}; + +// Close watchers +Gaze.prototype.close = function (_reset) { + var self = this; + Object.keys(self._watchers).forEach(function (file) { + self._watchers[file].close(); + }); + self._watchers = Object.create(null); + Object.keys(this._watched).forEach(function (dir) { + self._unpollDir(dir); + }); + if (_reset !== false) { + self._watched = Object.create(null); + setTimeout(function () { + self.emit('end'); + self.removeAllListeners(); + clearInterval(self._keepalive); + }, delay + 100); + } + return self; +}; + +// Add file patterns to be watched +Gaze.prototype.add = function (files, done) { + if (typeof files === 'string') { files = [files]; } + this._patterns = helper.unique.apply(null, [this._patterns, files]); + files = globule.find(this._patterns, this.options); + this._addToWatched(files); + this.close(false); + this._initWatched(done); +}; + +// Dont increment patterns and dont call done if nothing added +Gaze.prototype._internalAdd = function (file, done) { + var files = []; + if (helper.isDir(file)) { + files = [helper.markDir(file)].concat(globule.find(this._patterns, this.options)); + } else { + if (globule.isMatch(this._patterns, file, this.options)) { + files = [file]; + } + } + if (files.length > 0) { + this._addToWatched(files); + this.close(false); + this._initWatched(done); + } +}; + +// Remove file/dir from `watched` +Gaze.prototype.remove = function (file) { + var self = this; + if (this._watched[file]) { + // is dir, remove all files + this._unpollDir(file); + delete this._watched[file]; + } else { + // is a file, find and remove + Object.keys(this._watched).forEach(function (dir) { + var index = self._watched[dir].indexOf(file); + if (index !== -1) { + self._unpollFile(file); + self._watched[dir].splice(index, 1); + return false; + } + }); + } + if (this._watchers[file]) { + this._watchers[file].close(); + } + return this; +}; + +// Return watched files +Gaze.prototype.watched = function () { + return this._watched; +}; + +// Returns `watched` files with relative paths to process.cwd() +Gaze.prototype.relative = function (dir, unixify) { + var self = this; + var relative = Object.create(null); + var relDir, relFile, unixRelDir; + var cwd = this.options.cwd || process.cwd(); + if (dir === '') { dir = '.'; } + dir = helper.markDir(dir); + unixify = unixify || false; + Object.keys(this._watched).forEach(function (dir) { + relDir = path.relative(cwd, dir) + path.sep; + if (relDir === path.sep) { relDir = '.'; } + unixRelDir = unixify ? helper.unixifyPathSep(relDir) : relDir; + relative[unixRelDir] = self._watched[dir].map(function (file) { + relFile = path.relative(path.join(cwd, relDir) || '', file || ''); + if (helper.isDir(file)) { + relFile = helper.markDir(relFile); + } + if (unixify) { + relFile = helper.unixifyPathSep(relFile); + } + return relFile; + }); + }); + if (dir && unixify) { + dir = helper.unixifyPathSep(dir); + } + return dir ? relative[dir] || [] : relative; +}; + +// Adds files and dirs to watched +Gaze.prototype._addToWatched = function (files) { + var dirs = []; + + for (var i = 0; i < files.length; i++) { + var file = files[i]; + var filepath = path.resolve(this.options.cwd, file); + + var dirname = (helper.isDir(file)) ? filepath : path.dirname(filepath); + dirname = helper.markDir(dirname); + + // If a new dir is added + if (helper.isDir(file) && !(dirname in this._watched)) { + helper.objectPush(this._watched, dirname, []); + } + + if (file.slice(-1) === '/') { filepath += path.sep; } + helper.objectPush(this._watched, path.dirname(filepath) + path.sep, filepath); + + dirs.push(dirname); + } + + dirs = helper.unique(dirs); + + for (var k = 0; k < dirs.length; k++) { + dirname = dirs[k]; + // add folders into the mix + var readdir = fs.readdirSync(dirname); + for (var j = 0; j < readdir.length; j++) { + var dirfile = path.join(dirname, readdir[j]); + if (fs.lstatSync(dirfile).isDirectory()) { + helper.objectPush(this._watched, dirname, dirfile + path.sep); + } + } + } + + return this; +}; + +Gaze.prototype._watchDir = function (dir, done) { + var self = this; + var timeoutId; + try { + this._watchers[dir] = fs.watch(dir, function (event) { + // race condition. Let's give the fs a little time to settle down. so we + // don't fire events on non existent files. + clearTimeout(timeoutId); + timeoutId = setTimeout(function () { + // race condition. Ensure that this directory is still being watched + // before continuing. + if ((dir in self._watchers) && fs.existsSync(dir)) { + done(null, dir); + } + }, delay + 100); + }); + + this._watchers[dir].on('error', function (err) { + self._handleError(err); + }); + } catch (err) { + return this._handleError(err); + } + return this; +}; + +Gaze.prototype._unpollFile = function (file) { + if (this._pollers[file]) { + fs.unwatchFile(file, this._pollers[file]); + delete this._pollers[file]; + } + return this; +}; + +Gaze.prototype._unpollDir = function (dir) { + this._unpollFile(dir); + for (var i = 0; i < this._watched[dir].length; i++) { + this._unpollFile(this._watched[dir][i]); + } +}; + +Gaze.prototype._pollFile = function (file, done) { + var opts = { persistent: true, interval: this.options.interval }; + if (!this._pollers[file]) { + this._pollers[file] = function (curr, prev) { + done(null, file); + }; + try { + fs.watchFile(file, opts, this._pollers[file]); + } catch (err) { + return this._handleError(err); + } + } + return this; +}; + +// Initialize the actual watch on `watched` files +Gaze.prototype._initWatched = function (done) { + var self = this; + var cwd = this.options.cwd || process.cwd(); + var curWatched = Object.keys(self._watched); + + // if no matching files + if (curWatched.length < 1) { + // Defer to emitting to give a chance to attach event handlers. + setImmediate(function () { + self.emit('ready', self); + if (done) { done.call(self, null, self); } + self.emit('nomatch'); + }); + return; + } + + helper.forEachSeries(curWatched, function (dir, next) { + dir = dir || ''; + var files = self._watched[dir]; + // Triggered when a watched dir has an event + self._watchDir(dir, function (event, dirpath) { + var relDir = cwd === dir ? '.' : path.relative(cwd, dir); + relDir = relDir || ''; + + fs.readdir(dirpath, function (err, current) { + if (err) { return self.emit('error', err); } + if (!current) { return; } + + try { + // append path.sep to directories so they match previous. + current = current.map(function (curPath) { + if (fs.existsSync(path.join(dir, curPath)) && fs.lstatSync(path.join(dir, curPath)).isDirectory()) { + return curPath + path.sep; + } else { + return curPath; + } + }); + } catch (err) { + // race condition-- sometimes the file no longer exists + } + + // Get watched files for this dir + var previous = self.relative(relDir); + + // If file was deleted + previous.filter(function (file) { + return current.indexOf(file) < 0; + }).forEach(function (file) { + if (!helper.isDir(file)) { + var filepath = path.join(dir, file); + self.remove(filepath); + self.emit('deleted', filepath); + } + }); + + // If file was added + current.filter(function (file) { + return previous.indexOf(file) < 0; + }).forEach(function (file) { + // Is it a matching pattern? + var relFile = path.join(relDir, file); + // Add to watch then emit event + self._internalAdd(relFile, function () { + self.emit('added', path.join(dir, file)); + }); + }); + }); + }); + + // Watch for change/rename events on files + files.forEach(function (file) { + if (helper.isDir(file)) { return; } + self._pollFile(file, function (err, filepath) { + if (err) { + self.emit('error', err); + return; + } + // Only emit changed if the file still exists + // Prevents changed/deleted duplicate events + if (fs.existsSync(filepath)) { + self.emit('changed', filepath); + } + }); + }); + + next(); + }, function () { + // Return this instance of Gaze + // delay before ready solves a lot of issues + setTimeout(function () { + self.emit('ready', self); + if (done) { done.call(self, null, self); } + }, delay + 100); + }); +}; + +// If an error, handle it here +Gaze.prototype._handleError = function (err) { + if (err.code === 'EMFILE') { + return this.emit('error', new Error('EMFILE: Too many opened files.')); + } + return this.emit('error', err); +}; diff --git a/node_modules/gaze/lib/helper.js b/node_modules/gaze/lib/helper.js new file mode 100644 index 0000000..8e5727d --- /dev/null +++ b/node_modules/gaze/lib/helper.js @@ -0,0 +1,84 @@ +'use strict'; + +var path = require('path'); +var helper = module.exports = {}; + +// Returns boolean whether filepath is dir terminated +helper.isDir = function isDir (dir) { + if (typeof dir !== 'string') { + return false; + } + return (dir.slice(-(path.sep.length)) === path.sep); +}; + +// Create a `key:[]` if doesnt exist on `obj` then push or concat the `val` +helper.objectPush = function objectPush (obj, key, val) { + if (obj[key] == null) { + obj[key] = []; + } + if (Array.isArray(val)) { + obj[key] = obj[key].concat(val); + } else if (val) { + obj[key].push(val); + } + obj[key] = helper.unique(obj[key]); + return obj[key]; +}; + +// Ensures the dir is marked with path.sep +helper.markDir = function markDir (dir) { + if (typeof dir === 'string' && + dir.slice(-(path.sep.length)) !== path.sep && + dir !== '.') { + dir += path.sep; + } + return dir; +}; + +// Changes path.sep to unix ones for testing +helper.unixifyPathSep = function unixifyPathSep (filepath) { + return (process.platform === 'win32') ? String(filepath).replace(/\\/g, '/') : filepath; +}; + +/** + * Lo-Dash 1.0.1 + * Copyright 2012-2013 The Dojo Foundation + * Based on Underscore.js 1.4.4 + * Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud Inc. + * Available under MIT license + */ +helper.unique = function unique () { + var array = Array.prototype.concat.apply(Array.prototype, arguments); + var result = []; + for (var i = 0; i < array.length; i++) { + if (result.indexOf(array[i]) === -1) { + result.push(array[i]); + } + } + return result; +}; + +/** + * Copyright (c) 2010 Caolan McMahon + * Available under MIT license + */ +helper.forEachSeries = function forEachSeries (arr, iterator, callback) { + if (!arr.length) { return callback(); } + var completed = 0; + var iterate = function () { + iterator(arr[completed], function (err) { + if (err) { + callback(err); + callback = function () {}; + } else { + completed += 1; + if (completed === arr.length) { + callback(null); + } else { + iterate(); + } + } + }); + }; + iterate(); +}; diff --git a/node_modules/gaze/package.json b/node_modules/gaze/package.json new file mode 100644 index 0000000..85dbfb9 --- /dev/null +++ b/node_modules/gaze/package.json @@ -0,0 +1,54 @@ +{ + "name": "gaze", + "description": "A globbing fs.watch wrapper built from the best parts of other fine watch libs.", + "version": "1.1.3", + "homepage": "https://github.com/shama/gaze", + "author": { + "name": "Kyle Robinson Young", + "email": "kyle@dontkry.com" + }, + "repository": { + "type": "git", + "url": "https://github.com/shama/gaze.git" + }, + "bugs": { + "url": "https://github.com/shama/gaze/issues" + }, + "license": "MIT", + "main": "lib/gaze", + "engines": { + "node": ">= 4.0.0" + }, + "scripts": { + "test": "semistandard && grunt nodeunit -v" + }, + "dependencies": { + "globule": "^1.0.0" + }, + "devDependencies": { + "async": "^2.6.1", + "grunt": "^1.0.1", + "grunt-benchmark": "^1.0.0", + "grunt-cli": "^1.2.0", + "grunt-contrib-jshint": "^1.1.0", + "grunt-contrib-nodeunit": "^2.0.0", + "rimraf": "^2.5.2", + "semistandard": "^12.0.1" + }, + "keywords": [ + "watch", + "glob" + ], + "files": [ + "lib", + "LICENSE-MIT" + ], + "semistandard": { + "ignore": [ + "benchmarks", + "experiments", + "build", + "test" + ] + } +} diff --git a/node_modules/get-caller-file/LICENSE.md b/node_modules/get-caller-file/LICENSE.md new file mode 100644 index 0000000..bf3e1c0 --- /dev/null +++ b/node_modules/get-caller-file/LICENSE.md @@ -0,0 +1,6 @@ +ISC License (ISC) +Copyright 2018 Stefan Penner + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/get-caller-file/README.md b/node_modules/get-caller-file/README.md new file mode 100644 index 0000000..a7d8c07 --- /dev/null +++ b/node_modules/get-caller-file/README.md @@ -0,0 +1,41 @@ +# get-caller-file + +[![Build Status](https://travis-ci.org/stefanpenner/get-caller-file.svg?branch=master)](https://travis-ci.org/stefanpenner/get-caller-file) +[![Build status](https://ci.appveyor.com/api/projects/status/ol2q94g1932cy14a/branch/master?svg=true)](https://ci.appveyor.com/project/embercli/get-caller-file/branch/master) + +This is a utility, which allows a function to figure out from which file it was invoked. It does so by inspecting v8's stack trace at the time it is invoked. + +Inspired by http://stackoverflow.com/questions/13227489 + +*note: this relies on Node/V8 specific APIs, as such other runtimes may not work* + +## Installation + +```bash +yarn add get-caller-file +``` + +## Usage + +Given: + +```js +// ./foo.js +const getCallerFile = require('get-caller-file'); + +module.exports = function() { + return getCallerFile(); // figures out who called it +}; +``` + +```js +// index.js +const foo = require('./foo'); + +foo() // => /full/path/to/this/file/index.js +``` + + +## Options: + +* `getCallerFile(position = 2)`: where position is stack frame whos fileName we want. diff --git a/node_modules/get-caller-file/index.d.ts b/node_modules/get-caller-file/index.d.ts new file mode 100644 index 0000000..babed69 --- /dev/null +++ b/node_modules/get-caller-file/index.d.ts @@ -0,0 +1,2 @@ +declare const _default: (position?: number) => any; +export = _default; diff --git a/node_modules/get-caller-file/index.js b/node_modules/get-caller-file/index.js new file mode 100644 index 0000000..57304f8 --- /dev/null +++ b/node_modules/get-caller-file/index.js @@ -0,0 +1,22 @@ +"use strict"; +// Call this function in a another function to find out the file from +// which that function was called from. (Inspects the v8 stack trace) +// +// Inspired by http://stackoverflow.com/questions/13227489 +module.exports = function getCallerFile(position) { + if (position === void 0) { position = 2; } + if (position >= Error.stackTraceLimit) { + throw new TypeError('getCallerFile(position) requires position be less then Error.stackTraceLimit but position was: `' + position + '` and Error.stackTraceLimit was: `' + Error.stackTraceLimit + '`'); + } + var oldPrepareStackTrace = Error.prepareStackTrace; + Error.prepareStackTrace = function (_, stack) { return stack; }; + var stack = new Error().stack; + Error.prepareStackTrace = oldPrepareStackTrace; + if (stack !== null && typeof stack === 'object') { + // stack[0] holds this file + // stack[1] holds where this function was called + // stack[2] holds the file we're interested in + return stack[position] ? stack[position].getFileName() : undefined; + } +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/get-caller-file/index.js.map b/node_modules/get-caller-file/index.js.map new file mode 100644 index 0000000..89c655c --- /dev/null +++ b/node_modules/get-caller-file/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";AAAA,qEAAqE;AACrE,qEAAqE;AACrE,EAAE;AACF,0DAA0D;AAE1D,iBAAS,SAAS,aAAa,CAAC,QAAY;IAAZ,yBAAA,EAAA,YAAY;IAC1C,IAAI,QAAQ,IAAI,KAAK,CAAC,eAAe,EAAE;QACrC,MAAM,IAAI,SAAS,CAAC,kGAAkG,GAAG,QAAQ,GAAG,oCAAoC,GAAG,KAAK,CAAC,eAAe,GAAG,GAAG,CAAC,CAAC;KACzM;IAED,IAAM,oBAAoB,GAAG,KAAK,CAAC,iBAAiB,CAAC;IACrD,KAAK,CAAC,iBAAiB,GAAG,UAAC,CAAC,EAAE,KAAK,IAAM,OAAA,KAAK,EAAL,CAAK,CAAC;IAC/C,IAAM,KAAK,GAAG,IAAI,KAAK,EAAE,CAAC,KAAK,CAAC;IAChC,KAAK,CAAC,iBAAiB,GAAG,oBAAoB,CAAC;IAG/C,IAAI,KAAK,KAAK,IAAI,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC/C,2BAA2B;QAC3B,gDAAgD;QAChD,8CAA8C;QAC9C,OAAO,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAE,KAAK,CAAC,QAAQ,CAAS,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;KAC7E;AACH,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/get-caller-file/package.json b/node_modules/get-caller-file/package.json new file mode 100644 index 0000000..b0dd571 --- /dev/null +++ b/node_modules/get-caller-file/package.json @@ -0,0 +1,42 @@ +{ + "name": "get-caller-file", + "version": "2.0.5", + "description": "", + "main": "index.js", + "directories": { + "test": "tests" + }, + "files": [ + "index.js", + "index.js.map", + "index.d.ts" + ], + "scripts": { + "prepare": "tsc", + "test": "mocha test", + "test:debug": "mocha test" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/stefanpenner/get-caller-file.git" + }, + "author": "Stefan Penner", + "license": "ISC", + "bugs": { + "url": "https://github.com/stefanpenner/get-caller-file/issues" + }, + "homepage": "https://github.com/stefanpenner/get-caller-file#readme", + "devDependencies": { + "@types/chai": "^4.1.7", + "@types/ensure-posix-path": "^1.0.0", + "@types/mocha": "^5.2.6", + "@types/node": "^11.10.5", + "chai": "^4.1.2", + "ensure-posix-path": "^1.0.1", + "mocha": "^5.2.0", + "typescript": "^3.3.3333" + }, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } +} diff --git a/node_modules/get-stdin/index.js b/node_modules/get-stdin/index.js new file mode 100644 index 0000000..0f1aeb3 --- /dev/null +++ b/node_modules/get-stdin/index.js @@ -0,0 +1,49 @@ +'use strict'; + +module.exports = function (cb) { + var stdin = process.stdin; + var ret = ''; + + if (stdin.isTTY) { + setImmediate(cb, ''); + return; + } + + stdin.setEncoding('utf8'); + + stdin.on('readable', function () { + var chunk; + + while (chunk = stdin.read()) { + ret += chunk; + } + }); + + stdin.on('end', function () { + cb(ret); + }); +}; + +module.exports.buffer = function (cb) { + var stdin = process.stdin; + var ret = []; + var len = 0; + + if (stdin.isTTY) { + setImmediate(cb, new Buffer('')); + return; + } + + stdin.on('readable', function () { + var chunk; + + while (chunk = stdin.read()) { + ret.push(chunk); + len += chunk.length; + } + }); + + stdin.on('end', function () { + cb(Buffer.concat(ret, len)); + }); +}; diff --git a/node_modules/get-stdin/package.json b/node_modules/get-stdin/package.json new file mode 100644 index 0000000..9a3e42a --- /dev/null +++ b/node_modules/get-stdin/package.json @@ -0,0 +1,35 @@ +{ + "name": "get-stdin", + "version": "4.0.1", + "description": "Easier stdin", + "license": "MIT", + "repository": "sindresorhus/get-stdin", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "http://sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js && node test-buffer.js && echo unicorns | node test-real.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "std", + "stdin", + "stdio", + "concat", + "buffer", + "stream", + "process", + "stream" + ], + "devDependencies": { + "ava": "0.0.4", + "buffer-equal": "0.0.1" + } +} diff --git a/node_modules/get-stdin/readme.md b/node_modules/get-stdin/readme.md new file mode 100644 index 0000000..bc1d32a --- /dev/null +++ b/node_modules/get-stdin/readme.md @@ -0,0 +1,44 @@ +# get-stdin [![Build Status](https://travis-ci.org/sindresorhus/get-stdin.svg?branch=master)](https://travis-ci.org/sindresorhus/get-stdin) + +> Easier stdin + + +## Install + +```sh +$ npm install --save get-stdin +``` + + +## Usage + +```js +// example.js +var stdin = require('get-stdin'); + +stdin(function (data) { + console.log(data); + //=> unicorns +}); +``` + +```sh +$ echo unicorns | node example.js +unicorns +``` + + +## API + +### stdin(callback) + +Get `stdin` as a string. + +### stdin.buffer(callback) + +Get `stdin` as a buffer. + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/glob/LICENSE b/node_modules/glob/LICENSE new file mode 100644 index 0000000..42ca266 --- /dev/null +++ b/node_modules/glob/LICENSE @@ -0,0 +1,21 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md new file mode 100644 index 0000000..83f0c83 --- /dev/null +++ b/node_modules/glob/README.md @@ -0,0 +1,378 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![a fun cartoon logo made of glob characters](logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. +* `fs` File-system object with Node's `fs` API. By default, the built-in + `fs` module will be used. Set to a volume provided by a library like + `memfs` to avoid using the "real" file-system. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` + +![](oh-my-glob.gif) diff --git a/node_modules/glob/common.js b/node_modules/glob/common.js new file mode 100644 index 0000000..424c46e --- /dev/null +++ b/node_modules/glob/common.js @@ -0,0 +1,238 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = require("fs") +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + // always treat \ in patterns as escapes, not path separators + options.allowWindowsEscape = false + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/node_modules/glob/glob.js b/node_modules/glob/glob.js new file mode 100644 index 0000000..37a4d7e --- /dev/null +++ b/node_modules/glob/glob.js @@ -0,0 +1,790 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json new file mode 100644 index 0000000..5940b64 --- /dev/null +++ b/node_modules/glob/package.json @@ -0,0 +1,55 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.2.3", + "publishConfig": { + "tag": "v7-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "memfs": "^3.2.0", + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^15.0.6", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/glob/sync.js b/node_modules/glob/sync.js new file mode 100644 index 0000000..2c4f480 --- /dev/null +++ b/node_modules/glob/sync.js @@ -0,0 +1,486 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert.ok(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert.ok(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/node_modules/globule/LICENSE b/node_modules/globule/LICENSE new file mode 100644 index 0000000..5c0695e --- /dev/null +++ b/node_modules/globule/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2018 "Cowboy" Ben Alman + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/globule/README.md b/node_modules/globule/README.md new file mode 100644 index 0000000..fe87e3b --- /dev/null +++ b/node_modules/globule/README.md @@ -0,0 +1,131 @@ +# globule [![Build Status: Linux](https://travis-ci.org/cowboy/node-globule.svg?branch=master)](https://travis-ci.org/cowboy/node-globule) [![Build Status: Windows](https://ci.appveyor.com/api/projects/status/i9fnc38q952r9nc0/branch/master?svg=true)](https://ci.appveyor.com/project/gruntjs/node-globule/branch/master) + +An easy-to-use wildcard globbing library. + +## Getting Started +Install the module with: `npm install globule` + +```javascript +var globule = require('globule'); +var filepaths = globule.find('**/*.js'); +``` + +## Documentation + +### globule.find +Returns a unique array of all file or directory paths that match the given globbing pattern(s). This method accepts either comma separated globbing patterns or an array of globbing patterns. Paths matching patterns that begin with `!` will be excluded from the returned array. Patterns are processed in order, so inclusion and exclusion order is significant. Patterns may be specified as function arguments or as a `src` property of the options object. + +```js +globule.find(patterns [, patterns [, ...]] [, options]) +globule.find({src: patterns, /* other options */}) +``` + +The `options` object supports all [glob][] library options, along with a few extras. These are the most commonly used: + +* `src` This property may be used instead of specifying patterns as function arguments. +* `filter` Either a valid [fs.Stats method name](http://nodejs.org/docs/latest/api/fs.html#fs_class_fs_stats) or a function that will be passed the matched `src` filepath and `options` object as arguments. This function should return a `Boolean` value. +* `nonull` Retain globbing patterns in result set even if they fail to match files. +* `matchBase` Patterns without slashes will match just the basename part. Eg. this makes `*.js` work like `**/*.js`. +* `srcBase` Patterns will be matched relative to the specified path instead of the current working directory. This is a synonym for `cwd`. +* `prefixBase` Any specified `srcBase` will be prefixed to all returned filepaths. + +[glob]: https://github.com/isaacs/node-glob + +### globule.match +Match one or more globbing patterns against one or more file paths. Returns a uniqued array of all file paths that match any of the specified globbing patterns. Both the `patterns` and `filepaths` arguments can be a single string or array of strings. Paths matching patterns that begin with `!` will be excluded from the returned array. Patterns are processed in order, so inclusion and exclusion order is significant. + +```js +globule.match(patterns, filepaths [, options]) +``` + +### globule.isMatch +This method contains the same signature and logic as the `globule.match` method, but returns `true` if any files were matched, otherwise `false`. + +```js +globule.isMatch(patterns, filepaths [, options]) +``` + +### globule.mapping +Given a set of source file paths, returns an array of src-dest file mapping objects. Both src and dest paths may be renamed, depending on the options specified. Patterns may be specified as function arguments or as a `src` property of the options object. + +```js +globule.mapping(filepaths [, filepaths [, ...]] [, options]) +globule.mapping({src: filepaths, /* other options */}) +``` + +In addition to the options the `globule.find` method supports, the options object also supports these properties: + +* `srcBase` The directory from which patterns are matched. Any string specified as `srcBase` is effectively stripped from the beginning of all matched paths. +* `destBase` The specified path is prefixed to all `dest` filepaths. +* `ext` Remove anything after (and including) the first `.` in the destination path, then append this value. +* `extDot` Change the behavior of `ext`, `"first"` and `"last"` will remove anything after the first or last `.` in the destination filename, respectively. Defaults to `"first"`. +* `flatten` Remove the path component from all matched src files. The src file path is still joined to the specified destBase. +* `rename` If specified, this function will be responsible for returning the final `dest` filepath. By default, it flattens paths (if specified), changes extensions (if specified) and joins the matched path to the `destBase`. + +### globule.findMapping +This method is a convenience wrapper around the `globule.find` and `globule.mapping` methods. + +```js +globule.findMapping(patterns [, options]) +``` + + +## Examples + +Given the files `foo/a.js` and `foo/b.js`: + +### srcBase and destBase + +```js +globule.find("foo/*.js") +// ["foo/a.js", "foo/b.js"] + +globule.find("*.js", {srcBase: "foo"}) +// ["a.js", "b.js"] + +globule.find({src: "*.js", srcBase: "foo", prefixBase: true}) +// ["foo/a.js", "foo/b.js"] +``` + +```js +globule.findMapping("foo/*.js") +// [{src: ["foo/a.js"], dest: "foo/a.js"}, {src: ["foo/b.js"], dest: "foo/b.js"}] + +globule.findMapping("foo/*.js", {destBase: "bar"}) +// [{src: ["foo/a.js"], dest: "bar/foo/a.js"}, {src: ["foo/b.js"], dest: "bar/foo/b.js"}] + +globule.findMapping({src: "*.js", srcBase: "foo", destBase: "bar"}) +// [{src: ["foo/a.js"], dest: "bar/a.js"}, {src: ["foo/b.js"], dest: "bar/b.js"}] +``` + +```js +globule.mapping(["foo/a.js", "foo/b.js"]) +// [{src: ["foo/a.js"], dest: "foo/a.js"}, {src: ["foo/b.js"], dest: "foo/b.js"}] + +globule.mapping(["foo/a.js", "foo/b.js"], {destBase: "bar"}) +// [{src: ["foo/a.js"], dest: "bar/foo/a.js"}, {src: ["foo/b.js"], dest: "bar/foo/b.js"}] + +globule.mapping("foo/a.js", "foo/b.js", {destBase: "bar"}) +// [{src: ["foo/a.js"], dest: "bar/foo/a.js"}, {src: ["foo/b.js"], dest: "bar/foo/b.js"}] + +globule.mapping(["a.js", "b.js"], {srcBase: "foo", destBase: "bar"}) +// [{src: ["foo/a.js"], dest: "bar/a.js"}, {src: ["foo/b.js"], dest: "bar/b.js"}] + +globule.mapping({src: ["a.js", "b.js"], srcBase: "foo", destBase: "bar"}) +// [{src: ["foo/a.js"], dest: "bar/a.js"}, {src: ["foo/b.js"], dest: "bar/b.js"}] +``` + +## Contributing +In lieu of a formal styleguide, take care to maintain the existing coding style. Add unit tests for any new or changed functionality. Lint and test your code using [Grunt](http://gruntjs.com/). + +## Release History +2018-05-29 - v1.2.1 - Update dependencies, lodash@4.17.10 to avoid errant security warnings. +2017-06-10 - v1.2.0 - Update dependencies, lodash@4.17. +2016-10-23 - v1.1.0 - Update dependencies, lodash@4.16, glob@7.1. +2016-04-14 - v1.0.0 - Update dependencies, lodash@4.9, glob@7.0, minimatch@3.0. Paths are unix-style with prefixBase enabled. +2014-01-07 - v0.2.0 - Updated dependencies. Added `src` option. Improved exclusion speed significantly. +2013-04-11 - v0.1.0 - Initial release. + +## License +Copyright (c) 2018 "Cowboy" Ben Alman +Licensed under the MIT license. diff --git a/node_modules/globule/lib/globule.js b/node_modules/globule/lib/globule.js new file mode 100644 index 0000000..2efe361 --- /dev/null +++ b/node_modules/globule/lib/globule.js @@ -0,0 +1,192 @@ +/* + * globule + * https://github.com/cowboy/node-globule + * + * Copyright (c) 2018 "Cowboy" Ben Alman + * Licensed under the MIT license. + */ + +'use strict'; + +var fs = require('fs'); +var path = require('path'); + +var _ = require('lodash'); +var glob = require('glob'); +var minimatch = require('minimatch'); + +// The module. +var globule = exports; + +// Process specified wildcard glob patterns or filenames against a +// callback, excluding and uniquing files in the result set. +function processPatterns(patterns, options, fn) { + var result = []; + _.each(patterns, function(pattern) { + // The first character is not ! (inclusion). Add all matching filepaths + // to the result set. + if (pattern.indexOf('!') !== 0) { + result = _.union(result, fn(pattern)); + return; + } + // The first character is ! (exclusion). Remove any filepaths from the + // result set that match this pattern, sans leading !. + var filterFn = minimatch.filter(pattern.slice(1), options); + result = _.filter(result, function(filepath) { + return !filterFn(filepath); + }); + }); + return result; +} + +// Normalize paths to be unix-style. +var pathSeparatorRe = /[\/\\]/g; +function normalizePath(path) { + return path.replace(pathSeparatorRe, '/'); +} + +// Match a filepath or filepaths against one or more wildcard patterns. Returns +// all matching filepaths. This behaves just like minimatch.match, but supports +// any number of patterns. +globule.match = function(patterns, filepaths, options) { + // Return empty set if either patterns or filepaths was omitted. + if (patterns == null || filepaths == null) { return []; } + // Normalize patterns and filepaths to flattened arrays. + patterns = _.isArray(patterns) ? _.flattenDeep(patterns) : [patterns]; + filepaths = _.isArray(filepaths) ? _.flattenDeep(filepaths) : [filepaths]; + // Return empty set if there are no patterns or filepaths. + if (patterns.length === 0 || filepaths.length === 0) { return []; } + // Return all matching filepaths. + return processPatterns(patterns, options, function(pattern) { + return minimatch.match(filepaths, pattern, options || {}); + }); +}; + +// Match a filepath or filepaths against one or more wildcard patterns. Returns +// true if any of the patterns match. +globule.isMatch = function() { + return globule.match.apply(null, arguments).length > 0; +}; + +// Return an array of all file paths that match the given wildcard patterns. +globule.find = function() { + var args = _.toArray(arguments); + // If the last argument is an options object, remove it from args. + var options = _.isPlainObject(args[args.length - 1]) ? args.pop() : {}; + // If options.src was specified, use it. Otherwise, use all non-options + // arguments. Flatten nested arrays. + var patterns; + if (options.src) { + patterns = _.isArray(options.src) ? _.flattenDeep(options.src) : [options.src]; + } else { + patterns = _.flattenDeep(args); + } + // Return empty set if there are no patterns. + if (patterns.length === 0) { return []; } + var srcBase = options.srcBase || options.cwd; + // Create glob-specific options object. + var globOptions = _.extend({}, options); + if (srcBase) { + globOptions.cwd = srcBase; + } + // Get all matching filepaths. + var matches = processPatterns(patterns, options, function(pattern) { + return glob.sync(pattern, globOptions); + }); + // If srcBase and prefixBase were specified, prefix srcBase to matched paths. + if (srcBase && options.prefixBase) { + matches = matches.map(function(filepath) { + return normalizePath(path.join(srcBase, filepath)); + }); + } + // Filter result set? + if (options.filter) { + matches = matches.filter(function(filepath) { + // If srcBase was specified but prefixBase was NOT, prefix srcBase + // temporarily, for filtering. + if (srcBase && !options.prefixBase) { + filepath = normalizePath(path.join(srcBase, filepath)); + } + try { + if (_.isFunction(options.filter)) { + return options.filter(filepath, options); + } else { + // If the file is of the right type and exists, this should work. + return fs.statSync(filepath)[options.filter](); + } + } catch(err) { + // Otherwise, it's probably not the right type. + return false; + } + }); + } + return matches; +}; + +var extDotRe = { + first: /(\.[^\/]*)?$/, + last: /(\.[^\/\.]*)?$/, +}; +function rename(dest, options) { + // Flatten path? + if (options.flatten) { + dest = path.basename(dest); + } + // Change the extension? + if (options.ext) { + dest = dest.replace(extDotRe[options.extDot], options.ext); + } + // Join dest and destBase? + if (options.destBase) { + dest = path.join(options.destBase, dest); + } + return dest; +} + +// Build a mapping of src-dest filepaths from the given set of filepaths. +globule.mapping = function(filepaths, options) { + // Return empty set if filepaths was omitted. + if (filepaths == null) { return []; } + options = _.defaults({}, options, { + extDot: 'first', + rename: rename, + }); + var files = []; + var fileByDest = {}; + // Find all files matching pattern, using passed-in options. + filepaths.forEach(function(src) { + // Generate destination filename. + var dest = options.rename(src, options); + // Prepend srcBase to all src paths. + if (options.srcBase) { + src = path.join(options.srcBase, src); + } + // Normalize filepaths to be unix-style. + dest = normalizePath(dest); + src = normalizePath(src); + // Map correct src path to dest path. + if (fileByDest[dest]) { + // If dest already exists, push this src onto that dest's src array. + fileByDest[dest].src.push(src); + } else { + // Otherwise create a new src-dest file mapping object. + files.push({ + src: [src], + dest: dest, + }); + // And store a reference for later use. + fileByDest[dest] = files[files.length - 1]; + } + }); + return files; +}; + +// Return a mapping of src-dest filepaths from files matching the given +// wildcard patterns. +globule.findMapping = function() { + var args = _.toArray(arguments); + // If the last argument is an options object, remove it from args. + var options = _.isPlainObject(args[args.length - 1]) ? args.pop() : {}; + // Generate mapping from found filepaths. + return globule.mapping(globule.find(args, options), options); +}; diff --git a/node_modules/globule/node_modules/glob/LICENSE b/node_modules/globule/node_modules/glob/LICENSE new file mode 100644 index 0000000..42ca266 --- /dev/null +++ b/node_modules/globule/node_modules/glob/LICENSE @@ -0,0 +1,21 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/node_modules/globule/node_modules/glob/README.md b/node_modules/globule/node_modules/glob/README.md new file mode 100644 index 0000000..2dde30a --- /dev/null +++ b/node_modules/globule/node_modules/glob/README.md @@ -0,0 +1,375 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![a fun cartoon logo made of glob characters](logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` + +![](oh-my-glob.gif) diff --git a/node_modules/globule/node_modules/glob/changelog.md b/node_modules/globule/node_modules/glob/changelog.md new file mode 100644 index 0000000..4163677 --- /dev/null +++ b/node_modules/globule/node_modules/glob/changelog.md @@ -0,0 +1,67 @@ +## 7.0 + +- Raise error if `options.cwd` is specified, and not a directory + +## 6.0 + +- Remove comment and negation pattern support +- Ignore patterns are always in `dot:true` mode + +## 5.0 + +- Deprecate comment and negation patterns +- Fix regression in `mark` and `nodir` options from making all cache + keys absolute path. +- Abort if `fs.readdir` returns an error that's unexpected +- Don't emit `match` events for ignored items +- Treat ENOTSUP like ENOTDIR in readdir + +## 4.5 + +- Add `options.follow` to always follow directory symlinks in globstar +- Add `options.realpath` to call `fs.realpath` on all results +- Always cache based on absolute path + +## 4.4 + +- Add `options.ignore` +- Fix handling of broken symlinks + +## 4.3 + +- Bump minimatch to 2.x +- Pass all tests on Windows + +## 4.2 + +- Add `glob.hasMagic` function +- Add `options.nodir` flag + +## 4.1 + +- Refactor sync and async implementations for performance +- Throw if callback provided to sync glob function +- Treat symbolic links in globstar results the same as Bash 4.3 + +## 4.0 + +- Use `^` for dependency versions (bumped major because this breaks + older npm versions) +- Ensure callbacks are only ever called once +- switch to ISC license + +## 3.x + +- Rewrite in JavaScript +- Add support for setting root, cwd, and windows support +- Cache many fs calls +- Add globstar support +- emit match events + +## 2.x + +- Use `glob.h` and `fnmatch.h` from NetBSD + +## 1.x + +- `glob.h` static binding. diff --git a/node_modules/globule/node_modules/glob/common.js b/node_modules/globule/node_modules/glob/common.js new file mode 100644 index 0000000..d14157a --- /dev/null +++ b/node_modules/globule/node_modules/glob/common.js @@ -0,0 +1,234 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/node_modules/globule/node_modules/glob/glob.js b/node_modules/globule/node_modules/glob/glob.js new file mode 100644 index 0000000..dc27aef --- /dev/null +++ b/node_modules/globule/node_modules/glob/glob.js @@ -0,0 +1,788 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var fs = require('fs') +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/node_modules/globule/node_modules/glob/node_modules/minimatch/LICENSE b/node_modules/globule/node_modules/glob/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/globule/node_modules/glob/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/globule/node_modules/glob/node_modules/minimatch/README.md b/node_modules/globule/node_modules/glob/node_modules/minimatch/README.md new file mode 100644 index 0000000..33ede1d --- /dev/null +++ b/node_modules/globule/node_modules/glob/node_modules/minimatch/README.md @@ -0,0 +1,230 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### allowWindowsEscape + +Windows path separator `\` is by default converted to `/`, which +prohibits the usage of `\` as a escape character. This flag skips that +behavior and allows using the escape character. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/globule/node_modules/glob/node_modules/minimatch/minimatch.js b/node_modules/globule/node_modules/glob/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..fda45ad --- /dev/null +++ b/node_modules/globule/node_modules/glob/node_modules/minimatch/minimatch.js @@ -0,0 +1,947 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return require('path') } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/globule/node_modules/glob/node_modules/minimatch/package.json b/node_modules/globule/node_modules/glob/node_modules/minimatch/package.json new file mode 100644 index 0000000..566efdf --- /dev/null +++ b/node_modules/globule/node_modules/glob/node_modules/minimatch/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.1.2", + "publishConfig": { + "tag": "v3-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/globule/node_modules/glob/package.json b/node_modules/globule/node_modules/glob/package.json new file mode 100644 index 0000000..b345ae1 --- /dev/null +++ b/node_modules/globule/node_modules/glob/package.json @@ -0,0 +1,51 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.1.7", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^15.0.6", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/globule/node_modules/glob/sync.js b/node_modules/globule/node_modules/glob/sync.js new file mode 100644 index 0000000..10b0ed2 --- /dev/null +++ b/node_modules/globule/node_modules/glob/sync.js @@ -0,0 +1,484 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var fs = require('fs') +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/node_modules/globule/node_modules/minimatch/LICENSE b/node_modules/globule/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/globule/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/globule/node_modules/minimatch/README.md b/node_modules/globule/node_modules/minimatch/README.md new file mode 100644 index 0000000..2e9bd0f --- /dev/null +++ b/node_modules/globule/node_modules/minimatch/README.md @@ -0,0 +1,224 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/globule/node_modules/minimatch/minimatch.js b/node_modules/globule/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..caf5607 --- /dev/null +++ b/node_modules/globule/node_modules/minimatch/minimatch.js @@ -0,0 +1,947 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return require('path') } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/globule/node_modules/minimatch/package.json b/node_modules/globule/node_modules/minimatch/package.json new file mode 100644 index 0000000..f339044 --- /dev/null +++ b/node_modules/globule/node_modules/minimatch/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.0.8", + "publishConfig": { + "tag": "v3.0-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/globule/package.json b/node_modules/globule/package.json new file mode 100644 index 0000000..4c1ab48 --- /dev/null +++ b/node_modules/globule/package.json @@ -0,0 +1,50 @@ +{ + "name": "globule", + "description": "An easy-to-use wildcard globbing library.", + "version": "1.3.4", + "homepage": "https://github.com/cowboy/node-globule", + "author": { + "name": "\"Cowboy\" Ben Alman", + "url": "http://benalman.com/" + }, + "repository": { + "type": "git", + "url": "git://github.com/cowboy/node-globule.git" + }, + "bugs": { + "url": "https://github.com/cowboy/node-globule/issues" + }, + "license": "MIT", + "main": "lib/globule", + "files": [ + "lib" + ], + "engines": { + "node": ">= 0.10" + }, + "scripts": { + "test": "grunt" + }, + "devDependencies": { + "grunt-contrib-jshint": "^1.0.0", + "grunt-contrib-nodeunit": "^2.0.0", + "grunt-contrib-watch": "^1.1.0", + "grunt": "^1.0.2" + }, + "keywords": [ + "glob", + "file", + "match", + "mapping", + "expand", + "wildcard", + "pattern", + "sync", + "awesome" + ], + "dependencies": { + "glob": "~7.1.1", + "lodash": "^4.17.21", + "minimatch": "~3.0.2" + } +} diff --git a/node_modules/graceful-fs/LICENSE b/node_modules/graceful-fs/LICENSE new file mode 100644 index 0000000..e906a25 --- /dev/null +++ b/node_modules/graceful-fs/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2022 Isaac Z. Schlueter, Ben Noordhuis, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/graceful-fs/README.md b/node_modules/graceful-fs/README.md new file mode 100644 index 0000000..82d6e4d --- /dev/null +++ b/node_modules/graceful-fs/README.md @@ -0,0 +1,143 @@ +# graceful-fs + +graceful-fs functions as a drop-in replacement for the fs module, +making various improvements. + +The improvements are meant to normalize behavior across different +platforms and environments, and to make filesystem access more +resilient to errors. + +## Improvements over [fs module](https://nodejs.org/api/fs.html) + +* Queues up `open` and `readdir` calls, and retries them once + something closes if there is an EMFILE error from too many file + descriptors. +* fixes `lchmod` for Node versions prior to 0.6.2. +* implements `fs.lutimes` if possible. Otherwise it becomes a noop. +* ignores `EINVAL` and `EPERM` errors in `chown`, `fchown` or + `lchown` if the user isn't root. +* makes `lchmod` and `lchown` become noops, if not available. +* retries reading a file if `read` results in EAGAIN error. + +On Windows, it retries renaming a file for up to one second if `EACCESS` +or `EPERM` error occurs, likely because antivirus software has locked +the directory. + +## USAGE + +```javascript +// use just like fs +var fs = require('graceful-fs') + +// now go and do stuff with it... +fs.readFile('some-file-or-whatever', (err, data) => { + // Do stuff here. +}) +``` + +## Sync methods + +This module cannot intercept or handle `EMFILE` or `ENFILE` errors from sync +methods. If you use sync methods which open file descriptors then you are +responsible for dealing with any errors. + +This is a known limitation, not a bug. + +## Global Patching + +If you want to patch the global fs module (or any other fs-like +module) you can do this: + +```javascript +// Make sure to read the caveat below. +var realFs = require('fs') +var gracefulFs = require('graceful-fs') +gracefulFs.gracefulify(realFs) +``` + +This should only ever be done at the top-level application layer, in +order to delay on EMFILE errors from any fs-using dependencies. You +should **not** do this in a library, because it can cause unexpected +delays in other parts of the program. + +## Changes + +This module is fairly stable at this point, and used by a lot of +things. That being said, because it implements a subtle behavior +change in a core part of the node API, even modest changes can be +extremely breaking, and the versioning is thus biased towards +bumping the major when in doubt. + +The main change between major versions has been switching between +providing a fully-patched `fs` module vs monkey-patching the node core +builtin, and the approach by which a non-monkey-patched `fs` was +created. + +The goal is to trade `EMFILE` errors for slower fs operations. So, if +you try to open a zillion files, rather than crashing, `open` +operations will be queued up and wait for something else to `close`. + +There are advantages to each approach. Monkey-patching the fs means +that no `EMFILE` errors can possibly occur anywhere in your +application, because everything is using the same core `fs` module, +which is patched. However, it can also obviously cause undesirable +side-effects, especially if the module is loaded multiple times. + +Implementing a separate-but-identical patched `fs` module is more +surgical (and doesn't run the risk of patching multiple times), but +also imposes the challenge of keeping in sync with the core module. + +The current approach loads the `fs` module, and then creates a +lookalike object that has all the same methods, except a few that are +patched. It is safe to use in all versions of Node from 0.8 through +7.0. + +### v4 + +* Do not monkey-patch the fs module. This module may now be used as a + drop-in dep, and users can opt into monkey-patching the fs builtin + if their app requires it. + +### v3 + +* Monkey-patch fs, because the eval approach no longer works on recent + node. +* fixed possible type-error throw if rename fails on windows +* verify that we *never* get EMFILE errors +* Ignore ENOSYS from chmod/chown +* clarify that graceful-fs must be used as a drop-in + +### v2.1.0 + +* Use eval rather than monkey-patching fs. +* readdir: Always sort the results +* win32: requeue a file if error has an OK status + +### v2.0 + +* A return to monkey patching +* wrap process.cwd + +### v1.1 + +* wrap readFile +* Wrap fs.writeFile. +* readdir protection +* Don't clobber the fs builtin +* Handle fs.read EAGAIN errors by trying again +* Expose the curOpen counter +* No-op lchown/lchmod if not implemented +* fs.rename patch only for win32 +* Patch fs.rename to handle AV software on Windows +* Close #4 Chown should not fail on einval or eperm if non-root +* Fix isaacs/fstream#1 Only wrap fs one time +* Fix #3 Start at 1024 max files, then back off on EMFILE +* lutimes that doens't blow up on Linux +* A full on-rewrite using a queue instead of just swallowing the EMFILE error +* Wrap Read/Write streams as well + +### 1.0 + +* Update engines for node 0.6 +* Be lstat-graceful on Windows +* first diff --git a/node_modules/graceful-fs/clone.js b/node_modules/graceful-fs/clone.js new file mode 100644 index 0000000..dff3cc8 --- /dev/null +++ b/node_modules/graceful-fs/clone.js @@ -0,0 +1,23 @@ +'use strict' + +module.exports = clone + +var getPrototypeOf = Object.getPrototypeOf || function (obj) { + return obj.__proto__ +} + +function clone (obj) { + if (obj === null || typeof obj !== 'object') + return obj + + if (obj instanceof Object) + var copy = { __proto__: getPrototypeOf(obj) } + else + var copy = Object.create(null) + + Object.getOwnPropertyNames(obj).forEach(function (key) { + Object.defineProperty(copy, key, Object.getOwnPropertyDescriptor(obj, key)) + }) + + return copy +} diff --git a/node_modules/graceful-fs/graceful-fs.js b/node_modules/graceful-fs/graceful-fs.js new file mode 100644 index 0000000..8d5b89e --- /dev/null +++ b/node_modules/graceful-fs/graceful-fs.js @@ -0,0 +1,448 @@ +var fs = require('fs') +var polyfills = require('./polyfills.js') +var legacy = require('./legacy-streams.js') +var clone = require('./clone.js') + +var util = require('util') + +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol + +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} + +function noop () {} + +function publishQueue(context, queue) { + Object.defineProperty(context, gracefulQueue, { + get: function() { + return queue + } + }) +} + +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) + } + +// Once time initialization +if (!fs[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = global[gracefulQueue] || [] + publishQueue(fs, queue) + + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + resetQueue() + } + + if (typeof cb === 'function') + cb.apply(this, arguments) + }) + } + + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) + + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + resetQueue() + } + + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) + + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(fs[gracefulQueue]) + require('assert').equal(fs[gracefulQueue].length, 0) + }) + } +} + +if (!global[gracefulQueue]) { + publishQueue(global, fs[gracefulQueue]); +} + +module.exports = patch(clone(fs)) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { + module.exports = patch(fs) + fs.__patched = true; +} + +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch + + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$readFile(path, options, cb) + + function go$readFile (path, options, cb, startTime) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$writeFile(path, data, options, cb) + + function go$writeFile (path, data, options, cb, startTime) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$appendFile(path, data, options, cb) + + function go$appendFile (path, data, options, cb, startTime) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$copyFile = fs.copyFile + if (fs$copyFile) + fs.copyFile = copyFile + function copyFile (src, dest, flags, cb) { + if (typeof flags === 'function') { + cb = flags + flags = 0 + } + return go$copyFile(src, dest, flags, cb) + + function go$copyFile (src, dest, flags, cb, startTime) { + return fs$copyFile(src, dest, flags, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$copyFile, [src, dest, flags, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + var fs$readdir = fs.readdir + fs.readdir = readdir + var noReaddirOptionVersions = /^v[0-5]\./ + function readdir (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + var go$readdir = noReaddirOptionVersions.test(process.version) + ? function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, fs$readdirCallback( + path, options, cb, startTime + )) + } + : function go$readdir (path, options, cb, startTime) { + return fs$readdir(path, options, fs$readdirCallback( + path, options, cb, startTime + )) + } + + return go$readdir(path, options, cb) + + function fs$readdirCallback (path, options, cb, startTime) { + return function (err, files) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([ + go$readdir, + [path, options, cb], + err, + startTime || Date.now(), + Date.now() + ]) + else { + if (files && files.sort) + files.sort() + + if (typeof cb === 'function') + cb.call(this, err, files) + } + } + } + } + + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream + } + + var fs$ReadStream = fs.ReadStream + if (fs$ReadStream) { + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open + } + + var fs$WriteStream = fs.WriteStream + if (fs$WriteStream) { + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open + } + + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, + configurable: true + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) + + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) + + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) + } + + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() + + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() + } + }) + } + + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } + + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + } + }) + } + + function createReadStream (path, options) { + return new fs.ReadStream(path, options) + } + + function createWriteStream (path, options) { + return new fs.WriteStream(path, options) + } + + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null + + return go$open(path, flags, mode, cb) + + function go$open (path, flags, mode, cb, startTime) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb], err, startTime || Date.now(), Date.now()]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + } + }) + } + } + + return fs +} + +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + fs[gracefulQueue].push(elem) + retry() +} + +// keep track of the timeout between retry() calls +var retryTimer + +// reset the startTime and lastTime to now +// this resets the start of the 60 second overall timeout as well as the +// delay between attempts so that we'll retry these jobs sooner +function resetQueue () { + var now = Date.now() + for (var i = 0; i < fs[gracefulQueue].length; ++i) { + // entries that are only a length of 2 are from an older version, don't + // bother modifying those since they'll be retried anyway. + if (fs[gracefulQueue][i].length > 2) { + fs[gracefulQueue][i][3] = now // startTime + fs[gracefulQueue][i][4] = now // lastTime + } + } + // call retry to make sure we're actively processing the queue + retry() +} + +function retry () { + // clear the timer and remove it to help prevent unintended concurrency + clearTimeout(retryTimer) + retryTimer = undefined + + if (fs[gracefulQueue].length === 0) + return + + var elem = fs[gracefulQueue].shift() + var fn = elem[0] + var args = elem[1] + // these items may be unset if they were added by an older graceful-fs + var err = elem[2] + var startTime = elem[3] + var lastTime = elem[4] + + // if we don't have a startTime we have no way of knowing if we've waited + // long enough, so go ahead and retry this item now + if (startTime === undefined) { + debug('RETRY', fn.name, args) + fn.apply(null, args) + } else if (Date.now() - startTime >= 60000) { + // it's been more than 60 seconds total, bail now + debug('TIMEOUT', fn.name, args) + var cb = args.pop() + if (typeof cb === 'function') + cb.call(null, err) + } else { + // the amount of time between the last attempt and right now + var sinceAttempt = Date.now() - lastTime + // the amount of time between when we first tried, and when we last tried + // rounded up to at least 1 + var sinceStart = Math.max(lastTime - startTime, 1) + // backoff. wait longer than the total time we've been retrying, but only + // up to a maximum of 100ms + var desiredDelay = Math.min(sinceStart * 1.2, 100) + // it's been long enough since the last retry, do it again + if (sinceAttempt >= desiredDelay) { + debug('RETRY', fn.name, args) + fn.apply(null, args.concat([startTime])) + } else { + // if we can't do this job yet, push it to the end of the queue + // and let the next iteration check again + fs[gracefulQueue].push(elem) + } + } + + // schedule our next run if one isn't already scheduled + if (retryTimer === undefined) { + retryTimer = setTimeout(retry, 0) + } +} diff --git a/node_modules/graceful-fs/legacy-streams.js b/node_modules/graceful-fs/legacy-streams.js new file mode 100644 index 0000000..d617b50 --- /dev/null +++ b/node_modules/graceful-fs/legacy-streams.js @@ -0,0 +1,118 @@ +var Stream = require('stream').Stream + +module.exports = legacy + +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } + + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); + + Stream.call(this); + + var self = this; + + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; + + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.encoding) this.setEncoding(this.encoding); + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); + } + + if (this.start > this.end) { + throw new Error('start must be <= end'); + } + + this.pos = this.start; + } + + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } + + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; + } + + self.fd = fd; + self.emit('open', fd); + self._read(); + }) + } + + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); + + Stream.call(this); + + this.path = path; + this.fd = null; + this.writable = true; + + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); + } + if (this.start < 0) { + throw new Error('start must be >= zero'); + } + + this.pos = this.start; + } + + this.busy = false; + this._queue = []; + + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); + } + } +} diff --git a/node_modules/graceful-fs/package.json b/node_modules/graceful-fs/package.json new file mode 100644 index 0000000..87babf0 --- /dev/null +++ b/node_modules/graceful-fs/package.json @@ -0,0 +1,53 @@ +{ + "name": "graceful-fs", + "description": "A drop-in replacement for fs, making various improvements.", + "version": "4.2.11", + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-graceful-fs" + }, + "main": "graceful-fs.js", + "directories": { + "test": "test" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "test": "nyc --silent node test.js | tap -c -", + "posttest": "nyc report" + }, + "keywords": [ + "fs", + "module", + "reading", + "retry", + "retries", + "queue", + "error", + "errors", + "handling", + "EMFILE", + "EAGAIN", + "EINVAL", + "EPERM", + "EACCESS" + ], + "license": "ISC", + "devDependencies": { + "import-fresh": "^2.0.0", + "mkdirp": "^0.5.0", + "rimraf": "^2.2.8", + "tap": "^16.3.4" + }, + "files": [ + "fs.js", + "graceful-fs.js", + "legacy-streams.js", + "polyfills.js", + "clone.js" + ], + "tap": { + "reporter": "classic" + } +} diff --git a/node_modules/graceful-fs/polyfills.js b/node_modules/graceful-fs/polyfills.js new file mode 100644 index 0000000..453f1a9 --- /dev/null +++ b/node_modules/graceful-fs/polyfills.js @@ -0,0 +1,355 @@ +var constants = require('constants') + +var origCwd = process.cwd +var cwd = null + +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} + +// This check is needed until node.js 12 is required +if (typeof process.chdir === 'function') { + var chdir = process.chdir + process.chdir = function (d) { + cwd = null + chdir.call(process, d) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(process.chdir, chdir) +} + +module.exports = patch + +function patch (fs) { + // (re-)implement some things that are known busted or missing. + + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } + + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. + + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) + + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) + + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) + + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) + + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) + + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) + + // if lchmod/lchown do not exist, then make them no-ops + if (fs.chmod && !fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (fs.chown && !fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } + + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. + + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = typeof fs.rename !== 'function' ? fs.rename + : (function (fs$rename) { + function rename (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + } + if (Object.setPrototypeOf) Object.setPrototypeOf(rename, fs$rename) + return rename + })(fs.rename) + } + + // if read() returns EAGAIN, then just try it again. + fs.read = typeof fs.read !== 'function' ? fs.read + : (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + + // This ensures `util.promisify` works as it does for native `fs.read`. + if (Object.setPrototypeOf) Object.setPrototypeOf(read, fs$read) + return read + })(fs.read) + + fs.readSync = typeof fs.readSync !== 'function' ? fs.readSync + : (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) + + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } + + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) + + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + } + + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK") && fs.futimes) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } + + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + + } else if (fs.futimes) { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } + } + + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } + + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } + } + + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + return stats; + } + } + + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. + // + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. + // + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true + + if (er.code === "ENOSYS") + return true + + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } + + return false + } +} diff --git a/node_modules/hard-rejection/index.d.ts b/node_modules/hard-rejection/index.d.ts new file mode 100644 index 0000000..6f49890 --- /dev/null +++ b/node_modules/hard-rejection/index.d.ts @@ -0,0 +1,15 @@ +declare const hardRejection: { + /** + Make unhandled promise rejections fail hard right away instead of the default [silent fail](https://gist.github.com/benjamingr/0237932cee84712951a2). + + @param log - Custom logging function to print the rejected promise. Receives the error stack. Default: `console.error`. + */ + (log?: (stack?: string) => void): void; + + // TODO: Remove this for the next major release, refactor the whole definition to: + // declare function hardRejection(log?: (stack?: string) => void): void; + // export = hardRejection; + default: typeof hardRejection; +}; + +export = hardRejection; diff --git a/node_modules/hard-rejection/index.js b/node_modules/hard-rejection/index.js new file mode 100644 index 0000000..ba0205d --- /dev/null +++ b/node_modules/hard-rejection/index.js @@ -0,0 +1,25 @@ +'use strict'; +const util = require('util'); + +let installed = false; + +const hardRejection = (log = console.error) => { + if (installed) { + return; + } + + installed = true; + + process.on('unhandledRejection', error => { + if (!(error instanceof Error)) { + error = new Error(`Promise rejected with value: ${util.inspect(error)}`); + } + + log(error.stack); + process.exit(1); + }); +}; + +module.exports = hardRejection; +// TODO: Remove this for the next major release +module.exports.default = hardRejection; diff --git a/node_modules/hard-rejection/license b/node_modules/hard-rejection/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/hard-rejection/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/hard-rejection/package.json b/node_modules/hard-rejection/package.json new file mode 100644 index 0000000..179b407 --- /dev/null +++ b/node_modules/hard-rejection/package.json @@ -0,0 +1,47 @@ +{ + "name": "hard-rejection", + "version": "2.1.0", + "description": "Make unhandled promise rejections fail hard right away instead of the default silent fail", + "license": "MIT", + "repository": "sindresorhus/hard-rejection", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=6" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "register.js" + ], + "keywords": [ + "promise", + "promises", + "unhandled", + "uncaught", + "rejection", + "hard", + "fail", + "catch", + "throw", + "handler", + "exit", + "debug", + "debugging", + "verbose", + "immediate", + "immediately" + ], + "devDependencies": { + "ava": "^1.4.1", + "execa": "^1.0.0", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/hard-rejection/readme.md b/node_modules/hard-rejection/readme.md new file mode 100644 index 0000000..b66e475 --- /dev/null +++ b/node_modules/hard-rejection/readme.md @@ -0,0 +1,77 @@ +# hard-rejection [![Build Status](https://travis-ci.org/sindresorhus/hard-rejection.svg?branch=master)](https://travis-ci.org/sindresorhus/hard-rejection) + +> Make unhandled promise rejections fail hard right away instead of the default [silent fail](https://gist.github.com/benjamingr/0237932cee84712951a2) + +Promises fail silently if you don't attach a `.catch()` handler. + +This module exits the process with an error message right away when an unhandled rejection is encountered.
+**Note: That might not be desirable as unhandled rejections can be [handled at a future point in time](https://nodejs.org/api/process.html#process_event_unhandledrejection), although not common. You've been warned.** + +Intended for top-level long-running processes like servers, **but not in reusable modules.**
+For command-line apps and tests, see [`loud-rejection`](https://github.com/sindresorhus/loud-rejection). + + +## Install + +``` +$ npm install hard-rejection +``` + + +## Usage + +```js +const hardRejection = require('hard-rejection'); +const promiseFunction = require('some-promise-fn'); + +// Install the handler +hardRejection(); + +promiseFunction(); +``` + +Without this module it's more verbose and you might even miss some that will fail silently: + +```js +const promiseFunction = require('some-promise-fn'); + +function error(error) { + console.error(error.stack); + process.exit(1); +} + +promiseFunction().catch(error); +``` + +### Register script + +Alternatively to the above, you may simply require `hard-rejection/register` and the handler will be automagically installed for you. + +This is handy for ES2015 imports: + +```js +import 'hard-rejection/register'; +``` + + +## API + +### hardRejection([log]) + +#### log + +Type: `Function`
+Default: `console.error` + +Custom logging function to print the rejected promise. Receives the error stack. + + +## Related + +- [loud-rejection](https://github.com/sindresorhus/loud-rejection) - Make unhandled promise rejections fail loudly instead of the default silent fail +- [More…](https://github.com/sindresorhus/promise-fun) + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/hard-rejection/register.js b/node_modules/hard-rejection/register.js new file mode 100644 index 0000000..29f49d7 --- /dev/null +++ b/node_modules/hard-rejection/register.js @@ -0,0 +1,2 @@ +'use strict'; +require('.')(); diff --git a/node_modules/has-flag/index.js b/node_modules/has-flag/index.js new file mode 100644 index 0000000..5139728 --- /dev/null +++ b/node_modules/has-flag/index.js @@ -0,0 +1,8 @@ +'use strict'; +module.exports = (flag, argv) => { + argv = argv || process.argv; + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const pos = argv.indexOf(prefix + flag); + const terminatorPos = argv.indexOf('--'); + return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos); +}; diff --git a/node_modules/has-flag/license b/node_modules/has-flag/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/has-flag/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/has-flag/package.json b/node_modules/has-flag/package.json new file mode 100644 index 0000000..e1eb17a --- /dev/null +++ b/node_modules/has-flag/package.json @@ -0,0 +1,44 @@ +{ + "name": "has-flag", + "version": "3.0.0", + "description": "Check if argv has a specific flag", + "license": "MIT", + "repository": "sindresorhus/has-flag", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "has", + "check", + "detect", + "contains", + "find", + "flag", + "cli", + "command-line", + "argv", + "process", + "arg", + "args", + "argument", + "arguments", + "getopt", + "minimist", + "optimist" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + } +} diff --git a/node_modules/has-flag/readme.md b/node_modules/has-flag/readme.md new file mode 100644 index 0000000..677893c --- /dev/null +++ b/node_modules/has-flag/readme.md @@ -0,0 +1,70 @@ +# has-flag [![Build Status](https://travis-ci.org/sindresorhus/has-flag.svg?branch=master)](https://travis-ci.org/sindresorhus/has-flag) + +> Check if [`argv`](https://nodejs.org/docs/latest/api/process.html#process_process_argv) has a specific flag + +Correctly stops looking after an `--` argument terminator. + + +## Install + +``` +$ npm install has-flag +``` + + +## Usage + +```js +// foo.js +const hasFlag = require('has-flag'); + +hasFlag('unicorn'); +//=> true + +hasFlag('--unicorn'); +//=> true + +hasFlag('f'); +//=> true + +hasFlag('-f'); +//=> true + +hasFlag('foo=bar'); +//=> true + +hasFlag('foo'); +//=> false + +hasFlag('rainbow'); +//=> false +``` + +``` +$ node foo.js -f --unicorn --foo=bar -- --rainbow +``` + + +## API + +### hasFlag(flag, [argv]) + +Returns a boolean for whether the flag exists. + +#### flag + +Type: `string` + +CLI flag to look for. The `--` prefix is optional. + +#### argv + +Type: `string[]`
+Default: `process.argv` + +CLI arguments. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/has-unicode/LICENSE b/node_modules/has-unicode/LICENSE new file mode 100644 index 0000000..d42e25e --- /dev/null +++ b/node_modules/has-unicode/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2014, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/has-unicode/README.md b/node_modules/has-unicode/README.md new file mode 100644 index 0000000..5a03e59 --- /dev/null +++ b/node_modules/has-unicode/README.md @@ -0,0 +1,43 @@ +has-unicode +=========== + +Try to guess if your terminal supports unicode + +```javascript +var hasUnicode = require("has-unicode") + +if (hasUnicode()) { + // the terminal probably has unicode support +} +``` +```javascript +var hasUnicode = require("has-unicode").tryHarder +hasUnicode(function(unicodeSupported) { + if (unicodeSupported) { + // the terminal probably has unicode support + } +}) +``` + +## Detecting Unicode + +What we actually detect is UTF-8 support, as that's what Node itself supports. +If you have a UTF-16 locale then you won't be detected as unicode capable. + +### Windows + +Since at least Windows 7, `cmd` and `powershell` have been unicode capable, +but unfortunately even then it's not guaranteed. In many localizations it +still uses legacy code pages and there's no facility short of running +programs or linking C++ that will let us detect this. As such, we +report any Windows installation as NOT unicode capable, and recommend +that you encourage your users to override this via config. + +### Unix Like Operating Systems + +We look at the environment variables `LC_ALL`, `LC_CTYPE`, and `LANG` in +that order. For `LC_ALL` and `LANG`, it looks for `.UTF-8` in the value. +For `LC_CTYPE` it looks to see if the value is `UTF-8`. This is sufficient +for most POSIX systems. While locale data can be put in `/etc/locale.conf` +as well, AFAIK it's always copied into the environment. + diff --git a/node_modules/has-unicode/index.js b/node_modules/has-unicode/index.js new file mode 100644 index 0000000..9b0fe44 --- /dev/null +++ b/node_modules/has-unicode/index.js @@ -0,0 +1,16 @@ +"use strict" +var os = require("os") + +var hasUnicode = module.exports = function () { + // Recent Win32 platforms (>XP) CAN support unicode in the console but + // don't have to, and in non-english locales often use traditional local + // code pages. There's no way, short of windows system calls or execing + // the chcp command line program to figure this out. As such, we default + // this to false and encourage your users to override it via config if + // appropriate. + if (os.type() == "Windows_NT") { return false } + + var isUTF8 = /UTF-?8$/i + var ctype = process.env.LC_ALL || process.env.LC_CTYPE || process.env.LANG + return isUTF8.test(ctype) +} diff --git a/node_modules/has-unicode/package.json b/node_modules/has-unicode/package.json new file mode 100644 index 0000000..ebe9d76 --- /dev/null +++ b/node_modules/has-unicode/package.json @@ -0,0 +1,30 @@ +{ + "name": "has-unicode", + "version": "2.0.1", + "description": "Try to guess if your terminal supports unicode", + "main": "index.js", + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/has-unicode" + }, + "keywords": [ + "unicode", + "terminal" + ], + "files": [ + "index.js" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/has-unicode/issues" + }, + "homepage": "https://github.com/iarna/has-unicode", + "devDependencies": { + "require-inject": "^1.3.0", + "tap": "^2.3.1" + } +} diff --git a/node_modules/has/LICENSE-MIT b/node_modules/has/LICENSE-MIT new file mode 100644 index 0000000..ae7014d --- /dev/null +++ b/node_modules/has/LICENSE-MIT @@ -0,0 +1,22 @@ +Copyright (c) 2013 Thiago de Arruda + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/has/README.md b/node_modules/has/README.md new file mode 100644 index 0000000..635e3a4 --- /dev/null +++ b/node_modules/has/README.md @@ -0,0 +1,18 @@ +# has + +> Object.prototype.hasOwnProperty.call shortcut + +## Installation + +```sh +npm install --save has +``` + +## Usage + +```js +var has = require('has'); + +has({}, 'hasOwnProperty'); // false +has(Object.prototype, 'hasOwnProperty'); // true +``` diff --git a/node_modules/has/package.json b/node_modules/has/package.json new file mode 100644 index 0000000..7c4592f --- /dev/null +++ b/node_modules/has/package.json @@ -0,0 +1,48 @@ +{ + "name": "has", + "description": "Object.prototype.hasOwnProperty.call shortcut", + "version": "1.0.3", + "homepage": "https://github.com/tarruda/has", + "author": { + "name": "Thiago de Arruda", + "email": "tpadilha84@gmail.com" + }, + "contributors": [ + { + "name": "Jordan Harband", + "email": "ljharb@gmail.com", + "url": "http://ljharb.codes" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/tarruda/has.git" + }, + "bugs": { + "url": "https://github.com/tarruda/has/issues" + }, + "license": "MIT", + "licenses": [ + { + "type": "MIT", + "url": "https://github.com/tarruda/has/blob/master/LICENSE-MIT" + } + ], + "main": "./src", + "dependencies": { + "function-bind": "^1.1.1" + }, + "devDependencies": { + "@ljharb/eslint-config": "^12.2.1", + "eslint": "^4.19.1", + "tape": "^4.9.0" + }, + "engines": { + "node": ">= 0.4.0" + }, + "scripts": { + "lint": "eslint .", + "pretest": "npm run lint", + "test": "tape test" + } +} diff --git a/node_modules/has/src/index.js b/node_modules/has/src/index.js new file mode 100644 index 0000000..dd92dd9 --- /dev/null +++ b/node_modules/has/src/index.js @@ -0,0 +1,5 @@ +'use strict'; + +var bind = require('function-bind'); + +module.exports = bind.call(Function.call, Object.prototype.hasOwnProperty); diff --git a/node_modules/has/test/index.js b/node_modules/has/test/index.js new file mode 100644 index 0000000..43d480b --- /dev/null +++ b/node_modules/has/test/index.js @@ -0,0 +1,10 @@ +'use strict'; + +var test = require('tape'); +var has = require('../'); + +test('has', function (t) { + t.equal(has({}, 'hasOwnProperty'), false, 'object literal does not have own property "hasOwnProperty"'); + t.equal(has(Object.prototype, 'hasOwnProperty'), true, 'Object.prototype has own property "hasOwnProperty"'); + t.end(); +}); diff --git a/node_modules/hosted-git-info/LICENSE b/node_modules/hosted-git-info/LICENSE new file mode 100644 index 0000000..4505576 --- /dev/null +++ b/node_modules/hosted-git-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/hosted-git-info/README.md b/node_modules/hosted-git-info/README.md new file mode 100644 index 0000000..8740406 --- /dev/null +++ b/node_modules/hosted-git-info/README.md @@ -0,0 +1,133 @@ +# hosted-git-info + +This will let you identify and transform various git hosts URLs between +protocols. It also can tell you what the URL is for the raw path for +particular file for direct access without git. + +## Example + +```javascript +var hostedGitInfo = require("hosted-git-info") +var info = hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git", opts) +/* info looks like: +{ + type: "github", + domain: "github.com", + user: "npm", + project: "hosted-git-info" +} +*/ +``` + +If the URL can't be matched with a git host, `null` will be returned. We +can match git, ssh and https urls. Additionally, we can match ssh connect +strings (`git@github.com:npm/hosted-git-info`) and shortcuts (eg, +`github:npm/hosted-git-info`). GitHub specifically, is detected in the case +of a third, unprefixed, form: `npm/hosted-git-info`. + +If it does match, the returned object has properties of: + +* info.type -- The short name of the service +* info.domain -- The domain for git protocol use +* info.user -- The name of the user/org on the git host +* info.project -- The name of the project on the git host + +## Version Contract + +The major version will be bumped any time… + +* The constructor stops accepting URLs that it previously accepted. +* A method is removed. +* A method can no longer accept the number and type of arguments it previously accepted. +* A method can return a different type than it currently returns. + +Implications: + +* I do not consider the specific format of the urls returned from, say + `.https()` to be a part of the contract. The contract is that it will + return a string that can be used to fetch the repo via HTTPS. But what + that string looks like, specifically, can change. +* Dropping support for a hosted git provider would constitute a breaking + change. + +## Usage + +### var info = hostedGitInfo.fromUrl(gitSpecifier[, options]) + +* *gitSpecifer* is a URL of a git repository or a SCP-style specifier of one. +* *options* is an optional object. It can have the following properties: + * *noCommittish* — If true then committishes won't be included in generated URLs. + * *noGitPlus* — If true then `git+` won't be prefixed on URLs. + +## Methods + +All of the methods take the same options as the `fromUrl` factory. Options +provided to a method override those provided to the constructor. + +* info.file(path, opts) + +Given the path of a file relative to the repository, returns a URL for +directly fetching it from the githost. If no committish was set then +`master` will be used as the default. + +For example `hostedGitInfo.fromUrl("git@github.com:npm/hosted-git-info.git#v1.0.0").file("package.json")` +would return `https://raw.githubusercontent.com/npm/hosted-git-info/v1.0.0/package.json` + +* info.shortcut(opts) + +eg, `github:npm/hosted-git-info` + +* info.browse(path, fragment, opts) + +eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0`, +`https://github.com/npm/hosted-git-info/tree/v1.2.0/package.json`, +`https://github.com/npm/hosted-git-info/tree/v1.2.0/REAMDE.md#supported-hosts` + +* info.bugs(opts) + +eg, `https://github.com/npm/hosted-git-info/issues` + +* info.docs(opts) + +eg, `https://github.com/npm/hosted-git-info/tree/v1.2.0#readme` + +* info.https(opts) + +eg, `git+https://github.com/npm/hosted-git-info.git` + +* info.sshurl(opts) + +eg, `git+ssh://git@github.com/npm/hosted-git-info.git` + +* info.ssh(opts) + +eg, `git@github.com:npm/hosted-git-info.git` + +* info.path(opts) + +eg, `npm/hosted-git-info` + +* info.tarball(opts) + +eg, `https://github.com/npm/hosted-git-info/archive/v1.2.0.tar.gz` + +* info.getDefaultRepresentation() + +Returns the default output type. The default output type is based on the +string you passed in to be parsed + +* info.toString(opts) + +Uses the getDefaultRepresentation to call one of the other methods to get a URL for +this resource. As such `hostedGitInfo.fromUrl(url).toString()` will give +you a normalized version of the URL that still uses the same protocol. + +Shortcuts will still be returned as shortcuts, but the special case github +form of `org/project` will be normalized to `github:org/project`. + +SSH connect strings will be normalized into `git+ssh` URLs. + +## Supported hosts + +Currently this supports GitHub, Bitbucket and GitLab. Pull requests for +additional hosts welcome. diff --git a/node_modules/hosted-git-info/git-host-info.js b/node_modules/hosted-git-info/git-host-info.js new file mode 100644 index 0000000..ba55248 --- /dev/null +++ b/node_modules/hosted-git-info/git-host-info.js @@ -0,0 +1,184 @@ +'use strict' +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' + +const defaults = { + sshtemplate: ({ domain, user, project, committish }) => `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'master')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/raw/${maybeEncode(committish) || 'master'}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment +} + +const gitHosts = {} +gitHosts.github = Object.assign({}, defaults, { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + filetemplate: ({ auth, user, project, committish, path }) => `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish) || 'master'}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish } + } +}) + +gitHosts.bitbucket = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/get/${maybeEncode(committish) || 'master'}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + } +}) + +gitHosts.gitlab = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + httpstemplate: ({ auth, domain, user, project, committish }) => `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish) || 'master'}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + } +}) + +gitHosts.gist = Object.assign({}, defaults, { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + sshtemplate: ({ domain, project, committish }) => `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + browsetemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsefiletemplate: ({ domain, project, committish, path, hashformat }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish) || 'master'}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + } +}) + +gitHosts.sourcehut = Object.assign({}, defaults, { + protocols: ['git+ssh:', 'https:'], + domain: 'git.sr.ht', + treepath: 'tree', + browsefiletemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'main')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + filetemplate: ({ domain, user, project, committish, path }) => `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'main'}/${path}`, + httpstemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'main'}.tar.gz`, + bugstemplate: ({ domain, user, project }) => `https://todo.sr.ht/${user}/${project}`, + docstemplate: ({ domain, user, project, treepath, committish }) => `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + + // tarball url + if (['archive'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + } +}) + +const names = Object.keys(gitHosts) +gitHosts.byShortcut = {} +gitHosts.byDomain = {} +for (const name of names) { + gitHosts.byShortcut[`${name}:`] = name + gitHosts.byDomain[gitHosts[name].domain] = name +} + +function formatHashFragment (fragment) { + return fragment.toLowerCase().replace(/^\W+|\/|\W+$/g, '').replace(/\W+/g, '-') +} + +module.exports = gitHosts diff --git a/node_modules/hosted-git-info/git-host.js b/node_modules/hosted-git-info/git-host.js new file mode 100644 index 0000000..8a975e9 --- /dev/null +++ b/node_modules/hosted-git-info/git-host.js @@ -0,0 +1,110 @@ +'use strict' +const gitHosts = require('./git-host-info.js') + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, gitHosts[type]) + this.type = type + this.user = user + this.auth = auth + this.project = project + this.committish = committish + this.default = defaultRepresentation + this.opts = opts + } + + hash () { + return this.committish ? `#${this.committish}` : '' + } + + ssh (opts) { + return this._fill(this.sshtemplate, opts) + } + + _fill (template, opts) { + if (typeof template === 'function') { + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } + + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result + } + + return null + } + + sshurl (opts) { + return this._fill(this.sshurltemplate, opts) + } + + browse (path, fragment, opts) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this._fill(this.browsetemplate, path) + } + + if (typeof fragment !== 'string') { + opts = fragment + fragment = null + } + return this._fill(this.browsefiletemplate, { ...opts, fragment, path }) + } + + docs (opts) { + return this._fill(this.docstemplate, opts) + } + + bugs (opts) { + return this._fill(this.bugstemplate, opts) + } + + https (opts) { + return this._fill(this.httpstemplate, opts) + } + + git (opts) { + return this._fill(this.gittemplate, opts) + } + + shortcut (opts) { + return this._fill(this.shortcuttemplate, opts) + } + + path (opts) { + return this._fill(this.pathtemplate, opts) + } + + tarball (opts) { + return this._fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } + + file (path, opts) { + return this._fill(this.filetemplate, { ...opts, path }) + } + + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } + + return this.sshurl(opts) + } +} +module.exports = GitHost diff --git a/node_modules/hosted-git-info/index.js b/node_modules/hosted-git-info/index.js new file mode 100644 index 0000000..f35c570 --- /dev/null +++ b/node_modules/hosted-git-info/index.js @@ -0,0 +1,237 @@ +'use strict' +const url = require('url') +const gitHosts = require('./git-host-info.js') +const GitHost = module.exports = require('./git-host.js') +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) + +const protocolToRepresentationMap = { + 'git+ssh:': 'sshurl', + 'git+https:': 'https', + 'ssh:': 'sshurl', + 'git:': 'git' +} + +function protocolToRepresentation (protocol) { + return protocolToRepresentationMap[protocol] || protocol.slice(0, -1) +} + +const authProtocols = { + 'git:': true, + 'https:': true, + 'git+https:': true, + 'http:': true, + 'git+http:': true +} + +const knownProtocols = Object.keys(gitHosts.byShortcut).concat(['http:', 'https:', 'git:', 'git+ssh:', 'git+https:', 'ssh:']) + +module.exports.fromUrl = function (giturl, opts) { + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) + + if (!cache.has(key)) { + cache.set(key, fromUrl(giturl, opts)) + } + + return cache.get(key) +} + +function fromUrl (giturl, opts) { + if (!giturl) { + return + } + + const url = isGitHubShorthand(giturl) ? 'github:' + giturl : correctProtocol(giturl) + const parsed = parseGitUrl(url) + if (!parsed) { + return parsed + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') ? parsed.hostname.slice(4) : parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (authProtocols[parsed.protocol] && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) + } + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null + } + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return + } + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocolToRepresentation(parsed.protocol) + } + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } + + return new GitHost(gitHostName, user, auth, project, committish, defaultRepresentation, opts) +} + +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (knownProtocols.includes(proto)) { + return arg + } + + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + return `git+ssh://${arg}` + } else { + return arg + } + } + + const doubleSlash = arg.indexOf('//') + if (doubleSlash === firstColon + 1) { + return arg + } + + return arg.slice(0, firstColon + 1) + '//' + arg.slice(firstColon + 1) +} + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && secondSlashOnlyAfterHash +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + const firstAt = giturl.indexOf('@') + const lastHash = giturl.lastIndexOf('#') + let firstColon = giturl.indexOf(':') + let lastColon = giturl.lastIndexOf(':', lastHash > -1 ? lastHash : Infinity) + + let corrected + if (lastColon > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + corrected = giturl.slice(0, lastColon) + '/' + giturl.slice(lastColon + 1) + // // and we find our new : positions + firstColon = corrected.indexOf(':') + lastColon = corrected.lastIndexOf(':') + } + + if (firstColon === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + corrected = `git+ssh://${corrected}` + } + + return corrected +} + +// try to parse the url as its given to us, if that throws +// then we try to clean the url and parse that result instead +// THIS FUNCTION SHOULD NEVER THROW +const parseGitUrl = (giturl) => { + let result + try { + result = new url.URL(giturl) + } catch (err) {} + + if (result) { + return result + } + + const correctedUrl = correctUrl(giturl) + try { + result = new url.URL(correctedUrl) + } catch (err) {} + + return result +} diff --git a/node_modules/hosted-git-info/package.json b/node_modules/hosted-git-info/package.json new file mode 100644 index 0000000..b145e62 --- /dev/null +++ b/node_modules/hosted-git-info/package.json @@ -0,0 +1,51 @@ +{ + "name": "hosted-git-info", + "version": "4.1.0", + "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", + "main": "index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/hosted-git-info.git" + }, + "keywords": [ + "git", + "github", + "bitbucket", + "gitlab" + ], + "author": "Rebecca Turner (http://re-becca.org)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/hosted-git-info/issues" + }, + "homepage": "https://github.com/npm/hosted-git-info", + "scripts": { + "posttest": "standard", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preversion": "npm test", + "snap": "tap", + "test": "tap", + "test:coverage": "tap --coverage-report=html" + }, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "devDependencies": { + "standard": "^16.0.3", + "standard-version": "^9.1.0", + "tap": "^15.1.6" + }, + "files": [ + "index.js", + "git-host.js", + "git-host-info.js" + ], + "engines": { + "node": ">=10" + }, + "tap": { + "color": 1, + "coverage": true + } +} diff --git a/node_modules/http-cache-semantics/LICENSE b/node_modules/http-cache-semantics/LICENSE new file mode 100644 index 0000000..493d2ea --- /dev/null +++ b/node_modules/http-cache-semantics/LICENSE @@ -0,0 +1,9 @@ +Copyright 2016-2018 Kornel Lesiński + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/http-cache-semantics/README.md b/node_modules/http-cache-semantics/README.md new file mode 100644 index 0000000..685aa55 --- /dev/null +++ b/node_modules/http-cache-semantics/README.md @@ -0,0 +1,203 @@ +# Can I cache this? [![Build Status](https://travis-ci.org/kornelski/http-cache-semantics.svg?branch=master)](https://travis-ci.org/kornelski/http-cache-semantics) + +`CachePolicy` tells when responses can be reused from a cache, taking into account [HTTP RFC 7234](http://httpwg.org/specs/rfc7234.html) rules for user agents and shared caches. +It also implements [RFC 5861](https://tools.ietf.org/html/rfc5861), implementing `stale-if-error` and `stale-while-revalidate`. +It's aware of many tricky details such as the `Vary` header, proxy revalidation, and authenticated responses. + +## Usage + +Cacheability of an HTTP response depends on how it was requested, so both `request` and `response` are required to create the policy. + +```js +const policy = new CachePolicy(request, response, options); + +if (!policy.storable()) { + // throw the response away, it's not usable at all + return; +} + +// Cache the data AND the policy object in your cache +// (this is pseudocode, roll your own cache (lru-cache package works)) +letsPretendThisIsSomeCache.set( + request.url, + { policy, response }, + policy.timeToLive() +); +``` + +```js +// And later, when you receive a new request: +const { policy, response } = letsPretendThisIsSomeCache.get(newRequest.url); + +// It's not enough that it exists in the cache, it has to match the new request, too: +if (policy && policy.satisfiesWithoutRevalidation(newRequest)) { + // OK, the previous response can be used to respond to the `newRequest`. + // Response headers have to be updated, e.g. to add Age and remove uncacheable headers. + response.headers = policy.responseHeaders(); + return response; +} +``` + +It may be surprising, but it's not enough for an HTTP response to be [fresh](#yo-fresh) to satisfy a request. It may need to match request headers specified in `Vary`. Even a matching fresh response may still not be usable if the new request restricted cacheability, etc. + +The key method is `satisfiesWithoutRevalidation(newRequest)`, which checks whether the `newRequest` is compatible with the original request and whether all caching conditions are met. + +### Constructor options + +Request and response must have a `headers` property with all header names in lower case. `url`, `status` and `method` are optional (defaults are any URL, status `200`, and `GET` method). + +```js +const request = { + url: '/', + method: 'GET', + headers: { + accept: '*/*', + }, +}; + +const response = { + status: 200, + headers: { + 'cache-control': 'public, max-age=7234', + }, +}; + +const options = { + shared: true, + cacheHeuristic: 0.1, + immutableMinTimeToLive: 24 * 3600 * 1000, // 24h + ignoreCargoCult: false, +}; +``` + +If `options.shared` is `true` (default), then the response is evaluated from a perspective of a shared cache (i.e. `private` is not cacheable and `s-maxage` is respected). If `options.shared` is `false`, then the response is evaluated from a perspective of a single-user cache (i.e. `private` is cacheable and `s-maxage` is ignored). `shared: true` is recommended for HTTP clients. + +`options.cacheHeuristic` is a fraction of response's age that is used as a fallback cache duration. The default is 0.1 (10%), e.g. if a file hasn't been modified for 100 days, it'll be cached for 100\*0.1 = 10 days. + +`options.immutableMinTimeToLive` is a number of milliseconds to assume as the default time to cache responses with `Cache-Control: immutable`. Note that [per RFC](http://httpwg.org/http-extensions/immutable.html) these can become stale, so `max-age` still overrides the default. + +If `options.ignoreCargoCult` is true, common anti-cache directives will be completely ignored if the non-standard `pre-check` and `post-check` directives are present. These two useless directives are most commonly found in bad StackOverflow answers and PHP's "session limiter" defaults. + +### `storable()` + +Returns `true` if the response can be stored in a cache. If it's `false` then you MUST NOT store either the request or the response. + +### `satisfiesWithoutRevalidation(newRequest)` + +This is the most important method. Use this method to check whether the cached response is still fresh in the context of the new request. + +If it returns `true`, then the given `request` matches the original response this cache policy has been created with, and the response can be reused without contacting the server. Note that the old response can't be returned without being updated, see `responseHeaders()`. + +If it returns `false`, then the response may not be matching at all (e.g. it's for a different URL or method), or may require to be refreshed first (see `revalidationHeaders()`). + +### `responseHeaders()` + +Returns updated, filtered set of response headers to return to clients receiving the cached response. This function is necessary, because proxies MUST always remove hop-by-hop headers (such as `TE` and `Connection`) and update response's `Age` to avoid doubling cache time. + +```js +cachedResponse.headers = cachePolicy.responseHeaders(cachedResponse); +``` + +### `timeToLive()` + +Returns approximate time in _milliseconds_ until the response becomes stale (i.e. not fresh). + +After that time (when `timeToLive() <= 0`) the response might not be usable without revalidation. However, there are exceptions, e.g. a client can explicitly allow stale responses, so always check with `satisfiesWithoutRevalidation()`. +`stale-if-error` and `stale-while-revalidate` extend the time to live of the cache, that can still be used if stale. + +### `toObject()`/`fromObject(json)` + +Chances are you'll want to store the `CachePolicy` object along with the cached response. `obj = policy.toObject()` gives a plain JSON-serializable object. `policy = CachePolicy.fromObject(obj)` creates an instance from it. + +### Refreshing stale cache (revalidation) + +When a cached response has expired, it can be made fresh again by making a request to the origin server. The server may respond with status 304 (Not Modified) without sending the response body again, saving bandwidth. + +The following methods help perform the update efficiently and correctly. + +#### `revalidationHeaders(newRequest)` + +Returns updated, filtered set of request headers to send to the origin server to check if the cached response can be reused. These headers allow the origin server to return status 304 indicating the response is still fresh. All headers unrelated to caching are passed through as-is. + +Use this method when updating cache from the origin server. + +```js +updateRequest.headers = cachePolicy.revalidationHeaders(updateRequest); +``` + +#### `revalidatedPolicy(revalidationRequest, revalidationResponse)` + +Use this method to update the cache after receiving a new response from the origin server. It returns an object with two keys: + +- `policy` — A new `CachePolicy` with HTTP headers updated from `revalidationResponse`. You can always replace the old cached `CachePolicy` with the new one. +- `modified` — Boolean indicating whether the response body has changed. + - If `false`, then a valid 304 Not Modified response has been received, and you can reuse the old cached response body. This is also affected by `stale-if-error`. + - If `true`, you should use new response's body (if present), or make another request to the origin server without any conditional headers (i.e. don't use `revalidationHeaders()` this time) to get the new resource. + +```js +// When serving requests from cache: +const { oldPolicy, oldResponse } = letsPretendThisIsSomeCache.get( + newRequest.url +); + +if (!oldPolicy.satisfiesWithoutRevalidation(newRequest)) { + // Change the request to ask the origin server if the cached response can be used + newRequest.headers = oldPolicy.revalidationHeaders(newRequest); + + // Send request to the origin server. The server may respond with status 304 + const newResponse = await makeRequest(newRequest); + + // Create updated policy and combined response from the old and new data + const { policy, modified } = oldPolicy.revalidatedPolicy( + newRequest, + newResponse + ); + const response = modified ? newResponse : oldResponse; + + // Update the cache with the newer/fresher response + letsPretendThisIsSomeCache.set( + newRequest.url, + { policy, response }, + policy.timeToLive() + ); + + // And proceed returning cached response as usual + response.headers = policy.responseHeaders(); + return response; +} +``` + +# Yo, FRESH + +![satisfiesWithoutRevalidation](fresh.jpg) + +## Used by + +- [ImageOptim API](https://imageoptim.com/api), [make-fetch-happen](https://github.com/zkat/make-fetch-happen), [cacheable-request](https://www.npmjs.com/package/cacheable-request) ([got](https://www.npmjs.com/package/got)), [npm/registry-fetch](https://github.com/npm/registry-fetch), [etc.](https://github.com/kornelski/http-cache-semantics/network/dependents) + +## Implemented + +- `Cache-Control` response header with all the quirks. +- `Expires` with check for bad clocks. +- `Pragma` response header. +- `Age` response header. +- `Vary` response header. +- Default cacheability of statuses and methods. +- Requests for stale data. +- Filtering of hop-by-hop headers. +- Basic revalidation request +- `stale-if-error` + +## Unimplemented + +- Merging of range requests, `If-Range` (but correctly supports them as non-cacheable) +- Revalidation of multiple representations + +### Trusting server `Date` + +Per the RFC, the cache should take into account the time between server-supplied `Date` and the time it received the response. The RFC-mandated behavior creates two problems: + + * Servers with incorrectly set timezone may add several hours to cache age (or more, if the clock is completely wrong). + * Even reasonably correct clocks may be off by a couple of seconds, breaking `max-age=1` trick (which is useful for reverse proxies on high-traffic servers). + +Previous versions of this library had an option to ignore the server date if it was "too inaccurate". To support the `max-age=1` trick the library also has to ignore dates that pretty accurate. There's no point of having an option to trust dates that are only a bit inaccurate, so this library won't trust any server dates. `max-age` will be interpreted from the time the response has been received, not from when it has been sent. This will affect only [RFC 1149 networks](https://tools.ietf.org/html/rfc1149). diff --git a/node_modules/http-cache-semantics/index.js b/node_modules/http-cache-semantics/index.js new file mode 100644 index 0000000..31fba48 --- /dev/null +++ b/node_modules/http-cache-semantics/index.js @@ -0,0 +1,674 @@ +'use strict'; +// rfc7231 6.1 +const statusCodeCacheableByDefault = new Set([ + 200, + 203, + 204, + 206, + 300, + 301, + 308, + 404, + 405, + 410, + 414, + 501, +]); + +// This implementation does not understand partial responses (206) +const understoodStatuses = new Set([ + 200, + 203, + 204, + 300, + 301, + 302, + 303, + 307, + 308, + 404, + 405, + 410, + 414, + 501, +]); + +const errorStatusCodes = new Set([ + 500, + 502, + 503, + 504, +]); + +const hopByHopHeaders = { + date: true, // included, because we add Age update Date + connection: true, + 'keep-alive': true, + 'proxy-authenticate': true, + 'proxy-authorization': true, + te: true, + trailer: true, + 'transfer-encoding': true, + upgrade: true, +}; + +const excludedFromRevalidationUpdate = { + // Since the old body is reused, it doesn't make sense to change properties of the body + 'content-length': true, + 'content-encoding': true, + 'transfer-encoding': true, + 'content-range': true, +}; + +function toNumberOrZero(s) { + const n = parseInt(s, 10); + return isFinite(n) ? n : 0; +} + +// RFC 5861 +function isErrorResponse(response) { + // consider undefined response as faulty + if(!response) { + return true + } + return errorStatusCodes.has(response.status); +} + +function parseCacheControl(header) { + const cc = {}; + if (!header) return cc; + + // TODO: When there is more than one value present for a given directive (e.g., two Expires header fields, multiple Cache-Control: max-age directives), + // the directive's value is considered invalid. Caches are encouraged to consider responses that have invalid freshness information to be stale + const parts = header.trim().split(/,/); + for (const part of parts) { + const [k, v] = part.split(/=/, 2); + cc[k.trim()] = v === undefined ? true : v.trim().replace(/^"|"$/g, ''); + } + + return cc; +} + +function formatCacheControl(cc) { + let parts = []; + for (const k in cc) { + const v = cc[k]; + parts.push(v === true ? k : k + '=' + v); + } + if (!parts.length) { + return undefined; + } + return parts.join(', '); +} + +module.exports = class CachePolicy { + constructor( + req, + res, + { + shared, + cacheHeuristic, + immutableMinTimeToLive, + ignoreCargoCult, + _fromObject, + } = {} + ) { + if (_fromObject) { + this._fromObject(_fromObject); + return; + } + + if (!res || !res.headers) { + throw Error('Response headers missing'); + } + this._assertRequestHasHeaders(req); + + this._responseTime = this.now(); + this._isShared = shared !== false; + this._cacheHeuristic = + undefined !== cacheHeuristic ? cacheHeuristic : 0.1; // 10% matches IE + this._immutableMinTtl = + undefined !== immutableMinTimeToLive + ? immutableMinTimeToLive + : 24 * 3600 * 1000; + + this._status = 'status' in res ? res.status : 200; + this._resHeaders = res.headers; + this._rescc = parseCacheControl(res.headers['cache-control']); + this._method = 'method' in req ? req.method : 'GET'; + this._url = req.url; + this._host = req.headers.host; + this._noAuthorization = !req.headers.authorization; + this._reqHeaders = res.headers.vary ? req.headers : null; // Don't keep all request headers if they won't be used + this._reqcc = parseCacheControl(req.headers['cache-control']); + + // Assume that if someone uses legacy, non-standard uncecessary options they don't understand caching, + // so there's no point stricly adhering to the blindly copy&pasted directives. + if ( + ignoreCargoCult && + 'pre-check' in this._rescc && + 'post-check' in this._rescc + ) { + delete this._rescc['pre-check']; + delete this._rescc['post-check']; + delete this._rescc['no-cache']; + delete this._rescc['no-store']; + delete this._rescc['must-revalidate']; + this._resHeaders = Object.assign({}, this._resHeaders, { + 'cache-control': formatCacheControl(this._rescc), + }); + delete this._resHeaders.expires; + delete this._resHeaders.pragma; + } + + // When the Cache-Control header field is not present in a request, caches MUST consider the no-cache request pragma-directive + // as having the same effect as if "Cache-Control: no-cache" were present (see Section 5.2.1). + if ( + res.headers['cache-control'] == null && + /no-cache/.test(res.headers.pragma) + ) { + this._rescc['no-cache'] = true; + } + } + + now() { + return Date.now(); + } + + storable() { + // The "no-store" request directive indicates that a cache MUST NOT store any part of either this request or any response to it. + return !!( + !this._reqcc['no-store'] && + // A cache MUST NOT store a response to any request, unless: + // The request method is understood by the cache and defined as being cacheable, and + ('GET' === this._method || + 'HEAD' === this._method || + ('POST' === this._method && this._hasExplicitExpiration())) && + // the response status code is understood by the cache, and + understoodStatuses.has(this._status) && + // the "no-store" cache directive does not appear in request or response header fields, and + !this._rescc['no-store'] && + // the "private" response directive does not appear in the response, if the cache is shared, and + (!this._isShared || !this._rescc.private) && + // the Authorization header field does not appear in the request, if the cache is shared, + (!this._isShared || + this._noAuthorization || + this._allowsStoringAuthenticated()) && + // the response either: + // contains an Expires header field, or + (this._resHeaders.expires || + // contains a max-age response directive, or + // contains a s-maxage response directive and the cache is shared, or + // contains a public response directive. + this._rescc['max-age'] || + (this._isShared && this._rescc['s-maxage']) || + this._rescc.public || + // has a status code that is defined as cacheable by default + statusCodeCacheableByDefault.has(this._status)) + ); + } + + _hasExplicitExpiration() { + // 4.2.1 Calculating Freshness Lifetime + return ( + (this._isShared && this._rescc['s-maxage']) || + this._rescc['max-age'] || + this._resHeaders.expires + ); + } + + _assertRequestHasHeaders(req) { + if (!req || !req.headers) { + throw Error('Request headers missing'); + } + } + + satisfiesWithoutRevalidation(req) { + this._assertRequestHasHeaders(req); + + // When presented with a request, a cache MUST NOT reuse a stored response, unless: + // the presented request does not contain the no-cache pragma (Section 5.4), nor the no-cache cache directive, + // unless the stored response is successfully validated (Section 4.3), and + const requestCC = parseCacheControl(req.headers['cache-control']); + if (requestCC['no-cache'] || /no-cache/.test(req.headers.pragma)) { + return false; + } + + if (requestCC['max-age'] && this.age() > requestCC['max-age']) { + return false; + } + + if ( + requestCC['min-fresh'] && + this.timeToLive() < 1000 * requestCC['min-fresh'] + ) { + return false; + } + + // the stored response is either: + // fresh, or allowed to be served stale + if (this.stale()) { + const allowsStale = + requestCC['max-stale'] && + !this._rescc['must-revalidate'] && + (true === requestCC['max-stale'] || + requestCC['max-stale'] > this.age() - this.maxAge()); + if (!allowsStale) { + return false; + } + } + + return this._requestMatches(req, false); + } + + _requestMatches(req, allowHeadMethod) { + // The presented effective request URI and that of the stored response match, and + return ( + (!this._url || this._url === req.url) && + this._host === req.headers.host && + // the request method associated with the stored response allows it to be used for the presented request, and + (!req.method || + this._method === req.method || + (allowHeadMethod && 'HEAD' === req.method)) && + // selecting header fields nominated by the stored response (if any) match those presented, and + this._varyMatches(req) + ); + } + + _allowsStoringAuthenticated() { + // following Cache-Control response directives (Section 5.2.2) have such an effect: must-revalidate, public, and s-maxage. + return ( + this._rescc['must-revalidate'] || + this._rescc.public || + this._rescc['s-maxage'] + ); + } + + _varyMatches(req) { + if (!this._resHeaders.vary) { + return true; + } + + // A Vary header field-value of "*" always fails to match + if (this._resHeaders.vary === '*') { + return false; + } + + const fields = this._resHeaders.vary + .trim() + .toLowerCase() + .split(/\s*,\s*/); + for (const name of fields) { + if (req.headers[name] !== this._reqHeaders[name]) return false; + } + return true; + } + + _copyWithoutHopByHopHeaders(inHeaders) { + const headers = {}; + for (const name in inHeaders) { + if (hopByHopHeaders[name]) continue; + headers[name] = inHeaders[name]; + } + // 9.1. Connection + if (inHeaders.connection) { + const tokens = inHeaders.connection.trim().split(/\s*,\s*/); + for (const name of tokens) { + delete headers[name]; + } + } + if (headers.warning) { + const warnings = headers.warning.split(/,/).filter(warning => { + return !/^\s*1[0-9][0-9]/.test(warning); + }); + if (!warnings.length) { + delete headers.warning; + } else { + headers.warning = warnings.join(',').trim(); + } + } + return headers; + } + + responseHeaders() { + const headers = this._copyWithoutHopByHopHeaders(this._resHeaders); + const age = this.age(); + + // A cache SHOULD generate 113 warning if it heuristically chose a freshness + // lifetime greater than 24 hours and the response's age is greater than 24 hours. + if ( + age > 3600 * 24 && + !this._hasExplicitExpiration() && + this.maxAge() > 3600 * 24 + ) { + headers.warning = + (headers.warning ? `${headers.warning}, ` : '') + + '113 - "rfc7234 5.5.4"'; + } + headers.age = `${Math.round(age)}`; + headers.date = new Date(this.now()).toUTCString(); + return headers; + } + + /** + * Value of the Date response header or current time if Date was invalid + * @return timestamp + */ + date() { + const serverDate = Date.parse(this._resHeaders.date); + if (isFinite(serverDate)) { + return serverDate; + } + return this._responseTime; + } + + /** + * Value of the Age header, in seconds, updated for the current time. + * May be fractional. + * + * @return Number + */ + age() { + let age = this._ageValue(); + + const residentTime = (this.now() - this._responseTime) / 1000; + return age + residentTime; + } + + _ageValue() { + return toNumberOrZero(this._resHeaders.age); + } + + /** + * Value of applicable max-age (or heuristic equivalent) in seconds. This counts since response's `Date`. + * + * For an up-to-date value, see `timeToLive()`. + * + * @return Number + */ + maxAge() { + if (!this.storable() || this._rescc['no-cache']) { + return 0; + } + + // Shared responses with cookies are cacheable according to the RFC, but IMHO it'd be unwise to do so by default + // so this implementation requires explicit opt-in via public header + if ( + this._isShared && + (this._resHeaders['set-cookie'] && + !this._rescc.public && + !this._rescc.immutable) + ) { + return 0; + } + + if (this._resHeaders.vary === '*') { + return 0; + } + + if (this._isShared) { + if (this._rescc['proxy-revalidate']) { + return 0; + } + // if a response includes the s-maxage directive, a shared cache recipient MUST ignore the Expires field. + if (this._rescc['s-maxage']) { + return toNumberOrZero(this._rescc['s-maxage']); + } + } + + // If a response includes a Cache-Control field with the max-age directive, a recipient MUST ignore the Expires field. + if (this._rescc['max-age']) { + return toNumberOrZero(this._rescc['max-age']); + } + + const defaultMinTtl = this._rescc.immutable ? this._immutableMinTtl : 0; + + const serverDate = this.date(); + if (this._resHeaders.expires) { + const expires = Date.parse(this._resHeaders.expires); + // A cache recipient MUST interpret invalid date formats, especially the value "0", as representing a time in the past (i.e., "already expired"). + if (Number.isNaN(expires) || expires < serverDate) { + return 0; + } + return Math.max(defaultMinTtl, (expires - serverDate) / 1000); + } + + if (this._resHeaders['last-modified']) { + const lastModified = Date.parse(this._resHeaders['last-modified']); + if (isFinite(lastModified) && serverDate > lastModified) { + return Math.max( + defaultMinTtl, + ((serverDate - lastModified) / 1000) * this._cacheHeuristic + ); + } + } + + return defaultMinTtl; + } + + timeToLive() { + const age = this.maxAge() - this.age(); + const staleIfErrorAge = age + toNumberOrZero(this._rescc['stale-if-error']); + const staleWhileRevalidateAge = age + toNumberOrZero(this._rescc['stale-while-revalidate']); + return Math.max(0, age, staleIfErrorAge, staleWhileRevalidateAge) * 1000; + } + + stale() { + return this.maxAge() <= this.age(); + } + + _useStaleIfError() { + return this.maxAge() + toNumberOrZero(this._rescc['stale-if-error']) > this.age(); + } + + useStaleWhileRevalidate() { + return this.maxAge() + toNumberOrZero(this._rescc['stale-while-revalidate']) > this.age(); + } + + static fromObject(obj) { + return new this(undefined, undefined, { _fromObject: obj }); + } + + _fromObject(obj) { + if (this._responseTime) throw Error('Reinitialized'); + if (!obj || obj.v !== 1) throw Error('Invalid serialization'); + + this._responseTime = obj.t; + this._isShared = obj.sh; + this._cacheHeuristic = obj.ch; + this._immutableMinTtl = + obj.imm !== undefined ? obj.imm : 24 * 3600 * 1000; + this._status = obj.st; + this._resHeaders = obj.resh; + this._rescc = obj.rescc; + this._method = obj.m; + this._url = obj.u; + this._host = obj.h; + this._noAuthorization = obj.a; + this._reqHeaders = obj.reqh; + this._reqcc = obj.reqcc; + } + + toObject() { + return { + v: 1, + t: this._responseTime, + sh: this._isShared, + ch: this._cacheHeuristic, + imm: this._immutableMinTtl, + st: this._status, + resh: this._resHeaders, + rescc: this._rescc, + m: this._method, + u: this._url, + h: this._host, + a: this._noAuthorization, + reqh: this._reqHeaders, + reqcc: this._reqcc, + }; + } + + /** + * Headers for sending to the origin server to revalidate stale response. + * Allows server to return 304 to allow reuse of the previous response. + * + * Hop by hop headers are always stripped. + * Revalidation headers may be added or removed, depending on request. + */ + revalidationHeaders(incomingReq) { + this._assertRequestHasHeaders(incomingReq); + const headers = this._copyWithoutHopByHopHeaders(incomingReq.headers); + + // This implementation does not understand range requests + delete headers['if-range']; + + if (!this._requestMatches(incomingReq, true) || !this.storable()) { + // revalidation allowed via HEAD + // not for the same resource, or wasn't allowed to be cached anyway + delete headers['if-none-match']; + delete headers['if-modified-since']; + return headers; + } + + /* MUST send that entity-tag in any cache validation request (using If-Match or If-None-Match) if an entity-tag has been provided by the origin server. */ + if (this._resHeaders.etag) { + headers['if-none-match'] = headers['if-none-match'] + ? `${headers['if-none-match']}, ${this._resHeaders.etag}` + : this._resHeaders.etag; + } + + // Clients MAY issue simple (non-subrange) GET requests with either weak validators or strong validators. Clients MUST NOT use weak validators in other forms of request. + const forbidsWeakValidators = + headers['accept-ranges'] || + headers['if-match'] || + headers['if-unmodified-since'] || + (this._method && this._method != 'GET'); + + /* SHOULD send the Last-Modified value in non-subrange cache validation requests (using If-Modified-Since) if only a Last-Modified value has been provided by the origin server. + Note: This implementation does not understand partial responses (206) */ + if (forbidsWeakValidators) { + delete headers['if-modified-since']; + + if (headers['if-none-match']) { + const etags = headers['if-none-match'] + .split(/,/) + .filter(etag => { + return !/^\s*W\//.test(etag); + }); + if (!etags.length) { + delete headers['if-none-match']; + } else { + headers['if-none-match'] = etags.join(',').trim(); + } + } + } else if ( + this._resHeaders['last-modified'] && + !headers['if-modified-since'] + ) { + headers['if-modified-since'] = this._resHeaders['last-modified']; + } + + return headers; + } + + /** + * Creates new CachePolicy with information combined from the previews response, + * and the new revalidation response. + * + * Returns {policy, modified} where modified is a boolean indicating + * whether the response body has been modified, and old cached body can't be used. + * + * @return {Object} {policy: CachePolicy, modified: Boolean} + */ + revalidatedPolicy(request, response) { + this._assertRequestHasHeaders(request); + if(this._useStaleIfError() && isErrorResponse(response)) { // I consider the revalidation request unsuccessful + return { + modified: false, + matches: false, + policy: this, + }; + } + if (!response || !response.headers) { + throw Error('Response headers missing'); + } + + // These aren't going to be supported exactly, since one CachePolicy object + // doesn't know about all the other cached objects. + let matches = false; + if (response.status !== undefined && response.status != 304) { + matches = false; + } else if ( + response.headers.etag && + !/^\s*W\//.test(response.headers.etag) + ) { + // "All of the stored responses with the same strong validator are selected. + // If none of the stored responses contain the same strong validator, + // then the cache MUST NOT use the new response to update any stored responses." + matches = + this._resHeaders.etag && + this._resHeaders.etag.replace(/^\s*W\//, '') === + response.headers.etag; + } else if (this._resHeaders.etag && response.headers.etag) { + // "If the new response contains a weak validator and that validator corresponds + // to one of the cache's stored responses, + // then the most recent of those matching stored responses is selected for update." + matches = + this._resHeaders.etag.replace(/^\s*W\//, '') === + response.headers.etag.replace(/^\s*W\//, ''); + } else if (this._resHeaders['last-modified']) { + matches = + this._resHeaders['last-modified'] === + response.headers['last-modified']; + } else { + // If the new response does not include any form of validator (such as in the case where + // a client generates an If-Modified-Since request from a source other than the Last-Modified + // response header field), and there is only one stored response, and that stored response also + // lacks a validator, then that stored response is selected for update. + if ( + !this._resHeaders.etag && + !this._resHeaders['last-modified'] && + !response.headers.etag && + !response.headers['last-modified'] + ) { + matches = true; + } + } + + if (!matches) { + return { + policy: new this.constructor(request, response), + // Client receiving 304 without body, even if it's invalid/mismatched has no option + // but to reuse a cached body. We don't have a good way to tell clients to do + // error recovery in such case. + modified: response.status != 304, + matches: false, + }; + } + + // use other header fields provided in the 304 (Not Modified) response to replace all instances + // of the corresponding header fields in the stored response. + const headers = {}; + for (const k in this._resHeaders) { + headers[k] = + k in response.headers && !excludedFromRevalidationUpdate[k] + ? response.headers[k] + : this._resHeaders[k]; + } + + const newResponse = Object.assign({}, response, { + status: this._status, + method: this._method, + headers, + }); + return { + policy: new this.constructor(request, newResponse, { + shared: this._isShared, + cacheHeuristic: this._cacheHeuristic, + immutableMinTimeToLive: this._immutableMinTtl, + }), + modified: false, + matches: true, + }; + } +}; diff --git a/node_modules/http-cache-semantics/package.json b/node_modules/http-cache-semantics/package.json new file mode 100644 index 0000000..defbb04 --- /dev/null +++ b/node_modules/http-cache-semantics/package.json @@ -0,0 +1,18 @@ +{ + "name": "http-cache-semantics", + "version": "4.1.1", + "description": "Parses Cache-Control and other headers. Helps building correct HTTP caches and proxies", + "repository": "https://github.com/kornelski/http-cache-semantics.git", + "main": "index.js", + "scripts": { + "test": "mocha" + }, + "files": [ + "index.js" + ], + "author": "Kornel Lesiński (https://kornel.ski/)", + "license": "BSD-2-Clause", + "devDependencies": { + "mocha": "^10.0" + } +} diff --git a/node_modules/http-proxy-agent/README.md b/node_modules/http-proxy-agent/README.md new file mode 100644 index 0000000..d60e206 --- /dev/null +++ b/node_modules/http-proxy-agent/README.md @@ -0,0 +1,74 @@ +http-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTP +[![Build Status](https://github.com/TooTallNate/node-http-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-http-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `http` module. + +__Note:__ For HTTP proxy usage with the `https` module, check out +[`node-https-proxy-agent`](https://github.com/TooTallNate/node-https-proxy-agent). + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install http-proxy-agent +``` + + +Example +------- + +``` js +var url = require('url'); +var http = require('http'); +var HttpProxyAgent = require('http-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// HTTP endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'http://nodejs.org/api/'; +console.log('attempting to GET %j', endpoint); +var opts = url.parse(endpoint); + +// create an instance of the `HttpProxyAgent` class with the proxy server information +var agent = new HttpProxyAgent(proxy); +opts.agent = agent; + +http.get(opts, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/http-proxy-agent/dist/agent.d.ts b/node_modules/http-proxy-agent/dist/agent.d.ts new file mode 100644 index 0000000..3f043f7 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/agent.d.ts @@ -0,0 +1,32 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpProxyAgentOptions } from '.'; +interface HttpProxyAgentClientRequest extends ClientRequest { + path: string; + output?: string[]; + outputData?: { + data: string; + }[]; + _header?: string | null; + _implicitHeader(): void; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +export default class HttpProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: HttpProxyAgentClientRequest, opts: RequestOptions): Promise; +} +export {}; diff --git a/node_modules/http-proxy-agent/dist/agent.js b/node_modules/http-proxy-agent/dist/agent.js new file mode 100644 index 0000000..aca8280 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/agent.js @@ -0,0 +1,145 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const debug_1 = __importDefault(require("debug")); +const once_1 = __importDefault(require("@tootallnate/once")); +const agent_base_1 = require("agent-base"); +const debug = (0, debug_1.default)('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + parsed.port = ''; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield (0, once_1.default)(socket, 'connect'); + return socket; + }); + } +} +exports.default = HttpProxyAgent; +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/node_modules/http-proxy-agent/dist/agent.js.map b/node_modules/http-proxy-agent/dist/agent.js.map new file mode 100644 index 0000000..bd3b56a --- /dev/null +++ b/node_modules/http-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,MAAM,CAAC,IAAI,GAAG,EAAE,CAAC;aACjB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,IAAA,cAAI,EAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"} \ No newline at end of file diff --git a/node_modules/http-proxy-agent/dist/index.d.ts b/node_modules/http-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..24bdb52 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/index.d.ts @@ -0,0 +1,21 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import _HttpProxyAgent from './agent'; +declare function createHttpProxyAgent(opts: string | createHttpProxyAgent.HttpProxyAgentOptions): _HttpProxyAgent; +declare namespace createHttpProxyAgent { + interface BaseHttpProxyAgentOptions { + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpProxyAgentOptions extends AgentOptions, BaseHttpProxyAgentOptions, Partial> { + } + export type HttpProxyAgent = _HttpProxyAgent; + export const HttpProxyAgent: typeof _HttpProxyAgent; + export {}; +} +export = createHttpProxyAgent; diff --git a/node_modules/http-proxy-agent/dist/index.js b/node_modules/http-proxy-agent/dist/index.js new file mode 100644 index 0000000..0a71180 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/http-proxy-agent/dist/index.js.map b/node_modules/http-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..e07dae5 --- /dev/null +++ b/node_modules/http-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAIA,oDAAsC;AAEtC,SAAS,oBAAoB,CAC5B,IAAyD;IAEzD,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC;AAClC,CAAC;AAED,WAAU,oBAAoB;IAmBhB,mCAAc,GAAG,eAAe,CAAC;IAE9C,oBAAoB,CAAC,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;AAC5D,CAAC,EAtBS,oBAAoB,KAApB,oBAAoB,QAsB7B;AAED,iBAAS,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/http-proxy-agent/package.json b/node_modules/http-proxy-agent/package.json new file mode 100644 index 0000000..659d6e1 --- /dev/null +++ b/node_modules/http-proxy-agent/package.json @@ -0,0 +1,57 @@ +{ + "name": "http-proxy-agent", + "version": "5.0.0", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-http-proxy-agent.git" + }, + "keywords": [ + "http", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues" + }, + "dependencies": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.19.2", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^4.4.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/https-proxy-agent/README.md b/node_modules/https-proxy-agent/README.md new file mode 100644 index 0000000..328656a --- /dev/null +++ b/node_modules/https-proxy-agent/README.md @@ -0,0 +1,137 @@ +https-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTPS +[![Build Status](https://github.com/TooTallNate/node-https-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-https-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `https` module. + +Specifically, this `Agent` implementation connects to an intermediary "proxy" +server and issues the [CONNECT HTTP method][CONNECT], which tells the proxy to +open a direct TCP connection to the destination server. + +Since this agent implements the CONNECT HTTP method, it also works with other +protocols that use this method when connecting over proxies (i.e. WebSockets). +See the "Examples" section below for more. + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install https-proxy-agent +``` + + +Examples +-------- + +#### `https` module example + +``` js +var url = require('url'); +var https = require('https'); +var HttpsProxyAgent = require('https-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// HTTPS endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'https://graph.facebook.com/tootallnate'; +console.log('attempting to GET %j', endpoint); +var options = url.parse(endpoint); + +// create an instance of the `HttpsProxyAgent` class with the proxy server information +var agent = new HttpsProxyAgent(proxy); +options.agent = agent; + +https.get(options, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +#### `ws` WebSocket connection example + +``` js +var url = require('url'); +var WebSocket = require('ws'); +var HttpsProxyAgent = require('https-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// WebSocket endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'ws://echo.websocket.org'; +var parsed = url.parse(endpoint); +console.log('attempting to connect to WebSocket %j', endpoint); + +// create an instance of the `HttpsProxyAgent` class with the proxy server information +var options = url.parse(proxy); + +var agent = new HttpsProxyAgent(options); + +// finally, initiate the WebSocket connection +var socket = new WebSocket(endpoint, { agent: agent }); + +socket.on('open', function () { + console.log('"open" event!'); + socket.send('hello world'); +}); + +socket.on('message', function (data, flags) { + console.log('"message" event! %j %j', data, flags); + socket.close(); +}); +``` + +API +--- + +### new HttpsProxyAgent(Object options) + +The `HttpsProxyAgent` class implements an `http.Agent` subclass that connects +to the specified "HTTP(s) proxy server" in order to proxy HTTPS and/or WebSocket +requests. This is achieved by using the [HTTP `CONNECT` method][CONNECT]. + +The `options` argument may either be a string URI of the proxy server to use, or an +"options" object with more specific properties: + + * `host` - String - Proxy host to connect to (may use `hostname` as well). Required. + * `port` - Number - Proxy port to connect to. Required. + * `protocol` - String - If `https:`, then use TLS to connect to the proxy. + * `headers` - Object - Additional HTTP headers to be sent on the HTTP CONNECT method. + * Any other options given are passed to the `net.connect()`/`tls.connect()` functions. + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[CONNECT]: http://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_Tunneling diff --git a/node_modules/https-proxy-agent/dist/agent.d.ts b/node_modules/https-proxy-agent/dist/agent.d.ts new file mode 100644 index 0000000..4f1c636 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/agent.d.ts @@ -0,0 +1,30 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpsProxyAgentOptions } from '.'; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +export default class HttpsProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpsProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: ClientRequest, opts: RequestOptions): Promise; +} diff --git a/node_modules/https-proxy-agent/dist/agent.js b/node_modules/https-proxy-agent/dist/agent.js new file mode 100644 index 0000000..75d1136 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/agent.js @@ -0,0 +1,177 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const assert_1 = __importDefault(require("assert")); +const debug_1 = __importDefault(require("debug")); +const agent_base_1 = require("agent-base"); +const parse_proxy_response_1 = __importDefault(require("./parse-proxy-response")); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports.default = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/agent.js.map b/node_modules/https-proxy-agent/dist/agent.js.map new file mode 100644 index 0000000..0af6c17 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,oDAA4B;AAC5B,kDAAgC;AAEhC,2CAAkE;AAElE,kFAAwD;AAExD,MAAM,KAAK,GAAG,eAAW,CAAC,yBAAyB,CAAC,CAAC;AAErD;;;;;;;;;;;;;GAaG;AACH,MAAqB,eAAgB,SAAQ,kBAAK;IAIjD,YAAY,KAAsC;QACjD,IAAI,IAA4B,CAAC;QACjC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,2CAA2C,EAAE,IAAI,CAAC,CAAC;QACzD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAAgC,IAAI,CAAE,CAAC;QAElD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,sCAAsC;QACtC,sEAAsE;QACtE,IAAI,IAAI,CAAC,WAAW,IAAI,CAAC,CAAC,eAAe,IAAI,KAAK,CAAC,EAAE;YACpD,KAAK,CAAC,aAAa,GAAG,CAAC,UAAU,CAAC,CAAC;SACnC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAkB,EAClB,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YAEpC,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,MAAM,OAAO,qBAA6B,KAAK,CAAC,OAAO,CAAE,CAAC;YAC1D,MAAM,QAAQ,GAAG,GAAG,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;YAC7C,IAAI,OAAO,GAAG,WAAW,QAAQ,eAAe,CAAC;YAEjD,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,OAAO,CAAC,qBAAqB,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACpD,KAAK,CAAC,IAAI,CACV,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;aACvB;YAED,iDAAiD;YACjD,0CAA0C;YAC1C,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,IAAI,CAAC;YAC1C,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,cAAc,CAAC,EAAE;gBACzC,IAAI,IAAI,IAAI,IAAI,EAAE,CAAC;aACnB;YACD,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;YAEpB,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC;YAC7B,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;gBACxC,OAAO,IAAI,GAAG,IAAI,KAAK,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;aAC3C;YAED,MAAM,oBAAoB,GAAG,8BAAkB,CAAC,MAAM,CAAC,CAAC;YAExD,MAAM,CAAC,KAAK,CAAC,GAAG,OAAO,MAAM,CAAC,CAAC;YAE/B,MAAM,EACL,UAAU,EACV,QAAQ,EACR,GAAG,MAAM,oBAAoB,CAAC;YAE/B,IAAI,UAAU,KAAK,GAAG,EAAE;gBACvB,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;gBAE3B,IAAI,IAAI,CAAC,cAAc,EAAE;oBACxB,sDAAsD;oBACtD,8CAA8C;oBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAC;oBAC5C,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,IAAI,CAAC;oBAChD,OAAO,aAAG,CAAC,OAAO,iCACd,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,KACjD,MAAM;wBACN,UAAU,IACT,CAAC;iBACH;gBAED,OAAO,MAAM,CAAC;aACd;YAED,oEAAoE;YACpE,kEAAkE;YAClE,iEAAiE;YACjE,qBAAqB;YAErB,iEAAiE;YACjE,0DAA0D;YAC1D,oEAAoE;YACpE,mBAAmB;YACnB,EAAE;YACF,4CAA4C;YAC5C,MAAM,CAAC,OAAO,EAAE,CAAC;YAEjB,MAAM,UAAU,GAAG,IAAI,aAAG,CAAC,MAAM,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;YACvD,UAAU,CAAC,QAAQ,GAAG,IAAI,CAAC;YAE3B,oEAAoE;YACpE,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAa,EAAE,EAAE;gBACpC,KAAK,CAAC,2CAA2C,CAAC,CAAC;gBACnD,gBAAM,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;gBAEpC,gEAAgE;gBAChE,8DAA8D;gBAC9D,YAAY;gBACZ,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;gBACjB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACd,CAAC,CAAC,CAAC;YAEH,OAAO,UAAU,CAAC;QACnB,CAAC;KAAA;CACD;AA3JD,kCA2JC;AAED,SAAS,MAAM,CAAC,MAAkC;IACjD,MAAM,CAAC,MAAM,EAAE,CAAC;AACjB,CAAC;AAED,SAAS,aAAa,CAAC,IAAY,EAAE,MAAe;IACnD,OAAO,OAAO,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,KAAK,GAAG,CAAC,CAAC,CAAC;AACtE,CAAC;AAED,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED,SAAS,IAAI,CACZ,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAEX,CAAC;IACF,IAAI,GAAqB,CAAC;IAC1B,KAAK,GAAG,IAAI,GAAG,EAAE;QAChB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACxB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACpB;KACD;IACD,OAAO,GAAG,CAAC;AACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/index.d.ts b/node_modules/https-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..0d60062 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.d.ts @@ -0,0 +1,23 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import { OutgoingHttpHeaders } from 'http'; +import _HttpsProxyAgent from './agent'; +declare function createHttpsProxyAgent(opts: string | createHttpsProxyAgent.HttpsProxyAgentOptions): _HttpsProxyAgent; +declare namespace createHttpsProxyAgent { + interface BaseHttpsProxyAgentOptions { + headers?: OutgoingHttpHeaders; + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpsProxyAgentOptions extends AgentOptions, BaseHttpsProxyAgentOptions, Partial> { + } + export type HttpsProxyAgent = _HttpsProxyAgent; + export const HttpsProxyAgent: typeof _HttpsProxyAgent; + export {}; +} +export = createHttpsProxyAgent; diff --git a/node_modules/https-proxy-agent/dist/index.js b/node_modules/https-proxy-agent/dist/index.js new file mode 100644 index 0000000..b03e763 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/index.js.map b/node_modules/https-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..f3ce559 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAKA,oDAAuC;AAEvC,SAAS,qBAAqB,CAC7B,IAA2D;IAE3D,OAAO,IAAI,eAAgB,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,WAAU,qBAAqB;IAoBjB,qCAAe,GAAG,eAAgB,CAAC;IAEhD,qBAAqB,CAAC,SAAS,GAAG,eAAgB,CAAC,SAAS,CAAC;AAC9D,CAAC,EAvBS,qBAAqB,KAArB,qBAAqB,QAuB9B;AAED,iBAAS,qBAAqB,CAAC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts new file mode 100644 index 0000000..7565674 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts @@ -0,0 +1,7 @@ +/// +import { Readable } from 'stream'; +export interface ProxyResponse { + statusCode: number; + buffered: Buffer; +} +export default function parseProxyResponse(socket: Readable): Promise; diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/node_modules/https-proxy-agent/dist/parse-proxy-response.js new file mode 100644 index 0000000..aa5ce3c --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.js @@ -0,0 +1,66 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const debug_1 = __importDefault(require("debug")); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports.default = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map b/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map new file mode 100644 index 0000000..bacdb84 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parse-proxy-response.js","sourceRoot":"","sources":["../src/parse-proxy-response.ts"],"names":[],"mappings":";;;;;AAAA,kDAAgC;AAGhC,MAAM,KAAK,GAAG,eAAW,CAAC,wCAAwC,CAAC,CAAC;AAOpE,SAAwB,kBAAkB,CACzC,MAAgB;IAEhB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,+EAA+E;QAC/E,gFAAgF;QAChF,8EAA8E;QAC9E,8BAA8B;QAC9B,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,SAAS,IAAI;YACZ,MAAM,CAAC,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YACxB,IAAI,CAAC;gBAAE,MAAM,CAAC,CAAC,CAAC,CAAC;;gBACZ,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACpC,CAAC;QAED,SAAS,OAAO;YACf,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YACpC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,cAAc,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACzC,CAAC;QAED,SAAS,OAAO,CAAC,GAAW;YAC3B,KAAK,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAC;QACpC,CAAC;QAED,SAAS,KAAK;YACb,KAAK,CAAC,OAAO,CAAC,CAAC;QAChB,CAAC;QAED,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,KAAK,CAAC,YAAY,EAAE,GAAG,CAAC,CAAC;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QAED,SAAS,MAAM,CAAC,CAAS;YACxB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YAChB,aAAa,IAAI,CAAC,CAAC,MAAM,CAAC;YAE1B,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;YACvD,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YAElD,IAAI,YAAY,KAAK,CAAC,CAAC,EAAE;gBACxB,iBAAiB;gBACjB,KAAK,CAAC,8CAA8C,CAAC,CAAC;gBACtD,IAAI,EAAE,CAAC;gBACP,OAAO;aACP;YAED,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,CAClC,OAAO,EACP,CAAC,EACD,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CACxB,CAAC;YACF,MAAM,UAAU,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5C,KAAK,CAAC,+BAA+B,EAAE,SAAS,CAAC,CAAC;YAClD,OAAO,CAAC;gBACP,UAAU;gBACV,QAAQ;aACR,CAAC,CAAC;QACJ,CAAC;QAED,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;QAExB,IAAI,EAAE,CAAC;IACR,CAAC,CAAC,CAAC;AACJ,CAAC;AAvED,qCAuEC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/package.json b/node_modules/https-proxy-agent/package.json new file mode 100644 index 0000000..fb2aba1 --- /dev/null +++ b/node_modules/https-proxy-agent/package.json @@ -0,0 +1,56 @@ +{ + "name": "https-proxy-agent", + "version": "5.0.1", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS", + "main": "dist/index", + "types": "dist/index", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha --reporter spec", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-https-proxy-agent.git" + }, + "keywords": [ + "https", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-https-proxy-agent/issues" + }, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^3.5.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/humanize-ms/History.md b/node_modules/humanize-ms/History.md new file mode 100644 index 0000000..b159587 --- /dev/null +++ b/node_modules/humanize-ms/History.md @@ -0,0 +1,25 @@ + +1.2.1 / 2017-05-19 +================== + + * fix: package.json to reduce vulnerabilities (#3) + +1.2.0 / 2016-05-21 +================== + + * feat: warn with stack + +1.1.0 / 2016-04-04 +================== + + * deps: upgrade ms to 0.7.0 + +1.0.1 / 2014-12-31 +================== + + * feat(index.js): warn when result is undefined + +1.0.0 / 2014-08-14 +================== + + * init diff --git a/node_modules/humanize-ms/LICENSE b/node_modules/humanize-ms/LICENSE new file mode 100644 index 0000000..89de354 --- /dev/null +++ b/node_modules/humanize-ms/LICENSE @@ -0,0 +1,17 @@ +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/humanize-ms/README.md b/node_modules/humanize-ms/README.md new file mode 100644 index 0000000..20a2ca3 --- /dev/null +++ b/node_modules/humanize-ms/README.md @@ -0,0 +1,40 @@ +humanize-ms +--------------- + +[![NPM version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![Test coverage][coveralls-image]][coveralls-url] +[![Gittip][gittip-image]][gittip-url] +[![David deps][david-image]][david-url] + +[npm-image]: https://img.shields.io/npm/v/humanize-ms.svg?style=flat +[npm-url]: https://npmjs.org/package/humanize-ms +[travis-image]: https://img.shields.io/travis/node-modules/humanize-ms.svg?style=flat +[travis-url]: https://travis-ci.org/node-modules/humanize-ms +[coveralls-image]: https://img.shields.io/coveralls/node-modules/humanize-ms.svg?style=flat +[coveralls-url]: https://coveralls.io/r/node-modules/humanize-ms?branch=master +[gittip-image]: https://img.shields.io/gittip/dead-horse.svg?style=flat +[gittip-url]: https://www.gittip.com/dead-horse/ +[david-image]: https://img.shields.io/david/node-modules/humanize-ms.svg?style=flat +[david-url]: https://david-dm.org/node-modules/humanize-ms + +transform humanize time to ms + +## Installation + +```bash +$ npm install humanize-ms +``` + +## Examples + +```js +var ms = require('humanize-ms'); + +ms('1s') // 1000 +ms(1000) // 1000 +``` + +### License + +MIT diff --git a/node_modules/humanize-ms/index.js b/node_modules/humanize-ms/index.js new file mode 100644 index 0000000..660df81 --- /dev/null +++ b/node_modules/humanize-ms/index.js @@ -0,0 +1,24 @@ +/*! + * humanize-ms - index.js + * Copyright(c) 2014 dead_horse + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + */ + +var util = require('util'); +var ms = require('ms'); + +module.exports = function (t) { + if (typeof t === 'number') return t; + var r = ms(t); + if (r === undefined) { + var err = new Error(util.format('humanize-ms(%j) result undefined', t)); + console.warn(err.stack); + } + return r; +}; diff --git a/node_modules/humanize-ms/node_modules/ms/index.js b/node_modules/humanize-ms/node_modules/ms/index.js new file mode 100644 index 0000000..ea734fb --- /dev/null +++ b/node_modules/humanize-ms/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function (val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/humanize-ms/node_modules/ms/license.md b/node_modules/humanize-ms/node_modules/ms/license.md new file mode 100644 index 0000000..fa5d39b --- /dev/null +++ b/node_modules/humanize-ms/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Vercel, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/humanize-ms/node_modules/ms/package.json b/node_modules/humanize-ms/node_modules/ms/package.json new file mode 100644 index 0000000..4997189 --- /dev/null +++ b/node_modules/humanize-ms/node_modules/ms/package.json @@ -0,0 +1,38 @@ +{ + "name": "ms", + "version": "2.1.3", + "description": "Tiny millisecond conversion utility", + "repository": "vercel/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.18.2", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1", + "prettier": "2.0.5" + } +} diff --git a/node_modules/humanize-ms/node_modules/ms/readme.md b/node_modules/humanize-ms/node_modules/ms/readme.md new file mode 100644 index 0000000..0fc1abb --- /dev/null +++ b/node_modules/humanize-ms/node_modules/ms/readme.md @@ -0,0 +1,59 @@ +# ms + +![CI](https://github.com/vercel/ms/workflows/CI/badge.svg) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/humanize-ms/package.json b/node_modules/humanize-ms/package.json new file mode 100644 index 0000000..da4ab7f --- /dev/null +++ b/node_modules/humanize-ms/package.json @@ -0,0 +1,37 @@ +{ + "name": "humanize-ms", + "version": "1.2.1", + "description": "transform humanize time to ms", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "make test" + }, + "keywords": [ + "humanize", + "ms" + ], + "author": { + "name": "dead-horse", + "email": "dead_horse@qq.com", + "url": "http://deadhorse.me" + }, + "repository": { + "type": "git", + "url": "https://github.com/node-modules/humanize-ms" + }, + "license": "MIT", + "dependencies": { + "ms": "^2.0.0" + }, + "devDependencies": { + "autod": "*", + "beautify-benchmark": "~0.2.4", + "benchmark": "~1.0.0", + "istanbul": "*", + "mocha": "*", + "should": "*" + } +} diff --git a/node_modules/iconv-lite/.github/dependabot.yml b/node_modules/iconv-lite/.github/dependabot.yml new file mode 100644 index 0000000..e4a0e0a --- /dev/null +++ b/node_modules/iconv-lite/.github/dependabot.yml @@ -0,0 +1,11 @@ +# Please see the documentation for all configuration options: +# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: "npm" + directory: "/" + schedule: + interval: "daily" + allow: + - dependency-type: production diff --git a/node_modules/iconv-lite/.idea/codeStyles/Project.xml b/node_modules/iconv-lite/.idea/codeStyles/Project.xml new file mode 100644 index 0000000..3f2688c --- /dev/null +++ b/node_modules/iconv-lite/.idea/codeStyles/Project.xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/node_modules/iconv-lite/.idea/codeStyles/codeStyleConfig.xml b/node_modules/iconv-lite/.idea/codeStyles/codeStyleConfig.xml new file mode 100644 index 0000000..79ee123 --- /dev/null +++ b/node_modules/iconv-lite/.idea/codeStyles/codeStyleConfig.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/node_modules/iconv-lite/.idea/iconv-lite.iml b/node_modules/iconv-lite/.idea/iconv-lite.iml new file mode 100644 index 0000000..0c8867d --- /dev/null +++ b/node_modules/iconv-lite/.idea/iconv-lite.iml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/node_modules/iconv-lite/.idea/inspectionProfiles/Project_Default.xml b/node_modules/iconv-lite/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000..03d9549 --- /dev/null +++ b/node_modules/iconv-lite/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/node_modules/iconv-lite/.idea/modules.xml b/node_modules/iconv-lite/.idea/modules.xml new file mode 100644 index 0000000..5d24f2e --- /dev/null +++ b/node_modules/iconv-lite/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/node_modules/iconv-lite/.idea/vcs.xml b/node_modules/iconv-lite/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/node_modules/iconv-lite/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/node_modules/iconv-lite/Changelog.md b/node_modules/iconv-lite/Changelog.md new file mode 100644 index 0000000..464549b --- /dev/null +++ b/node_modules/iconv-lite/Changelog.md @@ -0,0 +1,212 @@ +## 0.6.3 / 2021-05-23 + * Fix HKSCS encoding to prefer Big5 codes if both Big5 and HKSCS codes are possible (#264) + + +## 0.6.2 / 2020-07-08 + * Support Uint8Array-s decoding without conversion to Buffers, plus fix an edge case. + + +## 0.6.1 / 2020-06-28 + * Support Uint8Array-s directly when decoding (#246, by @gyzerok) + * Unify package.json version ranges to be strictly semver-compatible (#241) + * Fix minor issue in UTF-32 decoder's endianness detection code. + + +## 0.6.0 / 2020-06-08 + * Updated 'gb18030' encoding to :2005 edition (see https://github.com/whatwg/encoding/issues/22). + * Removed `iconv.extendNodeEncodings()` mechanism. It was deprecated 5 years ago and didn't work + in recent Node versions. + * Reworked Streaming API behavior in browser environments to fix #204. Streaming API will be + excluded by default in browser packs, saving ~100Kb bundle size, unless enabled explicitly using + `iconv.enableStreamingAPI(require('stream'))`. + * Updates to development environment & tests: + * Added ./test/webpack private package to test complex new use cases that need custom environment. + It's tested as a separate job in Travis CI. + * Updated generation code for the new EUC-KR index file format from Encoding Standard. + * Removed Buffer() constructor in tests (#197 by @gabrielschulhof). + + +## 0.5.2 / 2020-06-08 + * Added `iconv.getEncoder()` and `iconv.getDecoder()` methods to typescript definitions (#229). + * Fixed semver version to 6.1.2 to support Node 8.x (by @tanandara). + * Capped iconv version to 2.x as 3.x has dropped support for older Node versions. + * Switched from instanbul to c8 for code coverage. + + +## 0.5.1 / 2020-01-18 + + * Added cp720 encoding (#221, by @kr-deps) + * (minor) Changed Changelog.md formatting to use h2. + + +## 0.5.0 / 2019-06-26 + + * Added UTF-32 encoding, both little-endian and big-endian variants (UTF-32LE, UTF32-BE). If endianness + is not provided for decoding, it's deduced automatically from the stream using a heuristic similar to + what we use in UTF-16. (great work in #216 by @kshetline) + * Several minor updates to README (#217 by @oldj, plus some more) + * Added Node versions 10 and 12 to Travis test harness. + + +## 0.4.24 / 2018-08-22 + + * Added MIK encoding (#196, by @Ivan-Kalatchev) + + +## 0.4.23 / 2018-05-07 + + * Fix deprecation warning in Node v10 due to the last usage of `new Buffer` (#185, by @felixbuenemann) + * Switched from NodeBuffer to Buffer in typings (#155 by @felixfbecker, #186 by @larssn) + + +## 0.4.22 / 2018-05-05 + + * Use older semver style for dependencies to be compatible with Node version 0.10 (#182, by @dougwilson) + * Fix tests to accomodate fixes in Node v10 (#182, by @dougwilson) + + +## 0.4.21 / 2018-04-06 + + * Fix encoding canonicalization (#156) + * Fix the paths in the "browser" field in package.json (#174 by @LMLB) + * Removed "contributors" section in package.json - see Git history instead. + + +## 0.4.20 / 2018-04-06 + + * Updated `new Buffer()` usages with recommended replacements as it's being deprecated in Node v10 (#176, #178 by @ChALkeR) + + +## 0.4.19 / 2017-09-09 + + * Fixed iso8859-1 codec regression in handling untranslatable characters (#162, caused by #147) + * Re-generated windows1255 codec, because it was updated in iconv project + * Fixed grammar in error message when iconv-lite is loaded with encoding other than utf8 + + +## 0.4.18 / 2017-06-13 + + * Fixed CESU-8 regression in Node v8. + + +## 0.4.17 / 2017-04-22 + + * Updated typescript definition file to support Angular 2 AoT mode (#153 by @larssn) + + +## 0.4.16 / 2017-04-22 + + * Added support for React Native (#150) + * Changed iso8859-1 encoding to usine internal 'binary' encoding, as it's the same thing (#147 by @mscdex) + * Fixed typo in Readme (#138 by @jiangzhuo) + * Fixed build for Node v6.10+ by making correct version comparison + * Added a warning if iconv-lite is loaded not as utf-8 (see #142) + + +## 0.4.15 / 2016-11-21 + + * Fixed typescript type definition (#137) + + +## 0.4.14 / 2016-11-20 + + * Preparation for v1.0 + * Added Node v6 and latest Node versions to Travis CI test rig + * Deprecated Node v0.8 support + * Typescript typings (@larssn) + * Fix encoding of Euro character in GB 18030 (inspired by @lygstate) + * Add ms prefix to dbcs windows encodings (@rokoroku) + + +## 0.4.13 / 2015-10-01 + + * Fix silly mistake in deprecation notice. + + +## 0.4.12 / 2015-09-26 + + * Node v4 support: + * Added CESU-8 decoding (#106) + * Added deprecation notice for `extendNodeEncodings` + * Added Travis tests for Node v4 and io.js latest (#105 by @Mithgol) + + +## 0.4.11 / 2015-07-03 + + * Added CESU-8 encoding. + + +## 0.4.10 / 2015-05-26 + + * Changed UTF-16 endianness heuristic to take into account any ASCII chars, not + just spaces. This should minimize the importance of "default" endianness. + + +## 0.4.9 / 2015-05-24 + + * Streamlined BOM handling: strip BOM by default, add BOM when encoding if + addBOM: true. Added docs to Readme. + * UTF16 now uses UTF16-LE by default. + * Fixed minor issue with big5 encoding. + * Added io.js testing on Travis; updated node-iconv version to test against. + Now we just skip testing SBCS encodings that node-iconv doesn't support. + * (internal refactoring) Updated codec interface to use classes. + * Use strict mode in all files. + + +## 0.4.8 / 2015-04-14 + + * added alias UNICODE-1-1-UTF-7 for UTF-7 encoding (#94) + + +## 0.4.7 / 2015-02-05 + + * stop official support of Node.js v0.8. Should still work, but no guarantees. + reason: Packages needed for testing are hard to get on Travis CI. + * work in environment where Object.prototype is monkey patched with enumerable + props (#89). + + +## 0.4.6 / 2015-01-12 + + * fix rare aliases of single-byte encodings (thanks @mscdex) + * double the timeout for dbcs tests to make them less flaky on travis + + +## 0.4.5 / 2014-11-20 + + * fix windows-31j and x-sjis encoding support (@nleush) + * minor fix: undefined variable reference when internal error happens + + +## 0.4.4 / 2014-07-16 + + * added encodings UTF-7 (RFC2152) and UTF-7-IMAP (RFC3501 Section 5.1.3) + * fixed streaming base64 encoding + + +## 0.4.3 / 2014-06-14 + + * added encodings UTF-16BE and UTF-16 with BOM + + +## 0.4.2 / 2014-06-12 + + * don't throw exception if `extendNodeEncodings()` is called more than once + + +## 0.4.1 / 2014-06-11 + + * codepage 808 added + + +## 0.4.0 / 2014-06-10 + + * code is rewritten from scratch + * all widespread encodings are supported + * streaming interface added + * browserify compatibility added + * (optional) extend core primitive encodings to make usage even simpler + * moved from vows to mocha as the testing framework + + diff --git a/node_modules/iconv-lite/LICENSE b/node_modules/iconv-lite/LICENSE new file mode 100644 index 0000000..d518d83 --- /dev/null +++ b/node_modules/iconv-lite/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2011 Alexander Shtuchkin + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/iconv-lite/README.md b/node_modules/iconv-lite/README.md new file mode 100644 index 0000000..3c97f87 --- /dev/null +++ b/node_modules/iconv-lite/README.md @@ -0,0 +1,130 @@ +## iconv-lite: Pure JS character encoding conversion + + * No need for native code compilation. Quick to install, works on Windows and in sandboxed environments like [Cloud9](http://c9.io). + * Used in popular projects like [Express.js (body_parser)](https://github.com/expressjs/body-parser), + [Grunt](http://gruntjs.com/), [Nodemailer](http://www.nodemailer.com/), [Yeoman](http://yeoman.io/) and others. + * Faster than [node-iconv](https://github.com/bnoordhuis/node-iconv) (see below for performance comparison). + * Intuitive encode/decode API, including Streaming support. + * In-browser usage via [browserify](https://github.com/substack/node-browserify) or [webpack](https://webpack.js.org/) (~180kb gzip compressed with Buffer shim included). + * Typescript [type definition file](https://github.com/ashtuchkin/iconv-lite/blob/master/lib/index.d.ts) included. + * React Native is supported (need to install `stream` module to enable Streaming API). + * License: MIT. + +[![NPM Stats](https://nodei.co/npm/iconv-lite.png)](https://npmjs.org/package/iconv-lite/) +[![Build Status](https://travis-ci.org/ashtuchkin/iconv-lite.svg?branch=master)](https://travis-ci.org/ashtuchkin/iconv-lite) +[![npm](https://img.shields.io/npm/v/iconv-lite.svg)](https://npmjs.org/package/iconv-lite/) +[![npm downloads](https://img.shields.io/npm/dm/iconv-lite.svg)](https://npmjs.org/package/iconv-lite/) +[![npm bundle size](https://img.shields.io/bundlephobia/min/iconv-lite.svg)](https://npmjs.org/package/iconv-lite/) + +## Usage +### Basic API +```javascript +var iconv = require('iconv-lite'); + +// Convert from an encoded buffer to a js string. +str = iconv.decode(Buffer.from([0x68, 0x65, 0x6c, 0x6c, 0x6f]), 'win1251'); + +// Convert from a js string to an encoded buffer. +buf = iconv.encode("Sample input string", 'win1251'); + +// Check if encoding is supported +iconv.encodingExists("us-ascii") +``` + +### Streaming API +```javascript + +// Decode stream (from binary data stream to js strings) +http.createServer(function(req, res) { + var converterStream = iconv.decodeStream('win1251'); + req.pipe(converterStream); + + converterStream.on('data', function(str) { + console.log(str); // Do something with decoded strings, chunk-by-chunk. + }); +}); + +// Convert encoding streaming example +fs.createReadStream('file-in-win1251.txt') + .pipe(iconv.decodeStream('win1251')) + .pipe(iconv.encodeStream('ucs2')) + .pipe(fs.createWriteStream('file-in-ucs2.txt')); + +// Sugar: all encode/decode streams have .collect(cb) method to accumulate data. +http.createServer(function(req, res) { + req.pipe(iconv.decodeStream('win1251')).collect(function(err, body) { + assert(typeof body == 'string'); + console.log(body); // full request body string + }); +}); +``` + +## Supported encodings + + * All node.js native encodings: utf8, ucs2 / utf16-le, ascii, binary, base64, hex. + * Additional unicode encodings: utf16, utf16-be, utf-7, utf-7-imap, utf32, utf32-le, and utf32-be. + * All widespread singlebyte encodings: Windows 125x family, ISO-8859 family, + IBM/DOS codepages, Macintosh family, KOI8 family, all others supported by iconv library. + Aliases like 'latin1', 'us-ascii' also supported. + * All widespread multibyte encodings: CP932, CP936, CP949, CP950, GB2312, GBK, GB18030, Big5, Shift_JIS, EUC-JP. + +See [all supported encodings on wiki](https://github.com/ashtuchkin/iconv-lite/wiki/Supported-Encodings). + +Most singlebyte encodings are generated automatically from [node-iconv](https://github.com/bnoordhuis/node-iconv). Thank you Ben Noordhuis and libiconv authors! + +Multibyte encodings are generated from [Unicode.org mappings](http://www.unicode.org/Public/MAPPINGS/) and [WHATWG Encoding Standard mappings](http://encoding.spec.whatwg.org/). Thank you, respective authors! + + +## Encoding/decoding speed + +Comparison with node-iconv module (1000x256kb, on MacBook Pro, Core i5/2.6 GHz, Node v0.12.0). +Note: your results may vary, so please always check on your hardware. + + operation iconv@2.1.4 iconv-lite@0.4.7 + ---------------------------------------------------------- + encode('win1251') ~96 Mb/s ~320 Mb/s + decode('win1251') ~95 Mb/s ~246 Mb/s + +## BOM handling + + * Decoding: BOM is stripped by default, unless overridden by passing `stripBOM: false` in options + (f.ex. `iconv.decode(buf, enc, {stripBOM: false})`). + A callback might also be given as a `stripBOM` parameter - it'll be called if BOM character was actually found. + * If you want to detect UTF-8 BOM when decoding other encodings, use [node-autodetect-decoder-stream](https://github.com/danielgindi/node-autodetect-decoder-stream) module. + * Encoding: No BOM added, unless overridden by `addBOM: true` option. + +## UTF-16 Encodings + +This library supports UTF-16LE, UTF-16BE and UTF-16 encodings. First two are straightforward, but UTF-16 is trying to be +smart about endianness in the following ways: + * Decoding: uses BOM and 'spaces heuristic' to determine input endianness. Default is UTF-16LE, but can be + overridden with `defaultEncoding: 'utf-16be'` option. Strips BOM unless `stripBOM: false`. + * Encoding: uses UTF-16LE and writes BOM by default. Use `addBOM: false` to override. + +## UTF-32 Encodings + +This library supports UTF-32LE, UTF-32BE and UTF-32 encodings. Like the UTF-16 encoding above, UTF-32 defaults to UTF-32LE, but uses BOM and 'spaces heuristics' to determine input endianness. + * The default of UTF-32LE can be overridden with the `defaultEncoding: 'utf-32be'` option. Strips BOM unless `stripBOM: false`. + * Encoding: uses UTF-32LE and writes BOM by default. Use `addBOM: false` to override. (`defaultEncoding: 'utf-32be'` can also be used here to change encoding.) + +## Other notes + +When decoding, be sure to supply a Buffer to decode() method, otherwise [bad things usually happen](https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding). +Untranslatable characters are set to � or ?. No transliteration is currently supported. +Node versions 0.10.31 and 0.11.13 are buggy, don't use them (see #65, #77). + +## Testing + +```bash +$ git clone git@github.com:ashtuchkin/iconv-lite.git +$ cd iconv-lite +$ npm install +$ npm test + +$ # To view performance: +$ node test/performance.js + +$ # To view test coverage: +$ npm run coverage +$ open coverage/lcov-report/index.html +``` diff --git a/node_modules/iconv-lite/encodings/dbcs-codec.js b/node_modules/iconv-lite/encodings/dbcs-codec.js new file mode 100644 index 0000000..fa83917 --- /dev/null +++ b/node_modules/iconv-lite/encodings/dbcs-codec.js @@ -0,0 +1,597 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Multibyte codec. In this scheme, a character is represented by 1 or more bytes. +// Our codec supports UTF-16 surrogates, extensions for GB18030 and unicode sequences. +// To save memory and loading time, we read table files only when requested. + +exports._dbcs = DBCSCodec; + +var UNASSIGNED = -1, + GB18030_CODE = -2, + SEQ_START = -10, + NODE_START = -1000, + UNASSIGNED_NODE = new Array(0x100), + DEF_CHAR = -1; + +for (var i = 0; i < 0x100; i++) + UNASSIGNED_NODE[i] = UNASSIGNED; + + +// Class DBCSCodec reads and initializes mapping tables. +function DBCSCodec(codecOptions, iconv) { + this.encodingName = codecOptions.encodingName; + if (!codecOptions) + throw new Error("DBCS codec is called without the data.") + if (!codecOptions.table) + throw new Error("Encoding '" + this.encodingName + "' has no data."); + + // Load tables. + var mappingTable = codecOptions.table(); + + + // Decode tables: MBCS -> Unicode. + + // decodeTables is a trie, encoded as an array of arrays of integers. Internal arrays are trie nodes and all have len = 256. + // Trie root is decodeTables[0]. + // Values: >= 0 -> unicode character code. can be > 0xFFFF + // == UNASSIGNED -> unknown/unassigned sequence. + // == GB18030_CODE -> this is the end of a GB18030 4-byte sequence. + // <= NODE_START -> index of the next node in our trie to process next byte. + // <= SEQ_START -> index of the start of a character code sequence, in decodeTableSeq. + this.decodeTables = []; + this.decodeTables[0] = UNASSIGNED_NODE.slice(0); // Create root node. + + // Sometimes a MBCS char corresponds to a sequence of unicode chars. We store them as arrays of integers here. + this.decodeTableSeq = []; + + // Actual mapping tables consist of chunks. Use them to fill up decode tables. + for (var i = 0; i < mappingTable.length; i++) + this._addDecodeChunk(mappingTable[i]); + + // Load & create GB18030 tables when needed. + if (typeof codecOptions.gb18030 === 'function') { + this.gb18030 = codecOptions.gb18030(); // Load GB18030 ranges. + + // Add GB18030 common decode nodes. + var commonThirdByteNodeIdx = this.decodeTables.length; + this.decodeTables.push(UNASSIGNED_NODE.slice(0)); + + var commonFourthByteNodeIdx = this.decodeTables.length; + this.decodeTables.push(UNASSIGNED_NODE.slice(0)); + + // Fill out the tree + var firstByteNode = this.decodeTables[0]; + for (var i = 0x81; i <= 0xFE; i++) { + var secondByteNode = this.decodeTables[NODE_START - firstByteNode[i]]; + for (var j = 0x30; j <= 0x39; j++) { + if (secondByteNode[j] === UNASSIGNED) { + secondByteNode[j] = NODE_START - commonThirdByteNodeIdx; + } else if (secondByteNode[j] > NODE_START) { + throw new Error("gb18030 decode tables conflict at byte 2"); + } + + var thirdByteNode = this.decodeTables[NODE_START - secondByteNode[j]]; + for (var k = 0x81; k <= 0xFE; k++) { + if (thirdByteNode[k] === UNASSIGNED) { + thirdByteNode[k] = NODE_START - commonFourthByteNodeIdx; + } else if (thirdByteNode[k] === NODE_START - commonFourthByteNodeIdx) { + continue; + } else if (thirdByteNode[k] > NODE_START) { + throw new Error("gb18030 decode tables conflict at byte 3"); + } + + var fourthByteNode = this.decodeTables[NODE_START - thirdByteNode[k]]; + for (var l = 0x30; l <= 0x39; l++) { + if (fourthByteNode[l] === UNASSIGNED) + fourthByteNode[l] = GB18030_CODE; + } + } + } + } + } + + this.defaultCharUnicode = iconv.defaultCharUnicode; + + + // Encode tables: Unicode -> DBCS. + + // `encodeTable` is array mapping from unicode char to encoded char. All its values are integers for performance. + // Because it can be sparse, it is represented as array of buckets by 256 chars each. Bucket can be null. + // Values: >= 0 -> it is a normal char. Write the value (if <=256 then 1 byte, if <=65536 then 2 bytes, etc.). + // == UNASSIGNED -> no conversion found. Output a default char. + // <= SEQ_START -> it's an index in encodeTableSeq, see below. The character starts a sequence. + this.encodeTable = []; + + // `encodeTableSeq` is used when a sequence of unicode characters is encoded as a single code. We use a tree of + // objects where keys correspond to characters in sequence and leafs are the encoded dbcs values. A special DEF_CHAR key + // means end of sequence (needed when one sequence is a strict subsequence of another). + // Objects are kept separately from encodeTable to increase performance. + this.encodeTableSeq = []; + + // Some chars can be decoded, but need not be encoded. + var skipEncodeChars = {}; + if (codecOptions.encodeSkipVals) + for (var i = 0; i < codecOptions.encodeSkipVals.length; i++) { + var val = codecOptions.encodeSkipVals[i]; + if (typeof val === 'number') + skipEncodeChars[val] = true; + else + for (var j = val.from; j <= val.to; j++) + skipEncodeChars[j] = true; + } + + // Use decode trie to recursively fill out encode tables. + this._fillEncodeTable(0, 0, skipEncodeChars); + + // Add more encoding pairs when needed. + if (codecOptions.encodeAdd) { + for (var uChar in codecOptions.encodeAdd) + if (Object.prototype.hasOwnProperty.call(codecOptions.encodeAdd, uChar)) + this._setEncodeChar(uChar.charCodeAt(0), codecOptions.encodeAdd[uChar]); + } + + this.defCharSB = this.encodeTable[0][iconv.defaultCharSingleByte.charCodeAt(0)]; + if (this.defCharSB === UNASSIGNED) this.defCharSB = this.encodeTable[0]['?']; + if (this.defCharSB === UNASSIGNED) this.defCharSB = "?".charCodeAt(0); +} + +DBCSCodec.prototype.encoder = DBCSEncoder; +DBCSCodec.prototype.decoder = DBCSDecoder; + +// Decoder helpers +DBCSCodec.prototype._getDecodeTrieNode = function(addr) { + var bytes = []; + for (; addr > 0; addr >>>= 8) + bytes.push(addr & 0xFF); + if (bytes.length == 0) + bytes.push(0); + + var node = this.decodeTables[0]; + for (var i = bytes.length-1; i > 0; i--) { // Traverse nodes deeper into the trie. + var val = node[bytes[i]]; + + if (val == UNASSIGNED) { // Create new node. + node[bytes[i]] = NODE_START - this.decodeTables.length; + this.decodeTables.push(node = UNASSIGNED_NODE.slice(0)); + } + else if (val <= NODE_START) { // Existing node. + node = this.decodeTables[NODE_START - val]; + } + else + throw new Error("Overwrite byte in " + this.encodingName + ", addr: " + addr.toString(16)); + } + return node; +} + + +DBCSCodec.prototype._addDecodeChunk = function(chunk) { + // First element of chunk is the hex mbcs code where we start. + var curAddr = parseInt(chunk[0], 16); + + // Choose the decoding node where we'll write our chars. + var writeTable = this._getDecodeTrieNode(curAddr); + curAddr = curAddr & 0xFF; + + // Write all other elements of the chunk to the table. + for (var k = 1; k < chunk.length; k++) { + var part = chunk[k]; + if (typeof part === "string") { // String, write as-is. + for (var l = 0; l < part.length;) { + var code = part.charCodeAt(l++); + if (0xD800 <= code && code < 0xDC00) { // Decode surrogate + var codeTrail = part.charCodeAt(l++); + if (0xDC00 <= codeTrail && codeTrail < 0xE000) + writeTable[curAddr++] = 0x10000 + (code - 0xD800) * 0x400 + (codeTrail - 0xDC00); + else + throw new Error("Incorrect surrogate pair in " + this.encodingName + " at chunk " + chunk[0]); + } + else if (0x0FF0 < code && code <= 0x0FFF) { // Character sequence (our own encoding used) + var len = 0xFFF - code + 2; + var seq = []; + for (var m = 0; m < len; m++) + seq.push(part.charCodeAt(l++)); // Simple variation: don't support surrogates or subsequences in seq. + + writeTable[curAddr++] = SEQ_START - this.decodeTableSeq.length; + this.decodeTableSeq.push(seq); + } + else + writeTable[curAddr++] = code; // Basic char + } + } + else if (typeof part === "number") { // Integer, meaning increasing sequence starting with prev character. + var charCode = writeTable[curAddr - 1] + 1; + for (var l = 0; l < part; l++) + writeTable[curAddr++] = charCode++; + } + else + throw new Error("Incorrect type '" + typeof part + "' given in " + this.encodingName + " at chunk " + chunk[0]); + } + if (curAddr > 0xFF) + throw new Error("Incorrect chunk in " + this.encodingName + " at addr " + chunk[0] + ": too long" + curAddr); +} + +// Encoder helpers +DBCSCodec.prototype._getEncodeBucket = function(uCode) { + var high = uCode >> 8; // This could be > 0xFF because of astral characters. + if (this.encodeTable[high] === undefined) + this.encodeTable[high] = UNASSIGNED_NODE.slice(0); // Create bucket on demand. + return this.encodeTable[high]; +} + +DBCSCodec.prototype._setEncodeChar = function(uCode, dbcsCode) { + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; + if (bucket[low] <= SEQ_START) + this.encodeTableSeq[SEQ_START-bucket[low]][DEF_CHAR] = dbcsCode; // There's already a sequence, set a single-char subsequence of it. + else if (bucket[low] == UNASSIGNED) + bucket[low] = dbcsCode; +} + +DBCSCodec.prototype._setEncodeSequence = function(seq, dbcsCode) { + + // Get the root of character tree according to first character of the sequence. + var uCode = seq[0]; + var bucket = this._getEncodeBucket(uCode); + var low = uCode & 0xFF; + + var node; + if (bucket[low] <= SEQ_START) { + // There's already a sequence with - use it. + node = this.encodeTableSeq[SEQ_START-bucket[low]]; + } + else { + // There was no sequence object - allocate a new one. + node = {}; + if (bucket[low] !== UNASSIGNED) node[DEF_CHAR] = bucket[low]; // If a char was set before - make it a single-char subsequence. + bucket[low] = SEQ_START - this.encodeTableSeq.length; + this.encodeTableSeq.push(node); + } + + // Traverse the character tree, allocating new nodes as needed. + for (var j = 1; j < seq.length-1; j++) { + var oldVal = node[uCode]; + if (typeof oldVal === 'object') + node = oldVal; + else { + node = node[uCode] = {} + if (oldVal !== undefined) + node[DEF_CHAR] = oldVal + } + } + + // Set the leaf to given dbcsCode. + uCode = seq[seq.length-1]; + node[uCode] = dbcsCode; +} + +DBCSCodec.prototype._fillEncodeTable = function(nodeIdx, prefix, skipEncodeChars) { + var node = this.decodeTables[nodeIdx]; + var hasValues = false; + var subNodeEmpty = {}; + for (var i = 0; i < 0x100; i++) { + var uCode = node[i]; + var mbCode = prefix + i; + if (skipEncodeChars[mbCode]) + continue; + + if (uCode >= 0) { + this._setEncodeChar(uCode, mbCode); + hasValues = true; + } else if (uCode <= NODE_START) { + var subNodeIdx = NODE_START - uCode; + if (!subNodeEmpty[subNodeIdx]) { // Skip empty subtrees (they are too large in gb18030). + var newPrefix = (mbCode << 8) >>> 0; // NOTE: '>>> 0' keeps 32-bit num positive. + if (this._fillEncodeTable(subNodeIdx, newPrefix, skipEncodeChars)) + hasValues = true; + else + subNodeEmpty[subNodeIdx] = true; + } + } else if (uCode <= SEQ_START) { + this._setEncodeSequence(this.decodeTableSeq[SEQ_START - uCode], mbCode); + hasValues = true; + } + } + return hasValues; +} + + + +// == Encoder ================================================================== + +function DBCSEncoder(options, codec) { + // Encoder state + this.leadSurrogate = -1; + this.seqObj = undefined; + + // Static data + this.encodeTable = codec.encodeTable; + this.encodeTableSeq = codec.encodeTableSeq; + this.defaultCharSingleByte = codec.defCharSB; + this.gb18030 = codec.gb18030; +} + +DBCSEncoder.prototype.write = function(str) { + var newBuf = Buffer.alloc(str.length * (this.gb18030 ? 4 : 3)), + leadSurrogate = this.leadSurrogate, + seqObj = this.seqObj, nextChar = -1, + i = 0, j = 0; + + while (true) { + // 0. Get next character. + if (nextChar === -1) { + if (i == str.length) break; + var uCode = str.charCodeAt(i++); + } + else { + var uCode = nextChar; + nextChar = -1; + } + + // 1. Handle surrogates. + if (0xD800 <= uCode && uCode < 0xE000) { // Char is one of surrogates. + if (uCode < 0xDC00) { // We've got lead surrogate. + if (leadSurrogate === -1) { + leadSurrogate = uCode; + continue; + } else { + leadSurrogate = uCode; + // Double lead surrogate found. + uCode = UNASSIGNED; + } + } else { // We've got trail surrogate. + if (leadSurrogate !== -1) { + uCode = 0x10000 + (leadSurrogate - 0xD800) * 0x400 + (uCode - 0xDC00); + leadSurrogate = -1; + } else { + // Incomplete surrogate pair - only trail surrogate found. + uCode = UNASSIGNED; + } + + } + } + else if (leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + nextChar = uCode; uCode = UNASSIGNED; // Write an error, then current char. + leadSurrogate = -1; + } + + // 2. Convert uCode character. + var dbcsCode = UNASSIGNED; + if (seqObj !== undefined && uCode != UNASSIGNED) { // We are in the middle of the sequence + var resCode = seqObj[uCode]; + if (typeof resCode === 'object') { // Sequence continues. + seqObj = resCode; + continue; + + } else if (typeof resCode == 'number') { // Sequence finished. Write it. + dbcsCode = resCode; + + } else if (resCode == undefined) { // Current character is not part of the sequence. + + // Try default character for this sequence + resCode = seqObj[DEF_CHAR]; + if (resCode !== undefined) { + dbcsCode = resCode; // Found. Write it. + nextChar = uCode; // Current character will be written too in the next iteration. + + } else { + // TODO: What if we have no default? (resCode == undefined) + // Then, we should write first char of the sequence as-is and try the rest recursively. + // Didn't do it for now because no encoding has this situation yet. + // Currently, just skip the sequence and write current char. + } + } + seqObj = undefined; + } + else if (uCode >= 0) { // Regular character + var subtable = this.encodeTable[uCode >> 8]; + if (subtable !== undefined) + dbcsCode = subtable[uCode & 0xFF]; + + if (dbcsCode <= SEQ_START) { // Sequence start + seqObj = this.encodeTableSeq[SEQ_START-dbcsCode]; + continue; + } + + if (dbcsCode == UNASSIGNED && this.gb18030) { + // Use GB18030 algorithm to find character(s) to write. + var idx = findIdx(this.gb18030.uChars, uCode); + if (idx != -1) { + var dbcsCode = this.gb18030.gbChars[idx] + (uCode - this.gb18030.uChars[idx]); + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 12600); dbcsCode = dbcsCode % 12600; + newBuf[j++] = 0x30 + Math.floor(dbcsCode / 1260); dbcsCode = dbcsCode % 1260; + newBuf[j++] = 0x81 + Math.floor(dbcsCode / 10); dbcsCode = dbcsCode % 10; + newBuf[j++] = 0x30 + dbcsCode; + continue; + } + } + } + + // 3. Write dbcsCode character. + if (dbcsCode === UNASSIGNED) + dbcsCode = this.defaultCharSingleByte; + + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else if (dbcsCode < 0x10000) { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + else if (dbcsCode < 0x1000000) { + newBuf[j++] = dbcsCode >> 16; + newBuf[j++] = (dbcsCode >> 8) & 0xFF; + newBuf[j++] = dbcsCode & 0xFF; + } else { + newBuf[j++] = dbcsCode >>> 24; + newBuf[j++] = (dbcsCode >>> 16) & 0xFF; + newBuf[j++] = (dbcsCode >>> 8) & 0xFF; + newBuf[j++] = dbcsCode & 0xFF; + } + } + + this.seqObj = seqObj; + this.leadSurrogate = leadSurrogate; + return newBuf.slice(0, j); +} + +DBCSEncoder.prototype.end = function() { + if (this.leadSurrogate === -1 && this.seqObj === undefined) + return; // All clean. Most often case. + + var newBuf = Buffer.alloc(10), j = 0; + + if (this.seqObj) { // We're in the sequence. + var dbcsCode = this.seqObj[DEF_CHAR]; + if (dbcsCode !== undefined) { // Write beginning of the sequence. + if (dbcsCode < 0x100) { + newBuf[j++] = dbcsCode; + } + else { + newBuf[j++] = dbcsCode >> 8; // high byte + newBuf[j++] = dbcsCode & 0xFF; // low byte + } + } else { + // See todo above. + } + this.seqObj = undefined; + } + + if (this.leadSurrogate !== -1) { + // Incomplete surrogate pair - only lead surrogate found. + newBuf[j++] = this.defaultCharSingleByte; + this.leadSurrogate = -1; + } + + return newBuf.slice(0, j); +} + +// Export for testing +DBCSEncoder.prototype.findIdx = findIdx; + + +// == Decoder ================================================================== + +function DBCSDecoder(options, codec) { + // Decoder state + this.nodeIdx = 0; + this.prevBytes = []; + + // Static data + this.decodeTables = codec.decodeTables; + this.decodeTableSeq = codec.decodeTableSeq; + this.defaultCharUnicode = codec.defaultCharUnicode; + this.gb18030 = codec.gb18030; +} + +DBCSDecoder.prototype.write = function(buf) { + var newBuf = Buffer.alloc(buf.length*2), + nodeIdx = this.nodeIdx, + prevBytes = this.prevBytes, prevOffset = this.prevBytes.length, + seqStart = -this.prevBytes.length, // idx of the start of current parsed sequence. + uCode; + + for (var i = 0, j = 0; i < buf.length; i++) { + var curByte = (i >= 0) ? buf[i] : prevBytes[i + prevOffset]; + + // Lookup in current trie node. + var uCode = this.decodeTables[nodeIdx][curByte]; + + if (uCode >= 0) { + // Normal character, just use it. + } + else if (uCode === UNASSIGNED) { // Unknown char. + // TODO: Callback with seq. + uCode = this.defaultCharUnicode.charCodeAt(0); + i = seqStart; // Skip one byte ('i' will be incremented by the for loop) and try to parse again. + } + else if (uCode === GB18030_CODE) { + if (i >= 3) { + var ptr = (buf[i-3]-0x81)*12600 + (buf[i-2]-0x30)*1260 + (buf[i-1]-0x81)*10 + (curByte-0x30); + } else { + var ptr = (prevBytes[i-3+prevOffset]-0x81)*12600 + + (((i-2 >= 0) ? buf[i-2] : prevBytes[i-2+prevOffset])-0x30)*1260 + + (((i-1 >= 0) ? buf[i-1] : prevBytes[i-1+prevOffset])-0x81)*10 + + (curByte-0x30); + } + var idx = findIdx(this.gb18030.gbChars, ptr); + uCode = this.gb18030.uChars[idx] + ptr - this.gb18030.gbChars[idx]; + } + else if (uCode <= NODE_START) { // Go to next trie node. + nodeIdx = NODE_START - uCode; + continue; + } + else if (uCode <= SEQ_START) { // Output a sequence of chars. + var seq = this.decodeTableSeq[SEQ_START - uCode]; + for (var k = 0; k < seq.length - 1; k++) { + uCode = seq[k]; + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; + } + uCode = seq[seq.length-1]; + } + else + throw new Error("iconv-lite internal error: invalid decoding table value " + uCode + " at " + nodeIdx + "/" + curByte); + + // Write the character to buffer, handling higher planes using surrogate pair. + if (uCode >= 0x10000) { + uCode -= 0x10000; + var uCodeLead = 0xD800 | (uCode >> 10); + newBuf[j++] = uCodeLead & 0xFF; + newBuf[j++] = uCodeLead >> 8; + + uCode = 0xDC00 | (uCode & 0x3FF); + } + newBuf[j++] = uCode & 0xFF; + newBuf[j++] = uCode >> 8; + + // Reset trie node. + nodeIdx = 0; seqStart = i+1; + } + + this.nodeIdx = nodeIdx; + this.prevBytes = (seqStart >= 0) + ? Array.prototype.slice.call(buf, seqStart) + : prevBytes.slice(seqStart + prevOffset).concat(Array.prototype.slice.call(buf)); + + return newBuf.slice(0, j).toString('ucs2'); +} + +DBCSDecoder.prototype.end = function() { + var ret = ''; + + // Try to parse all remaining chars. + while (this.prevBytes.length > 0) { + // Skip 1 character in the buffer. + ret += this.defaultCharUnicode; + var bytesArr = this.prevBytes.slice(1); + + // Parse remaining as usual. + this.prevBytes = []; + this.nodeIdx = 0; + if (bytesArr.length > 0) + ret += this.write(bytesArr); + } + + this.prevBytes = []; + this.nodeIdx = 0; + return ret; +} + +// Binary search for GB18030. Returns largest i such that table[i] <= val. +function findIdx(table, val) { + if (table[0] > val) + return -1; + + var l = 0, r = table.length; + while (l < r-1) { // always table[l] <= val < table[r] + var mid = l + ((r-l+1) >> 1); + if (table[mid] <= val) + l = mid; + else + r = mid; + } + return l; +} + diff --git a/node_modules/iconv-lite/encodings/dbcs-data.js b/node_modules/iconv-lite/encodings/dbcs-data.js new file mode 100644 index 0000000..0d17e58 --- /dev/null +++ b/node_modules/iconv-lite/encodings/dbcs-data.js @@ -0,0 +1,188 @@ +"use strict"; + +// Description of supported double byte encodings and aliases. +// Tables are not require()-d until they are needed to speed up library load. +// require()-s are direct to support Browserify. + +module.exports = { + + // == Japanese/ShiftJIS ==================================================== + // All japanese encodings are based on JIS X set of standards: + // JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF. + // JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes. + // Has several variations in 1978, 1983, 1990 and 1997. + // JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead. + // JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233. + // 2 planes, first is superset of 0208, second - revised 0212. + // Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx) + + // Byte encodings are: + // * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte + // encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC. + // Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI. + // * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes. + // 0x00-0x7F - lower part of 0201 + // 0x8E, 0xA1-0xDF - upper part of 0201 + // (0xA1-0xFE)x2 - 0208 plane (94x94). + // 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94). + // * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon. + // Used as-is in ISO2022 family. + // * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII, + // 0201-1976 Roman, 0208-1978, 0208-1983. + // * ISO2022-JP-1: Adds esc seq for 0212-1990. + // * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7. + // * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2. + // * ISO2022-JP-2004: Adds 0213-2004 Plane 1. + // + // After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes. + // + // Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html + + 'shiftjis': { + type: '_dbcs', + table: function() { return require('./tables/shiftjis.json') }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + encodeSkipVals: [{from: 0xED40, to: 0xF940}], + }, + 'csshiftjis': 'shiftjis', + 'mskanji': 'shiftjis', + 'sjis': 'shiftjis', + 'windows31j': 'shiftjis', + 'ms31j': 'shiftjis', + 'xsjis': 'shiftjis', + 'windows932': 'shiftjis', + 'ms932': 'shiftjis', + '932': 'shiftjis', + 'cp932': 'shiftjis', + + 'eucjp': { + type: '_dbcs', + table: function() { return require('./tables/eucjp.json') }, + encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E}, + }, + + // TODO: KDDI extension to Shift_JIS + // TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes. + // TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars. + + + // == Chinese/GBK ========================================================== + // http://en.wikipedia.org/wiki/GBK + // We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder + + // Oldest GB2312 (1981, ~7600 chars) is a subset of CP936 + 'gb2312': 'cp936', + 'gb231280': 'cp936', + 'gb23121980': 'cp936', + 'csgb2312': 'cp936', + 'csiso58gb231280': 'cp936', + 'euccn': 'cp936', + + // Microsoft's CP936 is a subset and approximation of GBK. + 'windows936': 'cp936', + 'ms936': 'cp936', + '936': 'cp936', + 'cp936': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json') }, + }, + + // GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other. + 'gbk': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) }, + }, + 'xgbk': 'gbk', + 'isoir58': 'gbk', + + // GB18030 is an algorithmic extension of GBK. + // Main source: https://www.w3.org/TR/encoding/#gbk-encoder + // http://icu-project.org/docs/papers/gb18030.html + // http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml + // http://www.khngai.com/chinese/charmap/tblgbk.php?page=0 + 'gb18030': { + type: '_dbcs', + table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) }, + gb18030: function() { return require('./tables/gb18030-ranges.json') }, + encodeSkipVals: [0x80], + encodeAdd: {'€': 0xA2E3}, + }, + + 'chinese': 'gb18030', + + + // == Korean =============================================================== + // EUC-KR, KS_C_5601 and KS X 1001 are exactly the same. + 'windows949': 'cp949', + 'ms949': 'cp949', + '949': 'cp949', + 'cp949': { + type: '_dbcs', + table: function() { return require('./tables/cp949.json') }, + }, + + 'cseuckr': 'cp949', + 'csksc56011987': 'cp949', + 'euckr': 'cp949', + 'isoir149': 'cp949', + 'korean': 'cp949', + 'ksc56011987': 'cp949', + 'ksc56011989': 'cp949', + 'ksc5601': 'cp949', + + + // == Big5/Taiwan/Hong Kong ================================================ + // There are lots of tables for Big5 and cp950. Please see the following links for history: + // http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html + // Variations, in roughly number of defined chars: + // * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT + // * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/ + // * Big5-2003 (Taiwan standard) almost superset of cp950. + // * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers. + // * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard. + // many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years. + // Plus, it has 4 combining sequences. + // Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299 + // because big5-hkscs is the only encoding to include astral characters in non-algorithmic way. + // Implementations are not consistent within browsers; sometimes labeled as just big5. + // MS Internet Explorer switches from big5 to big5-hkscs when a patch applied. + // Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31 + // In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s. + // Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt + // http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt + // + // Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder + // Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong. + + 'windows950': 'cp950', + 'ms950': 'cp950', + '950': 'cp950', + 'cp950': { + type: '_dbcs', + table: function() { return require('./tables/cp950.json') }, + }, + + // Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus. + 'big5': 'big5hkscs', + 'big5hkscs': { + type: '_dbcs', + table: function() { return require('./tables/cp950.json').concat(require('./tables/big5-added.json')) }, + encodeSkipVals: [ + // Although Encoding Standard says we should avoid encoding to HKSCS area (See Step 1 of + // https://encoding.spec.whatwg.org/#index-big5-pointer), we still do it to increase compatibility with ICU. + // But if a single unicode point can be encoded both as HKSCS and regular Big5, we prefer the latter. + 0x8e69, 0x8e6f, 0x8e7e, 0x8eab, 0x8eb4, 0x8ecd, 0x8ed0, 0x8f57, 0x8f69, 0x8f6e, 0x8fcb, 0x8ffe, + 0x906d, 0x907a, 0x90c4, 0x90dc, 0x90f1, 0x91bf, 0x92af, 0x92b0, 0x92b1, 0x92b2, 0x92d1, 0x9447, 0x94ca, + 0x95d9, 0x96fc, 0x9975, 0x9b76, 0x9b78, 0x9b7b, 0x9bc6, 0x9bde, 0x9bec, 0x9bf6, 0x9c42, 0x9c53, 0x9c62, + 0x9c68, 0x9c6b, 0x9c77, 0x9cbc, 0x9cbd, 0x9cd0, 0x9d57, 0x9d5a, 0x9dc4, 0x9def, 0x9dfb, 0x9ea9, 0x9eef, + 0x9efd, 0x9f60, 0x9fcb, 0xa077, 0xa0dc, 0xa0df, 0x8fcc, 0x92c8, 0x9644, 0x96ed, + + // Step 2 of https://encoding.spec.whatwg.org/#index-big5-pointer: Use last pointer for U+2550, U+255E, U+2561, U+256A, U+5341, or U+5345 + 0xa2a4, 0xa2a5, 0xa2a7, 0xa2a6, 0xa2cc, 0xa2ce, + ], + }, + + 'cnbig5': 'big5hkscs', + 'csbig5': 'big5hkscs', + 'xxbig5': 'big5hkscs', +}; diff --git a/node_modules/iconv-lite/encodings/index.js b/node_modules/iconv-lite/encodings/index.js new file mode 100644 index 0000000..d95c244 --- /dev/null +++ b/node_modules/iconv-lite/encodings/index.js @@ -0,0 +1,23 @@ +"use strict"; + +// Update this array if you add/rename/remove files in this directory. +// We support Browserify by skipping automatic module discovery and requiring modules directly. +var modules = [ + require("./internal"), + require("./utf32"), + require("./utf16"), + require("./utf7"), + require("./sbcs-codec"), + require("./sbcs-data"), + require("./sbcs-data-generated"), + require("./dbcs-codec"), + require("./dbcs-data"), +]; + +// Put all encoding/alias/codec definitions to single object and export it. +for (var i = 0; i < modules.length; i++) { + var module = modules[i]; + for (var enc in module) + if (Object.prototype.hasOwnProperty.call(module, enc)) + exports[enc] = module[enc]; +} diff --git a/node_modules/iconv-lite/encodings/internal.js b/node_modules/iconv-lite/encodings/internal.js new file mode 100644 index 0000000..dc1074f --- /dev/null +++ b/node_modules/iconv-lite/encodings/internal.js @@ -0,0 +1,198 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Export Node.js internal encodings. + +module.exports = { + // Encodings + utf8: { type: "_internal", bomAware: true}, + cesu8: { type: "_internal", bomAware: true}, + unicode11utf8: "utf8", + + ucs2: { type: "_internal", bomAware: true}, + utf16le: "ucs2", + + binary: { type: "_internal" }, + base64: { type: "_internal" }, + hex: { type: "_internal" }, + + // Codec. + _internal: InternalCodec, +}; + +//------------------------------------------------------------------------------ + +function InternalCodec(codecOptions, iconv) { + this.enc = codecOptions.encodingName; + this.bomAware = codecOptions.bomAware; + + if (this.enc === "base64") + this.encoder = InternalEncoderBase64; + else if (this.enc === "cesu8") { + this.enc = "utf8"; // Use utf8 for decoding. + this.encoder = InternalEncoderCesu8; + + // Add decoder for versions of Node not supporting CESU-8 + if (Buffer.from('eda0bdedb2a9', 'hex').toString() !== '💩') { + this.decoder = InternalDecoderCesu8; + this.defaultCharUnicode = iconv.defaultCharUnicode; + } + } +} + +InternalCodec.prototype.encoder = InternalEncoder; +InternalCodec.prototype.decoder = InternalDecoder; + +//------------------------------------------------------------------------------ + +// We use node.js internal decoder. Its signature is the same as ours. +var StringDecoder = require('string_decoder').StringDecoder; + +if (!StringDecoder.prototype.end) // Node v0.8 doesn't have this method. + StringDecoder.prototype.end = function() {}; + + +function InternalDecoder(options, codec) { + this.decoder = new StringDecoder(codec.enc); +} + +InternalDecoder.prototype.write = function(buf) { + if (!Buffer.isBuffer(buf)) { + buf = Buffer.from(buf); + } + + return this.decoder.write(buf); +} + +InternalDecoder.prototype.end = function() { + return this.decoder.end(); +} + + +//------------------------------------------------------------------------------ +// Encoder is mostly trivial + +function InternalEncoder(options, codec) { + this.enc = codec.enc; +} + +InternalEncoder.prototype.write = function(str) { + return Buffer.from(str, this.enc); +} + +InternalEncoder.prototype.end = function() { +} + + +//------------------------------------------------------------------------------ +// Except base64 encoder, which must keep its state. + +function InternalEncoderBase64(options, codec) { + this.prevStr = ''; +} + +InternalEncoderBase64.prototype.write = function(str) { + str = this.prevStr + str; + var completeQuads = str.length - (str.length % 4); + this.prevStr = str.slice(completeQuads); + str = str.slice(0, completeQuads); + + return Buffer.from(str, "base64"); +} + +InternalEncoderBase64.prototype.end = function() { + return Buffer.from(this.prevStr, "base64"); +} + + +//------------------------------------------------------------------------------ +// CESU-8 encoder is also special. + +function InternalEncoderCesu8(options, codec) { +} + +InternalEncoderCesu8.prototype.write = function(str) { + var buf = Buffer.alloc(str.length * 3), bufIdx = 0; + for (var i = 0; i < str.length; i++) { + var charCode = str.charCodeAt(i); + // Naive implementation, but it works because CESU-8 is especially easy + // to convert from UTF-16 (which all JS strings are encoded in). + if (charCode < 0x80) + buf[bufIdx++] = charCode; + else if (charCode < 0x800) { + buf[bufIdx++] = 0xC0 + (charCode >>> 6); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } + else { // charCode will always be < 0x10000 in javascript. + buf[bufIdx++] = 0xE0 + (charCode >>> 12); + buf[bufIdx++] = 0x80 + ((charCode >>> 6) & 0x3f); + buf[bufIdx++] = 0x80 + (charCode & 0x3f); + } + } + return buf.slice(0, bufIdx); +} + +InternalEncoderCesu8.prototype.end = function() { +} + +//------------------------------------------------------------------------------ +// CESU-8 decoder is not implemented in Node v4.0+ + +function InternalDecoderCesu8(options, codec) { + this.acc = 0; + this.contBytes = 0; + this.accBytes = 0; + this.defaultCharUnicode = codec.defaultCharUnicode; +} + +InternalDecoderCesu8.prototype.write = function(buf) { + var acc = this.acc, contBytes = this.contBytes, accBytes = this.accBytes, + res = ''; + for (var i = 0; i < buf.length; i++) { + var curByte = buf[i]; + if ((curByte & 0xC0) !== 0x80) { // Leading byte + if (contBytes > 0) { // Previous code is invalid + res += this.defaultCharUnicode; + contBytes = 0; + } + + if (curByte < 0x80) { // Single-byte code + res += String.fromCharCode(curByte); + } else if (curByte < 0xE0) { // Two-byte code + acc = curByte & 0x1F; + contBytes = 1; accBytes = 1; + } else if (curByte < 0xF0) { // Three-byte code + acc = curByte & 0x0F; + contBytes = 2; accBytes = 1; + } else { // Four or more are not supported for CESU-8. + res += this.defaultCharUnicode; + } + } else { // Continuation byte + if (contBytes > 0) { // We're waiting for it. + acc = (acc << 6) | (curByte & 0x3f); + contBytes--; accBytes++; + if (contBytes === 0) { + // Check for overlong encoding, but support Modified UTF-8 (encoding NULL as C0 80) + if (accBytes === 2 && acc < 0x80 && acc > 0) + res += this.defaultCharUnicode; + else if (accBytes === 3 && acc < 0x800) + res += this.defaultCharUnicode; + else + // Actually add character. + res += String.fromCharCode(acc); + } + } else { // Unexpected continuation byte + res += this.defaultCharUnicode; + } + } + } + this.acc = acc; this.contBytes = contBytes; this.accBytes = accBytes; + return res; +} + +InternalDecoderCesu8.prototype.end = function() { + var res = 0; + if (this.contBytes > 0) + res += this.defaultCharUnicode; + return res; +} diff --git a/node_modules/iconv-lite/encodings/sbcs-codec.js b/node_modules/iconv-lite/encodings/sbcs-codec.js new file mode 100644 index 0000000..abac5ff --- /dev/null +++ b/node_modules/iconv-lite/encodings/sbcs-codec.js @@ -0,0 +1,72 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Single-byte codec. Needs a 'chars' string parameter that contains 256 or 128 chars that +// correspond to encoded bytes (if 128 - then lower half is ASCII). + +exports._sbcs = SBCSCodec; +function SBCSCodec(codecOptions, iconv) { + if (!codecOptions) + throw new Error("SBCS codec is called without the data.") + + // Prepare char buffer for decoding. + if (!codecOptions.chars || (codecOptions.chars.length !== 128 && codecOptions.chars.length !== 256)) + throw new Error("Encoding '"+codecOptions.type+"' has incorrect 'chars' (must be of len 128 or 256)"); + + if (codecOptions.chars.length === 128) { + var asciiString = ""; + for (var i = 0; i < 128; i++) + asciiString += String.fromCharCode(i); + codecOptions.chars = asciiString + codecOptions.chars; + } + + this.decodeBuf = Buffer.from(codecOptions.chars, 'ucs2'); + + // Encoding buffer. + var encodeBuf = Buffer.alloc(65536, iconv.defaultCharSingleByte.charCodeAt(0)); + + for (var i = 0; i < codecOptions.chars.length; i++) + encodeBuf[codecOptions.chars.charCodeAt(i)] = i; + + this.encodeBuf = encodeBuf; +} + +SBCSCodec.prototype.encoder = SBCSEncoder; +SBCSCodec.prototype.decoder = SBCSDecoder; + + +function SBCSEncoder(options, codec) { + this.encodeBuf = codec.encodeBuf; +} + +SBCSEncoder.prototype.write = function(str) { + var buf = Buffer.alloc(str.length); + for (var i = 0; i < str.length; i++) + buf[i] = this.encodeBuf[str.charCodeAt(i)]; + + return buf; +} + +SBCSEncoder.prototype.end = function() { +} + + +function SBCSDecoder(options, codec) { + this.decodeBuf = codec.decodeBuf; +} + +SBCSDecoder.prototype.write = function(buf) { + // Strings are immutable in JS -> we use ucs2 buffer to speed up computations. + var decodeBuf = this.decodeBuf; + var newBuf = Buffer.alloc(buf.length*2); + var idx1 = 0, idx2 = 0; + for (var i = 0; i < buf.length; i++) { + idx1 = buf[i]*2; idx2 = i*2; + newBuf[idx2] = decodeBuf[idx1]; + newBuf[idx2+1] = decodeBuf[idx1+1]; + } + return newBuf.toString('ucs2'); +} + +SBCSDecoder.prototype.end = function() { +} diff --git a/node_modules/iconv-lite/encodings/sbcs-data-generated.js b/node_modules/iconv-lite/encodings/sbcs-data-generated.js new file mode 100644 index 0000000..9b48236 --- /dev/null +++ b/node_modules/iconv-lite/encodings/sbcs-data-generated.js @@ -0,0 +1,451 @@ +"use strict"; + +// Generated data for sbcs codec. Don't edit manually. Regenerate using generation/gen-sbcs.js script. +module.exports = { + "437": "cp437", + "737": "cp737", + "775": "cp775", + "850": "cp850", + "852": "cp852", + "855": "cp855", + "856": "cp856", + "857": "cp857", + "858": "cp858", + "860": "cp860", + "861": "cp861", + "862": "cp862", + "863": "cp863", + "864": "cp864", + "865": "cp865", + "866": "cp866", + "869": "cp869", + "874": "windows874", + "922": "cp922", + "1046": "cp1046", + "1124": "cp1124", + "1125": "cp1125", + "1129": "cp1129", + "1133": "cp1133", + "1161": "cp1161", + "1162": "cp1162", + "1163": "cp1163", + "1250": "windows1250", + "1251": "windows1251", + "1252": "windows1252", + "1253": "windows1253", + "1254": "windows1254", + "1255": "windows1255", + "1256": "windows1256", + "1257": "windows1257", + "1258": "windows1258", + "28591": "iso88591", + "28592": "iso88592", + "28593": "iso88593", + "28594": "iso88594", + "28595": "iso88595", + "28596": "iso88596", + "28597": "iso88597", + "28598": "iso88598", + "28599": "iso88599", + "28600": "iso885910", + "28601": "iso885911", + "28603": "iso885913", + "28604": "iso885914", + "28605": "iso885915", + "28606": "iso885916", + "windows874": { + "type": "_sbcs", + "chars": "€����…�����������‘’“”•–—�������� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "win874": "windows874", + "cp874": "windows874", + "windows1250": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź ˇ˘Ł¤Ą¦§¨©Ş«¬­®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "win1250": "windows1250", + "cp1250": "windows1250", + "windows1251": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ ЎўЈ¤Ґ¦§Ё©Є«¬­®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "win1251": "windows1251", + "cp1251": "windows1251", + "windows1252": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "win1252": "windows1252", + "cp1252": "windows1252", + "windows1253": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡�‰�‹�����‘’“”•–—�™�›���� ΅Ά£¤¥¦§¨©�«¬­®―°±²³΄µ¶·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "win1253": "windows1253", + "cp1253": "windows1253", + "windows1254": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰Š‹Œ����‘’“”•–—˜™š›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖרÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "win1254": "windows1254", + "cp1254": "windows1254", + "windows1255": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹�����‘’“”•–—˜™�›���� ¡¢£₪¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾¿ְֱֲֳִֵֶַָֹֺֻּֽ־ֿ׀ׁׂ׃װױײ׳״�������אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "win1255": "windows1255", + "cp1255": "windows1255", + "windows1256": { + "type": "_sbcs", + "chars": "€پ‚ƒ„…†‡ˆ‰ٹ‹Œچژڈگ‘’“”•–—ک™ڑ›œ‌‍ں ،¢£¤¥¦§¨©ھ«¬­®¯°±²³´µ¶·¸¹؛»¼½¾؟ہءآأؤإئابةتثجحخدذرزسشصض×طظعغـفقكàلâمنهوçèéêëىيîïًٌٍَôُِ÷ّùْûü‎‏ے" + }, + "win1256": "windows1256", + "cp1256": "windows1256", + "windows1257": { + "type": "_sbcs", + "chars": "€�‚�„…†‡�‰�‹�¨ˇ¸�‘’“”•–—�™�›�¯˛� �¢£¤�¦§Ø©Ŗ«¬­®Æ°±²³´µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž˙" + }, + "win1257": "windows1257", + "cp1257": "windows1257", + "windows1258": { + "type": "_sbcs", + "chars": "€�‚ƒ„…†‡ˆ‰�‹Œ����‘’“”•–—˜™�›œ��Ÿ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "win1258": "windows1258", + "cp1258": "windows1258", + "iso88591": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28591": "iso88591", + "iso88592": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ą˘Ł¤ĽŚ§¨ŠŞŤŹ­ŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙" + }, + "cp28592": "iso88592", + "iso88593": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ħ˘£¤�Ĥ§¨İŞĞĴ­�ݰħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙" + }, + "cp28593": "iso88593", + "iso88594": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĸŖ¤Ĩϧ¨ŠĒĢŦ­Ž¯°ą˛ŗ´ĩšēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖרŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙" + }, + "cp28594": "iso88594", + "iso88595": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂЃЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ" + }, + "cp28595": "iso88595", + "iso88596": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ���¤�������،­�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������" + }, + "cp28596": "iso88596", + "iso88597": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ‘’£€₯¦§¨©ͺ«¬­�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�" + }, + "cp28597": "iso88597", + "iso88598": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �¢£¤¥¦§¨©×«¬­®¯°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת��‎‏�" + }, + "cp28598": "iso88598", + "iso88599": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖרÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ" + }, + "cp28599": "iso88599", + "iso885910": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄĒĢĪĨͧĻĐŠŦŽ­ŪŊ°ąēģīĩķ·ļđšŧž―ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ" + }, + "cp28600": "iso885910", + "iso885911": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "cp28601": "iso885911", + "iso885913": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ”¢£¤„¦§Ø©Ŗ«¬­®Æ°±²³“µ¶·ø¹ŗ»¼½¾æĄĮĀĆÄÅĘĒČÉŹĖĢĶĪĻŠŃŅÓŌÕÖ×ŲŁŚŪÜŻŽßąįāćäåęēčéźėģķīļšńņóōõö÷ųłśūüżž’" + }, + "cp28603": "iso885913", + "iso885914": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ Ḃḃ£ĊċḊ§Ẁ©ẂḋỲ­®ŸḞḟĠġṀṁ¶ṖẁṗẃṠỳẄẅṡÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŴÑÒÓÔÕÖṪØÙÚÛÜÝŶßàáâãäåæçèéêëìíîïŵñòóôõöṫøùúûüýŷÿ" + }, + "cp28604": "iso885914", + "iso885915": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥Š§š©ª«¬­®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖרÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "cp28605": "iso885915", + "iso885916": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ĄąŁ€„Чš©Ș«Ź­źŻ°±ČłŽ”¶·žčș»ŒœŸżÀÁÂĂÄĆÆÇÈÉÊËÌÍÎÏĐŃÒÓÔŐÖŚŰÙÚÛÜĘȚßàáâăäćæçèéêëìíîïđńòóôőöśűùúûüęțÿ" + }, + "cp28606": "iso885916", + "cp437": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm437": "cp437", + "csibm437": "cp437", + "cp737": { + "type": "_sbcs", + "chars": "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρσςτυφχψ░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ωάέήϊίόύϋώΆΈΉΊΌΎΏ±≥≤ΪΫ÷≈°∙·√ⁿ²■ " + }, + "ibm737": "cp737", + "csibm737": "cp737", + "cp775": { + "type": "_sbcs", + "chars": "ĆüéāäģåćłēŖŗīŹÄÅÉæÆōöĢ¢ŚśÖÜø£Ø×¤ĀĪóŻżź”¦©®¬½¼Ł«»░▒▓│┤ĄČĘĖ╣║╗╝ĮŠ┐└┴┬├─┼ŲŪ╚╔╩╦╠═╬Žąčęėįšųūž┘┌█▄▌▐▀ÓßŌŃõÕµńĶķĻļņĒŅ’­±“¾¶§÷„°∙·¹³²■ " + }, + "ibm775": "cp775", + "csibm775": "cp775", + "cp850": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm850": "cp850", + "csibm850": "cp850", + "cp852": { + "type": "_sbcs", + "chars": "ÇüéâäůćçłëŐőîŹÄĆÉĹĺôöĽľŚśÖÜŤťŁ×čáíóúĄąŽžĘ꬟Ⱥ«»░▒▓│┤ÁÂĚŞ╣║╗╝Żż┐└┴┬├─┼Ăă╚╔╩╦╠═╬¤đĐĎËďŇÍÎě┘┌█▄ŢŮ▀ÓßÔŃńňŠšŔÚŕŰýÝţ´­˝˛ˇ˘§÷¸°¨˙űŘř■ " + }, + "ibm852": "cp852", + "csibm852": "cp852", + "cp855": { + "type": "_sbcs", + "chars": "ђЂѓЃёЁєЄѕЅіІїЇјЈљЉњЊћЋќЌўЎџЏюЮъЪаАбБцЦдДеЕфФгГ«»░▒▓│┤хХиИ╣║╗╝йЙ┐└┴┬├─┼кК╚╔╩╦╠═╬¤лЛмМнНоОп┘┌█▄Пя▀ЯрРсСтТуУжЖвВьЬ№­ыЫзЗшШэЭщЩчЧ§■ " + }, + "ibm855": "cp855", + "csibm855": "cp855", + "cp856": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת�£�×����������®¬½¼�«»░▒▓│┤���©╣║╗╝¢¥┐└┴┬├─┼��╚╔╩╦╠═╬¤���������┘┌█▄¦�▀������µ�������¯´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm856": "cp856", + "csibm856": "cp856", + "cp857": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîıÄÅÉæÆôöòûùİÖÜø£ØŞşáíóúñÑĞ𿮬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ºªÊËÈ�ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµ�×ÚÛÙìÿ¯´­±�¾¶§÷¸°¨·¹³²■ " + }, + "ibm857": "cp857", + "csibm857": "cp857", + "cp858": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø×ƒáíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈ€ÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´­±‗¾¶§÷¸°¨·¹³²■ " + }, + "ibm858": "cp858", + "csibm858": "cp858", + "cp860": { + "type": "_sbcs", + "chars": "ÇüéâãàÁçêÊèÍÔìÃÂÉÀÈôõòÚùÌÕÜ¢£Ù₧ÓáíóúñѪº¿Ò¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm860": "cp860", + "csibm860": "cp860", + "cp861": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèÐðÞÄÅÉæÆôöþûÝýÖÜø£Ø₧ƒáíóúÁÍÓÚ¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm861": "cp861", + "csibm861": "cp861", + "cp862": { + "type": "_sbcs", + "chars": "אבגדהוזחטיךכלםמןנסעףפץצקרשת¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm862": "cp862", + "csibm862": "cp862", + "cp863": { + "type": "_sbcs", + "chars": "ÇüéâÂà¶çêëèïî‗À§ÉÈÊôËÏûù¤ÔÜ¢£ÙÛƒ¦´óú¨¸³¯Î⌐¬½¼¾«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm863": "cp863", + "csibm863": "cp863", + "cp864": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$٪&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~°·∙√▒─│┼┤┬├┴┐┌└┘β∞φ±½¼≈«»ﻷﻸ��ﻻﻼ� ­ﺂ£¤ﺄ��ﺎﺏﺕﺙ،ﺝﺡﺥ٠١٢٣٤٥٦٧٨٩ﻑ؛ﺱﺵﺹ؟¢ﺀﺁﺃﺅﻊﺋﺍﺑﺓﺗﺛﺟﺣﺧﺩﺫﺭﺯﺳﺷﺻﺿﻁﻅﻋﻏ¦¬÷×ﻉـﻓﻗﻛﻟﻣﻧﻫﻭﻯﻳﺽﻌﻎﻍﻡﹽّﻥﻩﻬﻰﻲﻐﻕﻵﻶﻝﻙﻱ■�" + }, + "ibm864": "cp864", + "csibm864": "cp864", + "cp865": { + "type": "_sbcs", + "chars": "ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø₧ƒáíóúñѪº¿⌐¬½¼¡«¤░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + "ibm865": "cp865", + "csibm865": "cp865", + "cp866": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№¤■ " + }, + "ibm866": "cp866", + "csibm866": "cp866", + "cp869": { + "type": "_sbcs", + "chars": "������Ά�·¬¦‘’Έ―ΉΊΪΌ��ΎΫ©Ώ²³ά£έήίϊΐόύΑΒΓΔΕΖΗ½ΘΙ«»░▒▓│┤ΚΛΜΝ╣║╗╝ΞΟ┐└┴┬├─┼ΠΡ╚╔╩╦╠═╬ΣΤΥΦΧΨΩαβγ┘┌█▄δε▀ζηθικλμνξοπρσςτ΄­±υφχ§ψ΅°¨ωϋΰώ■ " + }, + "ibm869": "cp869", + "csibm869": "cp869", + "cp922": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®‾°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏŠÑÒÓÔÕÖרÙÚÛÜÝŽßàáâãäåæçèéêëìíîïšñòóôõö÷øùúûüýžÿ" + }, + "ibm922": "cp922", + "csibm922": "cp922", + "cp1046": { + "type": "_sbcs", + "chars": "ﺈ×÷ﹱˆ■│─┐┌└┘ﹹﹻﹽﹿﹷﺊﻰﻳﻲﻎﻏﻐﻶﻸﻺﻼ ¤ﺋﺑﺗﺛﺟﺣ،­ﺧﺳ٠١٢٣٤٥٦٧٨٩ﺷ؛ﺻﺿﻊ؟ﻋءآأؤإئابةتثجحخدذرزسشصضطﻇعغﻌﺂﺄﺎﻓـفقكلمنهوىيًٌٍَُِّْﻗﻛﻟﻵﻷﻹﻻﻣﻧﻬﻩ�" + }, + "ibm1046": "cp1046", + "csibm1046": "cp1046", + "cp1124": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ЁЂҐЄЅІЇЈЉЊЋЌ­ЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђґєѕіїјљњћќ§ўџ" + }, + "ibm1124": "cp1124", + "csibm1124": "cp1124", + "cp1125": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёҐґЄєІіЇї·√№¤■ " + }, + "ibm1125": "cp1125", + "csibm1125": "cp1125", + "cp1129": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1129": "cp1129", + "csibm1129": "cp1129", + "cp1133": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ກຂຄງຈສຊຍດຕຖທນບປຜຝພຟມຢຣລວຫອຮ���ຯະາຳິີຶືຸູຼັົຽ���ເແໂໃໄ່້໊໋໌ໍໆ�ໜໝ₭����������������໐໑໒໓໔໕໖໗໘໙��¢¬¦�" + }, + "ibm1133": "cp1133", + "csibm1133": "cp1133", + "cp1161": { + "type": "_sbcs", + "chars": "��������������������������������่กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู้๊๋€฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛¢¬¦ " + }, + "ibm1161": "cp1161", + "csibm1161": "cp1161", + "cp1162": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + }, + "ibm1162": "cp1162", + "csibm1162": "cp1162", + "cp1163": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£€¥¦§œ©ª«¬­®¯°±²³Ÿµ¶·Œ¹º»¼½¾¿ÀÁÂĂÄÅÆÇÈÉÊË̀ÍÎÏĐÑ̉ÓÔƠÖרÙÚÛÜỮßàáâăäåæçèéêë́íîïđṇ̃óôơö÷øùúûüư₫ÿ" + }, + "ibm1163": "cp1163", + "csibm1163": "cp1163", + "maccroatian": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®Š™´¨≠ŽØ∞±≤≥∆µ∂∑∏š∫ªºΩžø¿¡¬√ƒ≈ƫȅ ÀÃÕŒœĐ—“”‘’÷◊�©⁄¤‹›Æ»–·‚„‰ÂćÁčÈÍÎÏÌÓÔđÒÚÛÙıˆ˜¯πË˚¸Êæˇ" + }, + "maccyrillic": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°¢£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµ∂ЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "macgreek": { + "type": "_sbcs", + "chars": "Ĺ²É³ÖÜ΅àâä΄¨çéèê룙î‰ôö¦­ùûü†ΓΔΘΛΞΠß®©ΣΪ§≠°·Α±≤≥¥ΒΕΖΗΙΚΜΦΫΨΩάΝ¬ΟΡ≈Τ«»… ΥΧΆΈœ–―“”‘’÷ΉΊΌΎέήίόΏύαβψδεφγηιξκλμνοπώρστθωςχυζϊϋΐΰ�" + }, + "maciceland": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûüݰ¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤ÐðÞþý·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macroman": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macromania": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ĂŞ∞±≤≥¥µ∂∑∏π∫ªºΩăş¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›Ţţ‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macthai": { + "type": "_sbcs", + "chars": "«»…“”�•‘’� กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู​–—฿เแโใไๅๆ็่้๊๋์ํ™๏๐๑๒๓๔๕๖๗๘๙®©����" + }, + "macturkish": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸĞğİıŞş‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙ�ˆ˜¯˘˙˚¸˝˛ˇ" + }, + "macukraine": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ†°Ґ£§•¶І®©™Ђђ≠Ѓѓ∞±≤≥іµґЈЄєЇїЉљЊњјЅ¬√ƒ≈∆«»… ЋћЌќѕ–—“”‘’÷„ЎўЏџ№Ёёяабвгдежзийклмнопрстуфхцчшщъыьэю¤" + }, + "koi8r": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8u": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґ╝╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪Ґ╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8ru": { + "type": "_sbcs", + "chars": "─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥ ⌡°²·÷═║╒ёє╔ії╗╘╙╚╛ґў╞╟╠╡ЁЄ╣ІЇ╦╧╨╩╪ҐЎ©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "koi8t": { + "type": "_sbcs", + "chars": "қғ‚Ғ„…†‡�‰ҳ‹ҲҷҶ�Қ‘’“”•–—�™�›�����ӯӮё¤ӣ¦§���«¬­®�°±²Ё�Ӣ¶·�№�»���©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ" + }, + "armscii8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ �և։)(»«—.՝,-֊…՜՛՞ԱաԲբԳգԴդԵեԶզԷէԸըԹթԺժԻիԼլԽխԾծԿկՀհՁձՂղՃճՄմՅյՆնՇշՈոՉչՊպՋջՌռՍսՎվՏտՐրՑցՒւՓփՔքՕօՖֆ՚�" + }, + "rk1048": { + "type": "_sbcs", + "chars": "ЂЃ‚ѓ„…†‡€‰Љ‹ЊҚҺЏђ‘’“”•–—�™љ›њқһџ ҰұӘ¤Ө¦§Ё©Ғ«¬­®Ү°±Ііөµ¶·ё№ғ»әҢңүАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "tcvn": { + "type": "_sbcs", + "chars": "\u0000ÚỤ\u0003ỪỬỮ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010ỨỰỲỶỸÝỴ\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ÀẢÃÁẠẶẬÈẺẼÉẸỆÌỈĨÍỊÒỎÕÓỌỘỜỞỠỚỢÙỦŨ ĂÂÊÔƠƯĐăâêôơưđẶ̀̀̉̃́àảãáạẲằẳẵắẴẮẦẨẪẤỀặầẩẫấậèỂẻẽéẹềểễếệìỉỄẾỒĩíịòỔỏõóọồổỗốộờởỡớợùỖủũúụừửữứựỳỷỹýỵỐ" + }, + "georgianacademy": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზთიკლმნოპჟრსტუფქღყშჩცძწჭხჯჰჱჲჳჴჵჶçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "georgianps": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ¡¢£¤¥¦§¨©ª«¬­®¯°±²³´µ¶·¸¹º»¼½¾¿აბგდევზჱთიკლმნჲოპჟრსტჳუფქღყშჩცძწჭხჴჯჰჵæçèéêëìíîïðñòóôõö÷øùúûüýþÿ" + }, + "pt154": { + "type": "_sbcs", + "chars": "ҖҒӮғ„…ҶҮҲүҠӢҢҚҺҸҗ‘’“”•–—ҳҷҡӣңқһҹ ЎўЈӨҘҰ§Ё©Ә«¬ӯ®Ҝ°ұІіҙө¶·ё№ә»јҪҫҝАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя" + }, + "viscii": { + "type": "_sbcs", + "chars": "\u0000\u0001Ẳ\u0003\u0004ẴẪ\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013Ỷ\u0015\u0016\u0017\u0018Ỹ\u001a\u001b\u001c\u001dỴ\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~ẠẮẰẶẤẦẨẬẼẸẾỀỂỄỆỐỒỔỖỘỢỚỜỞỊỎỌỈỦŨỤỲÕắằặấầẩậẽẹếềểễệốồổỗỠƠộờởịỰỨỪỬơớƯÀÁÂÃẢĂẳẵÈÉÊẺÌÍĨỳĐứÒÓÔạỷừửÙÚỹỵÝỡưàáâãảăữẫèéêẻìíĩỉđựòóôõỏọụùúũủýợỮ" + }, + "iso646cn": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#¥%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "iso646jp": { + "type": "_sbcs", + "chars": "\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001a\u001b\u001c\u001d\u001e\u001f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[¥]^_`abcdefghijklmnopqrstuvwxyz{|}‾��������������������������������������������������������������������������������������������������������������������������������" + }, + "hproman8": { + "type": "_sbcs", + "chars": "€‚ƒ„…†‡ˆ‰Š‹ŒŽ‘’“”•–—˜™š›œžŸ ÀÂÈÊËÎÏ´ˋˆ¨˜ÙÛ₤¯Ýý°ÇçÑñ¡¿¤£¥§ƒ¢âêôûáéóúàèòùäëöüÅîØÆåíøæÄìÖÜÉïßÔÁÃãÐðÍÌÓÒÕõŠšÚŸÿÞþ·µ¶¾—¼½ªº«■»±�" + }, + "macintosh": { + "type": "_sbcs", + "chars": "ÄÅÇÉÑÖÜáàâäãåçéèêëíìîïñóòôöõúùûü†°¢£§•¶ß®©™´¨≠ÆØ∞±≤≥¥µ∂∑∏π∫ªºΩæø¿¡¬√ƒ≈∆«»… ÀÃÕŒœ–—“”‘’÷◊ÿŸ⁄¤‹›fifl‡·‚„‰ÂÊÁËÈÍÎÏÌÓÔ�ÒÚÛÙıˆ˜¯˘˙˚¸˝˛ˇ" + }, + "ascii": { + "type": "_sbcs", + "chars": "��������������������������������������������������������������������������������������������������������������������������������" + }, + "tis620": { + "type": "_sbcs", + "chars": "���������������������������������กขฃคฅฆงจฉชซฌญฎฏฐฑฒณดตถทธนบปผฝพฟภมยรฤลฦวศษสหฬอฮฯะัาำิีึืฺุู����฿เแโใไๅๆ็่้๊๋์ํ๎๏๐๑๒๓๔๕๖๗๘๙๚๛����" + } +} \ No newline at end of file diff --git a/node_modules/iconv-lite/encodings/sbcs-data.js b/node_modules/iconv-lite/encodings/sbcs-data.js new file mode 100644 index 0000000..066f904 --- /dev/null +++ b/node_modules/iconv-lite/encodings/sbcs-data.js @@ -0,0 +1,179 @@ +"use strict"; + +// Manually added data to be used by sbcs codec in addition to generated one. + +module.exports = { + // Not supported by iconv, not sure why. + "10029": "maccenteuro", + "maccenteuro": { + "type": "_sbcs", + "chars": "ÄĀāÉĄÖÜáąČäčĆć鏟ĎíďĒēĖóėôöõúĚěü†°Ę£§•¶ß®©™ę¨≠ģĮįĪ≤≥īĶ∂∑łĻļĽľĹĺŅņѬ√ńŇ∆«»… ňŐÕőŌ–—“”‘’÷◊ōŔŕŘ‹›řŖŗŠ‚„šŚśÁŤťÍŽžŪÓÔūŮÚůŰűŲųÝýķŻŁżĢˇ" + }, + + "808": "cp808", + "ibm808": "cp808", + "cp808": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀рстуфхцчшщъыьэюяЁёЄєЇїЎў°∙·√№€■ " + }, + + "mik": { + "type": "_sbcs", + "chars": "АБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя└┴┬├─┼╣║╚╔╩╦╠═╬┐░▒▓│┤№§╗╝┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ " + }, + + "cp720": { + "type": "_sbcs", + "chars": "\x80\x81éâ\x84à\x86çêëèïî\x8d\x8e\x8f\x90\u0651\u0652ô¤ـûùءآأؤ£إئابةتثجحخدذرزسشص«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀ضطظعغفµقكلمنهوىي≡\u064b\u064c\u064d\u064e\u064f\u0650≈°∙·√ⁿ²■\u00a0" + }, + + // Aliases of generated encodings. + "ascii8bit": "ascii", + "usascii": "ascii", + "ansix34": "ascii", + "ansix341968": "ascii", + "ansix341986": "ascii", + "csascii": "ascii", + "cp367": "ascii", + "ibm367": "ascii", + "isoir6": "ascii", + "iso646us": "ascii", + "iso646irv": "ascii", + "us": "ascii", + + "latin1": "iso88591", + "latin2": "iso88592", + "latin3": "iso88593", + "latin4": "iso88594", + "latin5": "iso88599", + "latin6": "iso885910", + "latin7": "iso885913", + "latin8": "iso885914", + "latin9": "iso885915", + "latin10": "iso885916", + + "csisolatin1": "iso88591", + "csisolatin2": "iso88592", + "csisolatin3": "iso88593", + "csisolatin4": "iso88594", + "csisolatincyrillic": "iso88595", + "csisolatinarabic": "iso88596", + "csisolatingreek" : "iso88597", + "csisolatinhebrew": "iso88598", + "csisolatin5": "iso88599", + "csisolatin6": "iso885910", + + "l1": "iso88591", + "l2": "iso88592", + "l3": "iso88593", + "l4": "iso88594", + "l5": "iso88599", + "l6": "iso885910", + "l7": "iso885913", + "l8": "iso885914", + "l9": "iso885915", + "l10": "iso885916", + + "isoir14": "iso646jp", + "isoir57": "iso646cn", + "isoir100": "iso88591", + "isoir101": "iso88592", + "isoir109": "iso88593", + "isoir110": "iso88594", + "isoir144": "iso88595", + "isoir127": "iso88596", + "isoir126": "iso88597", + "isoir138": "iso88598", + "isoir148": "iso88599", + "isoir157": "iso885910", + "isoir166": "tis620", + "isoir179": "iso885913", + "isoir199": "iso885914", + "isoir203": "iso885915", + "isoir226": "iso885916", + + "cp819": "iso88591", + "ibm819": "iso88591", + + "cyrillic": "iso88595", + + "arabic": "iso88596", + "arabic8": "iso88596", + "ecma114": "iso88596", + "asmo708": "iso88596", + + "greek" : "iso88597", + "greek8" : "iso88597", + "ecma118" : "iso88597", + "elot928" : "iso88597", + + "hebrew": "iso88598", + "hebrew8": "iso88598", + + "turkish": "iso88599", + "turkish8": "iso88599", + + "thai": "iso885911", + "thai8": "iso885911", + + "celtic": "iso885914", + "celtic8": "iso885914", + "isoceltic": "iso885914", + + "tis6200": "tis620", + "tis62025291": "tis620", + "tis62025330": "tis620", + + "10000": "macroman", + "10006": "macgreek", + "10007": "maccyrillic", + "10079": "maciceland", + "10081": "macturkish", + + "cspc8codepage437": "cp437", + "cspc775baltic": "cp775", + "cspc850multilingual": "cp850", + "cspcp852": "cp852", + "cspc862latinhebrew": "cp862", + "cpgr": "cp869", + + "msee": "cp1250", + "mscyrl": "cp1251", + "msansi": "cp1252", + "msgreek": "cp1253", + "msturk": "cp1254", + "mshebr": "cp1255", + "msarab": "cp1256", + "winbaltrim": "cp1257", + + "cp20866": "koi8r", + "20866": "koi8r", + "ibm878": "koi8r", + "cskoi8r": "koi8r", + + "cp21866": "koi8u", + "21866": "koi8u", + "ibm1168": "koi8u", + + "strk10482002": "rk1048", + + "tcvn5712": "tcvn", + "tcvn57121": "tcvn", + + "gb198880": "iso646cn", + "cn": "iso646cn", + + "csiso14jisc6220ro": "iso646jp", + "jisc62201969ro": "iso646jp", + "jp": "iso646jp", + + "cshproman8": "hproman8", + "r8": "hproman8", + "roman8": "hproman8", + "xroman8": "hproman8", + "ibm1051": "hproman8", + + "mac": "macintosh", + "csmacintosh": "macintosh", +}; + diff --git a/node_modules/iconv-lite/encodings/tables/big5-added.json b/node_modules/iconv-lite/encodings/tables/big5-added.json new file mode 100644 index 0000000..3c3d3c2 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/big5-added.json @@ -0,0 +1,122 @@ +[ +["8740","䏰䰲䘃䖦䕸𧉧䵷䖳𧲱䳢𧳅㮕䜶䝄䱇䱀𤊿𣘗𧍒𦺋𧃒䱗𪍑䝏䗚䲅𧱬䴇䪤䚡𦬣爥𥩔𡩣𣸆𣽡晍囻"], +["8767","綕夝𨮹㷴霴𧯯寛𡵞媤㘥𩺰嫑宷峼杮薓𩥅瑡璝㡵𡵓𣚞𦀡㻬"], +["87a1","𥣞㫵竼龗𤅡𨤍𣇪𠪊𣉞䌊蒄龖鐯䤰蘓墖靊鈘秐稲晠権袝瑌篅枂稬剏遆㓦珄𥶹瓆鿇垳䤯呌䄱𣚎堘穲𧭥讏䚮𦺈䆁𥶙箮𢒼鿈𢓁𢓉𢓌鿉蔄𣖻䂴鿊䓡𪷿拁灮鿋"], +["8840","㇀",4,"𠄌㇅𠃑𠃍㇆㇇𠃋𡿨㇈𠃊㇉㇊㇋㇌𠄎㇍㇎ĀÁǍÀĒÉĚÈŌÓǑÒ࿿Ê̄Ế࿿Ê̌ỀÊāáǎàɑēéěèīíǐìōóǒòūúǔùǖǘǚ"], +["88a1","ǜü࿿ê̄ế࿿ê̌ềêɡ⏚⏛"], +["8940","𪎩𡅅"], +["8943","攊"], +["8946","丽滝鵎釟"], +["894c","𧜵撑会伨侨兖兴农凤务动医华发变团声处备夲头学实実岚庆总斉柾栄桥济炼电纤纬纺织经统缆缷艺苏药视设询车轧轮"], +["89a1","琑糼緍楆竉刧"], +["89ab","醌碸酞肼"], +["89b0","贋胶𠧧"], +["89b5","肟黇䳍鷉鸌䰾𩷶𧀎鸊𪄳㗁"], +["89c1","溚舾甙"], +["89c5","䤑马骏龙禇𨑬𡷊𠗐𢫦两亁亀亇亿仫伷㑌侽㹈倃傈㑽㒓㒥円夅凛凼刅争剹劐匧㗇厩㕑厰㕓参吣㕭㕲㚁咓咣咴咹哐哯唘唣唨㖘唿㖥㖿嗗㗅"], +["8a40","𧶄唥"], +["8a43","𠱂𠴕𥄫喐𢳆㧬𠍁蹆𤶸𩓥䁓𨂾睺𢰸㨴䟕𨅝𦧲𤷪擝𠵼𠾴𠳕𡃴撍蹾𠺖𠰋𠽤𢲩𨉖𤓓"], +["8a64","𠵆𩩍𨃩䟴𤺧𢳂骲㩧𩗴㿭㔆𥋇𩟔𧣈𢵄鵮頕"], +["8a76","䏙𦂥撴哣𢵌𢯊𡁷㧻𡁯"], +["8aa1","𦛚𦜖𧦠擪𥁒𠱃蹨𢆡𨭌𠜱"], +["8aac","䠋𠆩㿺塳𢶍"], +["8ab2","𤗈𠓼𦂗𠽌𠶖啹䂻䎺"], +["8abb","䪴𢩦𡂝膪飵𠶜捹㧾𢝵跀嚡摼㹃"], +["8ac9","𪘁𠸉𢫏𢳉"], +["8ace","𡃈𣧂㦒㨆𨊛㕸𥹉𢃇噒𠼱𢲲𩜠㒼氽𤸻"], +["8adf","𧕴𢺋𢈈𪙛𨳍𠹺𠰴𦠜羓𡃏𢠃𢤹㗻𥇣𠺌𠾍𠺪㾓𠼰𠵇𡅏𠹌"], +["8af6","𠺫𠮩𠵈𡃀𡄽㿹𢚖搲𠾭"], +["8b40","𣏴𧘹𢯎𠵾𠵿𢱑𢱕㨘𠺘𡃇𠼮𪘲𦭐𨳒𨶙𨳊閪哌苄喹"], +["8b55","𩻃鰦骶𧝞𢷮煀腭胬尜𦕲脴㞗卟𨂽醶𠻺𠸏𠹷𠻻㗝𤷫㘉𠳖嚯𢞵𡃉𠸐𠹸𡁸𡅈𨈇𡑕𠹹𤹐𢶤婔𡀝𡀞𡃵𡃶垜𠸑"], +["8ba1","𧚔𨋍𠾵𠹻𥅾㜃𠾶𡆀𥋘𪊽𤧚𡠺𤅷𨉼墙剨㘚𥜽箲孨䠀䬬鼧䧧鰟鮍𥭴𣄽嗻㗲嚉丨夂𡯁屮靑𠂆乛亻㔾尣彑忄㣺扌攵歺氵氺灬爫丬犭𤣩罒礻糹罓𦉪㓁"], +["8bde","𦍋耂肀𦘒𦥑卝衤见𧢲讠贝钅镸长门𨸏韦页风飞饣𩠐鱼鸟黄歯龜丷𠂇阝户钢"], +["8c40","倻淾𩱳龦㷉袏𤅎灷峵䬠𥇍㕙𥴰愢𨨲辧釶熑朙玺𣊁𪄇㲋𡦀䬐磤琂冮𨜏䀉橣𪊺䈣蘏𠩯稪𩥇𨫪靕灍匤𢁾鏴盙𨧣龧矝亣俰傼丯众龨吴綋墒壐𡶶庒庙忂𢜒斋"], +["8ca1","𣏹椙橃𣱣泿"], +["8ca7","爀𤔅玌㻛𤨓嬕璹讃𥲤𥚕窓篬糃繬苸薗龩袐龪躹龫迏蕟駠鈡龬𨶹𡐿䁱䊢娚"], +["8cc9","顨杫䉶圽"], +["8cce","藖𤥻芿𧄍䲁𦵴嵻𦬕𦾾龭龮宖龯曧繛湗秊㶈䓃𣉖𢞖䎚䔶"], +["8ce6","峕𣬚諹屸㴒𣕑嵸龲煗䕘𤃬𡸣䱷㥸㑊𠆤𦱁諌侴𠈹妿腬顖𩣺弻"], +["8d40","𠮟"], +["8d42","𢇁𨥭䄂䚻𩁹㼇龳𪆵䃸㟖䛷𦱆䅼𨚲𧏿䕭㣔𥒚䕡䔛䶉䱻䵶䗪㿈𤬏㙡䓞䒽䇭崾嵈嵖㷼㠏嶤嶹㠠㠸幂庽弥徃㤈㤔㤿㥍惗愽峥㦉憷憹懏㦸戬抐拥挘㧸嚱"], +["8da1","㨃揢揻搇摚㩋擀崕嘡龟㪗斆㪽旿晓㫲暒㬢朖㭂枤栀㭘桊梄㭲㭱㭻椉楃牜楤榟榅㮼槖㯝橥橴橱檂㯬檙㯲檫檵櫔櫶殁毁毪汵沪㳋洂洆洦涁㳯涤涱渕渘温溆𨧀溻滢滚齿滨滩漤漴㵆𣽁澁澾㵪㵵熷岙㶊瀬㶑灐灔灯灿炉𠌥䏁㗱𠻘"], +["8e40","𣻗垾𦻓焾𥟠㙎榢𨯩孴穉𥣡𩓙穥穽𥦬窻窰竂竃燑𦒍䇊竚竝竪䇯咲𥰁笋筕笩𥌎𥳾箢筯莜𥮴𦱿篐萡箒箸𥴠㶭𥱥蒒篺簆簵𥳁籄粃𤢂粦晽𤕸糉糇糦籴糳糵糎"], +["8ea1","繧䔝𦹄絝𦻖璍綉綫焵綳緒𤁗𦀩緤㴓緵𡟹緥𨍭縝𦄡𦅚繮纒䌫鑬縧罀罁罇礶𦋐駡羗𦍑羣𡙡𠁨䕜𣝦䔃𨌺翺𦒉者耈耝耨耯𪂇𦳃耻耼聡𢜔䦉𦘦𣷣𦛨朥肧𨩈脇脚墰𢛶汿𦒘𤾸擧𡒊舘𡡞橓𤩥𤪕䑺舩𠬍𦩒𣵾俹𡓽蓢荢𦬊𤦧𣔰𡝳𣷸芪椛芳䇛"], +["8f40","蕋苐茚𠸖𡞴㛁𣅽𣕚艻苢茘𣺋𦶣𦬅𦮗𣗎㶿茝嗬莅䔋𦶥莬菁菓㑾𦻔橗蕚㒖𦹂𢻯葘𥯤葱㷓䓤檧葊𣲵祘蒨𦮖𦹷𦹃蓞萏莑䒠蒓蓤𥲑䉀𥳀䕃蔴嫲𦺙䔧蕳䔖枿蘖"], +["8fa1","𨘥𨘻藁𧂈蘂𡖂𧃍䕫䕪蘨㙈𡢢号𧎚虾蝱𪃸蟮𢰧螱蟚蠏噡虬桖䘏衅衆𧗠𣶹𧗤衞袜䙛袴袵揁装睷𧜏覇覊覦覩覧覼𨨥觧𧤤𧪽誜瞓釾誐𧩙竩𧬺𣾏䜓𧬸煼謌謟𥐰𥕥謿譌譍誩𤩺讐讛誯𡛟䘕衏貛𧵔𧶏貫㜥𧵓賖𧶘𧶽贒贃𡤐賛灜贑𤳉㻐起"], +["9040","趩𨀂𡀔𤦊㭼𨆼𧄌竧躭躶軃鋔輙輭𨍥𨐒辥錃𪊟𠩐辳䤪𨧞𨔽𣶻廸𣉢迹𪀔𨚼𨔁𢌥㦀𦻗逷𨔼𧪾遡𨕬𨘋邨𨜓郄𨛦邮都酧㫰醩釄粬𨤳𡺉鈎沟鉁鉢𥖹銹𨫆𣲛𨬌𥗛"], +["90a1","𠴱錬鍫𨫡𨯫炏嫃𨫢𨫥䥥鉄𨯬𨰹𨯿鍳鑛躼閅閦鐦閠濶䊹𢙺𨛘𡉼𣸮䧟氜陻隖䅬隣𦻕懚隶磵𨫠隽双䦡𦲸𠉴𦐐𩂯𩃥𤫑𡤕𣌊霱虂霶䨏䔽䖅𤫩灵孁霛靜𩇕靗孊𩇫靟鐥僐𣂷𣂼鞉鞟鞱鞾韀韒韠𥑬韮琜𩐳響韵𩐝𧥺䫑頴頳顋顦㬎𧅵㵑𠘰𤅜"], +["9140","𥜆飊颷飈飇䫿𦴧𡛓喰飡飦飬鍸餹𤨩䭲𩡗𩤅駵騌騻騐驘𥜥㛄𩂱𩯕髠髢𩬅髴䰎鬔鬭𨘀倴鬴𦦨㣃𣁽魐魀𩴾婅𡡣鮎𤉋鰂鯿鰌𩹨鷔𩾷𪆒𪆫𪃡𪄣𪇟鵾鶃𪄴鸎梈"], +["91a1","鷄𢅛𪆓𪈠𡤻𪈳鴹𪂹𪊴麐麕麞麢䴴麪麯𤍤黁㭠㧥㴝伲㞾𨰫鼂鼈䮖鐤𦶢鼗鼖鼹嚟嚊齅馸𩂋韲葿齢齩竜龎爖䮾𤥵𤦻煷𤧸𤍈𤩑玞𨯚𡣺禟𨥾𨸶鍩鏳𨩄鋬鎁鏋𨥬𤒹爗㻫睲穃烐𤑳𤏸煾𡟯炣𡢾𣖙㻇𡢅𥐯𡟸㜢𡛻𡠹㛡𡝴𡣑𥽋㜣𡛀坛𤨥𡏾𡊨"], +["9240","𡏆𡒶蔃𣚦蔃葕𤦔𧅥𣸱𥕜𣻻𧁒䓴𣛮𩦝𦼦柹㜳㰕㷧塬𡤢栐䁗𣜿𤃡𤂋𤄏𦰡哋嚞𦚱嚒𠿟𠮨𠸍鏆𨬓鎜仸儫㠙𤐶亼𠑥𠍿佋侊𥙑婨𠆫𠏋㦙𠌊𠐔㐵伩𠋀𨺳𠉵諚𠈌亘"], +["92a1","働儍侢伃𤨎𣺊佂倮偬傁俌俥偘僼兙兛兝兞湶𣖕𣸹𣺿浲𡢄𣺉冨凃𠗠䓝𠒣𠒒𠒑赺𨪜𠜎剙劤𠡳勡鍮䙺熌𤎌𠰠𤦬𡃤槑𠸝瑹㻞璙琔瑖玘䮎𤪼𤂍叐㖄爏𤃉喴𠍅响𠯆圝鉝雴鍦埝垍坿㘾壋媙𨩆𡛺𡝯𡜐娬妸銏婾嫏娒𥥆𡧳𡡡𤊕㛵洅瑃娡𥺃"], +["9340","媁𨯗𠐓鏠璌𡌃焅䥲鐈𨧻鎽㞠尞岞幞幈𡦖𡥼𣫮廍孏𡤃𡤄㜁𡢠㛝𡛾㛓脪𨩇𡶺𣑲𨦨弌弎𡤧𡞫婫𡜻孄蘔𧗽衠恾𢡠𢘫忛㺸𢖯𢖾𩂈𦽳懀𠀾𠁆𢘛憙憘恵𢲛𢴇𤛔𩅍"], +["93a1","摱𤙥𢭪㨩𢬢𣑐𩣪𢹸挷𪑛撶挱揑𤧣𢵧护𢲡搻敫楲㯴𣂎𣊭𤦉𣊫唍𣋠𡣙𩐿曎𣊉𣆳㫠䆐𥖄𨬢𥖏𡛼𥕛𥐥磮𣄃𡠪𣈴㑤𣈏𣆂𤋉暎𦴤晫䮓昰𧡰𡷫晣𣋒𣋡昞𥡲㣑𣠺𣞼㮙𣞢𣏾瓐㮖枏𤘪梶栞㯄檾㡣𣟕𤒇樳橒櫉欅𡤒攑梘橌㯗橺歗𣿀𣲚鎠鋲𨯪𨫋"], +["9440","銉𨀞𨧜鑧涥漋𤧬浧𣽿㶏渄𤀼娽渊塇洤硂焻𤌚𤉶烱牐犇犔𤞏𤜥兹𤪤𠗫瑺𣻸𣙟𤩊𤤗𥿡㼆㺱𤫟𨰣𣼵悧㻳瓌琼鎇琷䒟𦷪䕑疃㽣𤳙𤴆㽘畕癳𪗆㬙瑨𨫌𤦫𤦎㫻"], +["94a1","㷍𤩎㻿𤧅𤣳釺圲鍂𨫣𡡤僟𥈡𥇧睸𣈲眎眏睻𤚗𣞁㩞𤣰琸璛㺿𤪺𤫇䃈𤪖𦆮錇𥖁砞碍碈磒珐祙𧝁𥛣䄎禛蒖禥樭𣻺稺秴䅮𡛦䄲鈵秱𠵌𤦌𠊙𣶺𡝮㖗啫㕰㚪𠇔𠰍竢婙𢛵𥪯𥪜娍𠉛磰娪𥯆竾䇹籝籭䈑𥮳𥺼𥺦糍𤧹𡞰粎籼粮檲緜縇緓罎𦉡"], +["9540","𦅜𧭈綗𥺂䉪𦭵𠤖柖𠁎𣗏埄𦐒𦏸𤥢翝笧𠠬𥫩𥵃笌𥸎駦虅驣樜𣐿㧢𤧷𦖭騟𦖠蒀𧄧𦳑䓪脷䐂胆脉腂𦞴飃𦩂艢艥𦩑葓𦶧蘐𧈛媆䅿𡡀嬫𡢡嫤𡣘蚠蜨𣶏蠭𧐢娂"], +["95a1","衮佅袇袿裦襥襍𥚃襔𧞅𧞄𨯵𨯙𨮜𨧹㺭蒣䛵䛏㟲訽訜𩑈彍鈫𤊄旔焩烄𡡅鵭貟賩𧷜妚矃姰䍮㛔踪躧𤰉輰轊䋴汘澻𢌡䢛潹溋𡟚鯩㚵𤤯邻邗啱䤆醻鐄𨩋䁢𨫼鐧𨰝𨰻蓥訫閙閧閗閖𨴴瑅㻂𤣿𤩂𤏪㻧𣈥随𨻧𨹦𨹥㻌𤧭𤩸𣿮琒瑫㻼靁𩂰"], +["9640","桇䨝𩂓𥟟靝鍨𨦉𨰦𨬯𦎾銺嬑譩䤼珹𤈛鞛靱餸𠼦巁𨯅𤪲頟𩓚鋶𩗗釥䓀𨭐𤩧𨭤飜𨩅㼀鈪䤥萔餻饍𧬆㷽馛䭯馪驜𨭥𥣈檏騡嫾騯𩣱䮐𩥈馼䮽䮗鍽塲𡌂堢𤦸"], +["96a1","𡓨硄𢜟𣶸棅㵽鑘㤧慐𢞁𢥫愇鱏鱓鱻鰵鰐魿鯏𩸭鮟𪇵𪃾鴡䲮𤄄鸘䲰鴌𪆴𪃭𪃳𩤯鶥蒽𦸒𦿟𦮂藼䔳𦶤𦺄𦷰萠藮𦸀𣟗𦁤秢𣖜𣙀䤭𤧞㵢鏛銾鍈𠊿碹鉷鑍俤㑀遤𥕝砽硔碶硋𡝗𣇉𤥁㚚佲濚濙瀞瀞吔𤆵垻壳垊鴖埗焴㒯𤆬燫𦱀𤾗嬨𡞵𨩉"], +["9740","愌嫎娋䊼𤒈㜬䭻𨧼鎻鎸𡣖𠼝葲𦳀𡐓𤋺𢰦𤏁妔𣶷𦝁綨𦅛𦂤𤦹𤦋𨧺鋥珢㻩璴𨭣𡢟㻡𤪳櫘珳珻㻖𤨾𤪔𡟙𤩦𠎧𡐤𤧥瑈𤤖炥𤥶銄珦鍟𠓾錱𨫎𨨖鎆𨯧𥗕䤵𨪂煫"], +["97a1","𤥃𠳿嚤𠘚𠯫𠲸唂秄𡟺緾𡛂𤩐𡡒䔮鐁㜊𨫀𤦭妰𡢿𡢃𧒄媡㛢𣵛㚰鉟婹𨪁𡡢鍴㳍𠪴䪖㦊僴㵩㵌𡎜煵䋻𨈘渏𩃤䓫浗𧹏灧沯㳖𣿭𣸭渂漌㵯𠏵畑㚼㓈䚀㻚䡱姄鉮䤾轁𨰜𦯀堒埈㛖𡑒烾𤍢𤩱𢿣𡊰𢎽梹楧𡎘𣓥𧯴𣛟𨪃𣟖𣏺𤲟樚𣚭𦲷萾䓟䓎"], +["9840","𦴦𦵑𦲂𦿞漗𧄉茽𡜺菭𦲀𧁓𡟛妉媂𡞳婡婱𡤅𤇼㜭姯𡜼㛇熎鎐暚𤊥婮娫𤊓樫𣻹𧜶𤑛𤋊焝𤉙𨧡侰𦴨峂𤓎𧹍𤎽樌𤉖𡌄炦焳𤏩㶥泟勇𤩏繥姫崯㷳彜𤩝𡟟綤萦"], +["98a1","咅𣫺𣌀𠈔坾𠣕𠘙㿥𡾞𪊶瀃𩅛嵰玏糓𨩙𩐠俈翧狍猐𧫴猸猹𥛶獁獈㺩𧬘遬燵𤣲珡臶㻊県㻑沢国琙琞琟㻢㻰㻴㻺瓓㼎㽓畂畭畲疍㽼痈痜㿀癍㿗癴㿜発𤽜熈嘣覀塩䀝睃䀹条䁅㗛瞘䁪䁯属瞾矋売砘点砜䂨砹硇硑硦葈𥔵礳栃礲䄃"], +["9940","䄉禑禙辻稆込䅧窑䆲窼艹䇄竏竛䇏両筢筬筻簒簛䉠䉺类粜䊌粸䊔糭输烀𠳏総緔緐緽羮羴犟䎗耠耥笹耮耱联㷌垴炠肷胩䏭脌猪脎脒畠脔䐁㬹腖腙腚"], +["99a1","䐓堺腼膄䐥膓䐭膥埯臁臤艔䒏芦艶苊苘苿䒰荗险榊萅烵葤惣蒈䔄蒾蓡蓸蔐蔸蕒䔻蕯蕰藠䕷虲蚒蚲蛯际螋䘆䘗袮裿褤襇覑𧥧訩訸誔誴豑賔賲贜䞘塟跃䟭仮踺嗘坔蹱嗵躰䠷軎転軤軭軲辷迁迊迌逳駄䢭飠鈓䤞鈨鉘鉫銱銮銿"], +["9a40","鋣鋫鋳鋴鋽鍃鎄鎭䥅䥑麿鐗匁鐝鐭鐾䥪鑔鑹锭関䦧间阳䧥枠䨤靀䨵鞲韂噔䫤惨颹䬙飱塄餎餙冴餜餷饂饝饢䭰駅䮝騼鬏窃魩鮁鯝鯱鯴䱭鰠㝯𡯂鵉鰺"], +["9aa1","黾噐鶓鶽鷀鷼银辶鹻麬麱麽黆铜黢黱黸竈齄𠂔𠊷𠎠椚铃妬𠓗塀铁㞹𠗕𠘕𠙶𡚺块煳𠫂𠫍𠮿呪吆𠯋咞𠯻𠰻𠱓𠱥𠱼惧𠲍噺𠲵𠳝𠳭𠵯𠶲𠷈楕鰯螥𠸄𠸎𠻗𠾐𠼭𠹳尠𠾼帋𡁜𡁏𡁶朞𡁻𡂈𡂖㙇𡂿𡃓𡄯𡄻卤蒭𡋣𡍵𡌶讁𡕷𡘙𡟃𡟇乸炻𡠭𡥪"], +["9b40","𡨭𡩅𡰪𡱰𡲬𡻈拃𡻕𡼕熘桕𢁅槩㛈𢉼𢏗𢏺𢜪𢡱𢥏苽𢥧𢦓𢫕覥𢫨辠𢬎鞸𢬿顇骽𢱌"], +["9b62","𢲈𢲷𥯨𢴈𢴒𢶷𢶕𢹂𢽴𢿌𣀳𣁦𣌟𣏞徱晈暿𧩹𣕧𣗳爁𤦺矗𣘚𣜖纇𠍆墵朎"], +["9ba1","椘𣪧𧙗𥿢𣸑𣺹𧗾𢂚䣐䪸𤄙𨪚𤋮𤌍𤀻𤌴𤎖𤩅𠗊凒𠘑妟𡺨㮾𣳿𤐄𤓖垈𤙴㦛𤜯𨗨𩧉㝢𢇃譞𨭎駖𤠒𤣻𤨕爉𤫀𠱸奥𤺥𤾆𠝹軚𥀬劏圿煱𥊙𥐙𣽊𤪧喼𥑆𥑮𦭒釔㑳𥔿𧘲𥕞䜘𥕢𥕦𥟇𤤿𥡝偦㓻𣏌惞𥤃䝼𨥈𥪮𥮉𥰆𡶐垡煑澶𦄂𧰒遖𦆲𤾚譢𦐂𦑊"], +["9c40","嵛𦯷輶𦒄𡤜諪𤧶𦒈𣿯𦔒䯀𦖿𦚵𢜛鑥𥟡憕娧晉侻嚹𤔡𦛼乪𤤴陖涏𦲽㘘襷𦞙𦡮𦐑𦡞營𦣇筂𩃀𠨑𦤦鄄𦤹穅鷰𦧺騦𦨭㙟𦑩𠀡禃𦨴𦭛崬𣔙菏𦮝䛐𦲤画补𦶮墶"], +["9ca1","㜜𢖍𧁋𧇍㱔𧊀𧊅銁𢅺𧊋錰𧋦𤧐氹钟𧑐𠻸蠧裵𢤦𨑳𡞱溸𤨪𡠠㦤㚹尐秣䔿暶𩲭𩢤襃𧟌𧡘囖䃟𡘊㦡𣜯𨃨𡏅熭荦𧧝𩆨婧䲷𧂯𨦫𧧽𧨊𧬋𧵦𤅺筃祾𨀉澵𪋟樃𨌘厢𦸇鎿栶靝𨅯𨀣𦦵𡏭𣈯𨁈嶅𨰰𨂃圕頣𨥉嶫𤦈斾槕叒𤪥𣾁㰑朶𨂐𨃴𨄮𡾡𨅏"], +["9d40","𨆉𨆯𨈚𨌆𨌯𨎊㗊𨑨𨚪䣺揦𨥖砈鉕𨦸䏲𨧧䏟𨧨𨭆𨯔姸𨰉輋𨿅𩃬筑𩄐𩄼㷷𩅞𤫊运犏嚋𩓧𩗩𩖰𩖸𩜲𩣑𩥉𩥪𩧃𩨨𩬎𩵚𩶛纟𩻸𩼣䲤镇𪊓熢𪋿䶑递𪗋䶜𠲜达嗁"], +["9da1","辺𢒰边𤪓䔉繿潖檱仪㓤𨬬𧢝㜺躀𡟵𨀤𨭬𨮙𧨾𦚯㷫𧙕𣲷𥘵𥥖亚𥺁𦉘嚿𠹭踎孭𣺈𤲞揞拐𡟶𡡻攰嘭𥱊吚𥌑㷆𩶘䱽嘢嘞罉𥻘奵𣵀蝰东𠿪𠵉𣚺脗鵞贘瘻鱅癎瞹鍅吲腈苷嘥脲萘肽嗪祢噃吖𠺝㗎嘅嗱曱𨋢㘭甴嗰喺咗啲𠱁𠲖廐𥅈𠹶𢱢"], +["9e40","𠺢麫絚嗞𡁵抝靭咔賍燶酶揼掹揾啩𢭃鱲𢺳冚㓟𠶧冧呍唞唓癦踭𦢊疱肶蠄螆裇膶萜𡃁䓬猄𤜆宐茋𦢓噻𢛴𧴯𤆣𧵳𦻐𧊶酰𡇙鈈𣳼𪚩𠺬𠻹牦𡲢䝎𤿂𧿹𠿫䃺"], +["9ea1","鱝攟𢶠䣳𤟠𩵼𠿬𠸊恢𧖣𠿭"], +["9ead","𦁈𡆇熣纎鵐业丄㕷嬍沲卧㚬㧜卽㚥𤘘墚𤭮舭呋垪𥪕𠥹"], +["9ec5","㩒𢑥獴𩺬䴉鯭𣳾𩼰䱛𤾩𩖞𩿞葜𣶶𧊲𦞳𣜠挮紥𣻷𣸬㨪逈勌㹴㙺䗩𠒎癀嫰𠺶硺𧼮墧䂿噼鮋嵴癔𪐴麅䳡痹㟻愙𣃚𤏲"], +["9ef5","噝𡊩垧𤥣𩸆刴𧂮㖭汊鵼"], +["9f40","籖鬹埞𡝬屓擓𩓐𦌵𧅤蚭𠴨𦴢𤫢𠵱"], +["9f4f","凾𡼏嶎霃𡷑麁遌笟鬂峑箣扨挵髿篏鬪籾鬮籂粆鰕篼鬉鼗鰛𤤾齚啳寃俽麘俲剠㸆勑坧偖妷帒韈鶫轜呩鞴饀鞺匬愰"], +["9fa1","椬叚鰊鴂䰻陁榀傦畆𡝭駚剳"], +["9fae","酙隁酜"], +["9fb2","酑𨺗捿𦴣櫊嘑醎畺抅𠏼獏籰𥰡𣳽"], +["9fc1","𤤙盖鮝个𠳔莾衂"], +["9fc9","届槀僭坺刟巵从氱𠇲伹咜哚劚趂㗾弌㗳"], +["9fdb","歒酼龥鮗頮颴骺麨麄煺笔"], +["9fe7","毺蠘罸"], +["9feb","嘠𪙊蹷齓"], +["9ff0","跔蹏鸜踁抂𨍽踨蹵竓𤩷稾磘泪詧瘇"], +["a040","𨩚鼦泎蟖痃𪊲硓咢贌狢獱謭猂瓱賫𤪻蘯徺袠䒷"], +["a055","𡠻𦸅"], +["a058","詾𢔛"], +["a05b","惽癧髗鵄鍮鮏蟵"], +["a063","蠏賷猬霡鮰㗖犲䰇籑饊𦅙慙䰄麖慽"], +["a073","坟慯抦戹拎㩜懢厪𣏵捤栂㗒"], +["a0a1","嵗𨯂迚𨸹"], +["a0a6","僙𡵆礆匲阸𠼻䁥"], +["a0ae","矾"], +["a0b0","糂𥼚糚稭聦聣絍甅瓲覔舚朌聢𧒆聛瓰脃眤覉𦟌畓𦻑螩蟎臈螌詉貭譃眫瓸蓚㘵榲趦"], +["a0d4","覩瑨涹蟁𤀑瓧㷛煶悤憜㳑煢恷"], +["a0e2","罱𨬭牐惩䭾删㰘𣳇𥻗𧙖𥔱𡥄𡋾𩤃𦷜𧂭峁𦆭𨨏𣙷𠃮𦡆𤼎䕢嬟𦍌齐麦𦉫"], +["a3c0","␀",31,"␡"], +["c6a1","①",9,"⑴",9,"ⅰ",9,"丶丿亅亠冂冖冫勹匸卩厶夊宀巛⼳广廴彐彡攴无疒癶辵隶¨ˆヽヾゝゞ〃仝々〆〇ー[]✽ぁ",23], +["c740","す",58,"ァアィイ"], +["c7a1","ゥ",81,"А",5,"ЁЖ",4], +["c840","Л",26,"ёж",25,"⇧↸↹㇏𠃌乚𠂊刂䒑"], +["c8a1","龰冈龱𧘇"], +["c8cd","¬¦'"㈱№℡゛゜⺀⺄⺆⺇⺈⺊⺌⺍⺕⺜⺝⺥⺧⺪⺬⺮⺶⺼⺾⻆⻊⻌⻍⻏⻖⻗⻞⻣"], +["c8f5","ʃɐɛɔɵœøŋʊɪ"], +["f9fe","■"], +["fa40","𠕇鋛𠗟𣿅蕌䊵珯况㙉𤥂𨧤鍄𡧛苮𣳈砼杄拟𤤳𨦪𠊠𦮳𡌅侫𢓭倈𦴩𧪄𣘀𤪱𢔓倩𠍾徤𠎀𠍇滛𠐟偽儁㑺儎顬㝃萖𤦤𠒇兠𣎴兪𠯿𢃼𠋥𢔰𠖎𣈳𡦃宂蝽𠖳𣲙冲冸"], +["faa1","鴴凉减凑㳜凓𤪦决凢卂凭菍椾𣜭彻刋刦刼劵剗劔効勅簕蕂勠蘍𦬓包𨫞啉滙𣾀𠥔𣿬匳卄𠯢泋𡜦栛珕恊㺪㣌𡛨燝䒢卭却𨚫卾卿𡖖𡘓矦厓𨪛厠厫厮玧𥝲㽙玜叁叅汉义埾叙㪫𠮏叠𣿫𢶣叶𠱷吓灹唫晗浛呭𦭓𠵴啝咏咤䞦𡜍𠻝㶴𠵍"], +["fb40","𨦼𢚘啇䳭启琗喆喩嘅𡣗𤀺䕒𤐵暳𡂴嘷曍𣊊暤暭噍噏磱囱鞇叾圀囯园𨭦㘣𡉏坆𤆥汮炋坂㚱𦱾埦𡐖堃𡑔𤍣堦𤯵塜墪㕡壠壜𡈼壻寿坃𪅐𤉸鏓㖡够梦㛃湙"], +["fba1","𡘾娤啓𡚒蔅姉𠵎𦲁𦴪𡟜姙𡟻𡞲𦶦浱𡠨𡛕姹𦹅媫婣㛦𤦩婷㜈媖瑥嫓𦾡𢕔㶅𡤑㜲𡚸広勐孶斈孼𧨎䀄䡝𠈄寕慠𡨴𥧌𠖥寳宝䴐尅𡭄尓珎尔𡲥𦬨屉䣝岅峩峯嶋𡷹𡸷崐崘嵆𡺤岺巗苼㠭𤤁𢁉𢅳芇㠶㯂帮檊幵幺𤒼𠳓厦亷廐厨𡝱帉廴𨒂"], +["fc40","廹廻㢠廼栾鐛弍𠇁弢㫞䢮𡌺强𦢈𢏐彘𢑱彣鞽𦹮彲鍀𨨶徧嶶㵟𥉐𡽪𧃸𢙨釖𠊞𨨩怱暅𡡷㥣㷇㘹垐𢞴祱㹀悞悤悳𤦂𤦏𧩓璤僡媠慤萤慂慈𦻒憁凴𠙖憇宪𣾷"], +["fca1","𢡟懓𨮝𩥝懐㤲𢦀𢣁怣慜攞掋𠄘担𡝰拕𢸍捬𤧟㨗搸揸𡎎𡟼撐澊𢸶頔𤂌𥜝擡擥鑻㩦携㩗敍漖𤨨𤨣斅敭敟𣁾斵𤥀䬷旑䃘𡠩无旣忟𣐀昘𣇷𣇸晄𣆤𣆥晋𠹵晧𥇦晳晴𡸽𣈱𨗴𣇈𥌓矅𢣷馤朂𤎜𤨡㬫槺𣟂杞杧杢𤇍𩃭柗䓩栢湐鈼栁𣏦𦶠桝"], +["fd40","𣑯槡樋𨫟楳棃𣗍椁椀㴲㨁𣘼㮀枬楡𨩊䋼椶榘㮡𠏉荣傐槹𣙙𢄪橅𣜃檝㯳枱櫈𩆜㰍欝𠤣惞欵歴𢟍溵𣫛𠎵𡥘㝀吡𣭚毡𣻼毜氷𢒋𤣱𦭑汚舦汹𣶼䓅𣶽𤆤𤤌𤤀"], +["fda1","𣳉㛥㳫𠴲鮃𣇹𢒑羏样𦴥𦶡𦷫涖浜湼漄𤥿𤂅𦹲蔳𦽴凇沜渝萮𨬡港𣸯瑓𣾂秌湏媑𣁋濸㜍澝𣸰滺𡒗𤀽䕕鏰潄潜㵎潴𩅰㴻澟𤅄濓𤂑𤅕𤀹𣿰𣾴𤄿凟𤅖𤅗𤅀𦇝灋灾炧炁烌烕烖烟䄄㷨熴熖𤉷焫煅媈煊煮岜𤍥煏鍢𤋁焬𤑚𤨧𤨢熺𨯨炽爎"], +["fe40","鑂爕夑鑃爤鍁𥘅爮牀𤥴梽牕牗㹕𣁄栍漽犂猪猫𤠣𨠫䣭𨠄猨献珏玪𠰺𦨮珉瑉𤇢𡛧𤨤昣㛅𤦷𤦍𤧻珷琕椃𤨦琹𠗃㻗瑜𢢭瑠𨺲瑇珤瑶莹瑬㜰瑴鏱樬璂䥓𤪌"], +["fea1","𤅟𤩹𨮏孆𨰃𡢞瓈𡦈甎瓩甞𨻙𡩋寗𨺬鎅畍畊畧畮𤾂㼄𤴓疎瑝疞疴瘂瘬癑癏癯癶𦏵皐臯㟸𦤑𦤎皡皥皷盌𦾟葢𥂝𥅽𡸜眞眦着撯𥈠睘𣊬瞯𨥤𨥨𡛁矴砉𡍶𤨒棊碯磇磓隥礮𥗠磗礴碱𧘌辸袄𨬫𦂃𢘜禆褀椂禀𥡗禝𧬹礼禩渪𧄦㺨秆𩄍秔"] +] diff --git a/node_modules/iconv-lite/encodings/tables/cp936.json b/node_modules/iconv-lite/encodings/tables/cp936.json new file mode 100644 index 0000000..49ddb9a --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/cp936.json @@ -0,0 +1,264 @@ +[ +["0","\u0000",127,"€"], +["8140","丂丄丅丆丏丒丗丟丠両丣並丩丮丯丱丳丵丷丼乀乁乂乄乆乊乑乕乗乚乛乢乣乤乥乧乨乪",5,"乲乴",9,"乿",6,"亇亊"], +["8180","亐亖亗亙亜亝亞亣亪亯亰亱亴亶亷亸亹亼亽亾仈仌仏仐仒仚仛仜仠仢仦仧仩仭仮仯仱仴仸仹仺仼仾伀伂",6,"伋伌伒",4,"伜伝伡伣伨伩伬伭伮伱伳伵伷伹伻伾",4,"佄佅佇",5,"佒佔佖佡佢佦佨佪佫佭佮佱佲併佷佸佹佺佽侀侁侂侅來侇侊侌侎侐侒侓侕侖侘侙侚侜侞侟価侢"], +["8240","侤侫侭侰",4,"侶",8,"俀俁係俆俇俈俉俋俌俍俒",4,"俙俛俠俢俤俥俧俫俬俰俲俴俵俶俷俹俻俼俽俿",11], +["8280","個倎倐們倓倕倖倗倛倝倞倠倢倣値倧倫倯",10,"倻倽倿偀偁偂偄偅偆偉偊偋偍偐",4,"偖偗偘偙偛偝",7,"偦",5,"偭",8,"偸偹偺偼偽傁傂傃傄傆傇傉傊傋傌傎",20,"傤傦傪傫傭",4,"傳",6,"傼"], +["8340","傽",17,"僐",5,"僗僘僙僛",10,"僨僩僪僫僯僰僱僲僴僶",4,"僼",9,"儈"], +["8380","儉儊儌",5,"儓",13,"儢",28,"兂兇兊兌兎兏児兒兓兗兘兙兛兝",4,"兣兤兦內兩兪兯兲兺兾兿冃冄円冇冊冋冎冏冐冑冓冔冘冚冝冞冟冡冣冦",4,"冭冮冴冸冹冺冾冿凁凂凃凅凈凊凍凎凐凒",5], +["8440","凘凙凚凜凞凟凢凣凥",5,"凬凮凱凲凴凷凾刄刅刉刋刌刏刐刓刔刕刜刞刟刡刢刣別刦刧刪刬刯刱刲刴刵刼刾剄",5,"剋剎剏剒剓剕剗剘"], +["8480","剙剚剛剝剟剠剢剣剤剦剨剫剬剭剮剰剱剳",9,"剾劀劃",4,"劉",6,"劑劒劔",6,"劜劤劥劦劧劮劯劰労",9,"勀勁勂勄勅勆勈勊勌勍勎勏勑勓勔動勗務",5,"勠勡勢勣勥",10,"勱",7,"勻勼勽匁匂匃匄匇匉匊匋匌匎"], +["8540","匑匒匓匔匘匛匜匞匟匢匤匥匧匨匩匫匬匭匯",9,"匼匽區卂卄卆卋卌卍卐協単卙卛卝卥卨卪卬卭卲卶卹卻卼卽卾厀厁厃厇厈厊厎厏"], +["8580","厐",4,"厖厗厙厛厜厞厠厡厤厧厪厫厬厭厯",6,"厷厸厹厺厼厽厾叀參",4,"収叏叐叒叓叕叚叜叝叞叡叢叧叴叺叾叿吀吂吅吇吋吔吘吙吚吜吢吤吥吪吰吳吶吷吺吽吿呁呂呄呅呇呉呌呍呎呏呑呚呝",4,"呣呥呧呩",7,"呴呹呺呾呿咁咃咅咇咈咉咊咍咑咓咗咘咜咞咟咠咡"], +["8640","咢咥咮咰咲咵咶咷咹咺咼咾哃哅哊哋哖哘哛哠",4,"哫哬哯哰哱哴",5,"哻哾唀唂唃唄唅唈唊",4,"唒唓唕",5,"唜唝唞唟唡唥唦"], +["8680","唨唩唫唭唲唴唵唶唸唹唺唻唽啀啂啅啇啈啋",4,"啑啒啓啔啗",4,"啝啞啟啠啢啣啨啩啫啯",5,"啹啺啽啿喅喆喌喍喎喐喒喓喕喖喗喚喛喞喠",6,"喨",8,"喲喴営喸喺喼喿",4,"嗆嗇嗈嗊嗋嗎嗏嗐嗕嗗",4,"嗞嗠嗢嗧嗩嗭嗮嗰嗱嗴嗶嗸",4,"嗿嘂嘃嘄嘅"], +["8740","嘆嘇嘊嘋嘍嘐",7,"嘙嘚嘜嘝嘠嘡嘢嘥嘦嘨嘩嘪嘫嘮嘯嘰嘳嘵嘷嘸嘺嘼嘽嘾噀",11,"噏",4,"噕噖噚噛噝",4], +["8780","噣噥噦噧噭噮噯噰噲噳噴噵噷噸噹噺噽",7,"嚇",6,"嚐嚑嚒嚔",14,"嚤",10,"嚰",6,"嚸嚹嚺嚻嚽",12,"囋",8,"囕囖囘囙囜団囥",5,"囬囮囯囲図囶囷囸囻囼圀圁圂圅圇國",6], +["8840","園",9,"圝圞圠圡圢圤圥圦圧圫圱圲圴",4,"圼圽圿坁坃坄坅坆坈坉坋坒",4,"坘坙坢坣坥坧坬坮坰坱坲坴坵坸坹坺坽坾坿垀"], +["8880","垁垇垈垉垊垍",4,"垔",6,"垜垝垞垟垥垨垪垬垯垰垱垳垵垶垷垹",8,"埄",6,"埌埍埐埑埓埖埗埛埜埞埡埢埣埥",7,"埮埰埱埲埳埵埶執埻埼埾埿堁堃堄堅堈堉堊堌堎堏堐堒堓堔堖堗堘堚堛堜堝堟堢堣堥",4,"堫",4,"報堲堳場堶",7], +["8940","堾",5,"塅",6,"塎塏塐塒塓塕塖塗塙",4,"塟",5,"塦",4,"塭",16,"塿墂墄墆墇墈墊墋墌"], +["8980","墍",4,"墔",4,"墛墜墝墠",7,"墪",17,"墽墾墿壀壂壃壄壆",10,"壒壓壔壖",13,"壥",5,"壭壯壱売壴壵壷壸壺",7,"夃夅夆夈",4,"夎夐夑夒夓夗夘夛夝夞夠夡夢夣夦夨夬夰夲夳夵夶夻"], +["8a40","夽夾夿奀奃奅奆奊奌奍奐奒奓奙奛",4,"奡奣奤奦",12,"奵奷奺奻奼奾奿妀妅妉妋妌妎妏妐妑妔妕妘妚妛妜妝妟妠妡妢妦"], +["8a80","妧妬妭妰妱妳",5,"妺妼妽妿",6,"姇姈姉姌姍姎姏姕姖姙姛姞",4,"姤姦姧姩姪姫姭",11,"姺姼姽姾娀娂娊娋娍娎娏娐娒娔娕娖娗娙娚娛娝娞娡娢娤娦娧娨娪",6,"娳娵娷",4,"娽娾娿婁",4,"婇婈婋",9,"婖婗婘婙婛",5], +["8b40","婡婣婤婥婦婨婩婫",8,"婸婹婻婼婽婾媀",17,"媓",6,"媜",13,"媫媬"], +["8b80","媭",4,"媴媶媷媹",4,"媿嫀嫃",5,"嫊嫋嫍",4,"嫓嫕嫗嫙嫚嫛嫝嫞嫟嫢嫤嫥嫧嫨嫪嫬",4,"嫲",22,"嬊",11,"嬘",25,"嬳嬵嬶嬸",7,"孁",6], +["8c40","孈",7,"孒孖孞孠孡孧孨孫孭孮孯孲孴孶孷學孹孻孼孾孿宂宆宊宍宎宐宑宒宔宖実宧宨宩宬宭宮宯宱宲宷宺宻宼寀寁寃寈寉寊寋寍寎寏"], +["8c80","寑寔",8,"寠寢寣實寧審",4,"寯寱",6,"寽対尀専尃尅將專尋尌對導尐尒尓尗尙尛尞尟尠尡尣尦尨尩尪尫尭尮尯尰尲尳尵尶尷屃屄屆屇屌屍屒屓屔屖屗屘屚屛屜屝屟屢層屧",6,"屰屲",6,"屻屼屽屾岀岃",4,"岉岊岋岎岏岒岓岕岝",4,"岤",4], +["8d40","岪岮岯岰岲岴岶岹岺岻岼岾峀峂峃峅",5,"峌",5,"峓",5,"峚",6,"峢峣峧峩峫峬峮峯峱",9,"峼",4], +["8d80","崁崄崅崈",5,"崏",4,"崕崗崘崙崚崜崝崟",4,"崥崨崪崫崬崯",4,"崵",7,"崿",7,"嵈嵉嵍",10,"嵙嵚嵜嵞",10,"嵪嵭嵮嵰嵱嵲嵳嵵",12,"嶃",21,"嶚嶛嶜嶞嶟嶠"], +["8e40","嶡",21,"嶸",12,"巆",6,"巎",12,"巜巟巠巣巤巪巬巭"], +["8e80","巰巵巶巸",4,"巿帀帄帇帉帊帋帍帎帒帓帗帞",7,"帨",4,"帯帰帲",4,"帹帺帾帿幀幁幃幆",5,"幍",6,"幖",4,"幜幝幟幠幣",14,"幵幷幹幾庁庂広庅庈庉庌庍庎庒庘庛庝庡庢庣庤庨",4,"庮",4,"庴庺庻庼庽庿",6], +["8f40","廆廇廈廋",5,"廔廕廗廘廙廚廜",11,"廩廫",8,"廵廸廹廻廼廽弅弆弇弉弌弍弎弐弒弔弖弙弚弜弝弞弡弢弣弤"], +["8f80","弨弫弬弮弰弲",6,"弻弽弾弿彁",14,"彑彔彙彚彛彜彞彟彠彣彥彧彨彫彮彯彲彴彵彶彸彺彽彾彿徃徆徍徎徏徑従徔徖徚徛徝從徟徠徢",5,"復徫徬徯",5,"徶徸徹徺徻徾",4,"忇忈忊忋忎忓忔忕忚忛応忞忟忢忣忥忦忨忩忬忯忰忲忳忴忶忷忹忺忼怇"], +["9040","怈怉怋怌怐怑怓怗怘怚怞怟怢怣怤怬怭怮怰",4,"怶",4,"怽怾恀恄",6,"恌恎恏恑恓恔恖恗恘恛恜恞恟恠恡恥恦恮恱恲恴恵恷恾悀"], +["9080","悁悂悅悆悇悈悊悋悎悏悐悑悓悕悗悘悙悜悞悡悢悤悥悧悩悪悮悰悳悵悶悷悹悺悽",7,"惇惈惉惌",4,"惒惓惔惖惗惙惛惞惡",4,"惪惱惲惵惷惸惻",4,"愂愃愄愅愇愊愋愌愐",4,"愖愗愘愙愛愜愝愞愡愢愥愨愩愪愬",18,"慀",6], +["9140","慇慉態慍慏慐慒慓慔慖",6,"慞慟慠慡慣慤慥慦慩",6,"慱慲慳慴慶慸",18,"憌憍憏",4,"憕"], +["9180","憖",6,"憞",8,"憪憫憭",9,"憸",5,"憿懀懁懃",4,"應懌",4,"懓懕",16,"懧",13,"懶",8,"戀",5,"戇戉戓戔戙戜戝戞戠戣戦戧戨戩戫戭戯戰戱戲戵戶戸",4,"扂扄扅扆扊"], +["9240","扏扐払扖扗扙扚扜",6,"扤扥扨扱扲扴扵扷扸扺扻扽抁抂抃抅抆抇抈抋",5,"抔抙抜抝択抣抦抧抩抪抭抮抯抰抲抳抴抶抷抸抺抾拀拁"], +["9280","拃拋拏拑拕拝拞拠拡拤拪拫拰拲拵拸拹拺拻挀挃挄挅挆挊挋挌挍挏挐挒挓挔挕挗挘挙挜挦挧挩挬挭挮挰挱挳",5,"挻挼挾挿捀捁捄捇捈捊捑捒捓捔捖",7,"捠捤捥捦捨捪捫捬捯捰捲捳捴捵捸捹捼捽捾捿掁掃掄掅掆掋掍掑掓掔掕掗掙",6,"採掤掦掫掯掱掲掵掶掹掻掽掿揀"], +["9340","揁揂揃揅揇揈揊揋揌揑揓揔揕揗",6,"揟揢揤",4,"揫揬揮揯揰揱揳揵揷揹揺揻揼揾搃搄搆",4,"損搎搑搒搕",5,"搝搟搢搣搤"], +["9380","搥搧搨搩搫搮",5,"搵",4,"搻搼搾摀摂摃摉摋",6,"摓摕摖摗摙",4,"摟",7,"摨摪摫摬摮",9,"摻",6,"撃撆撈",8,"撓撔撗撘撚撛撜撝撟",4,"撥撦撧撨撪撫撯撱撲撳撴撶撹撻撽撾撿擁擃擄擆",6,"擏擑擓擔擕擖擙據"], +["9440","擛擜擝擟擠擡擣擥擧",24,"攁",7,"攊",7,"攓",4,"攙",8], +["9480","攢攣攤攦",4,"攬攭攰攱攲攳攷攺攼攽敀",4,"敆敇敊敋敍敎敐敒敓敔敗敘敚敜敟敠敡敤敥敧敨敩敪敭敮敯敱敳敵敶數",14,"斈斉斊斍斎斏斒斔斕斖斘斚斝斞斠斢斣斦斨斪斬斮斱",7,"斺斻斾斿旀旂旇旈旉旊旍旐旑旓旔旕旘",7,"旡旣旤旪旫"], +["9540","旲旳旴旵旸旹旻",4,"昁昄昅昇昈昉昋昍昐昑昒昖昗昘昚昛昜昞昡昢昣昤昦昩昪昫昬昮昰昲昳昷",4,"昽昿晀時晄",6,"晍晎晐晑晘"], +["9580","晙晛晜晝晞晠晢晣晥晧晩",4,"晱晲晳晵晸晹晻晼晽晿暀暁暃暅暆暈暉暊暋暍暎暏暐暒暓暔暕暘",4,"暞",8,"暩",4,"暯",4,"暵暶暷暸暺暻暼暽暿",25,"曚曞",7,"曧曨曪",5,"曱曵曶書曺曻曽朁朂會"], +["9640","朄朅朆朇朌朎朏朑朒朓朖朘朙朚朜朞朠",5,"朧朩朮朰朲朳朶朷朸朹朻朼朾朿杁杄杅杇杊杋杍杒杔杕杗",4,"杝杢杣杤杦杧杫杬杮東杴杶"], +["9680","杸杹杺杻杽枀枂枃枅枆枈枊枌枍枎枏枑枒枓枔枖枙枛枟枠枡枤枦枩枬枮枱枲枴枹",7,"柂柅",9,"柕柖柗柛柟柡柣柤柦柧柨柪柫柭柮柲柵",7,"柾栁栂栃栄栆栍栐栒栔栕栘",4,"栞栟栠栢",6,"栫",6,"栴栵栶栺栻栿桇桋桍桏桒桖",5], +["9740","桜桝桞桟桪桬",7,"桵桸",8,"梂梄梇",7,"梐梑梒梔梕梖梘",9,"梣梤梥梩梪梫梬梮梱梲梴梶梷梸"], +["9780","梹",6,"棁棃",5,"棊棌棎棏棐棑棓棔棖棗棙棛",4,"棡棢棤",9,"棯棲棳棴棶棷棸棻棽棾棿椀椂椃椄椆",4,"椌椏椑椓",11,"椡椢椣椥",7,"椮椯椱椲椳椵椶椷椸椺椻椼椾楀楁楃",16,"楕楖楘楙楛楜楟"], +["9840","楡楢楤楥楧楨楩楪楬業楯楰楲",4,"楺楻楽楾楿榁榃榅榊榋榌榎",5,"榖榗榙榚榝",9,"榩榪榬榮榯榰榲榳榵榶榸榹榺榼榽"], +["9880","榾榿槀槂",7,"構槍槏槑槒槓槕",5,"槜槝槞槡",11,"槮槯槰槱槳",9,"槾樀",9,"樋",11,"標",5,"樠樢",5,"権樫樬樭樮樰樲樳樴樶",6,"樿",4,"橅橆橈",7,"橑",6,"橚"], +["9940","橜",4,"橢橣橤橦",10,"橲",6,"橺橻橽橾橿檁檂檃檅",8,"檏檒",4,"檘",7,"檡",5], +["9980","檧檨檪檭",114,"欥欦欨",6], +["9a40","欯欰欱欳欴欵欶欸欻欼欽欿歀歁歂歄歅歈歊歋歍",11,"歚",7,"歨歩歫",13,"歺歽歾歿殀殅殈"], +["9a80","殌殎殏殐殑殔殕殗殘殙殜",4,"殢",7,"殫",7,"殶殸",6,"毀毃毄毆",4,"毌毎毐毑毘毚毜",4,"毢",7,"毬毭毮毰毱毲毴毶毷毸毺毻毼毾",6,"氈",4,"氎氒気氜氝氞氠氣氥氫氬氭氱氳氶氷氹氺氻氼氾氿汃汄汅汈汋",4,"汑汒汓汖汘"], +["9b40","汙汚汢汣汥汦汧汫",4,"汱汳汵汷汸決汻汼汿沀沄沇沊沋沍沎沑沒沕沖沗沘沚沜沝沞沠沢沨沬沯沰沴沵沶沷沺泀況泂泃泆泇泈泋泍泎泏泑泒泘"], +["9b80","泙泚泜泝泟泤泦泧泩泬泭泲泴泹泿洀洂洃洅洆洈洉洊洍洏洐洑洓洔洕洖洘洜洝洟",5,"洦洨洩洬洭洯洰洴洶洷洸洺洿浀浂浄浉浌浐浕浖浗浘浛浝浟浡浢浤浥浧浨浫浬浭浰浱浲浳浵浶浹浺浻浽",4,"涃涄涆涇涊涋涍涏涐涒涖",4,"涜涢涥涬涭涰涱涳涴涶涷涹",5,"淁淂淃淈淉淊"], +["9c40","淍淎淏淐淒淓淔淕淗淚淛淜淟淢淣淥淧淨淩淪淭淯淰淲淴淵淶淸淺淽",7,"渆渇済渉渋渏渒渓渕渘渙減渜渞渟渢渦渧渨渪測渮渰渱渳渵"], +["9c80","渶渷渹渻",7,"湅",7,"湏湐湑湒湕湗湙湚湜湝湞湠",10,"湬湭湯",14,"満溁溂溄溇溈溊",4,"溑",6,"溙溚溛溝溞溠溡溣溤溦溨溩溫溬溭溮溰溳溵溸溹溼溾溿滀滃滄滅滆滈滉滊滌滍滎滐滒滖滘滙滛滜滝滣滧滪",5], +["9d40","滰滱滲滳滵滶滷滸滺",7,"漃漄漅漇漈漊",4,"漐漑漒漖",9,"漡漢漣漥漦漧漨漬漮漰漲漴漵漷",6,"漿潀潁潂"], +["9d80","潃潄潅潈潉潊潌潎",9,"潙潚潛潝潟潠潡潣潤潥潧",5,"潯潰潱潳潵潶潷潹潻潽",6,"澅澆澇澊澋澏",12,"澝澞澟澠澢",4,"澨",10,"澴澵澷澸澺",5,"濁濃",5,"濊",6,"濓",10,"濟濢濣濤濥"], +["9e40","濦",7,"濰",32,"瀒",7,"瀜",6,"瀤",6], +["9e80","瀫",9,"瀶瀷瀸瀺",17,"灍灎灐",13,"灟",11,"灮灱灲灳灴灷灹灺灻災炁炂炃炄炆炇炈炋炌炍炏炐炑炓炗炘炚炛炞",12,"炰炲炴炵炶為炾炿烄烅烆烇烉烋",12,"烚"], +["9f40","烜烝烞烠烡烢烣烥烪烮烰",6,"烸烺烻烼烾",10,"焋",4,"焑焒焔焗焛",10,"焧",7,"焲焳焴"], +["9f80","焵焷",13,"煆煇煈煉煋煍煏",12,"煝煟",4,"煥煩",4,"煯煰煱煴煵煶煷煹煻煼煾",5,"熅",4,"熋熌熍熎熐熑熒熓熕熖熗熚",4,"熡",6,"熩熪熫熭",5,"熴熶熷熸熺",8,"燄",9,"燏",4], +["a040","燖",9,"燡燢燣燤燦燨",5,"燯",9,"燺",11,"爇",19], +["a080","爛爜爞",9,"爩爫爭爮爯爲爳爴爺爼爾牀",6,"牉牊牋牎牏牐牑牓牔牕牗牘牚牜牞牠牣牤牥牨牪牫牬牭牰牱牳牴牶牷牸牻牼牽犂犃犅",4,"犌犎犐犑犓",11,"犠",11,"犮犱犲犳犵犺",6,"狅狆狇狉狊狋狌狏狑狓狔狕狖狘狚狛"], +["a1a1"," 、。·ˉˇ¨〃々—~‖…‘’“”〔〕〈",7,"〖〗【】±×÷∶∧∨∑∏∪∩∈∷√⊥∥∠⌒⊙∫∮≡≌≈∽∝≠≮≯≤≥∞∵∴♂♀°′″℃$¤¢£‰§№☆★○●◎◇◆□■△▲※→←↑↓〓"], +["a2a1","ⅰ",9], +["a2b1","⒈",19,"⑴",19,"①",9], +["a2e5","㈠",9], +["a2f1","Ⅰ",11], +["a3a1","!"#¥%",88," ̄"], +["a4a1","ぁ",82], +["a5a1","ァ",85], +["a6a1","Α",16,"Σ",6], +["a6c1","α",16,"σ",6], +["a6e0","︵︶︹︺︿﹀︽︾﹁﹂﹃﹄"], +["a6ee","︻︼︷︸︱"], +["a6f4","︳︴"], +["a7a1","А",5,"ЁЖ",25], +["a7d1","а",5,"ёж",25], +["a840","ˊˋ˙–―‥‵℅℉↖↗↘↙∕∟∣≒≦≧⊿═",35,"▁",6], +["a880","█",7,"▓▔▕▼▽◢◣◤◥☉⊕〒〝〞"], +["a8a1","āáǎàēéěèīíǐìōóǒòūúǔùǖǘǚǜüêɑ"], +["a8bd","ńň"], +["a8c0","ɡ"], +["a8c5","ㄅ",36], +["a940","〡",8,"㊣㎎㎏㎜㎝㎞㎡㏄㏎㏑㏒㏕︰¬¦"], +["a959","℡㈱"], +["a95c","‐"], +["a960","ー゛゜ヽヾ〆ゝゞ﹉",9,"﹔﹕﹖﹗﹙",8], +["a980","﹢",4,"﹨﹩﹪﹫"], +["a996","〇"], +["a9a4","─",75], +["aa40","狜狝狟狢",5,"狪狫狵狶狹狽狾狿猀猂猄",5,"猋猌猍猏猐猑猒猔猘猙猚猟猠猣猤猦猧猨猭猯猰猲猳猵猶猺猻猼猽獀",8], +["aa80","獉獊獋獌獎獏獑獓獔獕獖獘",7,"獡",10,"獮獰獱"], +["ab40","獲",11,"獿",4,"玅玆玈玊玌玍玏玐玒玓玔玕玗玘玙玚玜玝玞玠玡玣",5,"玪玬玭玱玴玵玶玸玹玼玽玾玿珁珃",4], +["ab80","珋珌珎珒",6,"珚珛珜珝珟珡珢珣珤珦珨珪珫珬珮珯珰珱珳",4], +["ac40","珸",10,"琄琇琈琋琌琍琎琑",8,"琜",5,"琣琤琧琩琫琭琯琱琲琷",4,"琽琾琿瑀瑂",11], +["ac80","瑎",6,"瑖瑘瑝瑠",12,"瑮瑯瑱",4,"瑸瑹瑺"], +["ad40","瑻瑼瑽瑿璂璄璅璆璈璉璊璌璍璏璑",10,"璝璟",7,"璪",15,"璻",12], +["ad80","瓈",9,"瓓",8,"瓝瓟瓡瓥瓧",6,"瓰瓱瓲"], +["ae40","瓳瓵瓸",6,"甀甁甂甃甅",7,"甎甐甒甔甕甖甗甛甝甞甠",4,"甦甧甪甮甴甶甹甼甽甿畁畂畃畄畆畇畉畊畍畐畑畒畓畕畖畗畘"], +["ae80","畝",7,"畧畨畩畫",6,"畳畵當畷畺",4,"疀疁疂疄疅疇"], +["af40","疈疉疊疌疍疎疐疓疕疘疛疜疞疢疦",4,"疭疶疷疺疻疿痀痁痆痋痌痎痏痐痑痓痗痙痚痜痝痟痠痡痥痩痬痭痮痯痲痳痵痶痷痸痺痻痽痾瘂瘄瘆瘇"], +["af80","瘈瘉瘋瘍瘎瘏瘑瘒瘓瘔瘖瘚瘜瘝瘞瘡瘣瘧瘨瘬瘮瘯瘱瘲瘶瘷瘹瘺瘻瘽癁療癄"], +["b040","癅",6,"癎",5,"癕癗",4,"癝癟癠癡癢癤",6,"癬癭癮癰",7,"癹発發癿皀皁皃皅皉皊皌皍皏皐皒皔皕皗皘皚皛"], +["b080","皜",7,"皥",8,"皯皰皳皵",9,"盀盁盃啊阿埃挨哎唉哀皑癌蔼矮艾碍爱隘鞍氨安俺按暗岸胺案肮昂盎凹敖熬翱袄傲奥懊澳芭捌扒叭吧笆八疤巴拔跋靶把耙坝霸罢爸白柏百摆佰败拜稗斑班搬扳般颁板版扮拌伴瓣半办绊邦帮梆榜膀绑棒磅蚌镑傍谤苞胞包褒剥"], +["b140","盄盇盉盋盌盓盕盙盚盜盝盞盠",4,"盦",7,"盰盳盵盶盷盺盻盽盿眀眂眃眅眆眊県眎",10,"眛眜眝眞眡眣眤眥眧眪眫"], +["b180","眬眮眰",4,"眹眻眽眾眿睂睄睅睆睈",7,"睒",7,"睜薄雹保堡饱宝抱报暴豹鲍爆杯碑悲卑北辈背贝钡倍狈备惫焙被奔苯本笨崩绷甭泵蹦迸逼鼻比鄙笔彼碧蓖蔽毕毙毖币庇痹闭敝弊必辟壁臂避陛鞭边编贬扁便变卞辨辩辫遍标彪膘表鳖憋别瘪彬斌濒滨宾摈兵冰柄丙秉饼炳"], +["b240","睝睞睟睠睤睧睩睪睭",11,"睺睻睼瞁瞂瞃瞆",5,"瞏瞐瞓",11,"瞡瞣瞤瞦瞨瞫瞭瞮瞯瞱瞲瞴瞶",4], +["b280","瞼瞾矀",12,"矎",8,"矘矙矚矝",4,"矤病并玻菠播拨钵波博勃搏铂箔伯帛舶脖膊渤泊驳捕卜哺补埠不布步簿部怖擦猜裁材才财睬踩采彩菜蔡餐参蚕残惭惨灿苍舱仓沧藏操糙槽曹草厕策侧册测层蹭插叉茬茶查碴搽察岔差诧拆柴豺搀掺蝉馋谗缠铲产阐颤昌猖"], +["b340","矦矨矪矯矰矱矲矴矵矷矹矺矻矼砃",5,"砊砋砎砏砐砓砕砙砛砞砠砡砢砤砨砪砫砮砯砱砲砳砵砶砽砿硁硂硃硄硆硈硉硊硋硍硏硑硓硔硘硙硚"], +["b380","硛硜硞",11,"硯",7,"硸硹硺硻硽",6,"场尝常长偿肠厂敞畅唱倡超抄钞朝嘲潮巢吵炒车扯撤掣彻澈郴臣辰尘晨忱沉陈趁衬撑称城橙成呈乘程惩澄诚承逞骋秤吃痴持匙池迟弛驰耻齿侈尺赤翅斥炽充冲虫崇宠抽酬畴踌稠愁筹仇绸瞅丑臭初出橱厨躇锄雏滁除楚"], +["b440","碄碅碆碈碊碋碏碐碒碔碕碖碙碝碞碠碢碤碦碨",7,"碵碶碷碸確碻碼碽碿磀磂磃磄磆磇磈磌磍磎磏磑磒磓磖磗磘磚",9], +["b480","磤磥磦磧磩磪磫磭",4,"磳磵磶磸磹磻",5,"礂礃礄礆",6,"础储矗搐触处揣川穿椽传船喘串疮窗幢床闯创吹炊捶锤垂春椿醇唇淳纯蠢戳绰疵茨磁雌辞慈瓷词此刺赐次聪葱囱匆从丛凑粗醋簇促蹿篡窜摧崔催脆瘁粹淬翠村存寸磋撮搓措挫错搭达答瘩打大呆歹傣戴带殆代贷袋待逮"], +["b540","礍",5,"礔",9,"礟",4,"礥",14,"礵",4,"礽礿祂祃祄祅祇祊",8,"祔祕祘祙祡祣"], +["b580","祤祦祩祪祫祬祮祰",6,"祹祻",4,"禂禃禆禇禈禉禋禌禍禎禐禑禒怠耽担丹单郸掸胆旦氮但惮淡诞弹蛋当挡党荡档刀捣蹈倒岛祷导到稻悼道盗德得的蹬灯登等瞪凳邓堤低滴迪敌笛狄涤翟嫡抵底地蒂第帝弟递缔颠掂滇碘点典靛垫电佃甸店惦奠淀殿碉叼雕凋刁掉吊钓调跌爹碟蝶迭谍叠"], +["b640","禓",6,"禛",11,"禨",10,"禴",4,"禼禿秂秄秅秇秈秊秌秎秏秐秓秔秖秗秙",5,"秠秡秢秥秨秪"], +["b680","秬秮秱",6,"秹秺秼秾秿稁稄稅稇稈稉稊稌稏",4,"稕稖稘稙稛稜丁盯叮钉顶鼎锭定订丢东冬董懂动栋侗恫冻洞兜抖斗陡豆逗痘都督毒犊独读堵睹赌杜镀肚度渡妒端短锻段断缎堆兑队对墩吨蹲敦顿囤钝盾遁掇哆多夺垛躲朵跺舵剁惰堕蛾峨鹅俄额讹娥恶厄扼遏鄂饿恩而儿耳尔饵洱二"], +["b740","稝稟稡稢稤",14,"稴稵稶稸稺稾穀",5,"穇",9,"穒",4,"穘",16], +["b780","穩",6,"穱穲穳穵穻穼穽穾窂窅窇窉窊窋窌窎窏窐窓窔窙窚窛窞窡窢贰发罚筏伐乏阀法珐藩帆番翻樊矾钒繁凡烦反返范贩犯饭泛坊芳方肪房防妨仿访纺放菲非啡飞肥匪诽吠肺废沸费芬酚吩氛分纷坟焚汾粉奋份忿愤粪丰封枫蜂峰锋风疯烽逢冯缝讽奉凤佛否夫敷肤孵扶拂辐幅氟符伏俘服"], +["b840","窣窤窧窩窪窫窮",4,"窴",10,"竀",10,"竌",9,"竗竘竚竛竜竝竡竢竤竧",5,"竮竰竱竲竳"], +["b880","竴",4,"竻竼竾笀笁笂笅笇笉笌笍笎笐笒笓笖笗笘笚笜笝笟笡笢笣笧笩笭浮涪福袱弗甫抚辅俯釜斧脯腑府腐赴副覆赋复傅付阜父腹负富讣附妇缚咐噶嘎该改概钙盖溉干甘杆柑竿肝赶感秆敢赣冈刚钢缸肛纲岗港杠篙皋高膏羔糕搞镐稿告哥歌搁戈鸽胳疙割革葛格蛤阁隔铬个各给根跟耕更庚羹"], +["b940","笯笰笲笴笵笶笷笹笻笽笿",5,"筆筈筊筍筎筓筕筗筙筜筞筟筡筣",10,"筯筰筳筴筶筸筺筼筽筿箁箂箃箄箆",6,"箎箏"], +["b980","箑箒箓箖箘箙箚箛箞箟箠箣箤箥箮箯箰箲箳箵箶箷箹",7,"篂篃範埂耿梗工攻功恭龚供躬公宫弓巩汞拱贡共钩勾沟苟狗垢构购够辜菇咕箍估沽孤姑鼓古蛊骨谷股故顾固雇刮瓜剐寡挂褂乖拐怪棺关官冠观管馆罐惯灌贯光广逛瑰规圭硅归龟闺轨鬼诡癸桂柜跪贵刽辊滚棍锅郭国果裹过哈"], +["ba40","篅篈築篊篋篍篎篏篐篒篔",4,"篛篜篞篟篠篢篣篤篧篨篩篫篬篭篯篰篲",4,"篸篹篺篻篽篿",7,"簈簉簊簍簎簐",5,"簗簘簙"], +["ba80","簚",4,"簠",5,"簨簩簫",12,"簹",5,"籂骸孩海氦亥害骇酣憨邯韩含涵寒函喊罕翰撼捍旱憾悍焊汗汉夯杭航壕嚎豪毫郝好耗号浩呵喝荷菏核禾和何合盒貉阂河涸赫褐鹤贺嘿黑痕很狠恨哼亨横衡恒轰哄烘虹鸿洪宏弘红喉侯猴吼厚候后呼乎忽瑚壶葫胡蝴狐糊湖"], +["bb40","籃",9,"籎",36,"籵",5,"籾",9], +["bb80","粈粊",6,"粓粔粖粙粚粛粠粡粣粦粧粨粩粫粬粭粯粰粴",4,"粺粻弧虎唬护互沪户花哗华猾滑画划化话槐徊怀淮坏欢环桓还缓换患唤痪豢焕涣宦幻荒慌黄磺蝗簧皇凰惶煌晃幌恍谎灰挥辉徽恢蛔回毁悔慧卉惠晦贿秽会烩汇讳诲绘荤昏婚魂浑混豁活伙火获或惑霍货祸击圾基机畸稽积箕"], +["bc40","粿糀糂糃糄糆糉糋糎",6,"糘糚糛糝糞糡",6,"糩",5,"糰",7,"糹糺糼",13,"紋",5], +["bc80","紑",14,"紡紣紤紥紦紨紩紪紬紭紮細",6,"肌饥迹激讥鸡姬绩缉吉极棘辑籍集及急疾汲即嫉级挤几脊己蓟技冀季伎祭剂悸济寄寂计记既忌际妓继纪嘉枷夹佳家加荚颊贾甲钾假稼价架驾嫁歼监坚尖笺间煎兼肩艰奸缄茧检柬碱硷拣捡简俭剪减荐槛鉴践贱见键箭件"], +["bd40","紷",54,"絯",7], +["bd80","絸",32,"健舰剑饯渐溅涧建僵姜将浆江疆蒋桨奖讲匠酱降蕉椒礁焦胶交郊浇骄娇嚼搅铰矫侥脚狡角饺缴绞剿教酵轿较叫窖揭接皆秸街阶截劫节桔杰捷睫竭洁结解姐戒藉芥界借介疥诫届巾筋斤金今津襟紧锦仅谨进靳晋禁近烬浸"], +["be40","継",12,"綧",6,"綯",42], +["be80","線",32,"尽劲荆兢茎睛晶鲸京惊精粳经井警景颈静境敬镜径痉靖竟竞净炯窘揪究纠玖韭久灸九酒厩救旧臼舅咎就疚鞠拘狙疽居驹菊局咀矩举沮聚拒据巨具距踞锯俱句惧炬剧捐鹃娟倦眷卷绢撅攫抉掘倔爵觉决诀绝均菌钧军君峻"], +["bf40","緻",62], +["bf80","縺縼",4,"繂",4,"繈",21,"俊竣浚郡骏喀咖卡咯开揩楷凯慨刊堪勘坎砍看康慷糠扛抗亢炕考拷烤靠坷苛柯棵磕颗科壳咳可渴克刻客课肯啃垦恳坑吭空恐孔控抠口扣寇枯哭窟苦酷库裤夸垮挎跨胯块筷侩快宽款匡筐狂框矿眶旷况亏盔岿窥葵奎魁傀"], +["c040","繞",35,"纃",23,"纜纝纞"], +["c080","纮纴纻纼绖绤绬绹缊缐缞缷缹缻",6,"罃罆",9,"罒罓馈愧溃坤昆捆困括扩廓阔垃拉喇蜡腊辣啦莱来赖蓝婪栏拦篮阑兰澜谰揽览懒缆烂滥琅榔狼廊郎朗浪捞劳牢老佬姥酪烙涝勒乐雷镭蕾磊累儡垒擂肋类泪棱楞冷厘梨犁黎篱狸离漓理李里鲤礼莉荔吏栗丽厉励砾历利傈例俐"], +["c140","罖罙罛罜罝罞罠罣",4,"罫罬罭罯罰罳罵罶罷罸罺罻罼罽罿羀羂",7,"羋羍羏",4,"羕",4,"羛羜羠羢羣羥羦羨",6,"羱"], +["c180","羳",4,"羺羻羾翀翂翃翄翆翇翈翉翋翍翏",4,"翖翗翙",5,"翢翣痢立粒沥隶力璃哩俩联莲连镰廉怜涟帘敛脸链恋炼练粮凉梁粱良两辆量晾亮谅撩聊僚疗燎寥辽潦了撂镣廖料列裂烈劣猎琳林磷霖临邻鳞淋凛赁吝拎玲菱零龄铃伶羚凌灵陵岭领另令溜琉榴硫馏留刘瘤流柳六龙聋咙笼窿"], +["c240","翤翧翨翪翫翬翭翯翲翴",6,"翽翾翿耂耇耈耉耊耎耏耑耓耚耛耝耞耟耡耣耤耫",5,"耲耴耹耺耼耾聀聁聄聅聇聈聉聎聏聐聑聓聕聖聗"], +["c280","聙聛",13,"聫",5,"聲",11,"隆垄拢陇楼娄搂篓漏陋芦卢颅庐炉掳卤虏鲁麓碌露路赂鹿潞禄录陆戮驴吕铝侣旅履屡缕虑氯律率滤绿峦挛孪滦卵乱掠略抡轮伦仑沦纶论萝螺罗逻锣箩骡裸落洛骆络妈麻玛码蚂马骂嘛吗埋买麦卖迈脉瞒馒蛮满蔓曼慢漫"], +["c340","聾肁肂肅肈肊肍",5,"肔肕肗肙肞肣肦肧肨肬肰肳肵肶肸肹肻胅胇",4,"胏",6,"胘胟胠胢胣胦胮胵胷胹胻胾胿脀脁脃脄脅脇脈脋"], +["c380","脌脕脗脙脛脜脝脟",12,"脭脮脰脳脴脵脷脹",4,"脿谩芒茫盲氓忙莽猫茅锚毛矛铆卯茂冒帽貌贸么玫枚梅酶霉煤没眉媒镁每美昧寐妹媚门闷们萌蒙檬盟锰猛梦孟眯醚靡糜迷谜弥米秘觅泌蜜密幂棉眠绵冕免勉娩缅面苗描瞄藐秒渺庙妙蔑灭民抿皿敏悯闽明螟鸣铭名命谬摸"], +["c440","腀",5,"腇腉腍腎腏腒腖腗腘腛",4,"腡腢腣腤腦腨腪腫腬腯腲腳腵腶腷腸膁膃",4,"膉膋膌膍膎膐膒",5,"膙膚膞",4,"膤膥"], +["c480","膧膩膫",7,"膴",5,"膼膽膾膿臄臅臇臈臉臋臍",6,"摹蘑模膜磨摩魔抹末莫墨默沫漠寞陌谋牟某拇牡亩姆母墓暮幕募慕木目睦牧穆拿哪呐钠那娜纳氖乃奶耐奈南男难囊挠脑恼闹淖呢馁内嫩能妮霓倪泥尼拟你匿腻逆溺蔫拈年碾撵捻念娘酿鸟尿捏聂孽啮镊镍涅您柠狞凝宁"], +["c540","臔",14,"臤臥臦臨臩臫臮",4,"臵",5,"臽臿舃與",4,"舎舏舑舓舕",5,"舝舠舤舥舦舧舩舮舲舺舼舽舿"], +["c580","艀艁艂艃艅艆艈艊艌艍艎艐",7,"艙艛艜艝艞艠",7,"艩拧泞牛扭钮纽脓浓农弄奴努怒女暖虐疟挪懦糯诺哦欧鸥殴藕呕偶沤啪趴爬帕怕琶拍排牌徘湃派攀潘盘磐盼畔判叛乓庞旁耪胖抛咆刨炮袍跑泡呸胚培裴赔陪配佩沛喷盆砰抨烹澎彭蓬棚硼篷膨朋鹏捧碰坯砒霹批披劈琵毗"], +["c640","艪艫艬艭艱艵艶艷艸艻艼芀芁芃芅芆芇芉芌芐芓芔芕芖芚芛芞芠芢芣芧芲芵芶芺芻芼芿苀苂苃苅苆苉苐苖苙苚苝苢苧苨苩苪苬苭苮苰苲苳苵苶苸"], +["c680","苺苼",4,"茊茋茍茐茒茓茖茘茙茝",9,"茩茪茮茰茲茷茻茽啤脾疲皮匹痞僻屁譬篇偏片骗飘漂瓢票撇瞥拼频贫品聘乒坪苹萍平凭瓶评屏坡泼颇婆破魄迫粕剖扑铺仆莆葡菩蒲埔朴圃普浦谱曝瀑期欺栖戚妻七凄漆柒沏其棋奇歧畦崎脐齐旗祈祁骑起岂乞企启契砌器气迄弃汽泣讫掐"], +["c740","茾茿荁荂荄荅荈荊",4,"荓荕",4,"荝荢荰",6,"荹荺荾",6,"莇莈莊莋莌莍莏莐莑莔莕莖莗莙莚莝莟莡",6,"莬莭莮"], +["c780","莯莵莻莾莿菂菃菄菆菈菉菋菍菎菐菑菒菓菕菗菙菚菛菞菢菣菤菦菧菨菫菬菭恰洽牵扦钎铅千迁签仟谦乾黔钱钳前潜遣浅谴堑嵌欠歉枪呛腔羌墙蔷强抢橇锹敲悄桥瞧乔侨巧鞘撬翘峭俏窍切茄且怯窃钦侵亲秦琴勤芹擒禽寝沁青轻氢倾卿清擎晴氰情顷请庆琼穷秋丘邱球求囚酋泅趋区蛆曲躯屈驱渠"], +["c840","菮華菳",4,"菺菻菼菾菿萀萂萅萇萈萉萊萐萒",5,"萙萚萛萞",5,"萩",7,"萲",5,"萹萺萻萾",7,"葇葈葉"], +["c880","葊",6,"葒",4,"葘葝葞葟葠葢葤",4,"葪葮葯葰葲葴葷葹葻葼取娶龋趣去圈颧权醛泉全痊拳犬券劝缺炔瘸却鹊榷确雀裙群然燃冉染瓤壤攘嚷让饶扰绕惹热壬仁人忍韧任认刃妊纫扔仍日戎茸蓉荣融熔溶容绒冗揉柔肉茹蠕儒孺如辱乳汝入褥软阮蕊瑞锐闰润若弱撒洒萨腮鳃塞赛三叁"], +["c940","葽",4,"蒃蒄蒅蒆蒊蒍蒏",7,"蒘蒚蒛蒝蒞蒟蒠蒢",12,"蒰蒱蒳蒵蒶蒷蒻蒼蒾蓀蓂蓃蓅蓆蓇蓈蓋蓌蓎蓏蓒蓔蓕蓗"], +["c980","蓘",4,"蓞蓡蓢蓤蓧",4,"蓭蓮蓯蓱",10,"蓽蓾蔀蔁蔂伞散桑嗓丧搔骚扫嫂瑟色涩森僧莎砂杀刹沙纱傻啥煞筛晒珊苫杉山删煽衫闪陕擅赡膳善汕扇缮墒伤商赏晌上尚裳梢捎稍烧芍勺韶少哨邵绍奢赊蛇舌舍赦摄射慑涉社设砷申呻伸身深娠绅神沈审婶甚肾慎渗声生甥牲升绳"], +["ca40","蔃",8,"蔍蔎蔏蔐蔒蔔蔕蔖蔘蔙蔛蔜蔝蔞蔠蔢",8,"蔭",9,"蔾",4,"蕄蕅蕆蕇蕋",10], +["ca80","蕗蕘蕚蕛蕜蕝蕟",4,"蕥蕦蕧蕩",8,"蕳蕵蕶蕷蕸蕼蕽蕿薀薁省盛剩胜圣师失狮施湿诗尸虱十石拾时什食蚀实识史矢使屎驶始式示士世柿事拭誓逝势是嗜噬适仕侍释饰氏市恃室视试收手首守寿授售受瘦兽蔬枢梳殊抒输叔舒淑疏书赎孰熟薯暑曙署蜀黍鼠属术述树束戍竖墅庶数漱"], +["cb40","薂薃薆薈",6,"薐",10,"薝",6,"薥薦薧薩薫薬薭薱",5,"薸薺",6,"藂",6,"藊",4,"藑藒"], +["cb80","藔藖",5,"藝",6,"藥藦藧藨藪",14,"恕刷耍摔衰甩帅栓拴霜双爽谁水睡税吮瞬顺舜说硕朔烁斯撕嘶思私司丝死肆寺嗣四伺似饲巳松耸怂颂送宋讼诵搜艘擞嗽苏酥俗素速粟僳塑溯宿诉肃酸蒜算虽隋随绥髓碎岁穗遂隧祟孙损笋蓑梭唆缩琐索锁所塌他它她塔"], +["cc40","藹藺藼藽藾蘀",4,"蘆",10,"蘒蘓蘔蘕蘗",15,"蘨蘪",13,"蘹蘺蘻蘽蘾蘿虀"], +["cc80","虁",11,"虒虓處",4,"虛虜虝號虠虡虣",7,"獭挞蹋踏胎苔抬台泰酞太态汰坍摊贪瘫滩坛檀痰潭谭谈坦毯袒碳探叹炭汤塘搪堂棠膛唐糖倘躺淌趟烫掏涛滔绦萄桃逃淘陶讨套特藤腾疼誊梯剔踢锑提题蹄啼体替嚏惕涕剃屉天添填田甜恬舔腆挑条迢眺跳贴铁帖厅听烃"], +["cd40","虭虯虰虲",6,"蚃",6,"蚎",4,"蚔蚖",5,"蚞",4,"蚥蚦蚫蚭蚮蚲蚳蚷蚸蚹蚻",4,"蛁蛂蛃蛅蛈蛌蛍蛒蛓蛕蛖蛗蛚蛜"], +["cd80","蛝蛠蛡蛢蛣蛥蛦蛧蛨蛪蛫蛬蛯蛵蛶蛷蛺蛻蛼蛽蛿蜁蜄蜅蜆蜋蜌蜎蜏蜐蜑蜔蜖汀廷停亭庭挺艇通桐酮瞳同铜彤童桶捅筒统痛偷投头透凸秃突图徒途涂屠土吐兔湍团推颓腿蜕褪退吞屯臀拖托脱鸵陀驮驼椭妥拓唾挖哇蛙洼娃瓦袜歪外豌弯湾玩顽丸烷完碗挽晚皖惋宛婉万腕汪王亡枉网往旺望忘妄威"], +["ce40","蜙蜛蜝蜟蜠蜤蜦蜧蜨蜪蜫蜬蜭蜯蜰蜲蜳蜵蜶蜸蜹蜺蜼蜽蝀",6,"蝊蝋蝍蝏蝐蝑蝒蝔蝕蝖蝘蝚",5,"蝡蝢蝦",7,"蝯蝱蝲蝳蝵"], +["ce80","蝷蝸蝹蝺蝿螀螁螄螆螇螉螊螌螎",4,"螔螕螖螘",6,"螠",4,"巍微危韦违桅围唯惟为潍维苇萎委伟伪尾纬未蔚味畏胃喂魏位渭谓尉慰卫瘟温蚊文闻纹吻稳紊问嗡翁瓮挝蜗涡窝我斡卧握沃巫呜钨乌污诬屋无芜梧吾吴毋武五捂午舞伍侮坞戊雾晤物勿务悟误昔熙析西硒矽晰嘻吸锡牺"], +["cf40","螥螦螧螩螪螮螰螱螲螴螶螷螸螹螻螼螾螿蟁",4,"蟇蟈蟉蟌",4,"蟔",6,"蟜蟝蟞蟟蟡蟢蟣蟤蟦蟧蟨蟩蟫蟬蟭蟯",9], +["cf80","蟺蟻蟼蟽蟿蠀蠁蠂蠄",5,"蠋",7,"蠔蠗蠘蠙蠚蠜",4,"蠣稀息希悉膝夕惜熄烯溪汐犀檄袭席习媳喜铣洗系隙戏细瞎虾匣霞辖暇峡侠狭下厦夏吓掀锨先仙鲜纤咸贤衔舷闲涎弦嫌显险现献县腺馅羡宪陷限线相厢镶香箱襄湘乡翔祥详想响享项巷橡像向象萧硝霄削哮嚣销消宵淆晓"], +["d040","蠤",13,"蠳",5,"蠺蠻蠽蠾蠿衁衂衃衆",5,"衎",5,"衕衖衘衚",6,"衦衧衪衭衯衱衳衴衵衶衸衹衺"], +["d080","衻衼袀袃袆袇袉袊袌袎袏袐袑袓袔袕袗",4,"袝",4,"袣袥",5,"小孝校肖啸笑效楔些歇蝎鞋协挟携邪斜胁谐写械卸蟹懈泄泻谢屑薪芯锌欣辛新忻心信衅星腥猩惺兴刑型形邢行醒幸杏性姓兄凶胸匈汹雄熊休修羞朽嗅锈秀袖绣墟戌需虚嘘须徐许蓄酗叙旭序畜恤絮婿绪续轩喧宣悬旋玄"], +["d140","袬袮袯袰袲",4,"袸袹袺袻袽袾袿裀裃裄裇裈裊裋裌裍裏裐裑裓裖裗裚",4,"裠裡裦裧裩",6,"裲裵裶裷裺裻製裿褀褁褃",5], +["d180","褉褋",4,"褑褔",4,"褜",4,"褢褣褤褦褧褨褩褬褭褮褯褱褲褳褵褷选癣眩绚靴薛学穴雪血勋熏循旬询寻驯巡殉汛训讯逊迅压押鸦鸭呀丫芽牙蚜崖衙涯雅哑亚讶焉咽阉烟淹盐严研蜒岩延言颜阎炎沿奄掩眼衍演艳堰燕厌砚雁唁彦焰宴谚验殃央鸯秧杨扬佯疡羊洋阳氧仰痒养样漾邀腰妖瑶"], +["d240","褸",8,"襂襃襅",24,"襠",5,"襧",19,"襼"], +["d280","襽襾覀覂覄覅覇",26,"摇尧遥窑谣姚咬舀药要耀椰噎耶爷野冶也页掖业叶曳腋夜液一壹医揖铱依伊衣颐夷遗移仪胰疑沂宜姨彝椅蚁倚已乙矣以艺抑易邑屹亿役臆逸肄疫亦裔意毅忆义益溢诣议谊译异翼翌绎茵荫因殷音阴姻吟银淫寅饮尹引隐"], +["d340","覢",30,"觃觍觓觔觕觗觘觙觛觝觟觠觡觢觤觧觨觩觪觬觭觮觰觱觲觴",6], +["d380","觻",4,"訁",5,"計",21,"印英樱婴鹰应缨莹萤营荧蝇迎赢盈影颖硬映哟拥佣臃痈庸雍踊蛹咏泳涌永恿勇用幽优悠忧尤由邮铀犹油游酉有友右佑釉诱又幼迂淤于盂榆虞愚舆余俞逾鱼愉渝渔隅予娱雨与屿禹宇语羽玉域芋郁吁遇喻峪御愈欲狱育誉"], +["d440","訞",31,"訿",8,"詉",21], +["d480","詟",25,"詺",6,"浴寓裕预豫驭鸳渊冤元垣袁原援辕园员圆猿源缘远苑愿怨院曰约越跃钥岳粤月悦阅耘云郧匀陨允运蕴酝晕韵孕匝砸杂栽哉灾宰载再在咱攒暂赞赃脏葬遭糟凿藻枣早澡蚤躁噪造皂灶燥责择则泽贼怎增憎曾赠扎喳渣札轧"], +["d540","誁",7,"誋",7,"誔",46], +["d580","諃",32,"铡闸眨栅榨咋乍炸诈摘斋宅窄债寨瞻毡詹粘沾盏斩辗崭展蘸栈占战站湛绽樟章彰漳张掌涨杖丈帐账仗胀瘴障招昭找沼赵照罩兆肇召遮折哲蛰辙者锗蔗这浙珍斟真甄砧臻贞针侦枕疹诊震振镇阵蒸挣睁征狰争怔整拯正政"], +["d640","諤",34,"謈",27], +["d680","謤謥謧",30,"帧症郑证芝枝支吱蜘知肢脂汁之织职直植殖执值侄址指止趾只旨纸志挚掷至致置帜峙制智秩稚质炙痔滞治窒中盅忠钟衷终种肿重仲众舟周州洲诌粥轴肘帚咒皱宙昼骤珠株蛛朱猪诸诛逐竹烛煮拄瞩嘱主著柱助蛀贮铸筑"], +["d740","譆",31,"譧",4,"譭",25], +["d780","讇",24,"讬讱讻诇诐诪谉谞住注祝驻抓爪拽专砖转撰赚篆桩庄装妆撞壮状椎锥追赘坠缀谆准捉拙卓桌琢茁酌啄着灼浊兹咨资姿滋淄孜紫仔籽滓子自渍字鬃棕踪宗综总纵邹走奏揍租足卒族祖诅阻组钻纂嘴醉最罪尊遵昨左佐柞做作坐座"], +["d840","谸",8,"豂豃豄豅豈豊豋豍",7,"豖豗豘豙豛",5,"豣",6,"豬",6,"豴豵豶豷豻",6,"貃貄貆貇"], +["d880","貈貋貍",6,"貕貖貗貙",20,"亍丌兀丐廿卅丕亘丞鬲孬噩丨禺丿匕乇夭爻卮氐囟胤馗毓睾鼗丶亟鼐乜乩亓芈孛啬嘏仄厍厝厣厥厮靥赝匚叵匦匮匾赜卦卣刂刈刎刭刳刿剀剌剞剡剜蒯剽劂劁劐劓冂罔亻仃仉仂仨仡仫仞伛仳伢佤仵伥伧伉伫佞佧攸佚佝"], +["d940","貮",62], +["d980","賭",32,"佟佗伲伽佶佴侑侉侃侏佾佻侪佼侬侔俦俨俪俅俚俣俜俑俟俸倩偌俳倬倏倮倭俾倜倌倥倨偾偃偕偈偎偬偻傥傧傩傺僖儆僭僬僦僮儇儋仝氽佘佥俎龠汆籴兮巽黉馘冁夔勹匍訇匐凫夙兕亠兖亳衮袤亵脔裒禀嬴蠃羸冫冱冽冼"], +["da40","贎",14,"贠赑赒赗赟赥赨赩赪赬赮赯赱赲赸",8,"趂趃趆趇趈趉趌",4,"趒趓趕",9,"趠趡"], +["da80","趢趤",12,"趲趶趷趹趻趽跀跁跂跅跇跈跉跊跍跐跒跓跔凇冖冢冥讠讦讧讪讴讵讷诂诃诋诏诎诒诓诔诖诘诙诜诟诠诤诨诩诮诰诳诶诹诼诿谀谂谄谇谌谏谑谒谔谕谖谙谛谘谝谟谠谡谥谧谪谫谮谯谲谳谵谶卩卺阝阢阡阱阪阽阼陂陉陔陟陧陬陲陴隈隍隗隰邗邛邝邙邬邡邴邳邶邺"], +["db40","跕跘跙跜跠跡跢跥跦跧跩跭跮跰跱跲跴跶跼跾",6,"踆踇踈踋踍踎踐踑踒踓踕",7,"踠踡踤",4,"踫踭踰踲踳踴踶踷踸踻踼踾"], +["db80","踿蹃蹅蹆蹌",4,"蹓",5,"蹚",11,"蹧蹨蹪蹫蹮蹱邸邰郏郅邾郐郄郇郓郦郢郜郗郛郫郯郾鄄鄢鄞鄣鄱鄯鄹酃酆刍奂劢劬劭劾哿勐勖勰叟燮矍廴凵凼鬯厶弁畚巯坌垩垡塾墼壅壑圩圬圪圳圹圮圯坜圻坂坩垅坫垆坼坻坨坭坶坳垭垤垌垲埏垧垴垓垠埕埘埚埙埒垸埴埯埸埤埝"], +["dc40","蹳蹵蹷",4,"蹽蹾躀躂躃躄躆躈",6,"躑躒躓躕",6,"躝躟",11,"躭躮躰躱躳",6,"躻",7], +["dc80","軃",10,"軏",21,"堋堍埽埭堀堞堙塄堠塥塬墁墉墚墀馨鼙懿艹艽艿芏芊芨芄芎芑芗芙芫芸芾芰苈苊苣芘芷芮苋苌苁芩芴芡芪芟苄苎芤苡茉苷苤茏茇苜苴苒苘茌苻苓茑茚茆茔茕苠苕茜荑荛荜茈莒茼茴茱莛荞茯荏荇荃荟荀茗荠茭茺茳荦荥"], +["dd40","軥",62], +["dd80","輤",32,"荨茛荩荬荪荭荮莰荸莳莴莠莪莓莜莅荼莶莩荽莸荻莘莞莨莺莼菁萁菥菘堇萘萋菝菽菖萜萸萑萆菔菟萏萃菸菹菪菅菀萦菰菡葜葑葚葙葳蒇蒈葺蒉葸萼葆葩葶蒌蒎萱葭蓁蓍蓐蓦蒽蓓蓊蒿蒺蓠蒡蒹蒴蒗蓥蓣蔌甍蔸蓰蔹蔟蔺"], +["de40","轅",32,"轪辀辌辒辝辠辡辢辤辥辦辧辪辬辭辮辯農辳辴辵辷辸辺辻込辿迀迃迆"], +["de80","迉",4,"迏迒迖迗迚迠迡迣迧迬迯迱迲迴迵迶迺迻迼迾迿逇逈逌逎逓逕逘蕖蔻蓿蓼蕙蕈蕨蕤蕞蕺瞢蕃蕲蕻薤薨薇薏蕹薮薜薅薹薷薰藓藁藜藿蘧蘅蘩蘖蘼廾弈夼奁耷奕奚奘匏尢尥尬尴扌扪抟抻拊拚拗拮挢拶挹捋捃掭揶捱捺掎掴捭掬掊捩掮掼揲揸揠揿揄揞揎摒揆掾摅摁搋搛搠搌搦搡摞撄摭撖"], +["df40","這逜連逤逥逧",5,"逰",4,"逷逹逺逽逿遀遃遅遆遈",4,"過達違遖遙遚遜",5,"遤遦遧適遪遫遬遯",4,"遶",6,"遾邁"], +["df80","還邅邆邇邉邊邌",4,"邒邔邖邘邚邜邞邟邠邤邥邧邨邩邫邭邲邷邼邽邿郀摺撷撸撙撺擀擐擗擤擢攉攥攮弋忒甙弑卟叱叽叩叨叻吒吖吆呋呒呓呔呖呃吡呗呙吣吲咂咔呷呱呤咚咛咄呶呦咝哐咭哂咴哒咧咦哓哔呲咣哕咻咿哌哙哚哜咩咪咤哝哏哞唛哧唠哽唔哳唢唣唏唑唧唪啧喏喵啉啭啁啕唿啐唼"], +["e040","郂郃郆郈郉郋郌郍郒郔郕郖郘郙郚郞郟郠郣郤郥郩郪郬郮郰郱郲郳郵郶郷郹郺郻郼郿鄀鄁鄃鄅",19,"鄚鄛鄜"], +["e080","鄝鄟鄠鄡鄤",10,"鄰鄲",6,"鄺",8,"酄唷啖啵啶啷唳唰啜喋嗒喃喱喹喈喁喟啾嗖喑啻嗟喽喾喔喙嗪嗷嗉嘟嗑嗫嗬嗔嗦嗝嗄嗯嗥嗲嗳嗌嗍嗨嗵嗤辔嘞嘈嘌嘁嘤嘣嗾嘀嘧嘭噘嘹噗嘬噍噢噙噜噌噔嚆噤噱噫噻噼嚅嚓嚯囔囗囝囡囵囫囹囿圄圊圉圜帏帙帔帑帱帻帼"], +["e140","酅酇酈酑酓酔酕酖酘酙酛酜酟酠酦酧酨酫酭酳酺酻酼醀",4,"醆醈醊醎醏醓",6,"醜",5,"醤",5,"醫醬醰醱醲醳醶醷醸醹醻"], +["e180","醼",10,"釈釋釐釒",9,"針",8,"帷幄幔幛幞幡岌屺岍岐岖岈岘岙岑岚岜岵岢岽岬岫岱岣峁岷峄峒峤峋峥崂崃崧崦崮崤崞崆崛嵘崾崴崽嵬嵛嵯嵝嵫嵋嵊嵩嵴嶂嶙嶝豳嶷巅彳彷徂徇徉後徕徙徜徨徭徵徼衢彡犭犰犴犷犸狃狁狎狍狒狨狯狩狲狴狷猁狳猃狺"], +["e240","釦",62], +["e280","鈥",32,"狻猗猓猡猊猞猝猕猢猹猥猬猸猱獐獍獗獠獬獯獾舛夥飧夤夂饣饧",5,"饴饷饽馀馄馇馊馍馐馑馓馔馕庀庑庋庖庥庠庹庵庾庳赓廒廑廛廨廪膺忄忉忖忏怃忮怄忡忤忾怅怆忪忭忸怙怵怦怛怏怍怩怫怊怿怡恸恹恻恺恂"], +["e340","鉆",45,"鉵",16], +["e380","銆",7,"銏",24,"恪恽悖悚悭悝悃悒悌悛惬悻悱惝惘惆惚悴愠愦愕愣惴愀愎愫慊慵憬憔憧憷懔懵忝隳闩闫闱闳闵闶闼闾阃阄阆阈阊阋阌阍阏阒阕阖阗阙阚丬爿戕氵汔汜汊沣沅沐沔沌汨汩汴汶沆沩泐泔沭泷泸泱泗沲泠泖泺泫泮沱泓泯泾"], +["e440","銨",5,"銯",24,"鋉",31], +["e480","鋩",32,"洹洧洌浃浈洇洄洙洎洫浍洮洵洚浏浒浔洳涑浯涞涠浞涓涔浜浠浼浣渚淇淅淞渎涿淠渑淦淝淙渖涫渌涮渫湮湎湫溲湟溆湓湔渲渥湄滟溱溘滠漭滢溥溧溽溻溷滗溴滏溏滂溟潢潆潇漤漕滹漯漶潋潴漪漉漩澉澍澌潸潲潼潺濑"], +["e540","錊",51,"錿",10], +["e580","鍊",31,"鍫濉澧澹澶濂濡濮濞濠濯瀚瀣瀛瀹瀵灏灞宀宄宕宓宥宸甯骞搴寤寮褰寰蹇謇辶迓迕迥迮迤迩迦迳迨逅逄逋逦逑逍逖逡逵逶逭逯遄遑遒遐遨遘遢遛暹遴遽邂邈邃邋彐彗彖彘尻咫屐屙孱屣屦羼弪弩弭艴弼鬻屮妁妃妍妩妪妣"], +["e640","鍬",34,"鎐",27], +["e680","鎬",29,"鏋鏌鏍妗姊妫妞妤姒妲妯姗妾娅娆姝娈姣姘姹娌娉娲娴娑娣娓婀婧婊婕娼婢婵胬媪媛婷婺媾嫫媲嫒嫔媸嫠嫣嫱嫖嫦嫘嫜嬉嬗嬖嬲嬷孀尕尜孚孥孳孑孓孢驵驷驸驺驿驽骀骁骅骈骊骐骒骓骖骘骛骜骝骟骠骢骣骥骧纟纡纣纥纨纩"], +["e740","鏎",7,"鏗",54], +["e780","鐎",32,"纭纰纾绀绁绂绉绋绌绐绔绗绛绠绡绨绫绮绯绱绲缍绶绺绻绾缁缂缃缇缈缋缌缏缑缒缗缙缜缛缟缡",6,"缪缫缬缭缯",4,"缵幺畿巛甾邕玎玑玮玢玟珏珂珑玷玳珀珉珈珥珙顼琊珩珧珞玺珲琏琪瑛琦琥琨琰琮琬"], +["e840","鐯",14,"鐿",43,"鑬鑭鑮鑯"], +["e880","鑰",20,"钑钖钘铇铏铓铔铚铦铻锜锠琛琚瑁瑜瑗瑕瑙瑷瑭瑾璜璎璀璁璇璋璞璨璩璐璧瓒璺韪韫韬杌杓杞杈杩枥枇杪杳枘枧杵枨枞枭枋杷杼柰栉柘栊柩枰栌柙枵柚枳柝栀柃枸柢栎柁柽栲栳桠桡桎桢桄桤梃栝桕桦桁桧桀栾桊桉栩梵梏桴桷梓桫棂楮棼椟椠棹"], +["e940","锧锳锽镃镈镋镕镚镠镮镴镵長",7,"門",42], +["e980","閫",32,"椤棰椋椁楗棣椐楱椹楠楂楝榄楫榀榘楸椴槌榇榈槎榉楦楣楹榛榧榻榫榭槔榱槁槊槟榕槠榍槿樯槭樗樘橥槲橄樾檠橐橛樵檎橹樽樨橘橼檑檐檩檗檫猷獒殁殂殇殄殒殓殍殚殛殡殪轫轭轱轲轳轵轶轸轷轹轺轼轾辁辂辄辇辋"], +["ea40","闌",27,"闬闿阇阓阘阛阞阠阣",6,"阫阬阭阯阰阷阸阹阺阾陁陃陊陎陏陑陒陓陖陗"], +["ea80","陘陙陚陜陝陞陠陣陥陦陫陭",4,"陳陸",12,"隇隉隊辍辎辏辘辚軎戋戗戛戟戢戡戥戤戬臧瓯瓴瓿甏甑甓攴旮旯旰昊昙杲昃昕昀炅曷昝昴昱昶昵耆晟晔晁晏晖晡晗晷暄暌暧暝暾曛曜曦曩贲贳贶贻贽赀赅赆赈赉赇赍赕赙觇觊觋觌觎觏觐觑牮犟牝牦牯牾牿犄犋犍犏犒挈挲掰"], +["eb40","隌階隑隒隓隕隖隚際隝",9,"隨",7,"隱隲隴隵隷隸隺隻隿雂雃雈雊雋雐雑雓雔雖",9,"雡",6,"雫"], +["eb80","雬雭雮雰雱雲雴雵雸雺電雼雽雿霂霃霅霊霋霌霐霑霒霔霕霗",4,"霝霟霠搿擘耄毪毳毽毵毹氅氇氆氍氕氘氙氚氡氩氤氪氲攵敕敫牍牒牖爰虢刖肟肜肓肼朊肽肱肫肭肴肷胧胨胩胪胛胂胄胙胍胗朐胝胫胱胴胭脍脎胲胼朕脒豚脶脞脬脘脲腈腌腓腴腙腚腱腠腩腼腽腭腧塍媵膈膂膑滕膣膪臌朦臊膻"], +["ec40","霡",8,"霫霬霮霯霱霳",4,"霺霻霼霽霿",18,"靔靕靗靘靚靜靝靟靣靤靦靧靨靪",7], +["ec80","靲靵靷",4,"靽",7,"鞆",4,"鞌鞎鞏鞐鞓鞕鞖鞗鞙",4,"臁膦欤欷欹歃歆歙飑飒飓飕飙飚殳彀毂觳斐齑斓於旆旄旃旌旎旒旖炀炜炖炝炻烀炷炫炱烨烊焐焓焖焯焱煳煜煨煅煲煊煸煺熘熳熵熨熠燠燔燧燹爝爨灬焘煦熹戾戽扃扈扉礻祀祆祉祛祜祓祚祢祗祠祯祧祺禅禊禚禧禳忑忐"], +["ed40","鞞鞟鞡鞢鞤",6,"鞬鞮鞰鞱鞳鞵",46], +["ed80","韤韥韨韮",4,"韴韷",23,"怼恝恚恧恁恙恣悫愆愍慝憩憝懋懑戆肀聿沓泶淼矶矸砀砉砗砘砑斫砭砜砝砹砺砻砟砼砥砬砣砩硎硭硖硗砦硐硇硌硪碛碓碚碇碜碡碣碲碹碥磔磙磉磬磲礅磴礓礤礞礴龛黹黻黼盱眄眍盹眇眈眚眢眙眭眦眵眸睐睑睇睃睚睨"], +["ee40","頏",62], +["ee80","顎",32,"睢睥睿瞍睽瞀瞌瞑瞟瞠瞰瞵瞽町畀畎畋畈畛畲畹疃罘罡罟詈罨罴罱罹羁罾盍盥蠲钅钆钇钋钊钌钍钏钐钔钗钕钚钛钜钣钤钫钪钭钬钯钰钲钴钶",4,"钼钽钿铄铈",6,"铐铑铒铕铖铗铙铘铛铞铟铠铢铤铥铧铨铪"], +["ef40","顯",5,"颋颎颒颕颙颣風",37,"飏飐飔飖飗飛飜飝飠",4], +["ef80","飥飦飩",30,"铩铫铮铯铳铴铵铷铹铼铽铿锃锂锆锇锉锊锍锎锏锒",4,"锘锛锝锞锟锢锪锫锩锬锱锲锴锶锷锸锼锾锿镂锵镄镅镆镉镌镎镏镒镓镔镖镗镘镙镛镞镟镝镡镢镤",8,"镯镱镲镳锺矧矬雉秕秭秣秫稆嵇稃稂稞稔"], +["f040","餈",4,"餎餏餑",28,"餯",26], +["f080","饊",9,"饖",12,"饤饦饳饸饹饻饾馂馃馉稹稷穑黏馥穰皈皎皓皙皤瓞瓠甬鸠鸢鸨",4,"鸲鸱鸶鸸鸷鸹鸺鸾鹁鹂鹄鹆鹇鹈鹉鹋鹌鹎鹑鹕鹗鹚鹛鹜鹞鹣鹦",6,"鹱鹭鹳疒疔疖疠疝疬疣疳疴疸痄疱疰痃痂痖痍痣痨痦痤痫痧瘃痱痼痿瘐瘀瘅瘌瘗瘊瘥瘘瘕瘙"], +["f140","馌馎馚",10,"馦馧馩",47], +["f180","駙",32,"瘛瘼瘢瘠癀瘭瘰瘿瘵癃瘾瘳癍癞癔癜癖癫癯翊竦穸穹窀窆窈窕窦窠窬窨窭窳衤衩衲衽衿袂袢裆袷袼裉裢裎裣裥裱褚裼裨裾裰褡褙褓褛褊褴褫褶襁襦襻疋胥皲皴矜耒耔耖耜耠耢耥耦耧耩耨耱耋耵聃聆聍聒聩聱覃顸颀颃"], +["f240","駺",62], +["f280","騹",32,"颉颌颍颏颔颚颛颞颟颡颢颥颦虍虔虬虮虿虺虼虻蚨蚍蚋蚬蚝蚧蚣蚪蚓蚩蚶蛄蚵蛎蚰蚺蚱蚯蛉蛏蚴蛩蛱蛲蛭蛳蛐蜓蛞蛴蛟蛘蛑蜃蜇蛸蜈蜊蜍蜉蜣蜻蜞蜥蜮蜚蜾蝈蜴蜱蜩蜷蜿螂蜢蝽蝾蝻蝠蝰蝌蝮螋蝓蝣蝼蝤蝙蝥螓螯螨蟒"], +["f340","驚",17,"驲骃骉骍骎骔骕骙骦骩",6,"骲骳骴骵骹骻骽骾骿髃髄髆",4,"髍髎髏髐髒體髕髖髗髙髚髛髜"], +["f380","髝髞髠髢髣髤髥髧髨髩髪髬髮髰",8,"髺髼",6,"鬄鬅鬆蟆螈螅螭螗螃螫蟥螬螵螳蟋蟓螽蟑蟀蟊蟛蟪蟠蟮蠖蠓蟾蠊蠛蠡蠹蠼缶罂罄罅舐竺竽笈笃笄笕笊笫笏筇笸笪笙笮笱笠笥笤笳笾笞筘筚筅筵筌筝筠筮筻筢筲筱箐箦箧箸箬箝箨箅箪箜箢箫箴篑篁篌篝篚篥篦篪簌篾篼簏簖簋"], +["f440","鬇鬉",5,"鬐鬑鬒鬔",10,"鬠鬡鬢鬤",10,"鬰鬱鬳",7,"鬽鬾鬿魀魆魊魋魌魎魐魒魓魕",5], +["f480","魛",32,"簟簪簦簸籁籀臾舁舂舄臬衄舡舢舣舭舯舨舫舸舻舳舴舾艄艉艋艏艚艟艨衾袅袈裘裟襞羝羟羧羯羰羲籼敉粑粝粜粞粢粲粼粽糁糇糌糍糈糅糗糨艮暨羿翎翕翥翡翦翩翮翳糸絷綦綮繇纛麸麴赳趄趔趑趱赧赭豇豉酊酐酎酏酤"], +["f540","魼",62], +["f580","鮻",32,"酢酡酰酩酯酽酾酲酴酹醌醅醐醍醑醢醣醪醭醮醯醵醴醺豕鹾趸跫踅蹙蹩趵趿趼趺跄跖跗跚跞跎跏跛跆跬跷跸跣跹跻跤踉跽踔踝踟踬踮踣踯踺蹀踹踵踽踱蹉蹁蹂蹑蹒蹊蹰蹶蹼蹯蹴躅躏躔躐躜躞豸貂貊貅貘貔斛觖觞觚觜"], +["f640","鯜",62], +["f680","鰛",32,"觥觫觯訾謦靓雩雳雯霆霁霈霏霎霪霭霰霾龀龃龅",5,"龌黾鼋鼍隹隼隽雎雒瞿雠銎銮鋈錾鍪鏊鎏鐾鑫鱿鲂鲅鲆鲇鲈稣鲋鲎鲐鲑鲒鲔鲕鲚鲛鲞",5,"鲥",4,"鲫鲭鲮鲰",7,"鲺鲻鲼鲽鳄鳅鳆鳇鳊鳋"], +["f740","鰼",62], +["f780","鱻鱽鱾鲀鲃鲄鲉鲊鲌鲏鲓鲖鲗鲘鲙鲝鲪鲬鲯鲹鲾",4,"鳈鳉鳑鳒鳚鳛鳠鳡鳌",4,"鳓鳔鳕鳗鳘鳙鳜鳝鳟鳢靼鞅鞑鞒鞔鞯鞫鞣鞲鞴骱骰骷鹘骶骺骼髁髀髅髂髋髌髑魅魃魇魉魈魍魑飨餍餮饕饔髟髡髦髯髫髻髭髹鬈鬏鬓鬟鬣麽麾縻麂麇麈麋麒鏖麝麟黛黜黝黠黟黢黩黧黥黪黯鼢鼬鼯鼹鼷鼽鼾齄"], +["f840","鳣",62], +["f880","鴢",32], +["f940","鵃",62], +["f980","鶂",32], +["fa40","鶣",62], +["fa80","鷢",32], +["fb40","鸃",27,"鸤鸧鸮鸰鸴鸻鸼鹀鹍鹐鹒鹓鹔鹖鹙鹝鹟鹠鹡鹢鹥鹮鹯鹲鹴",9,"麀"], +["fb80","麁麃麄麅麆麉麊麌",5,"麔",8,"麞麠",5,"麧麨麩麪"], +["fc40","麫",8,"麵麶麷麹麺麼麿",4,"黅黆黇黈黊黋黌黐黒黓黕黖黗黙黚點黡黣黤黦黨黫黬黭黮黰",8,"黺黽黿",6], +["fc80","鼆",4,"鼌鼏鼑鼒鼔鼕鼖鼘鼚",5,"鼡鼣",8,"鼭鼮鼰鼱"], +["fd40","鼲",4,"鼸鼺鼼鼿",4,"齅",10,"齒",38], +["fd80","齹",5,"龁龂龍",11,"龜龝龞龡",4,"郎凉秊裏隣"], +["fe40","兀嗀﨎﨏﨑﨓﨔礼﨟蘒﨡﨣﨤﨧﨨﨩"] +] diff --git a/node_modules/iconv-lite/encodings/tables/cp949.json b/node_modules/iconv-lite/encodings/tables/cp949.json new file mode 100644 index 0000000..2022a00 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/cp949.json @@ -0,0 +1,273 @@ +[ +["0","\u0000",127], +["8141","갂갃갅갆갋",4,"갘갞갟갡갢갣갥",6,"갮갲갳갴"], +["8161","갵갶갷갺갻갽갾갿걁",9,"걌걎",5,"걕"], +["8181","걖걗걙걚걛걝",18,"걲걳걵걶걹걻",4,"겂겇겈겍겎겏겑겒겓겕",6,"겞겢",5,"겫겭겮겱",6,"겺겾겿곀곂곃곅곆곇곉곊곋곍",7,"곖곘",7,"곢곣곥곦곩곫곭곮곲곴곷",4,"곾곿괁괂괃괅괇",4,"괎괐괒괓"], +["8241","괔괕괖괗괙괚괛괝괞괟괡",7,"괪괫괮",5], +["8261","괶괷괹괺괻괽",6,"굆굈굊",5,"굑굒굓굕굖굗"], +["8281","굙",7,"굢굤",7,"굮굯굱굲굷굸굹굺굾궀궃",4,"궊궋궍궎궏궑",10,"궞",5,"궥",17,"궸",7,"귂귃귅귆귇귉",6,"귒귔",7,"귝귞귟귡귢귣귥",18], +["8341","귺귻귽귾긂",5,"긊긌긎",5,"긕",7], +["8361","긝",18,"긲긳긵긶긹긻긼"], +["8381","긽긾긿깂깄깇깈깉깋깏깑깒깓깕깗",4,"깞깢깣깤깦깧깪깫깭깮깯깱",6,"깺깾",5,"꺆",5,"꺍",46,"꺿껁껂껃껅",6,"껎껒",5,"껚껛껝",8], +["8441","껦껧껩껪껬껮",5,"껵껶껷껹껺껻껽",8], +["8461","꼆꼉꼊꼋꼌꼎꼏꼑",18], +["8481","꼤",7,"꼮꼯꼱꼳꼵",6,"꼾꽀꽄꽅꽆꽇꽊",5,"꽑",10,"꽞",5,"꽦",18,"꽺",5,"꾁꾂꾃꾅꾆꾇꾉",6,"꾒꾓꾔꾖",5,"꾝",26,"꾺꾻꾽꾾"], +["8541","꾿꿁",5,"꿊꿌꿏",4,"꿕",6,"꿝",4], +["8561","꿢",5,"꿪",5,"꿲꿳꿵꿶꿷꿹",6,"뀂뀃"], +["8581","뀅",6,"뀍뀎뀏뀑뀒뀓뀕",6,"뀞",9,"뀩",26,"끆끇끉끋끍끏끐끑끒끖끘끚끛끜끞",29,"끾끿낁낂낃낅",6,"낎낐낒",5,"낛낝낞낣낤"], +["8641","낥낦낧낪낰낲낶낷낹낺낻낽",6,"냆냊",5,"냒"], +["8661","냓냕냖냗냙",6,"냡냢냣냤냦",10], +["8681","냱",22,"넊넍넎넏넑넔넕넖넗넚넞",4,"넦넧넩넪넫넭",6,"넶넺",5,"녂녃녅녆녇녉",6,"녒녓녖녗녙녚녛녝녞녟녡",22,"녺녻녽녾녿놁놃",4,"놊놌놎놏놐놑놕놖놗놙놚놛놝"], +["8741","놞",9,"놩",15], +["8761","놹",18,"뇍뇎뇏뇑뇒뇓뇕"], +["8781","뇖",5,"뇞뇠",7,"뇪뇫뇭뇮뇯뇱",7,"뇺뇼뇾",5,"눆눇눉눊눍",6,"눖눘눚",5,"눡",18,"눵",6,"눽",26,"뉙뉚뉛뉝뉞뉟뉡",6,"뉪",4], +["8841","뉯",4,"뉶",5,"뉽",6,"늆늇늈늊",4], +["8861","늏늒늓늕늖늗늛",4,"늢늤늧늨늩늫늭늮늯늱늲늳늵늶늷"], +["8881","늸",15,"닊닋닍닎닏닑닓",4,"닚닜닞닟닠닡닣닧닩닪닰닱닲닶닼닽닾댂댃댅댆댇댉",6,"댒댖",5,"댝",54,"덗덙덚덝덠덡덢덣"], +["8941","덦덨덪덬덭덯덲덳덵덶덷덹",6,"뎂뎆",5,"뎍"], +["8961","뎎뎏뎑뎒뎓뎕",10,"뎢",5,"뎩뎪뎫뎭"], +["8981","뎮",21,"돆돇돉돊돍돏돑돒돓돖돘돚돜돞돟돡돢돣돥돦돧돩",18,"돽",18,"됑",6,"됙됚됛됝됞됟됡",6,"됪됬",7,"됵",15], +["8a41","둅",10,"둒둓둕둖둗둙",6,"둢둤둦"], +["8a61","둧",4,"둭",18,"뒁뒂"], +["8a81","뒃",4,"뒉",19,"뒞",5,"뒥뒦뒧뒩뒪뒫뒭",7,"뒶뒸뒺",5,"듁듂듃듅듆듇듉",6,"듑듒듓듔듖",5,"듞듟듡듢듥듧",4,"듮듰듲",5,"듹",26,"딖딗딙딚딝"], +["8b41","딞",5,"딦딫",4,"딲딳딵딶딷딹",6,"땂땆"], +["8b61","땇땈땉땊땎땏땑땒땓땕",6,"땞땢",8], +["8b81","땫",52,"떢떣떥떦떧떩떬떭떮떯떲떶",4,"떾떿뗁뗂뗃뗅",6,"뗎뗒",5,"뗙",18,"뗭",18], +["8c41","똀",15,"똒똓똕똖똗똙",4], +["8c61","똞",6,"똦",5,"똭",6,"똵",5], +["8c81","똻",12,"뙉",26,"뙥뙦뙧뙩",50,"뚞뚟뚡뚢뚣뚥",5,"뚭뚮뚯뚰뚲",16], +["8d41","뛃",16,"뛕",8], +["8d61","뛞",17,"뛱뛲뛳뛵뛶뛷뛹뛺"], +["8d81","뛻",4,"뜂뜃뜄뜆",33,"뜪뜫뜭뜮뜱",6,"뜺뜼",7,"띅띆띇띉띊띋띍",6,"띖",9,"띡띢띣띥띦띧띩",6,"띲띴띶",5,"띾띿랁랂랃랅",6,"랎랓랔랕랚랛랝랞"], +["8e41","랟랡",6,"랪랮",5,"랶랷랹",8], +["8e61","럂",4,"럈럊",19], +["8e81","럞",13,"럮럯럱럲럳럵",6,"럾렂",4,"렊렋렍렎렏렑",6,"렚렜렞",5,"렦렧렩렪렫렭",6,"렶렺",5,"롁롂롃롅",11,"롒롔",7,"롞롟롡롢롣롥",6,"롮롰롲",5,"롹롺롻롽",7], +["8f41","뢅",7,"뢎",17], +["8f61","뢠",7,"뢩",6,"뢱뢲뢳뢵뢶뢷뢹",4], +["8f81","뢾뢿룂룄룆",5,"룍룎룏룑룒룓룕",7,"룞룠룢",5,"룪룫룭룮룯룱",6,"룺룼룾",5,"뤅",18,"뤙",6,"뤡",26,"뤾뤿륁륂륃륅",6,"륍륎륐륒",5], +["9041","륚륛륝륞륟륡",6,"륪륬륮",5,"륶륷륹륺륻륽"], +["9061","륾",5,"릆릈릋릌릏",15], +["9081","릟",12,"릮릯릱릲릳릵",6,"릾맀맂",5,"맊맋맍맓",4,"맚맜맟맠맢맦맧맩맪맫맭",6,"맶맻",4,"먂",5,"먉",11,"먖",33,"먺먻먽먾먿멁멃멄멅멆"], +["9141","멇멊멌멏멐멑멒멖멗멙멚멛멝",6,"멦멪",5], +["9161","멲멳멵멶멷멹",9,"몆몈몉몊몋몍",5], +["9181","몓",20,"몪몭몮몯몱몳",4,"몺몼몾",5,"뫅뫆뫇뫉",14,"뫚",33,"뫽뫾뫿묁묂묃묅",7,"묎묐묒",5,"묙묚묛묝묞묟묡",6], +["9241","묨묪묬",7,"묷묹묺묿",4,"뭆뭈뭊뭋뭌뭎뭑뭒"], +["9261","뭓뭕뭖뭗뭙",7,"뭢뭤",7,"뭭",4], +["9281","뭲",21,"뮉뮊뮋뮍뮎뮏뮑",18,"뮥뮦뮧뮩뮪뮫뮭",6,"뮵뮶뮸",7,"믁믂믃믅믆믇믉",6,"믑믒믔",35,"믺믻믽믾밁"], +["9341","밃",4,"밊밎밐밒밓밙밚밠밡밢밣밦밨밪밫밬밮밯밲밳밵"], +["9361","밶밷밹",6,"뱂뱆뱇뱈뱊뱋뱎뱏뱑",8], +["9381","뱚뱛뱜뱞",37,"벆벇벉벊벍벏",4,"벖벘벛",4,"벢벣벥벦벩",6,"벲벶",5,"벾벿볁볂볃볅",7,"볎볒볓볔볖볗볙볚볛볝",22,"볷볹볺볻볽"], +["9441","볾",5,"봆봈봊",5,"봑봒봓봕",8], +["9461","봞",5,"봥",6,"봭",12], +["9481","봺",5,"뵁",6,"뵊뵋뵍뵎뵏뵑",6,"뵚",9,"뵥뵦뵧뵩",22,"붂붃붅붆붋",4,"붒붔붖붗붘붛붝",6,"붥",10,"붱",6,"붹",24], +["9541","뷒뷓뷖뷗뷙뷚뷛뷝",11,"뷪",5,"뷱"], +["9561","뷲뷳뷵뷶뷷뷹",6,"븁븂븄븆",5,"븎븏븑븒븓"], +["9581","븕",6,"븞븠",35,"빆빇빉빊빋빍빏",4,"빖빘빜빝빞빟빢빣빥빦빧빩빫",4,"빲빶",4,"빾빿뺁뺂뺃뺅",6,"뺎뺒",5,"뺚",13,"뺩",14], +["9641","뺸",23,"뻒뻓"], +["9661","뻕뻖뻙",6,"뻡뻢뻦",5,"뻭",8], +["9681","뻶",10,"뼂",5,"뼊",13,"뼚뼞",33,"뽂뽃뽅뽆뽇뽉",6,"뽒뽓뽔뽖",44], +["9741","뾃",16,"뾕",8], +["9761","뾞",17,"뾱",7], +["9781","뾹",11,"뿆",5,"뿎뿏뿑뿒뿓뿕",6,"뿝뿞뿠뿢",89,"쀽쀾쀿"], +["9841","쁀",16,"쁒",5,"쁙쁚쁛"], +["9861","쁝쁞쁟쁡",6,"쁪",15], +["9881","쁺",21,"삒삓삕삖삗삙",6,"삢삤삦",5,"삮삱삲삷",4,"삾샂샃샄샆샇샊샋샍샎샏샑",6,"샚샞",5,"샦샧샩샪샫샭",6,"샶샸샺",5,"섁섂섃섅섆섇섉",6,"섑섒섓섔섖",5,"섡섢섥섨섩섪섫섮"], +["9941","섲섳섴섵섷섺섻섽섾섿셁",6,"셊셎",5,"셖셗"], +["9961","셙셚셛셝",6,"셦셪",5,"셱셲셳셵셶셷셹셺셻"], +["9981","셼",8,"솆",5,"솏솑솒솓솕솗",4,"솞솠솢솣솤솦솧솪솫솭솮솯솱",11,"솾",5,"쇅쇆쇇쇉쇊쇋쇍",6,"쇕쇖쇙",6,"쇡쇢쇣쇥쇦쇧쇩",6,"쇲쇴",7,"쇾쇿숁숂숃숅",6,"숎숐숒",5,"숚숛숝숞숡숢숣"], +["9a41","숤숥숦숧숪숬숮숰숳숵",16], +["9a61","쉆쉇쉉",6,"쉒쉓쉕쉖쉗쉙",6,"쉡쉢쉣쉤쉦"], +["9a81","쉧",4,"쉮쉯쉱쉲쉳쉵",6,"쉾슀슂",5,"슊",5,"슑",6,"슙슚슜슞",5,"슦슧슩슪슫슮",5,"슶슸슺",33,"싞싟싡싢싥",5,"싮싰싲싳싴싵싷싺싽싾싿쌁",6,"쌊쌋쌎쌏"], +["9b41","쌐쌑쌒쌖쌗쌙쌚쌛쌝",6,"쌦쌧쌪",8], +["9b61","쌳",17,"썆",7], +["9b81","썎",25,"썪썫썭썮썯썱썳",4,"썺썻썾",5,"쎅쎆쎇쎉쎊쎋쎍",50,"쏁",22,"쏚"], +["9c41","쏛쏝쏞쏡쏣",4,"쏪쏫쏬쏮",5,"쏶쏷쏹",5], +["9c61","쏿",8,"쐉",6,"쐑",9], +["9c81","쐛",8,"쐥",6,"쐭쐮쐯쐱쐲쐳쐵",6,"쐾",9,"쑉",26,"쑦쑧쑩쑪쑫쑭",6,"쑶쑷쑸쑺",5,"쒁",18,"쒕",6,"쒝",12], +["9d41","쒪",13,"쒹쒺쒻쒽",8], +["9d61","쓆",25], +["9d81","쓠",8,"쓪",5,"쓲쓳쓵쓶쓷쓹쓻쓼쓽쓾씂",9,"씍씎씏씑씒씓씕",6,"씝",10,"씪씫씭씮씯씱",6,"씺씼씾",5,"앆앇앋앏앐앑앒앖앚앛앜앟앢앣앥앦앧앩",6,"앲앶",5,"앾앿얁얂얃얅얆얈얉얊얋얎얐얒얓얔"], +["9e41","얖얙얚얛얝얞얟얡",7,"얪",9,"얶"], +["9e61","얷얺얿",4,"엋엍엏엒엓엕엖엗엙",6,"엢엤엦엧"], +["9e81","엨엩엪엫엯엱엲엳엵엸엹엺엻옂옃옄옉옊옋옍옎옏옑",6,"옚옝",6,"옦옧옩옪옫옯옱옲옶옸옺옼옽옾옿왂왃왅왆왇왉",6,"왒왖",5,"왞왟왡",10,"왭왮왰왲",5,"왺왻왽왾왿욁",6,"욊욌욎",5,"욖욗욙욚욛욝",6,"욦"], +["9f41","욨욪",5,"욲욳욵욶욷욻",4,"웂웄웆",5,"웎"], +["9f61","웏웑웒웓웕",6,"웞웟웢",5,"웪웫웭웮웯웱웲"], +["9f81","웳",4,"웺웻웼웾",5,"윆윇윉윊윋윍",6,"윖윘윚",5,"윢윣윥윦윧윩",6,"윲윴윶윸윹윺윻윾윿읁읂읃읅",4,"읋읎읐읙읚읛읝읞읟읡",6,"읩읪읬",7,"읶읷읹읺읻읿잀잁잂잆잋잌잍잏잒잓잕잙잛",4,"잢잧",4,"잮잯잱잲잳잵잶잷"], +["a041","잸잹잺잻잾쟂",5,"쟊쟋쟍쟏쟑",6,"쟙쟚쟛쟜"], +["a061","쟞",5,"쟥쟦쟧쟩쟪쟫쟭",13], +["a081","쟻",4,"젂젃젅젆젇젉젋",4,"젒젔젗",4,"젞젟젡젢젣젥",6,"젮젰젲",5,"젹젺젻젽젾젿졁",6,"졊졋졎",5,"졕",26,"졲졳졵졶졷졹졻",4,"좂좄좈좉좊좎",5,"좕",7,"좞좠좢좣좤"], +["a141","좥좦좧좩",18,"좾좿죀죁"], +["a161","죂죃죅죆죇죉죊죋죍",6,"죖죘죚",5,"죢죣죥"], +["a181","죦",14,"죶",5,"죾죿줁줂줃줇",4,"줎 、。·‥…¨〃­―∥\∼‘’“”〔〕〈",9,"±×÷≠≤≥∞∴°′″℃Å¢£¥♂♀∠⊥⌒∂∇≡≒§※☆★○●◎◇◆□■△▲▽▼→←↑↓↔〓≪≫√∽∝∵∫∬∈∋⊆⊇⊂⊃∪∩∧∨¬"], +["a241","줐줒",5,"줙",18], +["a261","줭",6,"줵",18], +["a281","쥈",7,"쥒쥓쥕쥖쥗쥙",6,"쥢쥤",7,"쥭쥮쥯⇒⇔∀∃´~ˇ˘˝˚˙¸˛¡¿ː∮∑∏¤℉‰◁◀▷▶♤♠♡♥♧♣⊙◈▣◐◑▒▤▥▨▧▦▩♨☏☎☜☞¶†‡↕↗↙↖↘♭♩♪♬㉿㈜№㏇™㏂㏘℡€®"], +["a341","쥱쥲쥳쥵",6,"쥽",10,"즊즋즍즎즏"], +["a361","즑",6,"즚즜즞",16], +["a381","즯",16,"짂짃짅짆짉짋",4,"짒짔짗짘짛!",58,"₩]",32," ̄"], +["a441","짞짟짡짣짥짦짨짩짪짫짮짲",5,"짺짻짽짾짿쨁쨂쨃쨄"], +["a461","쨅쨆쨇쨊쨎",5,"쨕쨖쨗쨙",12], +["a481","쨦쨧쨨쨪",28,"ㄱ",93], +["a541","쩇",4,"쩎쩏쩑쩒쩓쩕",6,"쩞쩢",5,"쩩쩪"], +["a561","쩫",17,"쩾",5,"쪅쪆"], +["a581","쪇",16,"쪙",14,"ⅰ",9], +["a5b0","Ⅰ",9], +["a5c1","Α",16,"Σ",6], +["a5e1","α",16,"σ",6], +["a641","쪨",19,"쪾쪿쫁쫂쫃쫅"], +["a661","쫆",5,"쫎쫐쫒쫔쫕쫖쫗쫚",5,"쫡",6], +["a681","쫨쫩쫪쫫쫭",6,"쫵",18,"쬉쬊─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂┒┑┚┙┖┕┎┍┞┟┡┢┦┧┩┪┭┮┱┲┵┶┹┺┽┾╀╁╃",7], +["a741","쬋",4,"쬑쬒쬓쬕쬖쬗쬙",6,"쬢",7], +["a761","쬪",22,"쭂쭃쭄"], +["a781","쭅쭆쭇쭊쭋쭍쭎쭏쭑",6,"쭚쭛쭜쭞",5,"쭥",7,"㎕㎖㎗ℓ㎘㏄㎣㎤㎥㎦㎙",9,"㏊㎍㎎㎏㏏㎈㎉㏈㎧㎨㎰",9,"㎀",4,"㎺",5,"㎐",4,"Ω㏀㏁㎊㎋㎌㏖㏅㎭㎮㎯㏛㎩㎪㎫㎬㏝㏐㏓㏃㏉㏜㏆"], +["a841","쭭",10,"쭺",14], +["a861","쮉",18,"쮝",6], +["a881","쮤",19,"쮹",11,"ÆÐªĦ"], +["a8a6","IJ"], +["a8a8","ĿŁØŒºÞŦŊ"], +["a8b1","㉠",27,"ⓐ",25,"①",14,"½⅓⅔¼¾⅛⅜⅝⅞"], +["a941","쯅",14,"쯕",10], +["a961","쯠쯡쯢쯣쯥쯦쯨쯪",18], +["a981","쯽",14,"찎찏찑찒찓찕",6,"찞찟찠찣찤æđðħıijĸŀłøœßþŧŋʼn㈀",27,"⒜",25,"⑴",14,"¹²³⁴ⁿ₁₂₃₄"], +["aa41","찥찦찪찫찭찯찱",6,"찺찿",4,"챆챇챉챊챋챍챎"], +["aa61","챏",4,"챖챚",5,"챡챢챣챥챧챩",6,"챱챲"], +["aa81","챳챴챶",29,"ぁ",82], +["ab41","첔첕첖첗첚첛첝첞첟첡",6,"첪첮",5,"첶첷첹"], +["ab61","첺첻첽",6,"쳆쳈쳊",5,"쳑쳒쳓쳕",5], +["ab81","쳛",8,"쳥",6,"쳭쳮쳯쳱",12,"ァ",85], +["ac41","쳾쳿촀촂",5,"촊촋촍촎촏촑",6,"촚촜촞촟촠"], +["ac61","촡촢촣촥촦촧촩촪촫촭",11,"촺",4], +["ac81","촿",28,"쵝쵞쵟А",5,"ЁЖ",25], +["acd1","а",5,"ёж",25], +["ad41","쵡쵢쵣쵥",6,"쵮쵰쵲",5,"쵹",7], +["ad61","춁",6,"춉",10,"춖춗춙춚춛춝춞춟"], +["ad81","춠춡춢춣춦춨춪",5,"춱",18,"췅"], +["ae41","췆",5,"췍췎췏췑",16], +["ae61","췢",5,"췩췪췫췭췮췯췱",6,"췺췼췾",4], +["ae81","츃츅츆츇츉츊츋츍",6,"츕츖츗츘츚",5,"츢츣츥츦츧츩츪츫"], +["af41","츬츭츮츯츲츴츶",19], +["af61","칊",13,"칚칛칝칞칢",5,"칪칬"], +["af81","칮",5,"칶칷칹칺칻칽",6,"캆캈캊",5,"캒캓캕캖캗캙"], +["b041","캚",5,"캢캦",5,"캮",12], +["b061","캻",5,"컂",19], +["b081","컖",13,"컦컧컩컪컭",6,"컶컺",5,"가각간갇갈갉갊감",7,"같",4,"갠갤갬갭갯갰갱갸갹갼걀걋걍걔걘걜거걱건걷걸걺검겁것겄겅겆겉겊겋게겐겔겜겝겟겠겡겨격겪견겯결겸겹겻겼경곁계곈곌곕곗고곡곤곧골곪곬곯곰곱곳공곶과곽관괄괆"], +["b141","켂켃켅켆켇켉",6,"켒켔켖",5,"켝켞켟켡켢켣"], +["b161","켥",6,"켮켲",5,"켹",11], +["b181","콅",14,"콖콗콙콚콛콝",6,"콦콨콪콫콬괌괍괏광괘괜괠괩괬괭괴괵괸괼굄굅굇굉교굔굘굡굣구국군굳굴굵굶굻굼굽굿궁궂궈궉권궐궜궝궤궷귀귁귄귈귐귑귓규균귤그극근귿글긁금급긋긍긔기긱긴긷길긺김깁깃깅깆깊까깍깎깐깔깖깜깝깟깠깡깥깨깩깬깰깸"], +["b241","콭콮콯콲콳콵콶콷콹",6,"쾁쾂쾃쾄쾆",5,"쾍"], +["b261","쾎",18,"쾢",5,"쾩"], +["b281","쾪",5,"쾱",18,"쿅",6,"깹깻깼깽꺄꺅꺌꺼꺽꺾껀껄껌껍껏껐껑께껙껜껨껫껭껴껸껼꼇꼈꼍꼐꼬꼭꼰꼲꼴꼼꼽꼿꽁꽂꽃꽈꽉꽐꽜꽝꽤꽥꽹꾀꾄꾈꾐꾑꾕꾜꾸꾹꾼꿀꿇꿈꿉꿋꿍꿎꿔꿜꿨꿩꿰꿱꿴꿸뀀뀁뀄뀌뀐뀔뀜뀝뀨끄끅끈끊끌끎끓끔끕끗끙"], +["b341","쿌",19,"쿢쿣쿥쿦쿧쿩"], +["b361","쿪",5,"쿲쿴쿶",5,"쿽쿾쿿퀁퀂퀃퀅",5], +["b381","퀋",5,"퀒",5,"퀙",19,"끝끼끽낀낄낌낍낏낑나낙낚난낟날낡낢남납낫",4,"낱낳내낵낸낼냄냅냇냈냉냐냑냔냘냠냥너넉넋넌널넒넓넘넙넛넜넝넣네넥넨넬넴넵넷넸넹녀녁년녈념녑녔녕녘녜녠노녹논놀놂놈놉놋농높놓놔놘놜놨뇌뇐뇔뇜뇝"], +["b441","퀮",5,"퀶퀷퀹퀺퀻퀽",6,"큆큈큊",5], +["b461","큑큒큓큕큖큗큙",6,"큡",10,"큮큯"], +["b481","큱큲큳큵",6,"큾큿킀킂",18,"뇟뇨뇩뇬뇰뇹뇻뇽누눅눈눋눌눔눕눗눙눠눴눼뉘뉜뉠뉨뉩뉴뉵뉼늄늅늉느늑는늘늙늚늠늡늣능늦늪늬늰늴니닉닌닐닒님닙닛닝닢다닥닦단닫",4,"닳담답닷",4,"닿대댁댄댈댐댑댓댔댕댜더덕덖던덛덜덞덟덤덥"], +["b541","킕",14,"킦킧킩킪킫킭",5], +["b561","킳킶킸킺",5,"탂탃탅탆탇탊",5,"탒탖",4], +["b581","탛탞탟탡탢탣탥",6,"탮탲",5,"탹",11,"덧덩덫덮데덱덴델뎀뎁뎃뎄뎅뎌뎐뎔뎠뎡뎨뎬도독돈돋돌돎돐돔돕돗동돛돝돠돤돨돼됐되된될됨됩됫됴두둑둔둘둠둡둣둥둬뒀뒈뒝뒤뒨뒬뒵뒷뒹듀듄듈듐듕드득든듣들듦듬듭듯등듸디딕딘딛딜딤딥딧딨딩딪따딱딴딸"], +["b641","턅",7,"턎",17], +["b661","턠",15,"턲턳턵턶턷턹턻턼턽턾"], +["b681","턿텂텆",5,"텎텏텑텒텓텕",6,"텞텠텢",5,"텩텪텫텭땀땁땃땄땅땋때땍땐땔땜땝땟땠땡떠떡떤떨떪떫떰떱떳떴떵떻떼떽뗀뗄뗌뗍뗏뗐뗑뗘뗬또똑똔똘똥똬똴뙈뙤뙨뚜뚝뚠뚤뚫뚬뚱뛔뛰뛴뛸뜀뜁뜅뜨뜩뜬뜯뜰뜸뜹뜻띄띈띌띔띕띠띤띨띰띱띳띵라락란랄람랍랏랐랑랒랖랗"], +["b741","텮",13,"텽",6,"톅톆톇톉톊"], +["b761","톋",20,"톢톣톥톦톧"], +["b781","톩",6,"톲톴톶톷톸톹톻톽톾톿퇁",14,"래랙랜랠램랩랫랬랭랴략랸럇량러럭런럴럼럽럿렀렁렇레렉렌렐렘렙렛렝려력련렬렴렵렷렸령례롄롑롓로록론롤롬롭롯롱롸롼뢍뢨뢰뢴뢸룀룁룃룅료룐룔룝룟룡루룩룬룰룸룹룻룽뤄뤘뤠뤼뤽륀륄륌륏륑류륙륜률륨륩"], +["b841","퇐",7,"퇙",17], +["b861","퇫",8,"퇵퇶퇷퇹",13], +["b881","툈툊",5,"툑",24,"륫륭르륵른를름릅릇릉릊릍릎리릭린릴림립릿링마막만많",4,"맘맙맛망맞맡맣매맥맨맬맴맵맷맸맹맺먀먁먈먕머먹먼멀멂멈멉멋멍멎멓메멕멘멜멤멥멧멨멩며멱면멸몃몄명몇몌모목몫몬몰몲몸몹못몽뫄뫈뫘뫙뫼"], +["b941","툪툫툮툯툱툲툳툵",6,"툾퉀퉂",5,"퉉퉊퉋퉌"], +["b961","퉍",14,"퉝",6,"퉥퉦퉧퉨"], +["b981","퉩",22,"튂튃튅튆튇튉튊튋튌묀묄묍묏묑묘묜묠묩묫무묵묶문묻물묽묾뭄뭅뭇뭉뭍뭏뭐뭔뭘뭡뭣뭬뮈뮌뮐뮤뮨뮬뮴뮷므믄믈믐믓미믹민믿밀밂밈밉밋밌밍및밑바",4,"받",4,"밤밥밧방밭배백밴밸뱀뱁뱃뱄뱅뱉뱌뱍뱐뱝버벅번벋벌벎범법벗"], +["ba41","튍튎튏튒튓튔튖",5,"튝튞튟튡튢튣튥",6,"튭"], +["ba61","튮튯튰튲",5,"튺튻튽튾틁틃",4,"틊틌",5], +["ba81","틒틓틕틖틗틙틚틛틝",6,"틦",9,"틲틳틵틶틷틹틺벙벚베벡벤벧벨벰벱벳벴벵벼벽변별볍볏볐병볕볘볜보복볶본볼봄봅봇봉봐봔봤봬뵀뵈뵉뵌뵐뵘뵙뵤뵨부북분붇불붉붊붐붑붓붕붙붚붜붤붰붸뷔뷕뷘뷜뷩뷰뷴뷸븀븃븅브븍븐블븜븝븟비빅빈빌빎빔빕빗빙빚빛빠빡빤"], +["bb41","틻",4,"팂팄팆",5,"팏팑팒팓팕팗",4,"팞팢팣"], +["bb61","팤팦팧팪팫팭팮팯팱",6,"팺팾",5,"퍆퍇퍈퍉"], +["bb81","퍊",31,"빨빪빰빱빳빴빵빻빼빽뺀뺄뺌뺍뺏뺐뺑뺘뺙뺨뻐뻑뻔뻗뻘뻠뻣뻤뻥뻬뼁뼈뼉뼘뼙뼛뼜뼝뽀뽁뽄뽈뽐뽑뽕뾔뾰뿅뿌뿍뿐뿔뿜뿟뿡쀼쁑쁘쁜쁠쁨쁩삐삑삔삘삠삡삣삥사삭삯산삳살삵삶삼삽삿샀상샅새색샌샐샘샙샛샜생샤"], +["bc41","퍪",17,"퍾퍿펁펂펃펅펆펇"], +["bc61","펈펉펊펋펎펒",5,"펚펛펝펞펟펡",6,"펪펬펮"], +["bc81","펯",4,"펵펶펷펹펺펻펽",6,"폆폇폊",5,"폑",5,"샥샨샬샴샵샷샹섀섄섈섐섕서",4,"섣설섦섧섬섭섯섰성섶세섹센셀셈셉셋셌셍셔셕션셜셤셥셧셨셩셰셴셸솅소속솎손솔솖솜솝솟송솥솨솩솬솰솽쇄쇈쇌쇔쇗쇘쇠쇤쇨쇰쇱쇳쇼쇽숀숄숌숍숏숑수숙순숟술숨숩숫숭"], +["bd41","폗폙",7,"폢폤",7,"폮폯폱폲폳폵폶폷"], +["bd61","폸폹폺폻폾퐀퐂",5,"퐉",13], +["bd81","퐗",5,"퐞",25,"숯숱숲숴쉈쉐쉑쉔쉘쉠쉥쉬쉭쉰쉴쉼쉽쉿슁슈슉슐슘슛슝스슥슨슬슭슴습슷승시식신싣실싫심십싯싱싶싸싹싻싼쌀쌈쌉쌌쌍쌓쌔쌕쌘쌜쌤쌥쌨쌩썅써썩썬썰썲썸썹썼썽쎄쎈쎌쏀쏘쏙쏜쏟쏠쏢쏨쏩쏭쏴쏵쏸쐈쐐쐤쐬쐰"], +["be41","퐸",7,"푁푂푃푅",14], +["be61","푔",7,"푝푞푟푡푢푣푥",7,"푮푰푱푲"], +["be81","푳",4,"푺푻푽푾풁풃",4,"풊풌풎",5,"풕",8,"쐴쐼쐽쑈쑤쑥쑨쑬쑴쑵쑹쒀쒔쒜쒸쒼쓩쓰쓱쓴쓸쓺쓿씀씁씌씐씔씜씨씩씬씰씸씹씻씽아악안앉않알앍앎앓암압앗았앙앝앞애액앤앨앰앱앳앴앵야약얀얄얇얌얍얏양얕얗얘얜얠얩어억언얹얻얼얽얾엄",6,"엌엎"], +["bf41","풞",10,"풪",14], +["bf61","풹",18,"퓍퓎퓏퓑퓒퓓퓕"], +["bf81","퓖",5,"퓝퓞퓠",7,"퓩퓪퓫퓭퓮퓯퓱",6,"퓹퓺퓼에엑엔엘엠엡엣엥여역엮연열엶엷염",5,"옅옆옇예옌옐옘옙옛옜오옥온올옭옮옰옳옴옵옷옹옻와왁완왈왐왑왓왔왕왜왝왠왬왯왱외왹왼욀욈욉욋욍요욕욘욜욤욥욧용우욱운울욹욺움웁웃웅워웍원월웜웝웠웡웨"], +["c041","퓾",5,"픅픆픇픉픊픋픍",6,"픖픘",5], +["c061","픞",25], +["c081","픸픹픺픻픾픿핁핂핃핅",6,"핎핐핒",5,"핚핛핝핞핟핡핢핣웩웬웰웸웹웽위윅윈윌윔윕윗윙유육윤율윰윱윳융윷으윽은을읊음읍읏응",7,"읜읠읨읫이익인일읽읾잃임입잇있잉잊잎자작잔잖잗잘잚잠잡잣잤장잦재잭잰잴잼잽잿쟀쟁쟈쟉쟌쟎쟐쟘쟝쟤쟨쟬저적전절젊"], +["c141","핤핦핧핪핬핮",5,"핶핷핹핺핻핽",6,"햆햊햋"], +["c161","햌햍햎햏햑",19,"햦햧"], +["c181","햨",31,"점접젓정젖제젝젠젤젬젭젯젱져젼졀졈졉졌졍졔조족존졸졺좀좁좃종좆좇좋좌좍좔좝좟좡좨좼좽죄죈죌죔죕죗죙죠죡죤죵주죽준줄줅줆줌줍줏중줘줬줴쥐쥑쥔쥘쥠쥡쥣쥬쥰쥴쥼즈즉즌즐즘즙즛증지직진짇질짊짐집짓"], +["c241","헊헋헍헎헏헑헓",4,"헚헜헞",5,"헦헧헩헪헫헭헮"], +["c261","헯",4,"헶헸헺",5,"혂혃혅혆혇혉",6,"혒"], +["c281","혖",5,"혝혞혟혡혢혣혥",7,"혮",9,"혺혻징짖짙짚짜짝짠짢짤짧짬짭짯짰짱째짹짼쨀쨈쨉쨋쨌쨍쨔쨘쨩쩌쩍쩐쩔쩜쩝쩟쩠쩡쩨쩽쪄쪘쪼쪽쫀쫄쫌쫍쫏쫑쫓쫘쫙쫠쫬쫴쬈쬐쬔쬘쬠쬡쭁쭈쭉쭌쭐쭘쭙쭝쭤쭸쭹쮜쮸쯔쯤쯧쯩찌찍찐찔찜찝찡찢찧차착찬찮찰참찹찻"], +["c341","혽혾혿홁홂홃홄홆홇홊홌홎홏홐홒홓홖홗홙홚홛홝",4], +["c361","홢",4,"홨홪",5,"홲홳홵",11], +["c381","횁횂횄횆",5,"횎횏횑횒횓횕",7,"횞횠횢",5,"횩횪찼창찾채책챈챌챔챕챗챘챙챠챤챦챨챰챵처척천철첨첩첫첬청체첵첸첼쳄쳅쳇쳉쳐쳔쳤쳬쳰촁초촉촌촐촘촙촛총촤촨촬촹최쵠쵤쵬쵭쵯쵱쵸춈추축춘출춤춥춧충춰췄췌췐취췬췰췸췹췻췽츄츈츌츔츙츠측츤츨츰츱츳층"], +["c441","횫횭횮횯횱",7,"횺횼",7,"훆훇훉훊훋"], +["c461","훍훎훏훐훒훓훕훖훘훚",5,"훡훢훣훥훦훧훩",4], +["c481","훮훯훱훲훳훴훶",5,"훾훿휁휂휃휅",11,"휒휓휔치칙친칟칠칡침칩칫칭카칵칸칼캄캅캇캉캐캑캔캘캠캡캣캤캥캬캭컁커컥컨컫컬컴컵컷컸컹케켁켄켈켐켑켓켕켜켠켤켬켭켯켰켱켸코콕콘콜콤콥콧콩콰콱콴콸쾀쾅쾌쾡쾨쾰쿄쿠쿡쿤쿨쿰쿱쿳쿵쿼퀀퀄퀑퀘퀭퀴퀵퀸퀼"], +["c541","휕휖휗휚휛휝휞휟휡",6,"휪휬휮",5,"휶휷휹"], +["c561","휺휻휽",6,"흅흆흈흊",5,"흒흓흕흚",4], +["c581","흟흢흤흦흧흨흪흫흭흮흯흱흲흳흵",6,"흾흿힀힂",5,"힊힋큄큅큇큉큐큔큘큠크큭큰클큼큽킁키킥킨킬킴킵킷킹타탁탄탈탉탐탑탓탔탕태택탠탤탬탭탯탰탱탸턍터턱턴털턺텀텁텃텄텅테텍텐텔템텝텟텡텨텬텼톄톈토톡톤톨톰톱톳통톺톼퇀퇘퇴퇸툇툉툐투툭툰툴툼툽툿퉁퉈퉜"], +["c641","힍힎힏힑",6,"힚힜힞",5], +["c6a1","퉤튀튁튄튈튐튑튕튜튠튤튬튱트특튼튿틀틂틈틉틋틔틘틜틤틥티틱틴틸팀팁팃팅파팍팎판팔팖팜팝팟팠팡팥패팩팬팰팸팹팻팼팽퍄퍅퍼퍽펀펄펌펍펏펐펑페펙펜펠펨펩펫펭펴편펼폄폅폈평폐폘폡폣포폭폰폴폼폽폿퐁"], +["c7a1","퐈퐝푀푄표푠푤푭푯푸푹푼푿풀풂품풉풋풍풔풩퓌퓐퓔퓜퓟퓨퓬퓰퓸퓻퓽프픈플픔픕픗피픽핀필핌핍핏핑하학한할핥함합핫항해핵핸핼햄햅햇했행햐향허헉헌헐헒험헙헛헝헤헥헨헬헴헵헷헹혀혁현혈혐협혓혔형혜혠"], +["c8a1","혤혭호혹혼홀홅홈홉홋홍홑화확환활홧황홰홱홴횃횅회획횐횔횝횟횡효횬횰횹횻후훅훈훌훑훔훗훙훠훤훨훰훵훼훽휀휄휑휘휙휜휠휨휩휫휭휴휵휸휼흄흇흉흐흑흔흖흗흘흙흠흡흣흥흩희흰흴흼흽힁히힉힌힐힘힙힛힝"], +["caa1","伽佳假價加可呵哥嘉嫁家暇架枷柯歌珂痂稼苛茄街袈訶賈跏軻迦駕刻却各恪慤殼珏脚覺角閣侃刊墾奸姦干幹懇揀杆柬桿澗癎看磵稈竿簡肝艮艱諫間乫喝曷渴碣竭葛褐蝎鞨勘坎堪嵌感憾戡敢柑橄減甘疳監瞰紺邯鑑鑒龕"], +["cba1","匣岬甲胛鉀閘剛堈姜岡崗康强彊慷江畺疆糠絳綱羌腔舡薑襁講鋼降鱇介价個凱塏愷愾慨改槪漑疥皆盖箇芥蓋豈鎧開喀客坑更粳羹醵倨去居巨拒据據擧渠炬祛距踞車遽鉅鋸乾件健巾建愆楗腱虔蹇鍵騫乞傑杰桀儉劍劒檢"], +["cca1","瞼鈐黔劫怯迲偈憩揭擊格檄激膈覡隔堅牽犬甄絹繭肩見譴遣鵑抉決潔結缺訣兼慊箝謙鉗鎌京俓倞傾儆勁勍卿坰境庚徑慶憬擎敬景暻更梗涇炅烱璟璥瓊痙硬磬竟競絅經耕耿脛莖警輕逕鏡頃頸驚鯨係啓堺契季屆悸戒桂械"], +["cda1","棨溪界癸磎稽系繫繼計誡谿階鷄古叩告呱固姑孤尻庫拷攷故敲暠枯槁沽痼皐睾稿羔考股膏苦苽菰藁蠱袴誥賈辜錮雇顧高鼓哭斛曲梏穀谷鵠困坤崑昆梱棍滾琨袞鯤汨滑骨供公共功孔工恐恭拱控攻珙空蚣貢鞏串寡戈果瓜"], +["cea1","科菓誇課跨過鍋顆廓槨藿郭串冠官寬慣棺款灌琯瓘管罐菅觀貫關館刮恝括适侊光匡壙廣曠洸炚狂珖筐胱鑛卦掛罫乖傀塊壞怪愧拐槐魁宏紘肱轟交僑咬喬嬌嶠巧攪敎校橋狡皎矯絞翹膠蕎蛟較轎郊餃驕鮫丘久九仇俱具勾"], +["cfa1","區口句咎嘔坵垢寇嶇廐懼拘救枸柩構歐毆毬求溝灸狗玖球瞿矩究絿耉臼舅舊苟衢謳購軀逑邱鉤銶駒驅鳩鷗龜國局菊鞠鞫麴君窘群裙軍郡堀屈掘窟宮弓穹窮芎躬倦券勸卷圈拳捲權淃眷厥獗蕨蹶闕机櫃潰詭軌饋句晷歸貴"], +["d0a1","鬼龜叫圭奎揆槻珪硅窺竅糾葵規赳逵閨勻均畇筠菌鈞龜橘克剋劇戟棘極隙僅劤勤懃斤根槿瑾筋芹菫覲謹近饉契今妗擒昑檎琴禁禽芩衾衿襟金錦伋及急扱汲級給亘兢矜肯企伎其冀嗜器圻基埼夔奇妓寄岐崎己幾忌技旗旣"], +["d1a1","朞期杞棋棄機欺氣汽沂淇玘琦琪璂璣畸畿碁磯祁祇祈祺箕紀綺羈耆耭肌記譏豈起錡錤飢饑騎騏驥麒緊佶吉拮桔金喫儺喇奈娜懦懶拏拿癩",5,"那樂",4,"諾酪駱亂卵暖欄煖爛蘭難鸞捏捺南嵐枏楠湳濫男藍襤拉"], +["d2a1","納臘蠟衲囊娘廊",4,"乃來內奈柰耐冷女年撚秊念恬拈捻寧寗努勞奴弩怒擄櫓爐瑙盧",5,"駑魯",10,"濃籠聾膿農惱牢磊腦賂雷尿壘",7,"嫩訥杻紐勒",5,"能菱陵尼泥匿溺多茶"], +["d3a1","丹亶但單團壇彖斷旦檀段湍短端簞緞蛋袒鄲鍛撻澾獺疸達啖坍憺擔曇淡湛潭澹痰聃膽蕁覃談譚錟沓畓答踏遝唐堂塘幢戇撞棠當糖螳黨代垈坮大對岱帶待戴擡玳臺袋貸隊黛宅德悳倒刀到圖堵塗導屠島嶋度徒悼挑掉搗桃"], +["d4a1","棹櫂淘渡滔濤燾盜睹禱稻萄覩賭跳蹈逃途道都鍍陶韜毒瀆牘犢獨督禿篤纛讀墩惇敦旽暾沌焞燉豚頓乭突仝冬凍動同憧東桐棟洞潼疼瞳童胴董銅兜斗杜枓痘竇荳讀豆逗頭屯臀芚遁遯鈍得嶝橙燈登等藤謄鄧騰喇懶拏癩羅"], +["d5a1","蘿螺裸邏樂洛烙珞絡落諾酪駱丹亂卵欄欒瀾爛蘭鸞剌辣嵐擥攬欖濫籃纜藍襤覽拉臘蠟廊朗浪狼琅瑯螂郞來崍徠萊冷掠略亮倆兩凉梁樑粮粱糧良諒輛量侶儷勵呂廬慮戾旅櫚濾礪藜蠣閭驢驪麗黎力曆歷瀝礫轢靂憐戀攣漣"], +["d6a1","煉璉練聯蓮輦連鍊冽列劣洌烈裂廉斂殮濂簾獵令伶囹寧岺嶺怜玲笭羚翎聆逞鈴零靈領齡例澧禮醴隷勞怒撈擄櫓潞瀘爐盧老蘆虜路輅露魯鷺鹵碌祿綠菉錄鹿麓論壟弄朧瀧瓏籠聾儡瀨牢磊賂賚賴雷了僚寮廖料燎療瞭聊蓼"], +["d7a1","遼鬧龍壘婁屢樓淚漏瘻累縷蔞褸鏤陋劉旒柳榴流溜瀏琉瑠留瘤硫謬類六戮陸侖倫崙淪綸輪律慄栗率隆勒肋凜凌楞稜綾菱陵俚利厘吏唎履悧李梨浬犁狸理璃異痢籬罹羸莉裏裡里釐離鯉吝潾燐璘藺躪隣鱗麟林淋琳臨霖砬"], +["d8a1","立笠粒摩瑪痲碼磨馬魔麻寞幕漠膜莫邈万卍娩巒彎慢挽晩曼滿漫灣瞞萬蔓蠻輓饅鰻唜抹末沫茉襪靺亡妄忘忙望網罔芒茫莽輞邙埋妹媒寐昧枚梅每煤罵買賣邁魅脈貊陌驀麥孟氓猛盲盟萌冪覓免冕勉棉沔眄眠綿緬面麵滅"], +["d9a1","蔑冥名命明暝椧溟皿瞑茗蓂螟酩銘鳴袂侮冒募姆帽慕摸摹暮某模母毛牟牡瑁眸矛耗芼茅謀謨貌木沐牧目睦穆鶩歿沒夢朦蒙卯墓妙廟描昴杳渺猫竗苗錨務巫憮懋戊拇撫无楙武毋無珷畝繆舞茂蕪誣貿霧鵡墨默們刎吻問文"], +["daa1","汶紊紋聞蚊門雯勿沕物味媚尾嵋彌微未梶楣渼湄眉米美薇謎迷靡黴岷悶愍憫敏旻旼民泯玟珉緡閔密蜜謐剝博拍搏撲朴樸泊珀璞箔粕縛膊舶薄迫雹駁伴半反叛拌搬攀斑槃泮潘班畔瘢盤盼磐磻礬絆般蟠返頒飯勃拔撥渤潑"], +["dba1","發跋醱鉢髮魃倣傍坊妨尨幇彷房放方旁昉枋榜滂磅紡肪膀舫芳蒡蚌訪謗邦防龐倍俳北培徘拜排杯湃焙盃背胚裴裵褙賠輩配陪伯佰帛柏栢白百魄幡樊煩燔番磻繁蕃藩飜伐筏罰閥凡帆梵氾汎泛犯範范法琺僻劈壁擘檗璧癖"], +["dca1","碧蘗闢霹便卞弁變辨辯邊別瞥鱉鼈丙倂兵屛幷昞昺柄棅炳甁病秉竝輧餠騈保堡報寶普步洑湺潽珤甫菩補褓譜輔伏僕匐卜宓復服福腹茯蔔複覆輹輻馥鰒本乶俸奉封峯峰捧棒烽熢琫縫蓬蜂逢鋒鳳不付俯傅剖副否咐埠夫婦"], +["dda1","孚孵富府復扶敷斧浮溥父符簿缶腐腑膚艀芙莩訃負賦賻赴趺部釜阜附駙鳧北分吩噴墳奔奮忿憤扮昐汾焚盆粉糞紛芬賁雰不佛弗彿拂崩朋棚硼繃鵬丕備匕匪卑妃婢庇悲憊扉批斐枇榧比毖毗毘沸泌琵痺砒碑秕秘粃緋翡肥"], +["dea1","脾臂菲蜚裨誹譬費鄙非飛鼻嚬嬪彬斌檳殯浜濱瀕牝玭貧賓頻憑氷聘騁乍事些仕伺似使俟僿史司唆嗣四士奢娑寫寺射巳師徙思捨斜斯柶査梭死沙泗渣瀉獅砂社祀祠私篩紗絲肆舍莎蓑蛇裟詐詞謝賜赦辭邪飼駟麝削數朔索"], +["dfa1","傘刪山散汕珊産疝算蒜酸霰乷撒殺煞薩三參杉森渗芟蔘衫揷澁鈒颯上傷像償商喪嘗孀尙峠常床庠廂想桑橡湘爽牀狀相祥箱翔裳觴詳象賞霜塞璽賽嗇塞穡索色牲生甥省笙墅壻嶼序庶徐恕抒捿敍暑曙書栖棲犀瑞筮絮緖署"], +["e0a1","胥舒薯西誓逝鋤黍鼠夕奭席惜昔晳析汐淅潟石碩蓆釋錫仙僊先善嬋宣扇敾旋渲煽琁瑄璇璿癬禪線繕羨腺膳船蘚蟬詵跣選銑鐥饍鮮卨屑楔泄洩渫舌薛褻設說雪齧剡暹殲纖蟾贍閃陝攝涉燮葉城姓宬性惺成星晟猩珹盛省筬"], +["e1a1","聖聲腥誠醒世勢歲洗稅笹細說貰召嘯塑宵小少巢所掃搔昭梳沼消溯瀟炤燒甦疏疎瘙笑篠簫素紹蔬蕭蘇訴逍遡邵銷韶騷俗屬束涑粟續謖贖速孫巽損蓀遜飡率宋悚松淞訟誦送頌刷殺灑碎鎖衰釗修受嗽囚垂壽嫂守岫峀帥愁"], +["e2a1","戍手授搜收數樹殊水洙漱燧狩獸琇璲瘦睡秀穗竪粹綏綬繡羞脩茱蒐蓚藪袖誰讐輸遂邃酬銖銹隋隧隨雖需須首髓鬚叔塾夙孰宿淑潚熟琡璹肅菽巡徇循恂旬栒楯橓殉洵淳珣盾瞬筍純脣舜荀蓴蕣詢諄醇錞順馴戌術述鉥崇崧"], +["e3a1","嵩瑟膝蝨濕拾習褶襲丞乘僧勝升承昇繩蠅陞侍匙嘶始媤尸屎屍市弑恃施是時枾柴猜矢示翅蒔蓍視試詩諡豕豺埴寔式息拭植殖湜熄篒蝕識軾食飾伸侁信呻娠宸愼新晨燼申神紳腎臣莘薪藎蜃訊身辛辰迅失室實悉審尋心沁"], +["e4a1","沈深瀋甚芯諶什十拾雙氏亞俄兒啞娥峨我牙芽莪蛾衙訝阿雅餓鴉鵝堊岳嶽幄惡愕握樂渥鄂鍔顎鰐齷安岸按晏案眼雁鞍顔鮟斡謁軋閼唵岩巖庵暗癌菴闇壓押狎鴨仰央怏昻殃秧鴦厓哀埃崖愛曖涯碍艾隘靄厄扼掖液縊腋額"], +["e5a1","櫻罌鶯鸚也倻冶夜惹揶椰爺耶若野弱掠略約若葯蒻藥躍亮佯兩凉壤孃恙揚攘敭暘梁楊樣洋瀁煬痒瘍禳穰糧羊良襄諒讓釀陽量養圄御於漁瘀禦語馭魚齬億憶抑檍臆偃堰彦焉言諺孼蘖俺儼嚴奄掩淹嶪業円予余勵呂女如廬"], +["e6a1","旅歟汝濾璵礖礪與艅茹輿轝閭餘驪麗黎亦力域役易曆歷疫繹譯轢逆驛嚥堧姸娟宴年延憐戀捐挻撚椽沇沿涎涓淵演漣烟然煙煉燃燕璉硏硯秊筵緣練縯聯衍軟輦蓮連鉛鍊鳶列劣咽悅涅烈熱裂說閱厭廉念捻染殮炎焰琰艶苒"], +["e7a1","簾閻髥鹽曄獵燁葉令囹塋寧嶺嶸影怜映暎楹榮永泳渶潁濚瀛瀯煐營獰玲瑛瑩瓔盈穎纓羚聆英詠迎鈴鍈零霙靈領乂倪例刈叡曳汭濊猊睿穢芮藝蘂禮裔詣譽豫醴銳隸霓預五伍俉傲午吾吳嗚塢墺奧娛寤悟惡懊敖旿晤梧汚澳"], +["e8a1","烏熬獒筽蜈誤鰲鼇屋沃獄玉鈺溫瑥瘟穩縕蘊兀壅擁瓮甕癰翁邕雍饔渦瓦窩窪臥蛙蝸訛婉完宛梡椀浣玩琓琬碗緩翫脘腕莞豌阮頑曰往旺枉汪王倭娃歪矮外嵬巍猥畏了僚僥凹堯夭妖姚寥寮尿嶢拗搖撓擾料曜樂橈燎燿瑤療"], +["e9a1","窈窯繇繞耀腰蓼蟯要謠遙遼邀饒慾欲浴縟褥辱俑傭冗勇埇墉容庸慂榕涌湧溶熔瑢用甬聳茸蓉踊鎔鏞龍于佑偶優又友右宇寓尤愚憂旴牛玗瑀盂祐禑禹紆羽芋藕虞迂遇郵釪隅雨雩勖彧旭昱栯煜稶郁頊云暈橒殞澐熉耘芸蕓"], +["eaa1","運隕雲韻蔚鬱亐熊雄元原員圓園垣媛嫄寃怨愿援沅洹湲源爰猿瑗苑袁轅遠阮院願鴛月越鉞位偉僞危圍委威尉慰暐渭爲瑋緯胃萎葦蔿蝟衛褘謂違韋魏乳侑儒兪劉唯喩孺宥幼幽庾悠惟愈愉揄攸有杻柔柚柳楡楢油洧流游溜"], +["eba1","濡猶猷琉瑜由留癒硫紐維臾萸裕誘諛諭踰蹂遊逾遺酉釉鍮類六堉戮毓肉育陸倫允奫尹崙淪潤玧胤贇輪鈗閏律慄栗率聿戎瀜絨融隆垠恩慇殷誾銀隱乙吟淫蔭陰音飮揖泣邑凝應膺鷹依倚儀宜意懿擬椅毅疑矣義艤薏蟻衣誼"], +["eca1","議醫二以伊利吏夷姨履已弛彛怡易李梨泥爾珥理異痍痢移罹而耳肄苡荑裏裡貽貳邇里離飴餌匿溺瀷益翊翌翼謚人仁刃印吝咽因姻寅引忍湮燐璘絪茵藺蚓認隣靭靷鱗麟一佚佾壹日溢逸鎰馹任壬妊姙恁林淋稔臨荏賃入卄"], +["eda1","立笠粒仍剩孕芿仔刺咨姉姿子字孜恣慈滋炙煮玆瓷疵磁紫者自茨蔗藉諮資雌作勺嚼斫昨灼炸爵綽芍酌雀鵲孱棧殘潺盞岑暫潛箴簪蠶雜丈仗匠場墻壯奬將帳庄張掌暲杖樟檣欌漿牆狀獐璋章粧腸臟臧莊葬蔣薔藏裝贓醬長"], +["eea1","障再哉在宰才材栽梓渽滓災縡裁財載齋齎爭箏諍錚佇低儲咀姐底抵杵楮樗沮渚狙猪疽箸紵苧菹著藷詛貯躇這邸雎齟勣吊嫡寂摘敵滴狄炙的積笛籍績翟荻謫賊赤跡蹟迪迹適鏑佃佺傳全典前剪塡塼奠專展廛悛戰栓殿氈澱"], +["efa1","煎琠田甸畑癲筌箋箭篆纏詮輾轉鈿銓錢鐫電顚顫餞切截折浙癤竊節絶占岾店漸点粘霑鮎點接摺蝶丁井亭停偵呈姃定幀庭廷征情挺政整旌晶晸柾楨檉正汀淀淨渟湞瀞炡玎珽町睛碇禎程穽精綎艇訂諪貞鄭酊釘鉦鋌錠霆靖"], +["f0a1","靜頂鼎制劑啼堤帝弟悌提梯濟祭第臍薺製諸蹄醍除際霽題齊俎兆凋助嘲弔彫措操早晁曺曹朝條棗槽漕潮照燥爪璪眺祖祚租稠窕粗糟組繰肇藻蚤詔調趙躁造遭釣阻雕鳥族簇足鏃存尊卒拙猝倧宗從悰慫棕淙琮種終綜縱腫"], +["f1a1","踪踵鍾鐘佐坐左座挫罪主住侏做姝胄呪周嗾奏宙州廚晝朱柱株注洲湊澍炷珠疇籌紂紬綢舟蛛註誅走躊輳週酎酒鑄駐竹粥俊儁准埈寯峻晙樽浚準濬焌畯竣蠢逡遵雋駿茁中仲衆重卽櫛楫汁葺增憎曾拯烝甑症繒蒸證贈之只"], +["f2a1","咫地址志持指摯支旨智枝枳止池沚漬知砥祉祗紙肢脂至芝芷蜘誌識贄趾遲直稙稷織職唇嗔塵振搢晉晋桭榛殄津溱珍瑨璡畛疹盡眞瞋秦縉縝臻蔯袗診賑軫辰進鎭陣陳震侄叱姪嫉帙桎瓆疾秩窒膣蛭質跌迭斟朕什執潗緝輯"], +["f3a1","鏶集徵懲澄且侘借叉嗟嵯差次此磋箚茶蹉車遮捉搾着窄錯鑿齪撰澯燦璨瓚竄簒纂粲纘讚贊鑽餐饌刹察擦札紮僭參塹慘慙懺斬站讒讖倉倡創唱娼廠彰愴敞昌昶暢槍滄漲猖瘡窓脹艙菖蒼債埰寀寨彩採砦綵菜蔡采釵冊柵策"], +["f4a1","責凄妻悽處倜刺剔尺慽戚拓擲斥滌瘠脊蹠陟隻仟千喘天川擅泉淺玔穿舛薦賤踐遷釧闡阡韆凸哲喆徹撤澈綴輟轍鐵僉尖沾添甛瞻簽籤詹諂堞妾帖捷牒疊睫諜貼輒廳晴淸聽菁請靑鯖切剃替涕滯締諦逮遞體初剿哨憔抄招梢"], +["f5a1","椒楚樵炒焦硝礁礎秒稍肖艸苕草蕉貂超酢醋醮促囑燭矗蜀觸寸忖村邨叢塚寵悤憁摠總聰蔥銃撮催崔最墜抽推椎楸樞湫皺秋芻萩諏趨追鄒酋醜錐錘鎚雛騶鰍丑畜祝竺筑築縮蓄蹙蹴軸逐春椿瑃出朮黜充忠沖蟲衝衷悴膵萃"], +["f6a1","贅取吹嘴娶就炊翠聚脆臭趣醉驟鷲側仄厠惻測層侈値嗤峙幟恥梔治淄熾痔痴癡稚穉緇緻置致蚩輜雉馳齒則勅飭親七柒漆侵寢枕沈浸琛砧針鍼蟄秤稱快他咤唾墮妥惰打拖朶楕舵陀馱駝倬卓啄坼度托拓擢晫柝濁濯琢琸託"], +["f7a1","鐸呑嘆坦彈憚歎灘炭綻誕奪脫探眈耽貪塔搭榻宕帑湯糖蕩兌台太怠態殆汰泰笞胎苔跆邰颱宅擇澤撑攄兎吐土討慟桶洞痛筒統通堆槌腿褪退頹偸套妬投透鬪慝特闖坡婆巴把播擺杷波派爬琶破罷芭跛頗判坂板版瓣販辦鈑"], +["f8a1","阪八叭捌佩唄悖敗沛浿牌狽稗覇貝彭澎烹膨愎便偏扁片篇編翩遍鞭騙貶坪平枰萍評吠嬖幣廢弊斃肺蔽閉陛佈包匍匏咆哺圃布怖抛抱捕暴泡浦疱砲胞脯苞葡蒲袍褒逋鋪飽鮑幅暴曝瀑爆輻俵剽彪慓杓標漂瓢票表豹飇飄驃"], +["f9a1","品稟楓諷豊風馮彼披疲皮被避陂匹弼必泌珌畢疋筆苾馝乏逼下何厦夏廈昰河瑕荷蝦賀遐霞鰕壑學虐謔鶴寒恨悍旱汗漢澣瀚罕翰閑閒限韓割轄函含咸啣喊檻涵緘艦銜陷鹹合哈盒蛤閤闔陜亢伉姮嫦巷恒抗杭桁沆港缸肛航"], +["faa1","行降項亥偕咳垓奚孩害懈楷海瀣蟹解該諧邂駭骸劾核倖幸杏荇行享向嚮珦鄕響餉饗香噓墟虛許憲櫶獻軒歇險驗奕爀赫革俔峴弦懸晛泫炫玄玹現眩睍絃絢縣舷衒見賢鉉顯孑穴血頁嫌俠協夾峽挾浹狹脅脇莢鋏頰亨兄刑型"], +["fba1","形泂滎瀅灐炯熒珩瑩荊螢衡逈邢鎣馨兮彗惠慧暳蕙蹊醯鞋乎互呼壕壺好岵弧戶扈昊晧毫浩淏湖滸澔濠濩灝狐琥瑚瓠皓祜糊縞胡芦葫蒿虎號蝴護豪鎬頀顥惑或酷婚昏混渾琿魂忽惚笏哄弘汞泓洪烘紅虹訌鴻化和嬅樺火畵"], +["fca1","禍禾花華話譁貨靴廓擴攫確碻穫丸喚奐宦幻患換歡晥桓渙煥環紈還驩鰥活滑猾豁闊凰幌徨恍惶愰慌晃晄榥況湟滉潢煌璜皇篁簧荒蝗遑隍黃匯回廻徊恢悔懷晦會檜淮澮灰獪繪膾茴蛔誨賄劃獲宖橫鐄哮嚆孝效斅曉梟涍淆"], +["fda1","爻肴酵驍侯候厚后吼喉嗅帿後朽煦珝逅勛勳塤壎焄熏燻薰訓暈薨喧暄煊萱卉喙毁彙徽揮暉煇諱輝麾休携烋畦虧恤譎鷸兇凶匈洶胸黑昕欣炘痕吃屹紇訖欠欽歆吸恰洽翕興僖凞喜噫囍姬嬉希憙憘戱晞曦熙熹熺犧禧稀羲詰"] +] diff --git a/node_modules/iconv-lite/encodings/tables/cp950.json b/node_modules/iconv-lite/encodings/tables/cp950.json new file mode 100644 index 0000000..d8bc871 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/cp950.json @@ -0,0 +1,177 @@ +[ +["0","\u0000",127], +["a140"," ,、。.‧;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗|–︱—︳╴︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚"], +["a1a1","﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅¯ ̄_ˍ﹉﹊﹍﹎﹋﹌﹟﹠﹡+-×÷±√<>=≦≧≠∞≒≡﹢",4,"~∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂⊕⊙↑↓←→↖↗↙↘∥∣/"], +["a240","\∕﹨$¥〒¢£%@℃℉﹩﹪﹫㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁",7,"▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭"], +["a2a1","╮╰╯═╞╪╡◢◣◥◤╱╲╳0",9,"Ⅰ",9,"〡",8,"十卄卅A",25,"a",21], +["a340","wxyzΑ",16,"Σ",6,"α",16,"σ",6,"ㄅ",10], +["a3a1","ㄐ",25,"˙ˉˊˇˋ"], +["a3e1","€"], +["a440","一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才"], +["a4a1","丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙"], +["a540","世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外"], +["a5a1","央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全"], +["a640","共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年"], +["a6a1","式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣"], +["a740","作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍"], +["a7a1","均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠"], +["a840","杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒"], +["a8a1","芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵"], +["a940","咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居"], +["a9a1","屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊"], +["aa40","昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿氓氛泣注泳沱泌泥河沽沾沼波沫法泓沸泄油況沮泗泅泱沿治泡泛泊沬泯泜泖泠"], +["aaa1","炕炎炒炊炙爬爭爸版牧物狀狎狙狗狐玩玨玟玫玥甽疝疙疚的盂盲直知矽社祀祁秉秈空穹竺糾罔羌羋者肺肥肢肱股肫肩肴肪肯臥臾舍芳芝芙芭芽芟芹花芬芥芯芸芣芰芾芷虎虱初表軋迎返近邵邸邱邶采金長門阜陀阿阻附"], +["ab40","陂隹雨青非亟亭亮信侵侯便俠俑俏保促侶俘俟俊俗侮俐俄係俚俎俞侷兗冒冑冠剎剃削前剌剋則勇勉勃勁匍南卻厚叛咬哀咨哎哉咸咦咳哇哂咽咪品"], +["aba1","哄哈咯咫咱咻咩咧咿囿垂型垠垣垢城垮垓奕契奏奎奐姜姘姿姣姨娃姥姪姚姦威姻孩宣宦室客宥封屎屏屍屋峙峒巷帝帥帟幽庠度建弈弭彥很待徊律徇後徉怒思怠急怎怨恍恰恨恢恆恃恬恫恪恤扁拜挖按拼拭持拮拽指拱拷"], +["ac40","拯括拾拴挑挂政故斫施既春昭映昧是星昨昱昤曷柿染柱柔某柬架枯柵柩柯柄柑枴柚查枸柏柞柳枰柙柢柝柒歪殃殆段毒毗氟泉洋洲洪流津洌洱洞洗"], +["aca1","活洽派洶洛泵洹洧洸洩洮洵洎洫炫為炳炬炯炭炸炮炤爰牲牯牴狩狠狡玷珊玻玲珍珀玳甚甭畏界畎畋疫疤疥疢疣癸皆皇皈盈盆盃盅省盹相眉看盾盼眇矜砂研砌砍祆祉祈祇禹禺科秒秋穿突竿竽籽紂紅紀紉紇約紆缸美羿耄"], +["ad40","耐耍耑耶胖胥胚胃胄背胡胛胎胞胤胝致舢苧范茅苣苛苦茄若茂茉苒苗英茁苜苔苑苞苓苟苯茆虐虹虻虺衍衫要觔計訂訃貞負赴赳趴軍軌述迦迢迪迥"], +["ada1","迭迫迤迨郊郎郁郃酋酊重閂限陋陌降面革韋韭音頁風飛食首香乘亳倌倍倣俯倦倥俸倩倖倆值借倚倒們俺倀倔倨俱倡個候倘俳修倭倪俾倫倉兼冤冥冢凍凌准凋剖剜剔剛剝匪卿原厝叟哨唐唁唷哼哥哲唆哺唔哩哭員唉哮哪"], +["ae40","哦唧唇哽唏圃圄埂埔埋埃堉夏套奘奚娑娘娜娟娛娓姬娠娣娩娥娌娉孫屘宰害家宴宮宵容宸射屑展屐峭峽峻峪峨峰島崁峴差席師庫庭座弱徒徑徐恙"], +["aea1","恣恥恐恕恭恩息悄悟悚悍悔悌悅悖扇拳挈拿捎挾振捕捂捆捏捉挺捐挽挪挫挨捍捌效敉料旁旅時晉晏晃晒晌晅晁書朔朕朗校核案框桓根桂桔栩梳栗桌桑栽柴桐桀格桃株桅栓栘桁殊殉殷氣氧氨氦氤泰浪涕消涇浦浸海浙涓"], +["af40","浬涉浮浚浴浩涌涊浹涅浥涔烊烘烤烙烈烏爹特狼狹狽狸狷玆班琉珮珠珪珞畔畝畜畚留疾病症疲疳疽疼疹痂疸皋皰益盍盎眩真眠眨矩砰砧砸砝破砷"], +["afa1","砥砭砠砟砲祕祐祠祟祖神祝祗祚秤秣秧租秦秩秘窄窈站笆笑粉紡紗紋紊素索純紐紕級紜納紙紛缺罟羔翅翁耆耘耕耙耗耽耿胱脂胰脅胭胴脆胸胳脈能脊胼胯臭臬舀舐航舫舨般芻茫荒荔荊茸荐草茵茴荏茲茹茶茗荀茱茨荃"], +["b040","虔蚊蚪蚓蚤蚩蚌蚣蚜衰衷袁袂衽衹記訐討訌訕訊託訓訖訏訑豈豺豹財貢起躬軒軔軏辱送逆迷退迺迴逃追逅迸邕郡郝郢酒配酌釘針釗釜釙閃院陣陡"], +["b0a1","陛陝除陘陞隻飢馬骨高鬥鬲鬼乾偺偽停假偃偌做偉健偶偎偕偵側偷偏倏偯偭兜冕凰剪副勒務勘動匐匏匙匿區匾參曼商啪啦啄啞啡啃啊唱啖問啕唯啤唸售啜唬啣唳啁啗圈國圉域堅堊堆埠埤基堂堵執培夠奢娶婁婉婦婪婀"], +["b140","娼婢婚婆婊孰寇寅寄寂宿密尉專將屠屜屝崇崆崎崛崖崢崑崩崔崙崤崧崗巢常帶帳帷康庸庶庵庾張強彗彬彩彫得徙從徘御徠徜恿患悉悠您惋悴惦悽"], +["b1a1","情悻悵惜悼惘惕惆惟悸惚惇戚戛扈掠控捲掖探接捷捧掘措捱掩掉掃掛捫推掄授掙採掬排掏掀捻捩捨捺敝敖救教敗啟敏敘敕敔斜斛斬族旋旌旎晝晚晤晨晦晞曹勗望梁梯梢梓梵桿桶梱梧梗械梃棄梭梆梅梔條梨梟梡梂欲殺"], +["b240","毫毬氫涎涼淳淙液淡淌淤添淺清淇淋涯淑涮淞淹涸混淵淅淒渚涵淚淫淘淪深淮淨淆淄涪淬涿淦烹焉焊烽烯爽牽犁猜猛猖猓猙率琅琊球理現琍瓠瓶"], +["b2a1","瓷甜產略畦畢異疏痔痕疵痊痍皎盔盒盛眷眾眼眶眸眺硫硃硎祥票祭移窒窕笠笨笛第符笙笞笮粒粗粕絆絃統紮紹紼絀細紳組累終紲紱缽羞羚翌翎習耜聊聆脯脖脣脫脩脰脤舂舵舷舶船莎莞莘荸莢莖莽莫莒莊莓莉莠荷荻荼"], +["b340","莆莧處彪蛇蛀蚶蛄蚵蛆蛋蚱蚯蛉術袞袈被袒袖袍袋覓規訪訝訣訥許設訟訛訢豉豚販責貫貨貪貧赧赦趾趺軛軟這逍通逗連速逝逐逕逞造透逢逖逛途"], +["b3a1","部郭都酗野釵釦釣釧釭釩閉陪陵陳陸陰陴陶陷陬雀雪雩章竟頂頃魚鳥鹵鹿麥麻傢傍傅備傑傀傖傘傚最凱割剴創剩勞勝勛博厥啻喀喧啼喊喝喘喂喜喪喔喇喋喃喳單喟唾喲喚喻喬喱啾喉喫喙圍堯堪場堤堰報堡堝堠壹壺奠"], +["b440","婷媚婿媒媛媧孳孱寒富寓寐尊尋就嵌嵐崴嵇巽幅帽幀幃幾廊廁廂廄弼彭復循徨惑惡悲悶惠愜愣惺愕惰惻惴慨惱愎惶愉愀愒戟扉掣掌描揀揩揉揆揍"], +["b4a1","插揣提握揖揭揮捶援揪換摒揚揹敞敦敢散斑斐斯普晰晴晶景暑智晾晷曾替期朝棺棕棠棘棗椅棟棵森棧棹棒棲棣棋棍植椒椎棉棚楮棻款欺欽殘殖殼毯氮氯氬港游湔渡渲湧湊渠渥渣減湛湘渤湖湮渭渦湯渴湍渺測湃渝渾滋"], +["b540","溉渙湎湣湄湲湩湟焙焚焦焰無然煮焜牌犄犀猶猥猴猩琺琪琳琢琥琵琶琴琯琛琦琨甥甦畫番痢痛痣痙痘痞痠登發皖皓皴盜睏短硝硬硯稍稈程稅稀窘"], +["b5a1","窗窖童竣等策筆筐筒答筍筋筏筑粟粥絞結絨絕紫絮絲絡給絢絰絳善翔翕耋聒肅腕腔腋腑腎脹腆脾腌腓腴舒舜菩萃菸萍菠菅萋菁華菱菴著萊菰萌菌菽菲菊萸萎萄菜萇菔菟虛蛟蛙蛭蛔蛛蛤蛐蛞街裁裂袱覃視註詠評詞証詁"], +["b640","詔詛詐詆訴診訶詖象貂貯貼貳貽賁費賀貴買貶貿貸越超趁跎距跋跚跑跌跛跆軻軸軼辜逮逵週逸進逶鄂郵鄉郾酣酥量鈔鈕鈣鈉鈞鈍鈐鈇鈑閔閏開閑"], +["b6a1","間閒閎隊階隋陽隅隆隍陲隄雁雅雄集雇雯雲韌項順須飧飪飯飩飲飭馮馭黃黍黑亂傭債傲傳僅傾催傷傻傯僇剿剷剽募勦勤勢勣匯嗟嗨嗓嗦嗎嗜嗇嗑嗣嗤嗯嗚嗡嗅嗆嗥嗉園圓塞塑塘塗塚塔填塌塭塊塢塒塋奧嫁嫉嫌媾媽媼"], +["b740","媳嫂媲嵩嵯幌幹廉廈弒彙徬微愚意慈感想愛惹愁愈慎慌慄慍愾愴愧愍愆愷戡戢搓搾搞搪搭搽搬搏搜搔損搶搖搗搆敬斟新暗暉暇暈暖暄暘暍會榔業"], +["b7a1","楚楷楠楔極椰概楊楨楫楞楓楹榆楝楣楛歇歲毀殿毓毽溢溯滓溶滂源溝滇滅溥溘溼溺溫滑準溜滄滔溪溧溴煎煙煩煤煉照煜煬煦煌煥煞煆煨煖爺牒猷獅猿猾瑯瑚瑕瑟瑞瑁琿瑙瑛瑜當畸瘀痰瘁痲痱痺痿痴痳盞盟睛睫睦睞督"], +["b840","睹睪睬睜睥睨睢矮碎碰碗碘碌碉硼碑碓硿祺祿禁萬禽稜稚稠稔稟稞窟窠筷節筠筮筧粱粳粵經絹綑綁綏絛置罩罪署義羨群聖聘肆肄腱腰腸腥腮腳腫"], +["b8a1","腹腺腦舅艇蒂葷落萱葵葦葫葉葬葛萼萵葡董葩葭葆虞虜號蛹蜓蜈蜇蜀蛾蛻蜂蜃蜆蜊衙裟裔裙補裘裝裡裊裕裒覜解詫該詳試詩詰誇詼詣誠話誅詭詢詮詬詹詻訾詨豢貊貉賊資賈賄貲賃賂賅跡跟跨路跳跺跪跤跦躲較載軾輊"], +["b940","辟農運遊道遂達逼違遐遇遏過遍遑逾遁鄒鄗酬酪酩釉鈷鉗鈸鈽鉀鈾鉛鉋鉤鉑鈴鉉鉍鉅鈹鈿鉚閘隘隔隕雍雋雉雊雷電雹零靖靴靶預頑頓頊頒頌飼飴"], +["b9a1","飽飾馳馱馴髡鳩麂鼎鼓鼠僧僮僥僖僭僚僕像僑僱僎僩兢凳劃劂匱厭嗾嘀嘛嘗嗽嘔嘆嘉嘍嘎嗷嘖嘟嘈嘐嗶團圖塵塾境墓墊塹墅塽壽夥夢夤奪奩嫡嫦嫩嫗嫖嫘嫣孵寞寧寡寥實寨寢寤察對屢嶄嶇幛幣幕幗幔廓廖弊彆彰徹慇"], +["ba40","愿態慷慢慣慟慚慘慵截撇摘摔撤摸摟摺摑摧搴摭摻敲斡旗旖暢暨暝榜榨榕槁榮槓構榛榷榻榫榴槐槍榭槌榦槃榣歉歌氳漳演滾漓滴漩漾漠漬漏漂漢"], +["baa1","滿滯漆漱漸漲漣漕漫漯澈漪滬漁滲滌滷熔熙煽熊熄熒爾犒犖獄獐瑤瑣瑪瑰瑭甄疑瘧瘍瘋瘉瘓盡監瞄睽睿睡磁碟碧碳碩碣禎福禍種稱窪窩竭端管箕箋筵算箝箔箏箸箇箄粹粽精綻綰綜綽綾綠緊綴網綱綺綢綿綵綸維緒緇綬"], +["bb40","罰翠翡翟聞聚肇腐膀膏膈膊腿膂臧臺與舔舞艋蓉蒿蓆蓄蒙蒞蒲蒜蓋蒸蓀蓓蒐蒼蓑蓊蜿蜜蜻蜢蜥蜴蜘蝕蜷蜩裳褂裴裹裸製裨褚裯誦誌語誣認誡誓誤"], +["bba1","說誥誨誘誑誚誧豪貍貌賓賑賒赫趙趕跼輔輒輕輓辣遠遘遜遣遙遞遢遝遛鄙鄘鄞酵酸酷酴鉸銀銅銘銖鉻銓銜銨鉼銑閡閨閩閣閥閤隙障際雌雒需靼鞅韶頗領颯颱餃餅餌餉駁骯骰髦魁魂鳴鳶鳳麼鼻齊億儀僻僵價儂儈儉儅凜"], +["bc40","劇劈劉劍劊勰厲嘮嘻嘹嘲嘿嘴嘩噓噎噗噴嘶嘯嘰墀墟增墳墜墮墩墦奭嬉嫻嬋嫵嬌嬈寮寬審寫層履嶝嶔幢幟幡廢廚廟廝廣廠彈影德徵慶慧慮慝慕憂"], +["bca1","慼慰慫慾憧憐憫憎憬憚憤憔憮戮摩摯摹撞撲撈撐撰撥撓撕撩撒撮播撫撚撬撙撢撳敵敷數暮暫暴暱樣樟槨樁樞標槽模樓樊槳樂樅槭樑歐歎殤毅毆漿潼澄潑潦潔澆潭潛潸潮澎潺潰潤澗潘滕潯潠潟熟熬熱熨牖犛獎獗瑩璋璃"], +["bd40","瑾璀畿瘠瘩瘟瘤瘦瘡瘢皚皺盤瞎瞇瞌瞑瞋磋磅確磊碾磕碼磐稿稼穀稽稷稻窯窮箭箱範箴篆篇篁箠篌糊締練緯緻緘緬緝編緣線緞緩綞緙緲緹罵罷羯"], +["bda1","翩耦膛膜膝膠膚膘蔗蔽蔚蓮蔬蔭蔓蔑蔣蔡蔔蓬蔥蓿蔆螂蝴蝶蝠蝦蝸蝨蝙蝗蝌蝓衛衝褐複褒褓褕褊誼諒談諄誕請諸課諉諂調誰論諍誶誹諛豌豎豬賠賞賦賤賬賭賢賣賜質賡赭趟趣踫踐踝踢踏踩踟踡踞躺輝輛輟輩輦輪輜輞"], +["be40","輥適遮遨遭遷鄰鄭鄧鄱醇醉醋醃鋅銻銷鋪銬鋤鋁銳銼鋒鋇鋰銲閭閱霄霆震霉靠鞍鞋鞏頡頫頜颳養餓餒餘駝駐駟駛駑駕駒駙骷髮髯鬧魅魄魷魯鴆鴉"], +["bea1","鴃麩麾黎墨齒儒儘儔儐儕冀冪凝劑劓勳噙噫噹噩噤噸噪器噥噱噯噬噢噶壁墾壇壅奮嬝嬴學寰導彊憲憑憩憊懍憶憾懊懈戰擅擁擋撻撼據擄擇擂操撿擒擔撾整曆曉暹曄曇暸樽樸樺橙橫橘樹橄橢橡橋橇樵機橈歙歷氅濂澱澡"], +["bf40","濃澤濁澧澳激澹澶澦澠澴熾燉燐燒燈燕熹燎燙燜燃燄獨璜璣璘璟璞瓢甌甍瘴瘸瘺盧盥瞠瞞瞟瞥磨磚磬磧禦積穎穆穌穋窺篙簑築篤篛篡篩篦糕糖縊"], +["bfa1","縑縈縛縣縞縝縉縐罹羲翰翱翮耨膳膩膨臻興艘艙蕊蕙蕈蕨蕩蕃蕉蕭蕪蕞螃螟螞螢融衡褪褲褥褫褡親覦諦諺諫諱謀諜諧諮諾謁謂諷諭諳諶諼豫豭貓賴蹄踱踴蹂踹踵輻輯輸輳辨辦遵遴選遲遼遺鄴醒錠錶鋸錳錯錢鋼錫錄錚"], +["c040","錐錦錡錕錮錙閻隧隨險雕霎霑霖霍霓霏靛靜靦鞘頰頸頻頷頭頹頤餐館餞餛餡餚駭駢駱骸骼髻髭鬨鮑鴕鴣鴦鴨鴒鴛默黔龍龜優償儡儲勵嚎嚀嚐嚅嚇"], +["c0a1","嚏壕壓壑壎嬰嬪嬤孺尷屨嶼嶺嶽嶸幫彌徽應懂懇懦懋戲戴擎擊擘擠擰擦擬擱擢擭斂斃曙曖檀檔檄檢檜櫛檣橾檗檐檠歜殮毚氈濘濱濟濠濛濤濫濯澀濬濡濩濕濮濰燧營燮燦燥燭燬燴燠爵牆獰獲璩環璦璨癆療癌盪瞳瞪瞰瞬"], +["c140","瞧瞭矯磷磺磴磯礁禧禪穗窿簇簍篾篷簌篠糠糜糞糢糟糙糝縮績繆縷縲繃縫總縱繅繁縴縹繈縵縿縯罄翳翼聱聲聰聯聳臆臃膺臂臀膿膽臉膾臨舉艱薪"], +["c1a1","薄蕾薜薑薔薯薛薇薨薊虧蟀蟑螳蟒蟆螫螻螺蟈蟋褻褶襄褸褽覬謎謗謙講謊謠謝謄謐豁谿豳賺賽購賸賻趨蹉蹋蹈蹊轄輾轂轅輿避遽還邁邂邀鄹醣醞醜鍍鎂錨鍵鍊鍥鍋錘鍾鍬鍛鍰鍚鍔闊闋闌闈闆隱隸雖霜霞鞠韓顆颶餵騁"], +["c240","駿鮮鮫鮪鮭鴻鴿麋黏點黜黝黛鼾齋叢嚕嚮壙壘嬸彝懣戳擴擲擾攆擺擻擷斷曜朦檳檬櫃檻檸櫂檮檯歟歸殯瀉瀋濾瀆濺瀑瀏燻燼燾燸獷獵璧璿甕癖癘"], +["c2a1","癒瞽瞿瞻瞼礎禮穡穢穠竄竅簫簧簪簞簣簡糧織繕繞繚繡繒繙罈翹翻職聶臍臏舊藏薩藍藐藉薰薺薹薦蟯蟬蟲蟠覆覲觴謨謹謬謫豐贅蹙蹣蹦蹤蹟蹕軀轉轍邇邃邈醫醬釐鎔鎊鎖鎢鎳鎮鎬鎰鎘鎚鎗闔闖闐闕離雜雙雛雞霤鞣鞦"], +["c340","鞭韹額顏題顎顓颺餾餿餽餮馥騎髁鬃鬆魏魎魍鯊鯉鯽鯈鯀鵑鵝鵠黠鼕鼬儳嚥壞壟壢寵龐廬懲懷懶懵攀攏曠曝櫥櫝櫚櫓瀛瀟瀨瀚瀝瀕瀘爆爍牘犢獸"], +["c3a1","獺璽瓊瓣疇疆癟癡矇礙禱穫穩簾簿簸簽簷籀繫繭繹繩繪羅繳羶羹羸臘藩藝藪藕藤藥藷蟻蠅蠍蟹蟾襠襟襖襞譁譜識證譚譎譏譆譙贈贊蹼蹲躇蹶蹬蹺蹴轔轎辭邊邋醱醮鏡鏑鏟鏃鏈鏜鏝鏖鏢鏍鏘鏤鏗鏨關隴難霪霧靡韜韻類"], +["c440","願顛颼饅饉騖騙鬍鯨鯧鯖鯛鶉鵡鵲鵪鵬麒麗麓麴勸嚨嚷嚶嚴嚼壤孀孃孽寶巉懸懺攘攔攙曦朧櫬瀾瀰瀲爐獻瓏癢癥礦礪礬礫竇競籌籃籍糯糰辮繽繼"], +["c4a1","纂罌耀臚艦藻藹蘑藺蘆蘋蘇蘊蠔蠕襤覺觸議譬警譯譟譫贏贍躉躁躅躂醴釋鐘鐃鏽闡霰飄饒饑馨騫騰騷騵鰓鰍鹹麵黨鼯齟齣齡儷儸囁囀囂夔屬巍懼懾攝攜斕曩櫻欄櫺殲灌爛犧瓖瓔癩矓籐纏續羼蘗蘭蘚蠣蠢蠡蠟襪襬覽譴"], +["c540","護譽贓躊躍躋轟辯醺鐮鐳鐵鐺鐸鐲鐫闢霸霹露響顧顥饗驅驃驀騾髏魔魑鰭鰥鶯鶴鷂鶸麝黯鼙齜齦齧儼儻囈囊囉孿巔巒彎懿攤權歡灑灘玀瓤疊癮癬"], +["c5a1","禳籠籟聾聽臟襲襯觼讀贖贗躑躓轡酈鑄鑑鑒霽霾韃韁顫饕驕驍髒鬚鱉鰱鰾鰻鷓鷗鼴齬齪龔囌巖戀攣攫攪曬欐瓚竊籤籣籥纓纖纔臢蘸蘿蠱變邐邏鑣鑠鑤靨顯饜驚驛驗髓體髑鱔鱗鱖鷥麟黴囑壩攬灞癱癲矗罐羈蠶蠹衢讓讒"], +["c640","讖艷贛釀鑪靂靈靄韆顰驟鬢魘鱟鷹鷺鹼鹽鼇齷齲廳欖灣籬籮蠻觀躡釁鑲鑰顱饞髖鬣黌灤矚讚鑷韉驢驥纜讜躪釅鑽鑾鑼鱷鱸黷豔鑿鸚爨驪鬱鸛鸞籲"], +["c940","乂乜凵匚厂万丌乇亍囗兀屮彳丏冇与丮亓仂仉仈冘勼卬厹圠夃夬尐巿旡殳毌气爿丱丼仨仜仩仡仝仚刌匜卌圢圣夗夯宁宄尒尻屴屳帄庀庂忉戉扐氕"], +["c9a1","氶汃氿氻犮犰玊禸肊阞伎优伬仵伔仱伀价伈伝伂伅伢伓伄仴伒冱刓刉刐劦匢匟卍厊吇囡囟圮圪圴夼妀奼妅奻奾奷奿孖尕尥屼屺屻屾巟幵庄异弚彴忕忔忏扜扞扤扡扦扢扙扠扚扥旯旮朾朹朸朻机朿朼朳氘汆汒汜汏汊汔汋"], +["ca40","汌灱牞犴犵玎甪癿穵网艸艼芀艽艿虍襾邙邗邘邛邔阢阤阠阣佖伻佢佉体佤伾佧佒佟佁佘伭伳伿佡冏冹刜刞刡劭劮匉卣卲厎厏吰吷吪呔呅吙吜吥吘"], +["caa1","吽呏呁吨吤呇囮囧囥坁坅坌坉坋坒夆奀妦妘妠妗妎妢妐妏妧妡宎宒尨尪岍岏岈岋岉岒岊岆岓岕巠帊帎庋庉庌庈庍弅弝彸彶忒忑忐忭忨忮忳忡忤忣忺忯忷忻怀忴戺抃抌抎抏抔抇扱扻扺扰抁抈扷扽扲扴攷旰旴旳旲旵杅杇"], +["cb40","杙杕杌杈杝杍杚杋毐氙氚汸汧汫沄沋沏汱汯汩沚汭沇沕沜汦汳汥汻沎灴灺牣犿犽狃狆狁犺狅玕玗玓玔玒町甹疔疕皁礽耴肕肙肐肒肜芐芏芅芎芑芓"], +["cba1","芊芃芄豸迉辿邟邡邥邞邧邠阰阨阯阭丳侘佼侅佽侀侇佶佴侉侄佷佌侗佪侚佹侁佸侐侜侔侞侒侂侕佫佮冞冼冾刵刲刳剆刱劼匊匋匼厒厔咇呿咁咑咂咈呫呺呾呥呬呴呦咍呯呡呠咘呣呧呤囷囹坯坲坭坫坱坰坶垀坵坻坳坴坢"], +["cc40","坨坽夌奅妵妺姏姎妲姌姁妶妼姃姖妱妽姀姈妴姇孢孥宓宕屄屇岮岤岠岵岯岨岬岟岣岭岢岪岧岝岥岶岰岦帗帔帙弨弢弣弤彔徂彾彽忞忥怭怦怙怲怋"], +["cca1","怴怊怗怳怚怞怬怢怍怐怮怓怑怌怉怜戔戽抭抴拑抾抪抶拊抮抳抯抻抩抰抸攽斨斻昉旼昄昒昈旻昃昋昍昅旽昑昐曶朊枅杬枎枒杶杻枘枆构杴枍枌杺枟枑枙枃杽极杸杹枔欥殀歾毞氝沓泬泫泮泙沶泔沭泧沷泐泂沺泃泆泭泲"], +["cd40","泒泝沴沊沝沀泞泀洰泍泇沰泹泏泩泑炔炘炅炓炆炄炑炖炂炚炃牪狖狋狘狉狜狒狔狚狌狑玤玡玭玦玢玠玬玝瓝瓨甿畀甾疌疘皯盳盱盰盵矸矼矹矻矺"], +["cda1","矷祂礿秅穸穻竻籵糽耵肏肮肣肸肵肭舠芠苀芫芚芘芛芵芧芮芼芞芺芴芨芡芩苂芤苃芶芢虰虯虭虮豖迒迋迓迍迖迕迗邲邴邯邳邰阹阽阼阺陃俍俅俓侲俉俋俁俔俜俙侻侳俛俇俖侺俀侹俬剄剉勀勂匽卼厗厖厙厘咺咡咭咥哏"], +["ce40","哃茍咷咮哖咶哅哆咠呰咼咢咾呲哞咰垵垞垟垤垌垗垝垛垔垘垏垙垥垚垕壴复奓姡姞姮娀姱姝姺姽姼姶姤姲姷姛姩姳姵姠姾姴姭宨屌峐峘峌峗峋峛"], +["cea1","峞峚峉峇峊峖峓峔峏峈峆峎峟峸巹帡帢帣帠帤庰庤庢庛庣庥弇弮彖徆怷怹恔恲恞恅恓恇恉恛恌恀恂恟怤恄恘恦恮扂扃拏挍挋拵挎挃拫拹挏挌拸拶挀挓挔拺挕拻拰敁敃斪斿昶昡昲昵昜昦昢昳昫昺昝昴昹昮朏朐柁柲柈枺"], +["cf40","柜枻柸柘柀枷柅柫柤柟枵柍枳柷柶柮柣柂枹柎柧柰枲柼柆柭柌枮柦柛柺柉柊柃柪柋欨殂殄殶毖毘毠氠氡洨洴洭洟洼洿洒洊泚洳洄洙洺洚洑洀洝浂"], +["cfa1","洁洘洷洃洏浀洇洠洬洈洢洉洐炷炟炾炱炰炡炴炵炩牁牉牊牬牰牳牮狊狤狨狫狟狪狦狣玅珌珂珈珅玹玶玵玴珫玿珇玾珃珆玸珋瓬瓮甮畇畈疧疪癹盄眈眃眄眅眊盷盻盺矧矨砆砑砒砅砐砏砎砉砃砓祊祌祋祅祄秕种秏秖秎窀"], +["d040","穾竑笀笁籺籸籹籿粀粁紃紈紁罘羑羍羾耇耎耏耔耷胘胇胠胑胈胂胐胅胣胙胜胊胕胉胏胗胦胍臿舡芔苙苾苹茇苨茀苕茺苫苖苴苬苡苲苵茌苻苶苰苪"], +["d0a1","苤苠苺苳苭虷虴虼虳衁衎衧衪衩觓訄訇赲迣迡迮迠郱邽邿郕郅邾郇郋郈釔釓陔陏陑陓陊陎倞倅倇倓倢倰倛俵俴倳倷倬俶俷倗倜倠倧倵倯倱倎党冔冓凊凄凅凈凎剡剚剒剞剟剕剢勍匎厞唦哢唗唒哧哳哤唚哿唄唈哫唑唅哱"], +["d140","唊哻哷哸哠唎唃唋圁圂埌堲埕埒垺埆垽垼垸垶垿埇埐垹埁夎奊娙娖娭娮娕娏娗娊娞娳孬宧宭宬尃屖屔峬峿峮峱峷崀峹帩帨庨庮庪庬弳弰彧恝恚恧"], +["d1a1","恁悢悈悀悒悁悝悃悕悛悗悇悜悎戙扆拲挐捖挬捄捅挶捃揤挹捋捊挼挩捁挴捘捔捙挭捇挳捚捑挸捗捀捈敊敆旆旃旄旂晊晟晇晑朒朓栟栚桉栲栳栻桋桏栖栱栜栵栫栭栯桎桄栴栝栒栔栦栨栮桍栺栥栠欬欯欭欱欴歭肂殈毦毤"], +["d240","毨毣毢毧氥浺浣浤浶洍浡涒浘浢浭浯涑涍淯浿涆浞浧浠涗浰浼浟涂涘洯浨涋浾涀涄洖涃浻浽浵涐烜烓烑烝烋缹烢烗烒烞烠烔烍烅烆烇烚烎烡牂牸"], +["d2a1","牷牶猀狺狴狾狶狳狻猁珓珙珥珖玼珧珣珩珜珒珛珔珝珚珗珘珨瓞瓟瓴瓵甡畛畟疰痁疻痄痀疿疶疺皊盉眝眛眐眓眒眣眑眕眙眚眢眧砣砬砢砵砯砨砮砫砡砩砳砪砱祔祛祏祜祓祒祑秫秬秠秮秭秪秜秞秝窆窉窅窋窌窊窇竘笐"], +["d340","笄笓笅笏笈笊笎笉笒粄粑粊粌粈粍粅紞紝紑紎紘紖紓紟紒紏紌罜罡罞罠罝罛羖羒翃翂翀耖耾耹胺胲胹胵脁胻脀舁舯舥茳茭荄茙荑茥荖茿荁茦茜茢"], +["d3a1","荂荎茛茪茈茼荍茖茤茠茷茯茩荇荅荌荓茞茬荋茧荈虓虒蚢蚨蚖蚍蚑蚞蚇蚗蚆蚋蚚蚅蚥蚙蚡蚧蚕蚘蚎蚝蚐蚔衃衄衭衵衶衲袀衱衿衯袃衾衴衼訒豇豗豻貤貣赶赸趵趷趶軑軓迾迵适迿迻逄迼迶郖郠郙郚郣郟郥郘郛郗郜郤酐"], +["d440","酎酏釕釢釚陜陟隼飣髟鬯乿偰偪偡偞偠偓偋偝偲偈偍偁偛偊偢倕偅偟偩偫偣偤偆偀偮偳偗偑凐剫剭剬剮勖勓匭厜啵啶唼啍啐唴唪啑啢唶唵唰啒啅"], +["d4a1","唌唲啥啎唹啈唭唻啀啋圊圇埻堔埢埶埜埴堀埭埽堈埸堋埳埏堇埮埣埲埥埬埡堎埼堐埧堁堌埱埩埰堍堄奜婠婘婕婧婞娸娵婭婐婟婥婬婓婤婗婃婝婒婄婛婈媎娾婍娹婌婰婩婇婑婖婂婜孲孮寁寀屙崞崋崝崚崠崌崨崍崦崥崏"], +["d540","崰崒崣崟崮帾帴庱庴庹庲庳弶弸徛徖徟悊悐悆悾悰悺惓惔惏惤惙惝惈悱惛悷惊悿惃惍惀挲捥掊掂捽掽掞掭掝掗掫掎捯掇掐据掯捵掜捭掮捼掤挻掟"], +["d5a1","捸掅掁掑掍捰敓旍晥晡晛晙晜晢朘桹梇梐梜桭桮梮梫楖桯梣梬梩桵桴梲梏桷梒桼桫桲梪梀桱桾梛梖梋梠梉梤桸桻梑梌梊桽欶欳欷欸殑殏殍殎殌氪淀涫涴涳湴涬淩淢涷淶淔渀淈淠淟淖涾淥淜淝淛淴淊涽淭淰涺淕淂淏淉"], +["d640","淐淲淓淽淗淍淣涻烺焍烷焗烴焌烰焄烳焐烼烿焆焓焀烸烶焋焂焎牾牻牼牿猝猗猇猑猘猊猈狿猏猞玈珶珸珵琄琁珽琇琀珺珼珿琌琋珴琈畤畣痎痒痏"], +["d6a1","痋痌痑痐皏皉盓眹眯眭眱眲眴眳眽眥眻眵硈硒硉硍硊硌砦硅硐祤祧祩祪祣祫祡离秺秸秶秷窏窔窐笵筇笴笥笰笢笤笳笘笪笝笱笫笭笯笲笸笚笣粔粘粖粣紵紽紸紶紺絅紬紩絁絇紾紿絊紻紨罣羕羜羝羛翊翋翍翐翑翇翏翉耟"], +["d740","耞耛聇聃聈脘脥脙脛脭脟脬脞脡脕脧脝脢舑舸舳舺舴舲艴莐莣莨莍荺荳莤荴莏莁莕莙荵莔莩荽莃莌莝莛莪莋荾莥莯莈莗莰荿莦莇莮荶莚虙虖蚿蚷"], +["d7a1","蛂蛁蛅蚺蚰蛈蚹蚳蚸蛌蚴蚻蚼蛃蚽蚾衒袉袕袨袢袪袚袑袡袟袘袧袙袛袗袤袬袌袓袎覂觖觙觕訰訧訬訞谹谻豜豝豽貥赽赻赹趼跂趹趿跁軘軞軝軜軗軠軡逤逋逑逜逌逡郯郪郰郴郲郳郔郫郬郩酖酘酚酓酕釬釴釱釳釸釤釹釪"], +["d840","釫釷釨釮镺閆閈陼陭陫陱陯隿靪頄飥馗傛傕傔傞傋傣傃傌傎傝偨傜傒傂傇兟凔匒匑厤厧喑喨喥喭啷噅喢喓喈喏喵喁喣喒喤啽喌喦啿喕喡喎圌堩堷"], +["d8a1","堙堞堧堣堨埵塈堥堜堛堳堿堶堮堹堸堭堬堻奡媯媔媟婺媢媞婸媦婼媥媬媕媮娷媄媊媗媃媋媩婻婽媌媜媏媓媝寪寍寋寔寑寊寎尌尰崷嵃嵫嵁嵋崿崵嵑嵎嵕崳崺嵒崽崱嵙嵂崹嵉崸崼崲崶嵀嵅幄幁彘徦徥徫惉悹惌惢惎惄愔"], +["d940","惲愊愖愅惵愓惸惼惾惁愃愘愝愐惿愄愋扊掔掱掰揎揥揨揯揃撝揳揊揠揶揕揲揵摡揟掾揝揜揄揘揓揂揇揌揋揈揰揗揙攲敧敪敤敜敨敥斌斝斞斮旐旒"], +["d9a1","晼晬晻暀晱晹晪晲朁椌棓椄棜椪棬棪棱椏棖棷棫棤棶椓椐棳棡椇棌椈楰梴椑棯棆椔棸棐棽棼棨椋椊椗棎棈棝棞棦棴棑椆棔棩椕椥棇欹欻欿欼殔殗殙殕殽毰毲毳氰淼湆湇渟湉溈渼渽湅湢渫渿湁湝湳渜渳湋湀湑渻渃渮湞"], +["da40","湨湜湡渱渨湠湱湫渹渢渰湓湥渧湸湤湷湕湹湒湦渵渶湚焠焞焯烻焮焱焣焥焢焲焟焨焺焛牋牚犈犉犆犅犋猒猋猰猢猱猳猧猲猭猦猣猵猌琮琬琰琫琖"], +["daa1","琚琡琭琱琤琣琝琩琠琲瓻甯畯畬痧痚痡痦痝痟痤痗皕皒盚睆睇睄睍睅睊睎睋睌矞矬硠硤硥硜硭硱硪确硰硩硨硞硢祴祳祲祰稂稊稃稌稄窙竦竤筊笻筄筈筌筎筀筘筅粢粞粨粡絘絯絣絓絖絧絪絏絭絜絫絒絔絩絑絟絎缾缿罥"], +["db40","罦羢羠羡翗聑聏聐胾胔腃腊腒腏腇脽腍脺臦臮臷臸臹舄舼舽舿艵茻菏菹萣菀菨萒菧菤菼菶萐菆菈菫菣莿萁菝菥菘菿菡菋菎菖菵菉萉萏菞萑萆菂菳"], +["dba1","菕菺菇菑菪萓菃菬菮菄菻菗菢萛菛菾蛘蛢蛦蛓蛣蛚蛪蛝蛫蛜蛬蛩蛗蛨蛑衈衖衕袺裗袹袸裀袾袶袼袷袽袲褁裉覕覘覗觝觚觛詎詍訹詙詀詗詘詄詅詒詈詑詊詌詏豟貁貀貺貾貰貹貵趄趀趉跘跓跍跇跖跜跏跕跙跈跗跅軯軷軺"], +["dc40","軹軦軮軥軵軧軨軶軫軱軬軴軩逭逴逯鄆鄬鄄郿郼鄈郹郻鄁鄀鄇鄅鄃酡酤酟酢酠鈁鈊鈥鈃鈚鈦鈏鈌鈀鈒釿釽鈆鈄鈧鈂鈜鈤鈙鈗鈅鈖镻閍閌閐隇陾隈"], +["dca1","隉隃隀雂雈雃雱雰靬靰靮頇颩飫鳦黹亃亄亶傽傿僆傮僄僊傴僈僂傰僁傺傱僋僉傶傸凗剺剸剻剼嗃嗛嗌嗐嗋嗊嗝嗀嗔嗄嗩喿嗒喍嗏嗕嗢嗖嗈嗲嗍嗙嗂圔塓塨塤塏塍塉塯塕塎塝塙塥塛堽塣塱壼嫇嫄嫋媺媸媱媵媰媿嫈媻嫆"], +["dd40","媷嫀嫊媴媶嫍媹媐寖寘寙尟尳嵱嵣嵊嵥嵲嵬嵞嵨嵧嵢巰幏幎幊幍幋廅廌廆廋廇彀徯徭惷慉慊愫慅愶愲愮慆愯慏愩慀戠酨戣戥戤揅揱揫搐搒搉搠搤"], +["dda1","搳摃搟搕搘搹搷搢搣搌搦搰搨摁搵搯搊搚摀搥搧搋揧搛搮搡搎敯斒旓暆暌暕暐暋暊暙暔晸朠楦楟椸楎楢楱椿楅楪椹楂楗楙楺楈楉椵楬椳椽楥棰楸椴楩楀楯楄楶楘楁楴楌椻楋椷楜楏楑椲楒椯楻椼歆歅歃歂歈歁殛嗀毻毼"], +["de40","毹毷毸溛滖滈溏滀溟溓溔溠溱溹滆滒溽滁溞滉溷溰滍溦滏溲溾滃滜滘溙溒溎溍溤溡溿溳滐滊溗溮溣煇煔煒煣煠煁煝煢煲煸煪煡煂煘煃煋煰煟煐煓"], +["dea1","煄煍煚牏犍犌犑犐犎猼獂猻猺獀獊獉瑄瑊瑋瑒瑑瑗瑀瑏瑐瑎瑂瑆瑍瑔瓡瓿瓾瓽甝畹畷榃痯瘏瘃痷痾痼痹痸瘐痻痶痭痵痽皙皵盝睕睟睠睒睖睚睩睧睔睙睭矠碇碚碔碏碄碕碅碆碡碃硹碙碀碖硻祼禂祽祹稑稘稙稒稗稕稢稓"], +["df40","稛稐窣窢窞竫筦筤筭筴筩筲筥筳筱筰筡筸筶筣粲粴粯綈綆綀綍絿綅絺綎絻綃絼綌綔綄絽綒罭罫罧罨罬羦羥羧翛翜耡腤腠腷腜腩腛腢腲朡腞腶腧腯"], +["dfa1","腄腡舝艉艄艀艂艅蓱萿葖葶葹蒏蒍葥葑葀蒆葧萰葍葽葚葙葴葳葝蔇葞萷萺萴葺葃葸萲葅萩菙葋萯葂萭葟葰萹葎葌葒葯蓅蒎萻葇萶萳葨葾葄萫葠葔葮葐蜋蜄蛷蜌蛺蛖蛵蝍蛸蜎蜉蜁蛶蜍蜅裖裋裍裎裞裛裚裌裐覅覛觟觥觤"], +["e040","觡觠觢觜触詶誆詿詡訿詷誂誄詵誃誁詴詺谼豋豊豥豤豦貆貄貅賌赨赩趑趌趎趏趍趓趔趐趒跰跠跬跱跮跐跩跣跢跧跲跫跴輆軿輁輀輅輇輈輂輋遒逿"], +["e0a1","遄遉逽鄐鄍鄏鄑鄖鄔鄋鄎酮酯鉈鉒鈰鈺鉦鈳鉥鉞銃鈮鉊鉆鉭鉬鉏鉠鉧鉯鈶鉡鉰鈱鉔鉣鉐鉲鉎鉓鉌鉖鈲閟閜閞閛隒隓隑隗雎雺雽雸雵靳靷靸靲頏頍頎颬飶飹馯馲馰馵骭骫魛鳪鳭鳧麀黽僦僔僗僨僳僛僪僝僤僓僬僰僯僣僠"], +["e140","凘劀劁勩勫匰厬嘧嘕嘌嘒嗼嘏嘜嘁嘓嘂嗺嘝嘄嗿嗹墉塼墐墘墆墁塿塴墋塺墇墑墎塶墂墈塻墔墏壾奫嫜嫮嫥嫕嫪嫚嫭嫫嫳嫢嫠嫛嫬嫞嫝嫙嫨嫟孷寠"], +["e1a1","寣屣嶂嶀嵽嶆嵺嶁嵷嶊嶉嶈嵾嵼嶍嵹嵿幘幙幓廘廑廗廎廜廕廙廒廔彄彃彯徶愬愨慁慞慱慳慒慓慲慬憀慴慔慺慛慥愻慪慡慖戩戧戫搫摍摛摝摴摶摲摳摽摵摦撦摎撂摞摜摋摓摠摐摿搿摬摫摙摥摷敳斠暡暠暟朅朄朢榱榶槉"], +["e240","榠槎榖榰榬榼榑榙榎榧榍榩榾榯榿槄榽榤槔榹槊榚槏榳榓榪榡榞槙榗榐槂榵榥槆歊歍歋殞殟殠毃毄毾滎滵滱漃漥滸漷滻漮漉潎漙漚漧漘漻漒滭漊"], +["e2a1","漶潳滹滮漭潀漰漼漵滫漇漎潃漅滽滶漹漜滼漺漟漍漞漈漡熇熐熉熀熅熂熏煻熆熁熗牄牓犗犕犓獃獍獑獌瑢瑳瑱瑵瑲瑧瑮甀甂甃畽疐瘖瘈瘌瘕瘑瘊瘔皸瞁睼瞅瞂睮瞀睯睾瞃碲碪碴碭碨硾碫碞碥碠碬碢碤禘禊禋禖禕禔禓"], +["e340","禗禈禒禐稫穊稰稯稨稦窨窫窬竮箈箜箊箑箐箖箍箌箛箎箅箘劄箙箤箂粻粿粼粺綧綷緂綣綪緁緀緅綝緎緄緆緋緌綯綹綖綼綟綦綮綩綡緉罳翢翣翥翞"], +["e3a1","耤聝聜膉膆膃膇膍膌膋舕蒗蒤蒡蒟蒺蓎蓂蒬蒮蒫蒹蒴蓁蓍蒪蒚蒱蓐蒝蒧蒻蒢蒔蓇蓌蒛蒩蒯蒨蓖蒘蒶蓏蒠蓗蓔蓒蓛蒰蒑虡蜳蜣蜨蝫蝀蜮蜞蜡蜙蜛蝃蜬蝁蜾蝆蜠蜲蜪蜭蜼蜒蜺蜱蜵蝂蜦蜧蜸蜤蜚蜰蜑裷裧裱裲裺裾裮裼裶裻"], +["e440","裰裬裫覝覡覟覞觩觫觨誫誙誋誒誏誖谽豨豩賕賏賗趖踉踂跿踍跽踊踃踇踆踅跾踀踄輐輑輎輍鄣鄜鄠鄢鄟鄝鄚鄤鄡鄛酺酲酹酳銥銤鉶銛鉺銠銔銪銍"], +["e4a1","銦銚銫鉹銗鉿銣鋮銎銂銕銢鉽銈銡銊銆銌銙銧鉾銇銩銝銋鈭隞隡雿靘靽靺靾鞃鞀鞂靻鞄鞁靿韎韍頖颭颮餂餀餇馝馜駃馹馻馺駂馽駇骱髣髧鬾鬿魠魡魟鳱鳲鳵麧僿儃儰僸儆儇僶僾儋儌僽儊劋劌勱勯噈噂噌嘵噁噊噉噆噘"], +["e540","噚噀嘳嘽嘬嘾嘸嘪嘺圚墫墝墱墠墣墯墬墥墡壿嫿嫴嫽嫷嫶嬃嫸嬂嫹嬁嬇嬅嬏屧嶙嶗嶟嶒嶢嶓嶕嶠嶜嶡嶚嶞幩幝幠幜緳廛廞廡彉徲憋憃慹憱憰憢憉"], +["e5a1","憛憓憯憭憟憒憪憡憍慦憳戭摮摰撖撠撅撗撜撏撋撊撌撣撟摨撱撘敶敺敹敻斲斳暵暰暩暲暷暪暯樀樆樗槥槸樕槱槤樠槿槬槢樛樝槾樧槲槮樔槷槧橀樈槦槻樍槼槫樉樄樘樥樏槶樦樇槴樖歑殥殣殢殦氁氀毿氂潁漦潾澇濆澒"], +["e640","澍澉澌潢潏澅潚澖潶潬澂潕潲潒潐潗澔澓潝漀潡潫潽潧澐潓澋潩潿澕潣潷潪潻熲熯熛熰熠熚熩熵熝熥熞熤熡熪熜熧熳犘犚獘獒獞獟獠獝獛獡獚獙"], +["e6a1","獢璇璉璊璆璁瑽璅璈瑼瑹甈甇畾瘥瘞瘙瘝瘜瘣瘚瘨瘛皜皝皞皛瞍瞏瞉瞈磍碻磏磌磑磎磔磈磃磄磉禚禡禠禜禢禛歶稹窲窴窳箷篋箾箬篎箯箹篊箵糅糈糌糋緷緛緪緧緗緡縃緺緦緶緱緰緮緟罶羬羰羭翭翫翪翬翦翨聤聧膣膟"], +["e740","膞膕膢膙膗舖艏艓艒艐艎艑蔤蔻蔏蔀蔩蔎蔉蔍蔟蔊蔧蔜蓻蔫蓺蔈蔌蓴蔪蓲蔕蓷蓫蓳蓼蔒蓪蓩蔖蓾蔨蔝蔮蔂蓽蔞蓶蔱蔦蓧蓨蓰蓯蓹蔘蔠蔰蔋蔙蔯虢"], +["e7a1","蝖蝣蝤蝷蟡蝳蝘蝔蝛蝒蝡蝚蝑蝞蝭蝪蝐蝎蝟蝝蝯蝬蝺蝮蝜蝥蝏蝻蝵蝢蝧蝩衚褅褌褔褋褗褘褙褆褖褑褎褉覢覤覣觭觰觬諏諆誸諓諑諔諕誻諗誾諀諅諘諃誺誽諙谾豍貏賥賟賙賨賚賝賧趠趜趡趛踠踣踥踤踮踕踛踖踑踙踦踧"], +["e840","踔踒踘踓踜踗踚輬輤輘輚輠輣輖輗遳遰遯遧遫鄯鄫鄩鄪鄲鄦鄮醅醆醊醁醂醄醀鋐鋃鋄鋀鋙銶鋏鋱鋟鋘鋩鋗鋝鋌鋯鋂鋨鋊鋈鋎鋦鋍鋕鋉鋠鋞鋧鋑鋓"], +["e8a1","銵鋡鋆銴镼閬閫閮閰隤隢雓霅霈霂靚鞊鞎鞈韐韏頞頝頦頩頨頠頛頧颲餈飺餑餔餖餗餕駜駍駏駓駔駎駉駖駘駋駗駌骳髬髫髳髲髱魆魃魧魴魱魦魶魵魰魨魤魬鳼鳺鳽鳿鳷鴇鴀鳹鳻鴈鴅鴄麃黓鼏鼐儜儓儗儚儑凞匴叡噰噠噮"], +["e940","噳噦噣噭噲噞噷圜圛壈墽壉墿墺壂墼壆嬗嬙嬛嬡嬔嬓嬐嬖嬨嬚嬠嬞寯嶬嶱嶩嶧嶵嶰嶮嶪嶨嶲嶭嶯嶴幧幨幦幯廩廧廦廨廥彋徼憝憨憖懅憴懆懁懌憺"], +["e9a1","憿憸憌擗擖擐擏擉撽撉擃擛擳擙攳敿敼斢曈暾曀曊曋曏暽暻暺曌朣樴橦橉橧樲橨樾橝橭橶橛橑樨橚樻樿橁橪橤橐橏橔橯橩橠樼橞橖橕橍橎橆歕歔歖殧殪殫毈毇氄氃氆澭濋澣濇澼濎濈潞濄澽澞濊澨瀄澥澮澺澬澪濏澿澸"], +["ea40","澢濉澫濍澯澲澰燅燂熿熸燖燀燁燋燔燊燇燏熽燘熼燆燚燛犝犞獩獦獧獬獥獫獪瑿璚璠璔璒璕璡甋疀瘯瘭瘱瘽瘳瘼瘵瘲瘰皻盦瞚瞝瞡瞜瞛瞢瞣瞕瞙"], +["eaa1","瞗磝磩磥磪磞磣磛磡磢磭磟磠禤穄穈穇窶窸窵窱窷篞篣篧篝篕篥篚篨篹篔篪篢篜篫篘篟糒糔糗糐糑縒縡縗縌縟縠縓縎縜縕縚縢縋縏縖縍縔縥縤罃罻罼罺羱翯耪耩聬膱膦膮膹膵膫膰膬膴膲膷膧臲艕艖艗蕖蕅蕫蕍蕓蕡蕘"], +["eb40","蕀蕆蕤蕁蕢蕄蕑蕇蕣蔾蕛蕱蕎蕮蕵蕕蕧蕠薌蕦蕝蕔蕥蕬虣虥虤螛螏螗螓螒螈螁螖螘蝹螇螣螅螐螑螝螄螔螜螚螉褞褦褰褭褮褧褱褢褩褣褯褬褟觱諠"], +["eba1","諢諲諴諵諝謔諤諟諰諈諞諡諨諿諯諻貑貒貐賵賮賱賰賳赬赮趥趧踳踾踸蹀蹅踶踼踽蹁踰踿躽輶輮輵輲輹輷輴遶遹遻邆郺鄳鄵鄶醓醐醑醍醏錧錞錈錟錆錏鍺錸錼錛錣錒錁鍆錭錎錍鋋錝鋺錥錓鋹鋷錴錂錤鋿錩錹錵錪錔錌"], +["ec40","錋鋾錉錀鋻錖閼闍閾閹閺閶閿閵閽隩雔霋霒霐鞙鞗鞔韰韸頵頯頲餤餟餧餩馞駮駬駥駤駰駣駪駩駧骹骿骴骻髶髺髹髷鬳鮀鮅鮇魼魾魻鮂鮓鮒鮐魺鮕"], +["eca1","魽鮈鴥鴗鴠鴞鴔鴩鴝鴘鴢鴐鴙鴟麈麆麇麮麭黕黖黺鼒鼽儦儥儢儤儠儩勴嚓嚌嚍嚆嚄嚃噾嚂噿嚁壖壔壏壒嬭嬥嬲嬣嬬嬧嬦嬯嬮孻寱寲嶷幬幪徾徻懃憵憼懧懠懥懤懨懞擯擩擣擫擤擨斁斀斶旚曒檍檖檁檥檉檟檛檡檞檇檓檎"], +["ed40","檕檃檨檤檑橿檦檚檅檌檒歛殭氉濌澩濴濔濣濜濭濧濦濞濲濝濢濨燡燱燨燲燤燰燢獳獮獯璗璲璫璐璪璭璱璥璯甐甑甒甏疄癃癈癉癇皤盩瞵瞫瞲瞷瞶"], +["eda1","瞴瞱瞨矰磳磽礂磻磼磲礅磹磾礄禫禨穜穛穖穘穔穚窾竀竁簅簏篲簀篿篻簎篴簋篳簂簉簃簁篸篽簆篰篱簐簊糨縭縼繂縳顈縸縪繉繀繇縩繌縰縻縶繄縺罅罿罾罽翴翲耬膻臄臌臊臅臇膼臩艛艚艜薃薀薏薧薕薠薋薣蕻薤薚薞"], +["ee40","蕷蕼薉薡蕺蕸蕗薎薖薆薍薙薝薁薢薂薈薅蕹蕶薘薐薟虨螾螪螭蟅螰螬螹螵螼螮蟉蟃蟂蟌螷螯蟄蟊螴螶螿螸螽蟞螲褵褳褼褾襁襒褷襂覭覯覮觲觳謞"], +["eea1","謘謖謑謅謋謢謏謒謕謇謍謈謆謜謓謚豏豰豲豱豯貕貔賹赯蹎蹍蹓蹐蹌蹇轃轀邅遾鄸醚醢醛醙醟醡醝醠鎡鎃鎯鍤鍖鍇鍼鍘鍜鍶鍉鍐鍑鍠鍭鎏鍌鍪鍹鍗鍕鍒鍏鍱鍷鍻鍡鍞鍣鍧鎀鍎鍙闇闀闉闃闅閷隮隰隬霠霟霘霝霙鞚鞡鞜"], +["ef40","鞞鞝韕韔韱顁顄顊顉顅顃餥餫餬餪餳餲餯餭餱餰馘馣馡騂駺駴駷駹駸駶駻駽駾駼騃骾髾髽鬁髼魈鮚鮨鮞鮛鮦鮡鮥鮤鮆鮢鮠鮯鴳鵁鵧鴶鴮鴯鴱鴸鴰"], +["efa1","鵅鵂鵃鴾鴷鵀鴽翵鴭麊麉麍麰黈黚黻黿鼤鼣鼢齔龠儱儭儮嚘嚜嚗嚚嚝嚙奰嬼屩屪巀幭幮懘懟懭懮懱懪懰懫懖懩擿攄擽擸攁攃擼斔旛曚曛曘櫅檹檽櫡櫆檺檶檷櫇檴檭歞毉氋瀇瀌瀍瀁瀅瀔瀎濿瀀濻瀦濼濷瀊爁燿燹爃燽獶"], +["f040","璸瓀璵瓁璾璶璻瓂甔甓癜癤癙癐癓癗癚皦皽盬矂瞺磿礌礓礔礉礐礒礑禭禬穟簜簩簙簠簟簭簝簦簨簢簥簰繜繐繖繣繘繢繟繑繠繗繓羵羳翷翸聵臑臒"], +["f0a1","臐艟艞薴藆藀藃藂薳薵薽藇藄薿藋藎藈藅薱薶藒蘤薸薷薾虩蟧蟦蟢蟛蟫蟪蟥蟟蟳蟤蟔蟜蟓蟭蟘蟣螤蟗蟙蠁蟴蟨蟝襓襋襏襌襆襐襑襉謪謧謣謳謰謵譇謯謼謾謱謥謷謦謶謮謤謻謽謺豂豵貙貘貗賾贄贂贀蹜蹢蹠蹗蹖蹞蹥蹧"], +["f140","蹛蹚蹡蹝蹩蹔轆轇轈轋鄨鄺鄻鄾醨醥醧醯醪鎵鎌鎒鎷鎛鎝鎉鎧鎎鎪鎞鎦鎕鎈鎙鎟鎍鎱鎑鎲鎤鎨鎴鎣鎥闒闓闑隳雗雚巂雟雘雝霣霢霥鞬鞮鞨鞫鞤鞪"], +["f1a1","鞢鞥韗韙韖韘韺顐顑顒颸饁餼餺騏騋騉騍騄騑騊騅騇騆髀髜鬈鬄鬅鬩鬵魊魌魋鯇鯆鯃鮿鯁鮵鮸鯓鮶鯄鮹鮽鵜鵓鵏鵊鵛鵋鵙鵖鵌鵗鵒鵔鵟鵘鵚麎麌黟鼁鼀鼖鼥鼫鼪鼩鼨齌齕儴儵劖勷厴嚫嚭嚦嚧嚪嚬壚壝壛夒嬽嬾嬿巃幰"], +["f240","徿懻攇攐攍攉攌攎斄旞旝曞櫧櫠櫌櫑櫙櫋櫟櫜櫐櫫櫏櫍櫞歠殰氌瀙瀧瀠瀖瀫瀡瀢瀣瀩瀗瀤瀜瀪爌爊爇爂爅犥犦犤犣犡瓋瓅璷瓃甖癠矉矊矄矱礝礛"], +["f2a1","礡礜礗礞禰穧穨簳簼簹簬簻糬糪繶繵繸繰繷繯繺繲繴繨罋罊羃羆羷翽翾聸臗臕艤艡艣藫藱藭藙藡藨藚藗藬藲藸藘藟藣藜藑藰藦藯藞藢蠀蟺蠃蟶蟷蠉蠌蠋蠆蟼蠈蟿蠊蠂襢襚襛襗襡襜襘襝襙覈覷覶觶譐譈譊譀譓譖譔譋譕"], +["f340","譑譂譒譗豃豷豶貚贆贇贉趬趪趭趫蹭蹸蹳蹪蹯蹻軂轒轑轏轐轓辴酀鄿醰醭鏞鏇鏏鏂鏚鏐鏹鏬鏌鏙鎩鏦鏊鏔鏮鏣鏕鏄鏎鏀鏒鏧镽闚闛雡霩霫霬霨霦"], +["f3a1","鞳鞷鞶韝韞韟顜顙顝顗颿颽颻颾饈饇饃馦馧騚騕騥騝騤騛騢騠騧騣騞騜騔髂鬋鬊鬎鬌鬷鯪鯫鯠鯞鯤鯦鯢鯰鯔鯗鯬鯜鯙鯥鯕鯡鯚鵷鶁鶊鶄鶈鵱鶀鵸鶆鶋鶌鵽鵫鵴鵵鵰鵩鶅鵳鵻鶂鵯鵹鵿鶇鵨麔麑黀黼鼭齀齁齍齖齗齘匷嚲"], +["f440","嚵嚳壣孅巆巇廮廯忀忁懹攗攖攕攓旟曨曣曤櫳櫰櫪櫨櫹櫱櫮櫯瀼瀵瀯瀷瀴瀱灂瀸瀿瀺瀹灀瀻瀳灁爓爔犨獽獼璺皫皪皾盭矌矎矏矍矲礥礣礧礨礤礩"], +["f4a1","禲穮穬穭竷籉籈籊籇籅糮繻繾纁纀羺翿聹臛臙舋艨艩蘢藿蘁藾蘛蘀藶蘄蘉蘅蘌藽蠙蠐蠑蠗蠓蠖襣襦覹觷譠譪譝譨譣譥譧譭趮躆躈躄轙轖轗轕轘轚邍酃酁醷醵醲醳鐋鐓鏻鐠鐏鐔鏾鐕鐐鐨鐙鐍鏵鐀鏷鐇鐎鐖鐒鏺鐉鏸鐊鏿"], +["f540","鏼鐌鏶鐑鐆闞闠闟霮霯鞹鞻韽韾顠顢顣顟飁飂饐饎饙饌饋饓騲騴騱騬騪騶騩騮騸騭髇髊髆鬐鬒鬑鰋鰈鯷鰅鰒鯸鱀鰇鰎鰆鰗鰔鰉鶟鶙鶤鶝鶒鶘鶐鶛"], +["f5a1","鶠鶔鶜鶪鶗鶡鶚鶢鶨鶞鶣鶿鶩鶖鶦鶧麙麛麚黥黤黧黦鼰鼮齛齠齞齝齙龑儺儹劘劗囃嚽嚾孈孇巋巏廱懽攛欂櫼欃櫸欀灃灄灊灈灉灅灆爝爚爙獾甗癪矐礭礱礯籔籓糲纊纇纈纋纆纍罍羻耰臝蘘蘪蘦蘟蘣蘜蘙蘧蘮蘡蘠蘩蘞蘥"], +["f640","蠩蠝蠛蠠蠤蠜蠫衊襭襩襮襫觺譹譸譅譺譻贐贔趯躎躌轞轛轝酆酄酅醹鐿鐻鐶鐩鐽鐼鐰鐹鐪鐷鐬鑀鐱闥闤闣霵霺鞿韡顤飉飆飀饘饖騹騽驆驄驂驁騺"], +["f6a1","騿髍鬕鬗鬘鬖鬺魒鰫鰝鰜鰬鰣鰨鰩鰤鰡鶷鶶鶼鷁鷇鷊鷏鶾鷅鷃鶻鶵鷎鶹鶺鶬鷈鶱鶭鷌鶳鷍鶲鹺麜黫黮黭鼛鼘鼚鼱齎齥齤龒亹囆囅囋奱孋孌巕巑廲攡攠攦攢欋欈欉氍灕灖灗灒爞爟犩獿瓘瓕瓙瓗癭皭礵禴穰穱籗籜籙籛籚"], +["f740","糴糱纑罏羇臞艫蘴蘵蘳蘬蘲蘶蠬蠨蠦蠪蠥襱覿覾觻譾讄讂讆讅譿贕躕躔躚躒躐躖躗轠轢酇鑌鑐鑊鑋鑏鑇鑅鑈鑉鑆霿韣顪顩飋饔饛驎驓驔驌驏驈驊"], +["f7a1","驉驒驐髐鬙鬫鬻魖魕鱆鱈鰿鱄鰹鰳鱁鰼鰷鰴鰲鰽鰶鷛鷒鷞鷚鷋鷐鷜鷑鷟鷩鷙鷘鷖鷵鷕鷝麶黰鼵鼳鼲齂齫龕龢儽劙壨壧奲孍巘蠯彏戁戃戄攩攥斖曫欑欒欏毊灛灚爢玂玁玃癰矔籧籦纕艬蘺虀蘹蘼蘱蘻蘾蠰蠲蠮蠳襶襴襳觾"], +["f840","讌讎讋讈豅贙躘轤轣醼鑢鑕鑝鑗鑞韄韅頀驖驙鬞鬟鬠鱒鱘鱐鱊鱍鱋鱕鱙鱌鱎鷻鷷鷯鷣鷫鷸鷤鷶鷡鷮鷦鷲鷰鷢鷬鷴鷳鷨鷭黂黐黲黳鼆鼜鼸鼷鼶齃齏"], +["f8a1","齱齰齮齯囓囍孎屭攭曭曮欓灟灡灝灠爣瓛瓥矕礸禷禶籪纗羉艭虃蠸蠷蠵衋讔讕躞躟躠躝醾醽釂鑫鑨鑩雥靆靃靇韇韥驞髕魙鱣鱧鱦鱢鱞鱠鸂鷾鸇鸃鸆鸅鸀鸁鸉鷿鷽鸄麠鼞齆齴齵齶囔攮斸欘欙欗欚灢爦犪矘矙礹籩籫糶纚"], +["f940","纘纛纙臠臡虆虇虈襹襺襼襻觿讘讙躥躤躣鑮鑭鑯鑱鑳靉顲饟鱨鱮鱭鸋鸍鸐鸏鸒鸑麡黵鼉齇齸齻齺齹圞灦籯蠼趲躦釃鑴鑸鑶鑵驠鱴鱳鱱鱵鸔鸓黶鼊"], +["f9a1","龤灨灥糷虪蠾蠽蠿讞貜躩軉靋顳顴飌饡馫驤驦驧鬤鸕鸗齈戇欞爧虌躨钂钀钁驩驨鬮鸙爩虋讟钃鱹麷癵驫鱺鸝灩灪麤齾齉龘碁銹裏墻恒粧嫺╔╦╗╠╬╣╚╩╝╒╤╕╞╪╡╘╧╛╓╥╖╟╫╢╙╨╜║═╭╮╰╯▓"] +] diff --git a/node_modules/iconv-lite/encodings/tables/eucjp.json b/node_modules/iconv-lite/encodings/tables/eucjp.json new file mode 100644 index 0000000..4fa61ca --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/eucjp.json @@ -0,0 +1,182 @@ +[ +["0","\u0000",127], +["8ea1","。",62], +["a1a1"," 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈",9,"+-±×÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇"], +["a2a1","◆□■△▲▽▼※〒→←↑↓〓"], +["a2ba","∈∋⊆⊇⊂⊃∪∩"], +["a2ca","∧∨¬⇒⇔∀∃"], +["a2dc","∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬"], +["a2f2","ʼn♯♭♪†‡¶"], +["a2fe","◯"], +["a3b0","0",9], +["a3c1","A",25], +["a3e1","a",25], +["a4a1","ぁ",82], +["a5a1","ァ",85], +["a6a1","Α",16,"Σ",6], +["a6c1","α",16,"σ",6], +["a7a1","А",5,"ЁЖ",25], +["a7d1","а",5,"ёж",25], +["a8a1","─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂"], +["ada1","①",19,"Ⅰ",9], +["adc0","㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡"], +["addf","㍻〝〟№㏍℡㊤",4,"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪"], +["b0a1","亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭"], +["b1a1","院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応"], +["b2a1","押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改"], +["b3a1","魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱"], +["b4a1","粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄"], +["b5a1","機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京"], +["b6a1","供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈"], +["b7a1","掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲"], +["b8a1","検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向"], +["b9a1","后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込"], +["baa1","此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷"], +["bba1","察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時"], +["bca1","次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周"], +["bda1","宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償"], +["bea1","勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾"], +["bfa1","拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾"], +["c0a1","澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線"], +["c1a1","繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎"], +["c2a1","臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只"], +["c3a1","叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵"], +["c4a1","帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓"], +["c5a1","邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到"], +["c6a1","董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入"], +["c7a1","如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦"], +["c8a1","函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美"], +["c9a1","鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服"], +["caa1","福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋"], +["cba1","法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満"], +["cca1","漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒"], +["cda1","諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃"], +["cea1","痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯"], +["cfa1","蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕"], +["d0a1","弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲"], +["d1a1","僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨"], +["d2a1","辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨"], +["d3a1","咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉"], +["d4a1","圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩"], +["d5a1","奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓"], +["d6a1","屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏"], +["d7a1","廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚"], +["d8a1","悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛"], +["d9a1","戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼"], +["daa1","據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼"], +["dba1","曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍"], +["dca1","棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣"], +["dda1","檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾"], +["dea1","沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌"], +["dfa1","漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼"], +["e0a1","燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱"], +["e1a1","瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰"], +["e2a1","癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬"], +["e3a1","磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐"], +["e4a1","筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆"], +["e5a1","紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺"], +["e6a1","罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋"], +["e7a1","隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙"], +["e8a1","茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈"], +["e9a1","蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙"], +["eaa1","蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞"], +["eba1","襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫"], +["eca1","譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊"], +["eda1","蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸"], +["eea1","遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮"], +["efa1","錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞"], +["f0a1","陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰"], +["f1a1","顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷"], +["f2a1","髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈"], +["f3a1","鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠"], +["f4a1","堯槇遙瑤凜熙"], +["f9a1","纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德"], +["faa1","忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱"], +["fba1","犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚"], +["fca1","釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"], +["fcf1","ⅰ",9,"¬¦'""], +["8fa2af","˘ˇ¸˙˝¯˛˚~΄΅"], +["8fa2c2","¡¦¿"], +["8fa2eb","ºª©®™¤№"], +["8fa6e1","ΆΈΉΊΪ"], +["8fa6e7","Ό"], +["8fa6e9","ΎΫ"], +["8fa6ec","Ώ"], +["8fa6f1","άέήίϊΐόςύϋΰώ"], +["8fa7c2","Ђ",10,"ЎЏ"], +["8fa7f2","ђ",10,"ўџ"], +["8fa9a1","ÆĐ"], +["8fa9a4","Ħ"], +["8fa9a6","IJ"], +["8fa9a8","ŁĿ"], +["8fa9ab","ŊØŒ"], +["8fa9af","ŦÞ"], +["8fa9c1","æđðħıijĸłŀʼnŋøœßŧþ"], +["8faaa1","ÁÀÄÂĂǍĀĄÅÃĆĈČÇĊĎÉÈËÊĚĖĒĘ"], +["8faaba","ĜĞĢĠĤÍÌÏÎǏİĪĮĨĴĶĹĽĻŃŇŅÑÓÒÖÔǑŐŌÕŔŘŖŚŜŠŞŤŢÚÙÜÛŬǓŰŪŲŮŨǗǛǙǕŴÝŸŶŹŽŻ"], +["8faba1","áàäâăǎāąåãćĉčçċďéèëêěėēęǵĝğ"], +["8fabbd","ġĥíìïîǐ"], +["8fabc5","īįĩĵķĺľļńňņñóòöôǒőōõŕřŗśŝšşťţúùüûŭǔűūųůũǘǜǚǖŵýÿŷźžż"], +["8fb0a1","丂丄丅丌丒丟丣两丨丫丮丯丰丵乀乁乄乇乑乚乜乣乨乩乴乵乹乿亍亖亗亝亯亹仃仐仚仛仠仡仢仨仯仱仳仵份仾仿伀伂伃伈伋伌伒伕伖众伙伮伱你伳伵伷伹伻伾佀佂佈佉佋佌佒佔佖佘佟佣佪佬佮佱佷佸佹佺佽佾侁侂侄"], +["8fb1a1","侅侉侊侌侎侐侒侓侔侗侙侚侞侟侲侷侹侻侼侽侾俀俁俅俆俈俉俋俌俍俏俒俜俠俢俰俲俼俽俿倀倁倄倇倊倌倎倐倓倗倘倛倜倝倞倢倧倮倰倲倳倵偀偁偂偅偆偊偌偎偑偒偓偗偙偟偠偢偣偦偧偪偭偰偱倻傁傃傄傆傊傎傏傐"], +["8fb2a1","傒傓傔傖傛傜傞",4,"傪傯傰傹傺傽僀僃僄僇僌僎僐僓僔僘僜僝僟僢僤僦僨僩僯僱僶僺僾儃儆儇儈儋儌儍儎僲儐儗儙儛儜儝儞儣儧儨儬儭儯儱儳儴儵儸儹兂兊兏兓兕兗兘兟兤兦兾冃冄冋冎冘冝冡冣冭冸冺冼冾冿凂"], +["8fb3a1","凈减凑凒凓凕凘凞凢凥凮凲凳凴凷刁刂刅划刓刕刖刘刢刨刱刲刵刼剅剉剕剗剘剚剜剟剠剡剦剮剷剸剹劀劂劅劊劌劓劕劖劗劘劚劜劤劥劦劧劯劰劶劷劸劺劻劽勀勄勆勈勌勏勑勔勖勛勜勡勥勨勩勪勬勰勱勴勶勷匀匃匊匋"], +["8fb4a1","匌匑匓匘匛匜匞匟匥匧匨匩匫匬匭匰匲匵匼匽匾卂卌卋卙卛卡卣卥卬卭卲卹卾厃厇厈厎厓厔厙厝厡厤厪厫厯厲厴厵厷厸厺厽叀叅叏叒叓叕叚叝叞叠另叧叵吂吓吚吡吧吨吪启吱吴吵呃呄呇呍呏呞呢呤呦呧呩呫呭呮呴呿"], +["8fb5a1","咁咃咅咈咉咍咑咕咖咜咟咡咦咧咩咪咭咮咱咷咹咺咻咿哆哊响哎哠哪哬哯哶哼哾哿唀唁唅唈唉唌唍唎唕唪唫唲唵唶唻唼唽啁啇啉啊啍啐啑啘啚啛啞啠啡啤啦啿喁喂喆喈喎喏喑喒喓喔喗喣喤喭喲喿嗁嗃嗆嗉嗋嗌嗎嗑嗒"], +["8fb6a1","嗓嗗嗘嗛嗞嗢嗩嗶嗿嘅嘈嘊嘍",5,"嘙嘬嘰嘳嘵嘷嘹嘻嘼嘽嘿噀噁噃噄噆噉噋噍噏噔噞噠噡噢噣噦噩噭噯噱噲噵嚄嚅嚈嚋嚌嚕嚙嚚嚝嚞嚟嚦嚧嚨嚩嚫嚬嚭嚱嚳嚷嚾囅囉囊囋囏囐囌囍囙囜囝囟囡囤",4,"囱囫园"], +["8fb7a1","囶囷圁圂圇圊圌圑圕圚圛圝圠圢圣圤圥圩圪圬圮圯圳圴圽圾圿坅坆坌坍坒坢坥坧坨坫坭",4,"坳坴坵坷坹坺坻坼坾垁垃垌垔垗垙垚垜垝垞垟垡垕垧垨垩垬垸垽埇埈埌埏埕埝埞埤埦埧埩埭埰埵埶埸埽埾埿堃堄堈堉埡"], +["8fb8a1","堌堍堛堞堟堠堦堧堭堲堹堿塉塌塍塏塐塕塟塡塤塧塨塸塼塿墀墁墇墈墉墊墌墍墏墐墔墖墝墠墡墢墦墩墱墲壄墼壂壈壍壎壐壒壔壖壚壝壡壢壩壳夅夆夋夌夒夓夔虁夝夡夣夤夨夯夰夳夵夶夿奃奆奒奓奙奛奝奞奟奡奣奫奭"], +["8fb9a1","奯奲奵奶她奻奼妋妌妎妒妕妗妟妤妧妭妮妯妰妳妷妺妼姁姃姄姈姊姍姒姝姞姟姣姤姧姮姯姱姲姴姷娀娄娌娍娎娒娓娞娣娤娧娨娪娭娰婄婅婇婈婌婐婕婞婣婥婧婭婷婺婻婾媋媐媓媖媙媜媞媟媠媢媧媬媱媲媳媵媸媺媻媿"], +["8fbaa1","嫄嫆嫈嫏嫚嫜嫠嫥嫪嫮嫵嫶嫽嬀嬁嬈嬗嬴嬙嬛嬝嬡嬥嬭嬸孁孋孌孒孖孞孨孮孯孼孽孾孿宁宄宆宊宎宐宑宓宔宖宨宩宬宭宯宱宲宷宺宼寀寁寍寏寖",4,"寠寯寱寴寽尌尗尞尟尣尦尩尫尬尮尰尲尵尶屙屚屜屢屣屧屨屩"], +["8fbba1","屭屰屴屵屺屻屼屽岇岈岊岏岒岝岟岠岢岣岦岪岲岴岵岺峉峋峒峝峗峮峱峲峴崁崆崍崒崫崣崤崦崧崱崴崹崽崿嵂嵃嵆嵈嵕嵑嵙嵊嵟嵠嵡嵢嵤嵪嵭嵰嵹嵺嵾嵿嶁嶃嶈嶊嶒嶓嶔嶕嶙嶛嶟嶠嶧嶫嶰嶴嶸嶹巃巇巋巐巎巘巙巠巤"], +["8fbca1","巩巸巹帀帇帍帒帔帕帘帟帠帮帨帲帵帾幋幐幉幑幖幘幛幜幞幨幪",4,"幰庀庋庎庢庤庥庨庪庬庱庳庽庾庿廆廌廋廎廑廒廔廕廜廞廥廫异弆弇弈弎弙弜弝弡弢弣弤弨弫弬弮弰弴弶弻弽弿彀彄彅彇彍彐彔彘彛彠彣彤彧"], +["8fbda1","彯彲彴彵彸彺彽彾徉徍徏徖徜徝徢徧徫徤徬徯徰徱徸忄忇忈忉忋忐",4,"忞忡忢忨忩忪忬忭忮忯忲忳忶忺忼怇怊怍怓怔怗怘怚怟怤怭怳怵恀恇恈恉恌恑恔恖恗恝恡恧恱恾恿悂悆悈悊悎悑悓悕悘悝悞悢悤悥您悰悱悷"], +["8fbea1","悻悾惂惄惈惉惊惋惎惏惔惕惙惛惝惞惢惥惲惵惸惼惽愂愇愊愌愐",4,"愖愗愙愜愞愢愪愫愰愱愵愶愷愹慁慅慆慉慞慠慬慲慸慻慼慿憀憁憃憄憋憍憒憓憗憘憜憝憟憠憥憨憪憭憸憹憼懀懁懂懎懏懕懜懝懞懟懡懢懧懩懥"], +["8fbfa1","懬懭懯戁戃戄戇戓戕戜戠戢戣戧戩戫戹戽扂扃扄扆扌扐扑扒扔扖扚扜扤扭扯扳扺扽抍抎抏抐抦抨抳抶抷抺抾抿拄拎拕拖拚拪拲拴拼拽挃挄挊挋挍挐挓挖挘挩挪挭挵挶挹挼捁捂捃捄捆捊捋捎捒捓捔捘捛捥捦捬捭捱捴捵"], +["8fc0a1","捸捼捽捿掂掄掇掊掐掔掕掙掚掞掤掦掭掮掯掽揁揅揈揎揑揓揔揕揜揠揥揪揬揲揳揵揸揹搉搊搐搒搔搘搞搠搢搤搥搩搪搯搰搵搽搿摋摏摑摒摓摔摚摛摜摝摟摠摡摣摭摳摴摻摽撅撇撏撐撑撘撙撛撝撟撡撣撦撨撬撳撽撾撿"], +["8fc1a1","擄擉擊擋擌擎擐擑擕擗擤擥擩擪擭擰擵擷擻擿攁攄攈攉攊攏攓攔攖攙攛攞攟攢攦攩攮攱攺攼攽敃敇敉敐敒敔敟敠敧敫敺敽斁斅斊斒斕斘斝斠斣斦斮斲斳斴斿旂旈旉旎旐旔旖旘旟旰旲旴旵旹旾旿昀昄昈昉昍昑昒昕昖昝"], +["8fc2a1","昞昡昢昣昤昦昩昪昫昬昮昰昱昳昹昷晀晅晆晊晌晑晎晗晘晙晛晜晠晡曻晪晫晬晾晳晵晿晷晸晹晻暀晼暋暌暍暐暒暙暚暛暜暟暠暤暭暱暲暵暻暿曀曂曃曈曌曎曏曔曛曟曨曫曬曮曺朅朇朎朓朙朜朠朢朳朾杅杇杈杌杔杕杝"], +["8fc3a1","杦杬杮杴杶杻极构枎枏枑枓枖枘枙枛枰枱枲枵枻枼枽柹柀柂柃柅柈柉柒柗柙柜柡柦柰柲柶柷桒栔栙栝栟栨栧栬栭栯栰栱栳栻栿桄桅桊桌桕桗桘桛桫桮",4,"桵桹桺桻桼梂梄梆梈梖梘梚梜梡梣梥梩梪梮梲梻棅棈棌棏"], +["8fc4a1","棐棑棓棖棙棜棝棥棨棪棫棬棭棰棱棵棶棻棼棽椆椉椊椐椑椓椖椗椱椳椵椸椻楂楅楉楎楗楛楣楤楥楦楨楩楬楰楱楲楺楻楿榀榍榒榖榘榡榥榦榨榫榭榯榷榸榺榼槅槈槑槖槗槢槥槮槯槱槳槵槾樀樁樃樏樑樕樚樝樠樤樨樰樲"], +["8fc5a1","樴樷樻樾樿橅橆橉橊橎橐橑橒橕橖橛橤橧橪橱橳橾檁檃檆檇檉檋檑檛檝檞檟檥檫檯檰檱檴檽檾檿櫆櫉櫈櫌櫐櫔櫕櫖櫜櫝櫤櫧櫬櫰櫱櫲櫼櫽欂欃欆欇欉欏欐欑欗欛欞欤欨欫欬欯欵欶欻欿歆歊歍歒歖歘歝歠歧歫歮歰歵歽"], +["8fc6a1","歾殂殅殗殛殟殠殢殣殨殩殬殭殮殰殸殹殽殾毃毄毉毌毖毚毡毣毦毧毮毱毷毹毿氂氄氅氉氍氎氐氒氙氟氦氧氨氬氮氳氵氶氺氻氿汊汋汍汏汒汔汙汛汜汫汭汯汴汶汸汹汻沅沆沇沉沔沕沗沘沜沟沰沲沴泂泆泍泏泐泑泒泔泖"], +["8fc7a1","泚泜泠泧泩泫泬泮泲泴洄洇洊洎洏洑洓洚洦洧洨汧洮洯洱洹洼洿浗浞浟浡浥浧浯浰浼涂涇涑涒涔涖涗涘涪涬涴涷涹涽涿淄淈淊淎淏淖淛淝淟淠淢淥淩淯淰淴淶淼渀渄渞渢渧渲渶渹渻渼湄湅湈湉湋湏湑湒湓湔湗湜湝湞"], +["8fc8a1","湢湣湨湳湻湽溍溓溙溠溧溭溮溱溳溻溿滀滁滃滇滈滊滍滎滏滫滭滮滹滻滽漄漈漊漌漍漖漘漚漛漦漩漪漯漰漳漶漻漼漭潏潑潒潓潗潙潚潝潞潡潢潨潬潽潾澃澇澈澋澌澍澐澒澓澔澖澚澟澠澥澦澧澨澮澯澰澵澶澼濅濇濈濊"], +["8fc9a1","濚濞濨濩濰濵濹濼濽瀀瀅瀆瀇瀍瀗瀠瀣瀯瀴瀷瀹瀼灃灄灈灉灊灋灔灕灝灞灎灤灥灬灮灵灶灾炁炅炆炔",4,"炛炤炫炰炱炴炷烊烑烓烔烕烖烘烜烤烺焃",4,"焋焌焏焞焠焫焭焯焰焱焸煁煅煆煇煊煋煐煒煗煚煜煞煠"], +["8fcaa1","煨煹熀熅熇熌熒熚熛熠熢熯熰熲熳熺熿燀燁燄燋燌燓燖燙燚燜燸燾爀爇爈爉爓爗爚爝爟爤爫爯爴爸爹牁牂牃牅牎牏牐牓牕牖牚牜牞牠牣牨牫牮牯牱牷牸牻牼牿犄犉犍犎犓犛犨犭犮犱犴犾狁狇狉狌狕狖狘狟狥狳狴狺狻"], +["8fcba1","狾猂猄猅猇猋猍猒猓猘猙猞猢猤猧猨猬猱猲猵猺猻猽獃獍獐獒獖獘獝獞獟獠獦獧獩獫獬獮獯獱獷獹獼玀玁玃玅玆玎玐玓玕玗玘玜玞玟玠玢玥玦玪玫玭玵玷玹玼玽玿珅珆珉珋珌珏珒珓珖珙珝珡珣珦珧珩珴珵珷珹珺珻珽"], +["8fcca1","珿琀琁琄琇琊琑琚琛琤琦琨",9,"琹瑀瑃瑄瑆瑇瑋瑍瑑瑒瑗瑝瑢瑦瑧瑨瑫瑭瑮瑱瑲璀璁璅璆璇璉璏璐璑璒璘璙璚璜璟璠璡璣璦璨璩璪璫璮璯璱璲璵璹璻璿瓈瓉瓌瓐瓓瓘瓚瓛瓞瓟瓤瓨瓪瓫瓯瓴瓺瓻瓼瓿甆"], +["8fcda1","甒甖甗甠甡甤甧甩甪甯甶甹甽甾甿畀畃畇畈畎畐畒畗畞畟畡畯畱畹",5,"疁疅疐疒疓疕疙疜疢疤疴疺疿痀痁痄痆痌痎痏痗痜痟痠痡痤痧痬痮痯痱痹瘀瘂瘃瘄瘇瘈瘊瘌瘏瘒瘓瘕瘖瘙瘛瘜瘝瘞瘣瘥瘦瘩瘭瘲瘳瘵瘸瘹"], +["8fcea1","瘺瘼癊癀癁癃癄癅癉癋癕癙癟癤癥癭癮癯癱癴皁皅皌皍皕皛皜皝皟皠皢",6,"皪皭皽盁盅盉盋盌盎盔盙盠盦盨盬盰盱盶盹盼眀眆眊眎眒眔眕眗眙眚眜眢眨眭眮眯眴眵眶眹眽眾睂睅睆睊睍睎睏睒睖睗睜睞睟睠睢"], +["8fcfa1","睤睧睪睬睰睲睳睴睺睽瞀瞄瞌瞍瞔瞕瞖瞚瞟瞢瞧瞪瞮瞯瞱瞵瞾矃矉矑矒矕矙矞矟矠矤矦矪矬矰矱矴矸矻砅砆砉砍砎砑砝砡砢砣砭砮砰砵砷硃硄硇硈硌硎硒硜硞硠硡硣硤硨硪确硺硾碊碏碔碘碡碝碞碟碤碨碬碭碰碱碲碳"], +["8fd0a1","碻碽碿磇磈磉磌磎磒磓磕磖磤磛磟磠磡磦磪磲磳礀磶磷磺磻磿礆礌礐礚礜礞礟礠礥礧礩礭礱礴礵礻礽礿祄祅祆祊祋祏祑祔祘祛祜祧祩祫祲祹祻祼祾禋禌禑禓禔禕禖禘禛禜禡禨禩禫禯禱禴禸离秂秄秇秈秊秏秔秖秚秝秞"], +["8fd1a1","秠秢秥秪秫秭秱秸秼稂稃稇稉稊稌稑稕稛稞稡稧稫稭稯稰稴稵稸稹稺穄穅穇穈穌穕穖穙穜穝穟穠穥穧穪穭穵穸穾窀窂窅窆窊窋窐窑窔窞窠窣窬窳窵窹窻窼竆竉竌竎竑竛竨竩竫竬竱竴竻竽竾笇笔笟笣笧笩笪笫笭笮笯笰"], +["8fd2a1","笱笴笽笿筀筁筇筎筕筠筤筦筩筪筭筯筲筳筷箄箉箎箐箑箖箛箞箠箥箬箯箰箲箵箶箺箻箼箽篂篅篈篊篔篖篗篙篚篛篨篪篲篴篵篸篹篺篼篾簁簂簃簄簆簉簋簌簎簏簙簛簠簥簦簨簬簱簳簴簶簹簺籆籊籕籑籒籓籙",5], +["8fd3a1","籡籣籧籩籭籮籰籲籹籼籽粆粇粏粔粞粠粦粰粶粷粺粻粼粿糄糇糈糉糍糏糓糔糕糗糙糚糝糦糩糫糵紃紇紈紉紏紑紒紓紖紝紞紣紦紪紭紱紼紽紾絀絁絇絈絍絑絓絗絙絚絜絝絥絧絪絰絸絺絻絿綁綂綃綅綆綈綋綌綍綑綖綗綝"], +["8fd4a1","綞綦綧綪綳綶綷綹緂",4,"緌緍緎緗緙縀緢緥緦緪緫緭緱緵緶緹緺縈縐縑縕縗縜縝縠縧縨縬縭縯縳縶縿繄繅繇繎繐繒繘繟繡繢繥繫繮繯繳繸繾纁纆纇纊纍纑纕纘纚纝纞缼缻缽缾缿罃罄罇罏罒罓罛罜罝罡罣罤罥罦罭"], +["8fd5a1","罱罽罾罿羀羋羍羏羐羑羖羗羜羡羢羦羪羭羴羼羿翀翃翈翎翏翛翟翣翥翨翬翮翯翲翺翽翾翿耇耈耊耍耎耏耑耓耔耖耝耞耟耠耤耦耬耮耰耴耵耷耹耺耼耾聀聄聠聤聦聭聱聵肁肈肎肜肞肦肧肫肸肹胈胍胏胒胔胕胗胘胠胭胮"], +["8fd6a1","胰胲胳胶胹胺胾脃脋脖脗脘脜脞脠脤脧脬脰脵脺脼腅腇腊腌腒腗腠腡腧腨腩腭腯腷膁膐膄膅膆膋膎膖膘膛膞膢膮膲膴膻臋臃臅臊臎臏臕臗臛臝臞臡臤臫臬臰臱臲臵臶臸臹臽臿舀舃舏舓舔舙舚舝舡舢舨舲舴舺艃艄艅艆"], +["8fd7a1","艋艎艏艑艖艜艠艣艧艭艴艻艽艿芀芁芃芄芇芉芊芎芑芔芖芘芚芛芠芡芣芤芧芨芩芪芮芰芲芴芷芺芼芾芿苆苐苕苚苠苢苤苨苪苭苯苶苷苽苾茀茁茇茈茊茋荔茛茝茞茟茡茢茬茭茮茰茳茷茺茼茽荂荃荄荇荍荎荑荕荖荗荰荸"], +["8fd8a1","荽荿莀莂莄莆莍莒莔莕莘莙莛莜莝莦莧莩莬莾莿菀菇菉菏菐菑菔菝荓菨菪菶菸菹菼萁萆萊萏萑萕萙莭萯萹葅葇葈葊葍葏葑葒葖葘葙葚葜葠葤葥葧葪葰葳葴葶葸葼葽蒁蒅蒒蒓蒕蒞蒦蒨蒩蒪蒯蒱蒴蒺蒽蒾蓀蓂蓇蓈蓌蓏蓓"], +["8fd9a1","蓜蓧蓪蓯蓰蓱蓲蓷蔲蓺蓻蓽蔂蔃蔇蔌蔎蔐蔜蔞蔢蔣蔤蔥蔧蔪蔫蔯蔳蔴蔶蔿蕆蕏",4,"蕖蕙蕜",6,"蕤蕫蕯蕹蕺蕻蕽蕿薁薅薆薉薋薌薏薓薘薝薟薠薢薥薧薴薶薷薸薼薽薾薿藂藇藊藋藎薭藘藚藟藠藦藨藭藳藶藼"], +["8fdaa1","藿蘀蘄蘅蘍蘎蘐蘑蘒蘘蘙蘛蘞蘡蘧蘩蘶蘸蘺蘼蘽虀虂虆虒虓虖虗虘虙虝虠",4,"虩虬虯虵虶虷虺蚍蚑蚖蚘蚚蚜蚡蚦蚧蚨蚭蚱蚳蚴蚵蚷蚸蚹蚿蛀蛁蛃蛅蛑蛒蛕蛗蛚蛜蛠蛣蛥蛧蚈蛺蛼蛽蜄蜅蜇蜋蜎蜏蜐蜓蜔蜙蜞蜟蜡蜣"], +["8fdba1","蜨蜮蜯蜱蜲蜹蜺蜼蜽蜾蝀蝃蝅蝍蝘蝝蝡蝤蝥蝯蝱蝲蝻螃",6,"螋螌螐螓螕螗螘螙螞螠螣螧螬螭螮螱螵螾螿蟁蟈蟉蟊蟎蟕蟖蟙蟚蟜蟟蟢蟣蟤蟪蟫蟭蟱蟳蟸蟺蟿蠁蠃蠆蠉蠊蠋蠐蠙蠒蠓蠔蠘蠚蠛蠜蠞蠟蠨蠭蠮蠰蠲蠵"], +["8fdca1","蠺蠼衁衃衅衈衉衊衋衎衑衕衖衘衚衜衟衠衤衩衱衹衻袀袘袚袛袜袟袠袨袪袺袽袾裀裊",4,"裑裒裓裛裞裧裯裰裱裵裷褁褆褍褎褏褕褖褘褙褚褜褠褦褧褨褰褱褲褵褹褺褾襀襂襅襆襉襏襒襗襚襛襜襡襢襣襫襮襰襳襵襺"], +["8fdda1","襻襼襽覉覍覐覔覕覛覜覟覠覥覰覴覵覶覷覼觔",4,"觥觩觫觭觱觳觶觹觽觿訄訅訇訏訑訒訔訕訞訠訢訤訦訫訬訯訵訷訽訾詀詃詅詇詉詍詎詓詖詗詘詜詝詡詥詧詵詶詷詹詺詻詾詿誀誃誆誋誏誐誒誖誗誙誟誧誩誮誯誳"], +["8fdea1","誶誷誻誾諃諆諈諉諊諑諓諔諕諗諝諟諬諰諴諵諶諼諿謅謆謋謑謜謞謟謊謭謰謷謼譂",4,"譈譒譓譔譙譍譞譣譭譶譸譹譼譾讁讄讅讋讍讏讔讕讜讞讟谸谹谽谾豅豇豉豋豏豑豓豔豗豘豛豝豙豣豤豦豨豩豭豳豵豶豻豾貆"], +["8fdfa1","貇貋貐貒貓貙貛貜貤貹貺賅賆賉賋賏賖賕賙賝賡賨賬賯賰賲賵賷賸賾賿贁贃贉贒贗贛赥赩赬赮赿趂趄趈趍趐趑趕趞趟趠趦趫趬趯趲趵趷趹趻跀跅跆跇跈跊跎跑跔跕跗跙跤跥跧跬跰趼跱跲跴跽踁踄踅踆踋踑踔踖踠踡踢"], +["8fe0a1","踣踦踧踱踳踶踷踸踹踽蹀蹁蹋蹍蹎蹏蹔蹛蹜蹝蹞蹡蹢蹩蹬蹭蹯蹰蹱蹹蹺蹻躂躃躉躐躒躕躚躛躝躞躢躧躩躭躮躳躵躺躻軀軁軃軄軇軏軑軔軜軨軮軰軱軷軹軺軭輀輂輇輈輏輐輖輗輘輞輠輡輣輥輧輨輬輭輮輴輵輶輷輺轀轁"], +["8fe1a1","轃轇轏轑",4,"轘轝轞轥辝辠辡辤辥辦辵辶辸达迀迁迆迊迋迍运迒迓迕迠迣迤迨迮迱迵迶迻迾适逄逈逌逘逛逨逩逯逪逬逭逳逴逷逿遃遄遌遛遝遢遦遧遬遰遴遹邅邈邋邌邎邐邕邗邘邙邛邠邡邢邥邰邲邳邴邶邽郌邾郃"], +["8fe2a1","郄郅郇郈郕郗郘郙郜郝郟郥郒郶郫郯郰郴郾郿鄀鄄鄅鄆鄈鄍鄐鄔鄖鄗鄘鄚鄜鄞鄠鄥鄢鄣鄧鄩鄮鄯鄱鄴鄶鄷鄹鄺鄼鄽酃酇酈酏酓酗酙酚酛酡酤酧酭酴酹酺酻醁醃醅醆醊醎醑醓醔醕醘醞醡醦醨醬醭醮醰醱醲醳醶醻醼醽醿"], +["8fe3a1","釂釃釅釓釔釗釙釚釞釤釥釩釪釬",5,"釷釹釻釽鈀鈁鈄鈅鈆鈇鈉鈊鈌鈐鈒鈓鈖鈘鈜鈝鈣鈤鈥鈦鈨鈮鈯鈰鈳鈵鈶鈸鈹鈺鈼鈾鉀鉂鉃鉆鉇鉊鉍鉎鉏鉑鉘鉙鉜鉝鉠鉡鉥鉧鉨鉩鉮鉯鉰鉵",4,"鉻鉼鉽鉿銈銉銊銍銎銒銗"], +["8fe4a1","銙銟銠銤銥銧銨銫銯銲銶銸銺銻銼銽銿",4,"鋅鋆鋇鋈鋋鋌鋍鋎鋐鋓鋕鋗鋘鋙鋜鋝鋟鋠鋡鋣鋥鋧鋨鋬鋮鋰鋹鋻鋿錀錂錈錍錑錔錕錜錝錞錟錡錤錥錧錩錪錳錴錶錷鍇鍈鍉鍐鍑鍒鍕鍗鍘鍚鍞鍤鍥鍧鍩鍪鍭鍯鍰鍱鍳鍴鍶"], +["8fe5a1","鍺鍽鍿鎀鎁鎂鎈鎊鎋鎍鎏鎒鎕鎘鎛鎞鎡鎣鎤鎦鎨鎫鎴鎵鎶鎺鎩鏁鏄鏅鏆鏇鏉",4,"鏓鏙鏜鏞鏟鏢鏦鏧鏹鏷鏸鏺鏻鏽鐁鐂鐄鐈鐉鐍鐎鐏鐕鐖鐗鐟鐮鐯鐱鐲鐳鐴鐻鐿鐽鑃鑅鑈鑊鑌鑕鑙鑜鑟鑡鑣鑨鑫鑭鑮鑯鑱鑲钄钃镸镹"], +["8fe6a1","镾閄閈閌閍閎閝閞閟閡閦閩閫閬閴閶閺閽閿闆闈闉闋闐闑闒闓闙闚闝闞闟闠闤闦阝阞阢阤阥阦阬阱阳阷阸阹阺阼阽陁陒陔陖陗陘陡陮陴陻陼陾陿隁隂隃隄隉隑隖隚隝隟隤隥隦隩隮隯隳隺雊雒嶲雘雚雝雞雟雩雯雱雺霂"], +["8fe7a1","霃霅霉霚霛霝霡霢霣霨霱霳靁靃靊靎靏靕靗靘靚靛靣靧靪靮靳靶靷靸靻靽靿鞀鞉鞕鞖鞗鞙鞚鞞鞟鞢鞬鞮鞱鞲鞵鞶鞸鞹鞺鞼鞾鞿韁韄韅韇韉韊韌韍韎韐韑韔韗韘韙韝韞韠韛韡韤韯韱韴韷韸韺頇頊頙頍頎頔頖頜頞頠頣頦"], +["8fe8a1","頫頮頯頰頲頳頵頥頾顄顇顊顑顒顓顖顗顙顚顢顣顥顦顪顬颫颭颮颰颴颷颸颺颻颿飂飅飈飌飡飣飥飦飧飪飳飶餂餇餈餑餕餖餗餚餛餜餟餢餦餧餫餱",4,"餹餺餻餼饀饁饆饇饈饍饎饔饘饙饛饜饞饟饠馛馝馟馦馰馱馲馵"], +["8fe9a1","馹馺馽馿駃駉駓駔駙駚駜駞駧駪駫駬駰駴駵駹駽駾騂騃騄騋騌騐騑騖騞騠騢騣騤騧騭騮騳騵騶騸驇驁驄驊驋驌驎驑驔驖驝骪骬骮骯骲骴骵骶骹骻骾骿髁髃髆髈髎髐髒髕髖髗髛髜髠髤髥髧髩髬髲髳髵髹髺髽髿",4], +["8feaa1","鬄鬅鬈鬉鬋鬌鬍鬎鬐鬒鬖鬙鬛鬜鬠鬦鬫鬭鬳鬴鬵鬷鬹鬺鬽魈魋魌魕魖魗魛魞魡魣魥魦魨魪",4,"魳魵魷魸魹魿鮀鮄鮅鮆鮇鮉鮊鮋鮍鮏鮐鮔鮚鮝鮞鮦鮧鮩鮬鮰鮱鮲鮷鮸鮻鮼鮾鮿鯁鯇鯈鯎鯐鯗鯘鯝鯟鯥鯧鯪鯫鯯鯳鯷鯸"], +["8feba1","鯹鯺鯽鯿鰀鰂鰋鰏鰑鰖鰘鰙鰚鰜鰞鰢鰣鰦",4,"鰱鰵鰶鰷鰽鱁鱃鱄鱅鱉鱊鱎鱏鱐鱓鱔鱖鱘鱛鱝鱞鱟鱣鱩鱪鱜鱫鱨鱮鱰鱲鱵鱷鱻鳦鳲鳷鳹鴋鴂鴑鴗鴘鴜鴝鴞鴯鴰鴲鴳鴴鴺鴼鵅鴽鵂鵃鵇鵊鵓鵔鵟鵣鵢鵥鵩鵪鵫鵰鵶鵷鵻"], +["8feca1","鵼鵾鶃鶄鶆鶊鶍鶎鶒鶓鶕鶖鶗鶘鶡鶪鶬鶮鶱鶵鶹鶼鶿鷃鷇鷉鷊鷔鷕鷖鷗鷚鷞鷟鷠鷥鷧鷩鷫鷮鷰鷳鷴鷾鸊鸂鸇鸎鸐鸑鸒鸕鸖鸙鸜鸝鹺鹻鹼麀麂麃麄麅麇麎麏麖麘麛麞麤麨麬麮麯麰麳麴麵黆黈黋黕黟黤黧黬黭黮黰黱黲黵"], +["8feda1","黸黿鼂鼃鼉鼏鼐鼑鼒鼔鼖鼗鼙鼚鼛鼟鼢鼦鼪鼫鼯鼱鼲鼴鼷鼹鼺鼼鼽鼿齁齃",4,"齓齕齖齗齘齚齝齞齨齩齭",4,"齳齵齺齽龏龐龑龒龔龖龗龞龡龢龣龥"] +] diff --git a/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json b/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json new file mode 100644 index 0000000..85c6934 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/gb18030-ranges.json @@ -0,0 +1 @@ +{"uChars":[128,165,169,178,184,216,226,235,238,244,248,251,253,258,276,284,300,325,329,334,364,463,465,467,469,471,473,475,477,506,594,610,712,716,730,930,938,962,970,1026,1104,1106,8209,8215,8218,8222,8231,8241,8244,8246,8252,8365,8452,8454,8458,8471,8482,8556,8570,8596,8602,8713,8720,8722,8726,8731,8737,8740,8742,8748,8751,8760,8766,8777,8781,8787,8802,8808,8816,8854,8858,8870,8896,8979,9322,9372,9548,9588,9616,9622,9634,9652,9662,9672,9676,9680,9702,9735,9738,9793,9795,11906,11909,11913,11917,11928,11944,11947,11951,11956,11960,11964,11979,12284,12292,12312,12319,12330,12351,12436,12447,12535,12543,12586,12842,12850,12964,13200,13215,13218,13253,13263,13267,13270,13384,13428,13727,13839,13851,14617,14703,14801,14816,14964,15183,15471,15585,16471,16736,17208,17325,17330,17374,17623,17997,18018,18212,18218,18301,18318,18760,18811,18814,18820,18823,18844,18848,18872,19576,19620,19738,19887,40870,59244,59336,59367,59413,59417,59423,59431,59437,59443,59452,59460,59478,59493,63789,63866,63894,63976,63986,64016,64018,64021,64025,64034,64037,64042,65074,65093,65107,65112,65127,65132,65375,65510,65536],"gbChars":[0,36,38,45,50,81,89,95,96,100,103,104,105,109,126,133,148,172,175,179,208,306,307,308,309,310,311,312,313,341,428,443,544,545,558,741,742,749,750,805,819,820,7922,7924,7925,7927,7934,7943,7944,7945,7950,8062,8148,8149,8152,8164,8174,8236,8240,8262,8264,8374,8380,8381,8384,8388,8390,8392,8393,8394,8396,8401,8406,8416,8419,8424,8437,8439,8445,8482,8485,8496,8521,8603,8936,8946,9046,9050,9063,9066,9076,9092,9100,9108,9111,9113,9131,9162,9164,9218,9219,11329,11331,11334,11336,11346,11361,11363,11366,11370,11372,11375,11389,11682,11686,11687,11692,11694,11714,11716,11723,11725,11730,11736,11982,11989,12102,12336,12348,12350,12384,12393,12395,12397,12510,12553,12851,12962,12973,13738,13823,13919,13933,14080,14298,14585,14698,15583,15847,16318,16434,16438,16481,16729,17102,17122,17315,17320,17402,17418,17859,17909,17911,17915,17916,17936,17939,17961,18664,18703,18814,18962,19043,33469,33470,33471,33484,33485,33490,33497,33501,33505,33513,33520,33536,33550,37845,37921,37948,38029,38038,38064,38065,38066,38069,38075,38076,38078,39108,39109,39113,39114,39115,39116,39265,39394,189000]} \ No newline at end of file diff --git a/node_modules/iconv-lite/encodings/tables/gbk-added.json b/node_modules/iconv-lite/encodings/tables/gbk-added.json new file mode 100644 index 0000000..b742e36 --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/gbk-added.json @@ -0,0 +1,56 @@ +[ +["a140","",62], +["a180","",32], +["a240","",62], +["a280","",32], +["a2ab","",5], +["a2e3","€"], +["a2ef",""], +["a2fd",""], +["a340","",62], +["a380","",31," "], +["a440","",62], +["a480","",32], +["a4f4","",10], +["a540","",62], +["a580","",32], +["a5f7","",7], +["a640","",62], +["a680","",32], +["a6b9","",7], +["a6d9","",6], +["a6ec",""], +["a6f3",""], +["a6f6","",8], +["a740","",62], +["a780","",32], +["a7c2","",14], +["a7f2","",12], +["a896","",10], +["a8bc","ḿ"], +["a8bf","ǹ"], +["a8c1",""], +["a8ea","",20], +["a958",""], +["a95b",""], +["a95d",""], +["a989","〾⿰",11], +["a997","",12], +["a9f0","",14], +["aaa1","",93], +["aba1","",93], +["aca1","",93], +["ada1","",93], +["aea1","",93], +["afa1","",93], +["d7fa","",4], +["f8a1","",93], +["f9a1","",93], +["faa1","",93], +["fba1","",93], +["fca1","",93], +["fda1","",93], +["fe50","⺁⺄㑳㑇⺈⺋㖞㘚㘎⺌⺗㥮㤘㧏㧟㩳㧐㭎㱮㳠⺧⺪䁖䅟⺮䌷⺳⺶⺷䎱䎬⺻䏝䓖䙡䙌"], +["fe80","䜣䜩䝼䞍⻊䥇䥺䥽䦂䦃䦅䦆䦟䦛䦷䦶䲣䲟䲠䲡䱷䲢䴓",6,"䶮",93], +["8135f437",""] +] diff --git a/node_modules/iconv-lite/encodings/tables/shiftjis.json b/node_modules/iconv-lite/encodings/tables/shiftjis.json new file mode 100644 index 0000000..5a3a43c --- /dev/null +++ b/node_modules/iconv-lite/encodings/tables/shiftjis.json @@ -0,0 +1,125 @@ +[ +["0","\u0000",128], +["a1","。",62], +["8140"," 、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈",9,"+-±×"], +["8180","÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓"], +["81b8","∈∋⊆⊇⊂⊃∪∩"], +["81c8","∧∨¬⇒⇔∀∃"], +["81da","∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬"], +["81f0","ʼn♯♭♪†‡¶"], +["81fc","◯"], +["824f","0",9], +["8260","A",25], +["8281","a",25], +["829f","ぁ",82], +["8340","ァ",62], +["8380","ム",22], +["839f","Α",16,"Σ",6], +["83bf","α",16,"σ",6], +["8440","А",5,"ЁЖ",25], +["8470","а",5,"ёж",7], +["8480","о",17], +["849f","─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂"], +["8740","①",19,"Ⅰ",9], +["875f","㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡"], +["877e","㍻"], +["8780","〝〟№㏍℡㊤",4,"㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪"], +["889f","亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭"], +["8940","院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円"], +["8980","園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改"], +["8a40","魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫"], +["8a80","橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄"], +["8b40","機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救"], +["8b80","朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈"], +["8c40","掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨"], +["8c80","劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向"], +["8d40","后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降"], +["8d80","項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷"], +["8e40","察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止"], +["8e80","死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周"], +["8f40","宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳"], +["8f80","準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾"], +["9040","拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨"], +["9080","逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線"], +["9140","繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻"], +["9180","操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只"], +["9240","叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄"], +["9280","逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓"], +["9340","邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬"], +["9380","凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入"], +["9440","如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅"], +["9480","楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美"], +["9540","鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷"], +["9580","斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋"], +["9640","法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆"], +["9680","摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒"], +["9740","諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲"], +["9780","沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯"], +["9840","蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕"], +["989f","弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲"], +["9940","僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭"], +["9980","凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨"], +["9a40","咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸"], +["9a80","噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩"], +["9b40","奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀"], +["9b80","它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏"], +["9c40","廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠"], +["9c80","怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛"], +["9d40","戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫"], +["9d80","捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼"], +["9e40","曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎"], +["9e80","梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣"], +["9f40","檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯"], +["9f80","麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌"], +["e040","漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝"], +["e080","烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱"], +["e140","瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿"], +["e180","痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬"], +["e240","磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰"], +["e280","窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆"], +["e340","紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷"], +["e380","縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋"], +["e440","隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤"], +["e480","艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈"], +["e540","蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬"], +["e580","蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞"], +["e640","襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧"], +["e680","諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊"], +["e740","蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜"], +["e780","轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮"], +["e840","錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙"], +["e880","閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰"], +["e940","顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃"], +["e980","騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈"], +["ea40","鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯"], +["ea80","黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠堯槇遙瑤凜熙"], +["ed40","纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏"], +["ed80","塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱"], +["ee40","犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙"], +["ee80","蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"], +["eeef","ⅰ",9,"¬¦'""], +["f040","",62], +["f080","",124], +["f140","",62], +["f180","",124], +["f240","",62], +["f280","",124], +["f340","",62], +["f380","",124], +["f440","",62], +["f480","",124], +["f540","",62], +["f580","",124], +["f640","",62], +["f680","",124], +["f740","",62], +["f780","",124], +["f840","",62], +["f880","",124], +["f940",""], +["fa40","ⅰ",9,"Ⅰ",9,"¬¦'"㈱№℡∵纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊"], +["fa80","兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯"], +["fb40","涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神"], +["fb80","祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙"], +["fc40","髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑"] +] diff --git a/node_modules/iconv-lite/encodings/utf16.js b/node_modules/iconv-lite/encodings/utf16.js new file mode 100644 index 0000000..97d0669 --- /dev/null +++ b/node_modules/iconv-lite/encodings/utf16.js @@ -0,0 +1,197 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// Note: UTF16-LE (or UCS2) codec is Node.js native. See encodings/internal.js + +// == UTF16-BE codec. ========================================================== + +exports.utf16be = Utf16BECodec; +function Utf16BECodec() { +} + +Utf16BECodec.prototype.encoder = Utf16BEEncoder; +Utf16BECodec.prototype.decoder = Utf16BEDecoder; +Utf16BECodec.prototype.bomAware = true; + + +// -- Encoding + +function Utf16BEEncoder() { +} + +Utf16BEEncoder.prototype.write = function(str) { + var buf = Buffer.from(str, 'ucs2'); + for (var i = 0; i < buf.length; i += 2) { + var tmp = buf[i]; buf[i] = buf[i+1]; buf[i+1] = tmp; + } + return buf; +} + +Utf16BEEncoder.prototype.end = function() { +} + + +// -- Decoding + +function Utf16BEDecoder() { + this.overflowByte = -1; +} + +Utf16BEDecoder.prototype.write = function(buf) { + if (buf.length == 0) + return ''; + + var buf2 = Buffer.alloc(buf.length + 1), + i = 0, j = 0; + + if (this.overflowByte !== -1) { + buf2[0] = buf[0]; + buf2[1] = this.overflowByte; + i = 1; j = 2; + } + + for (; i < buf.length-1; i += 2, j+= 2) { + buf2[j] = buf[i+1]; + buf2[j+1] = buf[i]; + } + + this.overflowByte = (i == buf.length-1) ? buf[buf.length-1] : -1; + + return buf2.slice(0, j).toString('ucs2'); +} + +Utf16BEDecoder.prototype.end = function() { + this.overflowByte = -1; +} + + +// == UTF-16 codec ============================================================= +// Decoder chooses automatically from UTF-16LE and UTF-16BE using BOM and space-based heuristic. +// Defaults to UTF-16LE, as it's prevalent and default in Node. +// http://en.wikipedia.org/wiki/UTF-16 and http://encoding.spec.whatwg.org/#utf-16le +// Decoder default can be changed: iconv.decode(buf, 'utf16', {defaultEncoding: 'utf-16be'}); + +// Encoder uses UTF-16LE and prepends BOM (which can be overridden with addBOM: false). + +exports.utf16 = Utf16Codec; +function Utf16Codec(codecOptions, iconv) { + this.iconv = iconv; +} + +Utf16Codec.prototype.encoder = Utf16Encoder; +Utf16Codec.prototype.decoder = Utf16Decoder; + + +// -- Encoding (pass-through) + +function Utf16Encoder(options, codec) { + options = options || {}; + if (options.addBOM === undefined) + options.addBOM = true; + this.encoder = codec.iconv.getEncoder('utf-16le', options); +} + +Utf16Encoder.prototype.write = function(str) { + return this.encoder.write(str); +} + +Utf16Encoder.prototype.end = function() { + return this.encoder.end(); +} + + +// -- Decoding + +function Utf16Decoder(options, codec) { + this.decoder = null; + this.initialBufs = []; + this.initialBufsLen = 0; + + this.options = options || {}; + this.iconv = codec.iconv; +} + +Utf16Decoder.prototype.write = function(buf) { + if (!this.decoder) { + // Codec is not chosen yet. Accumulate initial bytes. + this.initialBufs.push(buf); + this.initialBufsLen += buf.length; + + if (this.initialBufsLen < 16) // We need more bytes to use space heuristic (see below) + return ''; + + // We have enough bytes -> detect endianness. + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + + return this.decoder.write(buf); +} + +Utf16Decoder.prototype.end = function() { + if (!this.decoder) { + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + var trail = this.decoder.end(); + if (trail) + resStr += trail; + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + return this.decoder.end(); +} + +function detectEncoding(bufs, defaultEncoding) { + var b = []; + var charsProcessed = 0; + var asciiCharsLE = 0, asciiCharsBE = 0; // Number of ASCII chars when decoded as LE or BE. + + outer_loop: + for (var i = 0; i < bufs.length; i++) { + var buf = bufs[i]; + for (var j = 0; j < buf.length; j++) { + b.push(buf[j]); + if (b.length === 2) { + if (charsProcessed === 0) { + // Check BOM first. + if (b[0] === 0xFF && b[1] === 0xFE) return 'utf-16le'; + if (b[0] === 0xFE && b[1] === 0xFF) return 'utf-16be'; + } + + if (b[0] === 0 && b[1] !== 0) asciiCharsBE++; + if (b[0] !== 0 && b[1] === 0) asciiCharsLE++; + + b.length = 0; + charsProcessed++; + + if (charsProcessed >= 100) { + break outer_loop; + } + } + } + } + + // Make decisions. + // Most of the time, the content has ASCII chars (U+00**), but the opposite (U+**00) is uncommon. + // So, we count ASCII as if it was LE or BE, and decide from that. + if (asciiCharsBE > asciiCharsLE) return 'utf-16be'; + if (asciiCharsBE < asciiCharsLE) return 'utf-16le'; + + // Couldn't decide (likely all zeros or not enough data). + return defaultEncoding || 'utf-16le'; +} + + diff --git a/node_modules/iconv-lite/encodings/utf32.js b/node_modules/iconv-lite/encodings/utf32.js new file mode 100644 index 0000000..2fa900a --- /dev/null +++ b/node_modules/iconv-lite/encodings/utf32.js @@ -0,0 +1,319 @@ +'use strict'; + +var Buffer = require('safer-buffer').Buffer; + +// == UTF32-LE/BE codec. ========================================================== + +exports._utf32 = Utf32Codec; + +function Utf32Codec(codecOptions, iconv) { + this.iconv = iconv; + this.bomAware = true; + this.isLE = codecOptions.isLE; +} + +exports.utf32le = { type: '_utf32', isLE: true }; +exports.utf32be = { type: '_utf32', isLE: false }; + +// Aliases +exports.ucs4le = 'utf32le'; +exports.ucs4be = 'utf32be'; + +Utf32Codec.prototype.encoder = Utf32Encoder; +Utf32Codec.prototype.decoder = Utf32Decoder; + +// -- Encoding + +function Utf32Encoder(options, codec) { + this.isLE = codec.isLE; + this.highSurrogate = 0; +} + +Utf32Encoder.prototype.write = function(str) { + var src = Buffer.from(str, 'ucs2'); + var dst = Buffer.alloc(src.length * 2); + var write32 = this.isLE ? dst.writeUInt32LE : dst.writeUInt32BE; + var offset = 0; + + for (var i = 0; i < src.length; i += 2) { + var code = src.readUInt16LE(i); + var isHighSurrogate = (0xD800 <= code && code < 0xDC00); + var isLowSurrogate = (0xDC00 <= code && code < 0xE000); + + if (this.highSurrogate) { + if (isHighSurrogate || !isLowSurrogate) { + // There shouldn't be two high surrogates in a row, nor a high surrogate which isn't followed by a low + // surrogate. If this happens, keep the pending high surrogate as a stand-alone semi-invalid character + // (technically wrong, but expected by some applications, like Windows file names). + write32.call(dst, this.highSurrogate, offset); + offset += 4; + } + else { + // Create 32-bit value from high and low surrogates; + var codepoint = (((this.highSurrogate - 0xD800) << 10) | (code - 0xDC00)) + 0x10000; + + write32.call(dst, codepoint, offset); + offset += 4; + this.highSurrogate = 0; + + continue; + } + } + + if (isHighSurrogate) + this.highSurrogate = code; + else { + // Even if the current character is a low surrogate, with no previous high surrogate, we'll + // encode it as a semi-invalid stand-alone character for the same reasons expressed above for + // unpaired high surrogates. + write32.call(dst, code, offset); + offset += 4; + this.highSurrogate = 0; + } + } + + if (offset < dst.length) + dst = dst.slice(0, offset); + + return dst; +}; + +Utf32Encoder.prototype.end = function() { + // Treat any leftover high surrogate as a semi-valid independent character. + if (!this.highSurrogate) + return; + + var buf = Buffer.alloc(4); + + if (this.isLE) + buf.writeUInt32LE(this.highSurrogate, 0); + else + buf.writeUInt32BE(this.highSurrogate, 0); + + this.highSurrogate = 0; + + return buf; +}; + +// -- Decoding + +function Utf32Decoder(options, codec) { + this.isLE = codec.isLE; + this.badChar = codec.iconv.defaultCharUnicode.charCodeAt(0); + this.overflow = []; +} + +Utf32Decoder.prototype.write = function(src) { + if (src.length === 0) + return ''; + + var i = 0; + var codepoint = 0; + var dst = Buffer.alloc(src.length + 4); + var offset = 0; + var isLE = this.isLE; + var overflow = this.overflow; + var badChar = this.badChar; + + if (overflow.length > 0) { + for (; i < src.length && overflow.length < 4; i++) + overflow.push(src[i]); + + if (overflow.length === 4) { + // NOTE: codepoint is a signed int32 and can be negative. + // NOTE: We copied this block from below to help V8 optimize it (it works with array, not buffer). + if (isLE) { + codepoint = overflow[i] | (overflow[i+1] << 8) | (overflow[i+2] << 16) | (overflow[i+3] << 24); + } else { + codepoint = overflow[i+3] | (overflow[i+2] << 8) | (overflow[i+1] << 16) | (overflow[i] << 24); + } + overflow.length = 0; + + offset = _writeCodepoint(dst, offset, codepoint, badChar); + } + } + + // Main loop. Should be as optimized as possible. + for (; i < src.length - 3; i += 4) { + // NOTE: codepoint is a signed int32 and can be negative. + if (isLE) { + codepoint = src[i] | (src[i+1] << 8) | (src[i+2] << 16) | (src[i+3] << 24); + } else { + codepoint = src[i+3] | (src[i+2] << 8) | (src[i+1] << 16) | (src[i] << 24); + } + offset = _writeCodepoint(dst, offset, codepoint, badChar); + } + + // Keep overflowing bytes. + for (; i < src.length; i++) { + overflow.push(src[i]); + } + + return dst.slice(0, offset).toString('ucs2'); +}; + +function _writeCodepoint(dst, offset, codepoint, badChar) { + // NOTE: codepoint is signed int32 and can be negative. We keep it that way to help V8 with optimizations. + if (codepoint < 0 || codepoint > 0x10FFFF) { + // Not a valid Unicode codepoint + codepoint = badChar; + } + + // Ephemeral Planes: Write high surrogate. + if (codepoint >= 0x10000) { + codepoint -= 0x10000; + + var high = 0xD800 | (codepoint >> 10); + dst[offset++] = high & 0xff; + dst[offset++] = high >> 8; + + // Low surrogate is written below. + var codepoint = 0xDC00 | (codepoint & 0x3FF); + } + + // Write BMP char or low surrogate. + dst[offset++] = codepoint & 0xff; + dst[offset++] = codepoint >> 8; + + return offset; +}; + +Utf32Decoder.prototype.end = function() { + this.overflow.length = 0; +}; + +// == UTF-32 Auto codec ============================================================= +// Decoder chooses automatically from UTF-32LE and UTF-32BE using BOM and space-based heuristic. +// Defaults to UTF-32LE. http://en.wikipedia.org/wiki/UTF-32 +// Encoder/decoder default can be changed: iconv.decode(buf, 'utf32', {defaultEncoding: 'utf-32be'}); + +// Encoder prepends BOM (which can be overridden with (addBOM: false}). + +exports.utf32 = Utf32AutoCodec; +exports.ucs4 = 'utf32'; + +function Utf32AutoCodec(options, iconv) { + this.iconv = iconv; +} + +Utf32AutoCodec.prototype.encoder = Utf32AutoEncoder; +Utf32AutoCodec.prototype.decoder = Utf32AutoDecoder; + +// -- Encoding + +function Utf32AutoEncoder(options, codec) { + options = options || {}; + + if (options.addBOM === undefined) + options.addBOM = true; + + this.encoder = codec.iconv.getEncoder(options.defaultEncoding || 'utf-32le', options); +} + +Utf32AutoEncoder.prototype.write = function(str) { + return this.encoder.write(str); +}; + +Utf32AutoEncoder.prototype.end = function() { + return this.encoder.end(); +}; + +// -- Decoding + +function Utf32AutoDecoder(options, codec) { + this.decoder = null; + this.initialBufs = []; + this.initialBufsLen = 0; + this.options = options || {}; + this.iconv = codec.iconv; +} + +Utf32AutoDecoder.prototype.write = function(buf) { + if (!this.decoder) { + // Codec is not chosen yet. Accumulate initial bytes. + this.initialBufs.push(buf); + this.initialBufsLen += buf.length; + + if (this.initialBufsLen < 32) // We need more bytes to use space heuristic (see below) + return ''; + + // We have enough bytes -> detect endianness. + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + + return this.decoder.write(buf); +}; + +Utf32AutoDecoder.prototype.end = function() { + if (!this.decoder) { + var encoding = detectEncoding(this.initialBufs, this.options.defaultEncoding); + this.decoder = this.iconv.getDecoder(encoding, this.options); + + var resStr = ''; + for (var i = 0; i < this.initialBufs.length; i++) + resStr += this.decoder.write(this.initialBufs[i]); + + var trail = this.decoder.end(); + if (trail) + resStr += trail; + + this.initialBufs.length = this.initialBufsLen = 0; + return resStr; + } + + return this.decoder.end(); +}; + +function detectEncoding(bufs, defaultEncoding) { + var b = []; + var charsProcessed = 0; + var invalidLE = 0, invalidBE = 0; // Number of invalid chars when decoded as LE or BE. + var bmpCharsLE = 0, bmpCharsBE = 0; // Number of BMP chars when decoded as LE or BE. + + outer_loop: + for (var i = 0; i < bufs.length; i++) { + var buf = bufs[i]; + for (var j = 0; j < buf.length; j++) { + b.push(buf[j]); + if (b.length === 4) { + if (charsProcessed === 0) { + // Check BOM first. + if (b[0] === 0xFF && b[1] === 0xFE && b[2] === 0 && b[3] === 0) { + return 'utf-32le'; + } + if (b[0] === 0 && b[1] === 0 && b[2] === 0xFE && b[3] === 0xFF) { + return 'utf-32be'; + } + } + + if (b[0] !== 0 || b[1] > 0x10) invalidBE++; + if (b[3] !== 0 || b[2] > 0x10) invalidLE++; + + if (b[0] === 0 && b[1] === 0 && (b[2] !== 0 || b[3] !== 0)) bmpCharsBE++; + if ((b[0] !== 0 || b[1] !== 0) && b[2] === 0 && b[3] === 0) bmpCharsLE++; + + b.length = 0; + charsProcessed++; + + if (charsProcessed >= 100) { + break outer_loop; + } + } + } + } + + // Make decisions. + if (bmpCharsBE - invalidBE > bmpCharsLE - invalidLE) return 'utf-32be'; + if (bmpCharsBE - invalidBE < bmpCharsLE - invalidLE) return 'utf-32le'; + + // Couldn't decide (likely all zeros or not enough data). + return defaultEncoding || 'utf-32le'; +} diff --git a/node_modules/iconv-lite/encodings/utf7.js b/node_modules/iconv-lite/encodings/utf7.js new file mode 100644 index 0000000..eacae34 --- /dev/null +++ b/node_modules/iconv-lite/encodings/utf7.js @@ -0,0 +1,290 @@ +"use strict"; +var Buffer = require("safer-buffer").Buffer; + +// UTF-7 codec, according to https://tools.ietf.org/html/rfc2152 +// See also below a UTF-7-IMAP codec, according to http://tools.ietf.org/html/rfc3501#section-5.1.3 + +exports.utf7 = Utf7Codec; +exports.unicode11utf7 = 'utf7'; // Alias UNICODE-1-1-UTF-7 +function Utf7Codec(codecOptions, iconv) { + this.iconv = iconv; +}; + +Utf7Codec.prototype.encoder = Utf7Encoder; +Utf7Codec.prototype.decoder = Utf7Decoder; +Utf7Codec.prototype.bomAware = true; + + +// -- Encoding + +var nonDirectChars = /[^A-Za-z0-9'\(\),-\.\/:\? \n\r\t]+/g; + +function Utf7Encoder(options, codec) { + this.iconv = codec.iconv; +} + +Utf7Encoder.prototype.write = function(str) { + // Naive implementation. + // Non-direct chars are encoded as "+-"; single "+" char is encoded as "+-". + return Buffer.from(str.replace(nonDirectChars, function(chunk) { + return "+" + (chunk === '+' ? '' : + this.iconv.encode(chunk, 'utf16-be').toString('base64').replace(/=+$/, '')) + + "-"; + }.bind(this))); +} + +Utf7Encoder.prototype.end = function() { +} + + +// -- Decoding + +function Utf7Decoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} + +var base64Regex = /[A-Za-z0-9\/+]/; +var base64Chars = []; +for (var i = 0; i < 256; i++) + base64Chars[i] = base64Regex.test(String.fromCharCode(i)); + +var plusChar = '+'.charCodeAt(0), + minusChar = '-'.charCodeAt(0), + andChar = '&'.charCodeAt(0); + +Utf7Decoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; + + // The decoder is more involved as we must handle chunks in stream. + + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '+' + if (buf[i] == plusChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64Chars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) {// "+-" -> "+" + res += "+"; + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii"); + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + if (buf[i] != minusChar) // Minus is absorbed after base64. + i--; + + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } + } + + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii"); + + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); + + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + this.inBase64 = inBase64; + this.base64Accum = base64Accum; + + return res; +} + +Utf7Decoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be"); + + this.inBase64 = false; + this.base64Accum = ''; + return res; +} + + +// UTF-7-IMAP codec. +// RFC3501 Sec. 5.1.3 Modified UTF-7 (http://tools.ietf.org/html/rfc3501#section-5.1.3) +// Differences: +// * Base64 part is started by "&" instead of "+" +// * Direct characters are 0x20-0x7E, except "&" (0x26) +// * In Base64, "," is used instead of "/" +// * Base64 must not be used to represent direct characters. +// * No implicit shift back from Base64 (should always end with '-') +// * String must end in non-shifted position. +// * "-&" while in base64 is not allowed. + + +exports.utf7imap = Utf7IMAPCodec; +function Utf7IMAPCodec(codecOptions, iconv) { + this.iconv = iconv; +}; + +Utf7IMAPCodec.prototype.encoder = Utf7IMAPEncoder; +Utf7IMAPCodec.prototype.decoder = Utf7IMAPDecoder; +Utf7IMAPCodec.prototype.bomAware = true; + + +// -- Encoding + +function Utf7IMAPEncoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = Buffer.alloc(6); + this.base64AccumIdx = 0; +} + +Utf7IMAPEncoder.prototype.write = function(str) { + var inBase64 = this.inBase64, + base64Accum = this.base64Accum, + base64AccumIdx = this.base64AccumIdx, + buf = Buffer.alloc(str.length*5 + 10), bufIdx = 0; + + for (var i = 0; i < str.length; i++) { + var uChar = str.charCodeAt(i); + if (0x20 <= uChar && uChar <= 0x7E) { // Direct character or '&'. + if (inBase64) { + if (base64AccumIdx > 0) { + bufIdx += buf.write(base64Accum.slice(0, base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + base64AccumIdx = 0; + } + + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + inBase64 = false; + } + + if (!inBase64) { + buf[bufIdx++] = uChar; // Write direct character + + if (uChar === andChar) // Ampersand -> '&-' + buf[bufIdx++] = minusChar; + } + + } else { // Non-direct character + if (!inBase64) { + buf[bufIdx++] = andChar; // Write '&', then go to base64 mode. + inBase64 = true; + } + if (inBase64) { + base64Accum[base64AccumIdx++] = uChar >> 8; + base64Accum[base64AccumIdx++] = uChar & 0xFF; + + if (base64AccumIdx == base64Accum.length) { + bufIdx += buf.write(base64Accum.toString('base64').replace(/\//g, ','), bufIdx); + base64AccumIdx = 0; + } + } + } + } + + this.inBase64 = inBase64; + this.base64AccumIdx = base64AccumIdx; + + return buf.slice(0, bufIdx); +} + +Utf7IMAPEncoder.prototype.end = function() { + var buf = Buffer.alloc(10), bufIdx = 0; + if (this.inBase64) { + if (this.base64AccumIdx > 0) { + bufIdx += buf.write(this.base64Accum.slice(0, this.base64AccumIdx).toString('base64').replace(/\//g, ',').replace(/=+$/, ''), bufIdx); + this.base64AccumIdx = 0; + } + + buf[bufIdx++] = minusChar; // Write '-', then go to direct mode. + this.inBase64 = false; + } + + return buf.slice(0, bufIdx); +} + + +// -- Decoding + +function Utf7IMAPDecoder(options, codec) { + this.iconv = codec.iconv; + this.inBase64 = false; + this.base64Accum = ''; +} + +var base64IMAPChars = base64Chars.slice(); +base64IMAPChars[','.charCodeAt(0)] = true; + +Utf7IMAPDecoder.prototype.write = function(buf) { + var res = "", lastI = 0, + inBase64 = this.inBase64, + base64Accum = this.base64Accum; + + // The decoder is more involved as we must handle chunks in stream. + // It is forgiving, closer to standard UTF-7 (for example, '-' is optional at the end). + + for (var i = 0; i < buf.length; i++) { + if (!inBase64) { // We're in direct mode. + // Write direct chars until '&' + if (buf[i] == andChar) { + res += this.iconv.decode(buf.slice(lastI, i), "ascii"); // Write direct chars. + lastI = i+1; + inBase64 = true; + } + } else { // We decode base64. + if (!base64IMAPChars[buf[i]]) { // Base64 ended. + if (i == lastI && buf[i] == minusChar) { // "&-" -> "&" + res += "&"; + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI, i), "ascii").replace(/,/g, '/'); + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + if (buf[i] != minusChar) // Minus may be absorbed after base64. + i--; + + lastI = i+1; + inBase64 = false; + base64Accum = ''; + } + } + } + + if (!inBase64) { + res += this.iconv.decode(buf.slice(lastI), "ascii"); // Write direct chars. + } else { + var b64str = base64Accum + this.iconv.decode(buf.slice(lastI), "ascii").replace(/,/g, '/'); + + var canBeDecoded = b64str.length - (b64str.length % 8); // Minimal chunk: 2 quads -> 2x3 bytes -> 3 chars. + base64Accum = b64str.slice(canBeDecoded); // The rest will be decoded in future. + b64str = b64str.slice(0, canBeDecoded); + + res += this.iconv.decode(Buffer.from(b64str, 'base64'), "utf16-be"); + } + + this.inBase64 = inBase64; + this.base64Accum = base64Accum; + + return res; +} + +Utf7IMAPDecoder.prototype.end = function() { + var res = ""; + if (this.inBase64 && this.base64Accum.length > 0) + res = this.iconv.decode(Buffer.from(this.base64Accum, 'base64'), "utf16-be"); + + this.inBase64 = false; + this.base64Accum = ''; + return res; +} + + diff --git a/node_modules/iconv-lite/lib/bom-handling.js b/node_modules/iconv-lite/lib/bom-handling.js new file mode 100644 index 0000000..1050872 --- /dev/null +++ b/node_modules/iconv-lite/lib/bom-handling.js @@ -0,0 +1,52 @@ +"use strict"; + +var BOMChar = '\uFEFF'; + +exports.PrependBOM = PrependBOMWrapper +function PrependBOMWrapper(encoder, options) { + this.encoder = encoder; + this.addBOM = true; +} + +PrependBOMWrapper.prototype.write = function(str) { + if (this.addBOM) { + str = BOMChar + str; + this.addBOM = false; + } + + return this.encoder.write(str); +} + +PrependBOMWrapper.prototype.end = function() { + return this.encoder.end(); +} + + +//------------------------------------------------------------------------------ + +exports.StripBOM = StripBOMWrapper; +function StripBOMWrapper(decoder, options) { + this.decoder = decoder; + this.pass = false; + this.options = options || {}; +} + +StripBOMWrapper.prototype.write = function(buf) { + var res = this.decoder.write(buf); + if (this.pass || !res) + return res; + + if (res[0] === BOMChar) { + res = res.slice(1); + if (typeof this.options.stripBOM === 'function') + this.options.stripBOM(); + } + + this.pass = true; + return res; +} + +StripBOMWrapper.prototype.end = function() { + return this.decoder.end(); +} + diff --git a/node_modules/iconv-lite/lib/index.d.ts b/node_modules/iconv-lite/lib/index.d.ts new file mode 100644 index 0000000..99f200f --- /dev/null +++ b/node_modules/iconv-lite/lib/index.d.ts @@ -0,0 +1,41 @@ +/*--------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. + * REQUIREMENT: This definition is dependent on the @types/node definition. + * Install with `npm install @types/node --save-dev` + *--------------------------------------------------------------------------------------------*/ + +declare module 'iconv-lite' { + // Basic API + export function decode(buffer: Buffer, encoding: string, options?: Options): string; + + export function encode(content: string, encoding: string, options?: Options): Buffer; + + export function encodingExists(encoding: string): boolean; + + // Stream API + export function decodeStream(encoding: string, options?: Options): NodeJS.ReadWriteStream; + + export function encodeStream(encoding: string, options?: Options): NodeJS.ReadWriteStream; + + // Low-level stream APIs + export function getEncoder(encoding: string, options?: Options): EncoderStream; + + export function getDecoder(encoding: string, options?: Options): DecoderStream; +} + +export interface Options { + stripBOM?: boolean; + addBOM?: boolean; + defaultEncoding?: string; +} + +export interface EncoderStream { + write(str: string): Buffer; + end(): Buffer | undefined; +} + +export interface DecoderStream { + write(buf: Buffer): string; + end(): string | undefined; +} diff --git a/node_modules/iconv-lite/lib/index.js b/node_modules/iconv-lite/lib/index.js new file mode 100644 index 0000000..657701c --- /dev/null +++ b/node_modules/iconv-lite/lib/index.js @@ -0,0 +1,180 @@ +"use strict"; + +var Buffer = require("safer-buffer").Buffer; + +var bomHandling = require("./bom-handling"), + iconv = module.exports; + +// All codecs and aliases are kept here, keyed by encoding name/alias. +// They are lazy loaded in `iconv.getCodec` from `encodings/index.js`. +iconv.encodings = null; + +// Characters emitted in case of error. +iconv.defaultCharUnicode = '�'; +iconv.defaultCharSingleByte = '?'; + +// Public API. +iconv.encode = function encode(str, encoding, options) { + str = "" + (str || ""); // Ensure string. + + var encoder = iconv.getEncoder(encoding, options); + + var res = encoder.write(str); + var trail = encoder.end(); + + return (trail && trail.length > 0) ? Buffer.concat([res, trail]) : res; +} + +iconv.decode = function decode(buf, encoding, options) { + if (typeof buf === 'string') { + if (!iconv.skipDecodeWarning) { + console.error('Iconv-lite warning: decode()-ing strings is deprecated. Refer to https://github.com/ashtuchkin/iconv-lite/wiki/Use-Buffers-when-decoding'); + iconv.skipDecodeWarning = true; + } + + buf = Buffer.from("" + (buf || ""), "binary"); // Ensure buffer. + } + + var decoder = iconv.getDecoder(encoding, options); + + var res = decoder.write(buf); + var trail = decoder.end(); + + return trail ? (res + trail) : res; +} + +iconv.encodingExists = function encodingExists(enc) { + try { + iconv.getCodec(enc); + return true; + } catch (e) { + return false; + } +} + +// Legacy aliases to convert functions +iconv.toEncoding = iconv.encode; +iconv.fromEncoding = iconv.decode; + +// Search for a codec in iconv.encodings. Cache codec data in iconv._codecDataCache. +iconv._codecDataCache = {}; +iconv.getCodec = function getCodec(encoding) { + if (!iconv.encodings) + iconv.encodings = require("../encodings"); // Lazy load all encoding definitions. + + // Canonicalize encoding name: strip all non-alphanumeric chars and appended year. + var enc = iconv._canonicalizeEncoding(encoding); + + // Traverse iconv.encodings to find actual codec. + var codecOptions = {}; + while (true) { + var codec = iconv._codecDataCache[enc]; + if (codec) + return codec; + + var codecDef = iconv.encodings[enc]; + + switch (typeof codecDef) { + case "string": // Direct alias to other encoding. + enc = codecDef; + break; + + case "object": // Alias with options. Can be layered. + for (var key in codecDef) + codecOptions[key] = codecDef[key]; + + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; + + enc = codecDef.type; + break; + + case "function": // Codec itself. + if (!codecOptions.encodingName) + codecOptions.encodingName = enc; + + // The codec function must load all tables and return object with .encoder and .decoder methods. + // It'll be called only once (for each different options object). + codec = new codecDef(codecOptions, iconv); + + iconv._codecDataCache[codecOptions.encodingName] = codec; // Save it to be reused later. + return codec; + + default: + throw new Error("Encoding not recognized: '" + encoding + "' (searched as: '"+enc+"')"); + } + } +} + +iconv._canonicalizeEncoding = function(encoding) { + // Canonicalize encoding name: strip all non-alphanumeric chars and appended year. + return (''+encoding).toLowerCase().replace(/:\d{4}$|[^0-9a-z]/g, ""); +} + +iconv.getEncoder = function getEncoder(encoding, options) { + var codec = iconv.getCodec(encoding), + encoder = new codec.encoder(options, codec); + + if (codec.bomAware && options && options.addBOM) + encoder = new bomHandling.PrependBOM(encoder, options); + + return encoder; +} + +iconv.getDecoder = function getDecoder(encoding, options) { + var codec = iconv.getCodec(encoding), + decoder = new codec.decoder(options, codec); + + if (codec.bomAware && !(options && options.stripBOM === false)) + decoder = new bomHandling.StripBOM(decoder, options); + + return decoder; +} + +// Streaming API +// NOTE: Streaming API naturally depends on 'stream' module from Node.js. Unfortunately in browser environments this module can add +// up to 100Kb to the output bundle. To avoid unnecessary code bloat, we don't enable Streaming API in browser by default. +// If you would like to enable it explicitly, please add the following code to your app: +// > iconv.enableStreamingAPI(require('stream')); +iconv.enableStreamingAPI = function enableStreamingAPI(stream_module) { + if (iconv.supportsStreams) + return; + + // Dependency-inject stream module to create IconvLite stream classes. + var streams = require("./streams")(stream_module); + + // Not public API yet, but expose the stream classes. + iconv.IconvLiteEncoderStream = streams.IconvLiteEncoderStream; + iconv.IconvLiteDecoderStream = streams.IconvLiteDecoderStream; + + // Streaming API. + iconv.encodeStream = function encodeStream(encoding, options) { + return new iconv.IconvLiteEncoderStream(iconv.getEncoder(encoding, options), options); + } + + iconv.decodeStream = function decodeStream(encoding, options) { + return new iconv.IconvLiteDecoderStream(iconv.getDecoder(encoding, options), options); + } + + iconv.supportsStreams = true; +} + +// Enable Streaming API automatically if 'stream' module is available and non-empty (the majority of environments). +var stream_module; +try { + stream_module = require("stream"); +} catch (e) {} + +if (stream_module && stream_module.Transform) { + iconv.enableStreamingAPI(stream_module); + +} else { + // In rare cases where 'stream' module is not available by default, throw a helpful exception. + iconv.encodeStream = iconv.decodeStream = function() { + throw new Error("iconv-lite Streaming API is not enabled. Use iconv.enableStreamingAPI(require('stream')); to enable it."); + }; +} + +if ("Ā" != "\u0100") { + console.error("iconv-lite warning: js files use non-utf8 encoding. See https://github.com/ashtuchkin/iconv-lite/wiki/Javascript-source-file-encodings for more info."); +} diff --git a/node_modules/iconv-lite/lib/streams.js b/node_modules/iconv-lite/lib/streams.js new file mode 100644 index 0000000..a150648 --- /dev/null +++ b/node_modules/iconv-lite/lib/streams.js @@ -0,0 +1,109 @@ +"use strict"; + +var Buffer = require("safer-buffer").Buffer; + +// NOTE: Due to 'stream' module being pretty large (~100Kb, significant in browser environments), +// we opt to dependency-inject it instead of creating a hard dependency. +module.exports = function(stream_module) { + var Transform = stream_module.Transform; + + // == Encoder stream ======================================================= + + function IconvLiteEncoderStream(conv, options) { + this.conv = conv; + options = options || {}; + options.decodeStrings = false; // We accept only strings, so we don't need to decode them. + Transform.call(this, options); + } + + IconvLiteEncoderStream.prototype = Object.create(Transform.prototype, { + constructor: { value: IconvLiteEncoderStream } + }); + + IconvLiteEncoderStream.prototype._transform = function(chunk, encoding, done) { + if (typeof chunk != 'string') + return done(new Error("Iconv encoding stream needs strings as its input.")); + try { + var res = this.conv.write(chunk); + if (res && res.length) this.push(res); + done(); + } + catch (e) { + done(e); + } + } + + IconvLiteEncoderStream.prototype._flush = function(done) { + try { + var res = this.conv.end(); + if (res && res.length) this.push(res); + done(); + } + catch (e) { + done(e); + } + } + + IconvLiteEncoderStream.prototype.collect = function(cb) { + var chunks = []; + this.on('error', cb); + this.on('data', function(chunk) { chunks.push(chunk); }); + this.on('end', function() { + cb(null, Buffer.concat(chunks)); + }); + return this; + } + + + // == Decoder stream ======================================================= + + function IconvLiteDecoderStream(conv, options) { + this.conv = conv; + options = options || {}; + options.encoding = this.encoding = 'utf8'; // We output strings. + Transform.call(this, options); + } + + IconvLiteDecoderStream.prototype = Object.create(Transform.prototype, { + constructor: { value: IconvLiteDecoderStream } + }); + + IconvLiteDecoderStream.prototype._transform = function(chunk, encoding, done) { + if (!Buffer.isBuffer(chunk) && !(chunk instanceof Uint8Array)) + return done(new Error("Iconv decoding stream needs buffers as its input.")); + try { + var res = this.conv.write(chunk); + if (res && res.length) this.push(res, this.encoding); + done(); + } + catch (e) { + done(e); + } + } + + IconvLiteDecoderStream.prototype._flush = function(done) { + try { + var res = this.conv.end(); + if (res && res.length) this.push(res, this.encoding); + done(); + } + catch (e) { + done(e); + } + } + + IconvLiteDecoderStream.prototype.collect = function(cb) { + var res = ''; + this.on('error', cb); + this.on('data', function(chunk) { res += chunk; }); + this.on('end', function() { + cb(null, res); + }); + return this; + } + + return { + IconvLiteEncoderStream: IconvLiteEncoderStream, + IconvLiteDecoderStream: IconvLiteDecoderStream, + }; +}; diff --git a/node_modules/iconv-lite/package.json b/node_modules/iconv-lite/package.json new file mode 100644 index 0000000..d351115 --- /dev/null +++ b/node_modules/iconv-lite/package.json @@ -0,0 +1,44 @@ +{ + "name": "iconv-lite", + "description": "Convert character encodings in pure javascript.", + "version": "0.6.3", + "license": "MIT", + "keywords": [ + "iconv", + "convert", + "charset", + "icu" + ], + "author": "Alexander Shtuchkin ", + "main": "./lib/index.js", + "typings": "./lib/index.d.ts", + "homepage": "https://github.com/ashtuchkin/iconv-lite", + "bugs": "https://github.com/ashtuchkin/iconv-lite/issues", + "repository": { + "type": "git", + "url": "git://github.com/ashtuchkin/iconv-lite.git" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "coverage": "c8 _mocha --grep .", + "test": "mocha --reporter spec --grep ." + }, + "browser": { + "stream": false + }, + "devDependencies": { + "async": "^3.2.0", + "c8": "^7.2.0", + "errto": "^0.2.1", + "iconv": "^2.3.5", + "mocha": "^3.5.3", + "request": "^2.88.2", + "semver": "^6.3.0", + "unorm": "^1.6.0" + }, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } +} diff --git a/node_modules/imurmurhash/README.md b/node_modules/imurmurhash/README.md new file mode 100644 index 0000000..f35b20a --- /dev/null +++ b/node_modules/imurmurhash/README.md @@ -0,0 +1,122 @@ +iMurmurHash.js +============== + +An incremental implementation of the MurmurHash3 (32-bit) hashing algorithm for JavaScript based on [Gary Court's implementation](https://github.com/garycourt/murmurhash-js) with [kazuyukitanimura's modifications](https://github.com/kazuyukitanimura/murmurhash-js). + +This version works significantly faster than the non-incremental version if you need to hash many small strings into a single hash, since string concatenation (to build the single string to pass the non-incremental version) is fairly costly. In one case tested, using the incremental version was about 50% faster than concatenating 5-10 strings and then hashing. + +Installation +------------ + +To use iMurmurHash in the browser, [download the latest version](https://raw.github.com/jensyt/imurmurhash-js/master/imurmurhash.min.js) and include it as a script on your site. + +```html + + +``` + +--- + +To use iMurmurHash in Node.js, install the module using NPM: + +```bash +npm install imurmurhash +``` + +Then simply include it in your scripts: + +```javascript +MurmurHash3 = require('imurmurhash'); +``` + +Quick Example +------------- + +```javascript +// Create the initial hash +var hashState = MurmurHash3('string'); + +// Incrementally add text +hashState.hash('more strings'); +hashState.hash('even more strings'); + +// All calls can be chained if desired +hashState.hash('and').hash('some').hash('more'); + +// Get a result +hashState.result(); +// returns 0xe4ccfe6b +``` + +Functions +--------- + +### MurmurHash3 ([string], [seed]) +Get a hash state object, optionally initialized with the given _string_ and _seed_. _Seed_ must be a positive integer if provided. Calling this function without the `new` keyword will return a cached state object that has been reset. This is safe to use as long as the object is only used from a single thread and no other hashes are created while operating on this one. If this constraint cannot be met, you can use `new` to create a new state object. For example: + +```javascript +// Use the cached object, calling the function again will return the same +// object (but reset, so the current state would be lost) +hashState = MurmurHash3(); +... + +// Create a new object that can be safely used however you wish. Calling the +// function again will simply return a new state object, and no state loss +// will occur, at the cost of creating more objects. +hashState = new MurmurHash3(); +``` + +Both methods can be mixed however you like if you have different use cases. + +--- + +### MurmurHash3.prototype.hash (string) +Incrementally add _string_ to the hash. This can be called as many times as you want for the hash state object, including after a call to `result()`. Returns `this` so calls can be chained. + +--- + +### MurmurHash3.prototype.result () +Get the result of the hash as a 32-bit positive integer. This performs the tail and finalizer portions of the algorithm, but does not store the result in the state object. This means that it is perfectly safe to get results and then continue adding strings via `hash`. + +```javascript +// Do the whole string at once +MurmurHash3('this is a test string').result(); +// 0x70529328 + +// Do part of the string, get a result, then the other part +var m = MurmurHash3('this is a'); +m.result(); +// 0xbfc4f834 +m.hash(' test string').result(); +// 0x70529328 (same as above) +``` + +--- + +### MurmurHash3.prototype.reset ([seed]) +Reset the state object for reuse, optionally using the given _seed_ (defaults to 0 like the constructor). Returns `this` so calls can be chained. + +--- + +License (MIT) +------------- +Copyright (c) 2013 Gary Court, Jens Taylor + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/imurmurhash/imurmurhash.js b/node_modules/imurmurhash/imurmurhash.js new file mode 100644 index 0000000..e63146a --- /dev/null +++ b/node_modules/imurmurhash/imurmurhash.js @@ -0,0 +1,138 @@ +/** + * @preserve + * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013) + * + * @author Jens Taylor + * @see http://github.com/homebrewing/brauhaus-diff + * @author Gary Court + * @see http://github.com/garycourt/murmurhash-js + * @author Austin Appleby + * @see http://sites.google.com/site/murmurhash/ + */ +(function(){ + var cache; + + // Call this function without `new` to use the cached object (good for + // single-threaded environments), or with `new` to create a new object. + // + // @param {string} key A UTF-16 or ASCII string + // @param {number} seed An optional positive integer + // @return {object} A MurmurHash3 object for incremental hashing + function MurmurHash3(key, seed) { + var m = this instanceof MurmurHash3 ? this : cache; + m.reset(seed) + if (typeof key === 'string' && key.length > 0) { + m.hash(key); + } + + if (m !== this) { + return m; + } + }; + + // Incrementally add a string to this hash + // + // @param {string} key A UTF-16 or ASCII string + // @return {object} this + MurmurHash3.prototype.hash = function(key) { + var h1, k1, i, top, len; + + len = key.length; + this.len += len; + + k1 = this.k1; + i = 0; + switch (this.rem) { + case 0: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) : 0; + case 1: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 8 : 0; + case 2: k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 16 : 0; + case 3: + k1 ^= len > i ? (key.charCodeAt(i) & 0xff) << 24 : 0; + k1 ^= len > i ? (key.charCodeAt(i++) & 0xff00) >> 8 : 0; + } + + this.rem = (len + this.rem) & 3; // & 3 is same as % 4 + len -= this.rem; + if (len > 0) { + h1 = this.h1; + while (1) { + k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff; + k1 = (k1 << 15) | (k1 >>> 17); + k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff; + + h1 ^= k1; + h1 = (h1 << 13) | (h1 >>> 19); + h1 = (h1 * 5 + 0xe6546b64) & 0xffffffff; + + if (i >= len) { + break; + } + + k1 = ((key.charCodeAt(i++) & 0xffff)) ^ + ((key.charCodeAt(i++) & 0xffff) << 8) ^ + ((key.charCodeAt(i++) & 0xffff) << 16); + top = key.charCodeAt(i++); + k1 ^= ((top & 0xff) << 24) ^ + ((top & 0xff00) >> 8); + } + + k1 = 0; + switch (this.rem) { + case 3: k1 ^= (key.charCodeAt(i + 2) & 0xffff) << 16; + case 2: k1 ^= (key.charCodeAt(i + 1) & 0xffff) << 8; + case 1: k1 ^= (key.charCodeAt(i) & 0xffff); + } + + this.h1 = h1; + } + + this.k1 = k1; + return this; + }; + + // Get the result of this hash + // + // @return {number} The 32-bit hash + MurmurHash3.prototype.result = function() { + var k1, h1; + + k1 = this.k1; + h1 = this.h1; + + if (k1 > 0) { + k1 = (k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000) & 0xffffffff; + k1 = (k1 << 15) | (k1 >>> 17); + k1 = (k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000) & 0xffffffff; + h1 ^= k1; + } + + h1 ^= this.len; + + h1 ^= h1 >>> 16; + h1 = (h1 * 0xca6b + (h1 & 0xffff) * 0x85eb0000) & 0xffffffff; + h1 ^= h1 >>> 13; + h1 = (h1 * 0xae35 + (h1 & 0xffff) * 0xc2b20000) & 0xffffffff; + h1 ^= h1 >>> 16; + + return h1 >>> 0; + }; + + // Reset the hash object for reuse + // + // @param {number} seed An optional positive integer + MurmurHash3.prototype.reset = function(seed) { + this.h1 = typeof seed === 'number' ? seed : 0; + this.rem = this.k1 = this.len = 0; + return this; + }; + + // A cached object to use. This can be safely used if you're in a single- + // threaded environment, otherwise you need to create new hashes to use. + cache = new MurmurHash3(); + + if (typeof(module) != 'undefined') { + module.exports = MurmurHash3; + } else { + this.MurmurHash3 = MurmurHash3; + } +}()); diff --git a/node_modules/imurmurhash/imurmurhash.min.js b/node_modules/imurmurhash/imurmurhash.min.js new file mode 100644 index 0000000..dc0ee88 --- /dev/null +++ b/node_modules/imurmurhash/imurmurhash.min.js @@ -0,0 +1,12 @@ +/** + * @preserve + * JS Implementation of incremental MurmurHash3 (r150) (as of May 10, 2013) + * + * @author Jens Taylor + * @see http://github.com/homebrewing/brauhaus-diff + * @author Gary Court + * @see http://github.com/garycourt/murmurhash-js + * @author Austin Appleby + * @see http://sites.google.com/site/murmurhash/ + */ +!function(){function t(h,r){var s=this instanceof t?this:e;return s.reset(r),"string"==typeof h&&h.length>0&&s.hash(h),s!==this?s:void 0}var e;t.prototype.hash=function(t){var e,h,r,s,i;switch(i=t.length,this.len+=i,h=this.k1,r=0,this.rem){case 0:h^=i>r?65535&t.charCodeAt(r++):0;case 1:h^=i>r?(65535&t.charCodeAt(r++))<<8:0;case 2:h^=i>r?(65535&t.charCodeAt(r++))<<16:0;case 3:h^=i>r?(255&t.charCodeAt(r))<<24:0,h^=i>r?(65280&t.charCodeAt(r++))>>8:0}if(this.rem=3&i+this.rem,i-=this.rem,i>0){for(e=this.h1;;){if(h=4294967295&11601*h+3432906752*(65535&h),h=h<<15|h>>>17,h=4294967295&13715*h+461832192*(65535&h),e^=h,e=e<<13|e>>>19,e=4294967295&5*e+3864292196,r>=i)break;h=65535&t.charCodeAt(r++)^(65535&t.charCodeAt(r++))<<8^(65535&t.charCodeAt(r++))<<16,s=t.charCodeAt(r++),h^=(255&s)<<24^(65280&s)>>8}switch(h=0,this.rem){case 3:h^=(65535&t.charCodeAt(r+2))<<16;case 2:h^=(65535&t.charCodeAt(r+1))<<8;case 1:h^=65535&t.charCodeAt(r)}this.h1=e}return this.k1=h,this},t.prototype.result=function(){var t,e;return t=this.k1,e=this.h1,t>0&&(t=4294967295&11601*t+3432906752*(65535&t),t=t<<15|t>>>17,t=4294967295&13715*t+461832192*(65535&t),e^=t),e^=this.len,e^=e>>>16,e=4294967295&51819*e+2246770688*(65535&e),e^=e>>>13,e=4294967295&44597*e+3266445312*(65535&e),e^=e>>>16,e>>>0},t.prototype.reset=function(t){return this.h1="number"==typeof t?t:0,this.rem=this.k1=this.len=0,this},e=new t,"undefined"!=typeof module?module.exports=t:this.MurmurHash3=t}(); \ No newline at end of file diff --git a/node_modules/imurmurhash/package.json b/node_modules/imurmurhash/package.json new file mode 100644 index 0000000..8a93edb --- /dev/null +++ b/node_modules/imurmurhash/package.json @@ -0,0 +1,40 @@ +{ + "name": "imurmurhash", + "version": "0.1.4", + "description": "An incremental implementation of MurmurHash3", + "homepage": "https://github.com/jensyt/imurmurhash-js", + "main": "imurmurhash.js", + "files": [ + "imurmurhash.js", + "imurmurhash.min.js", + "package.json", + "README.md" + ], + "repository": { + "type": "git", + "url": "https://github.com/jensyt/imurmurhash-js" + }, + "bugs": { + "url": "https://github.com/jensyt/imurmurhash-js/issues" + }, + "keywords": [ + "murmur", + "murmurhash", + "murmurhash3", + "hash", + "incremental" + ], + "author": { + "name": "Jens Taylor", + "email": "jensyt@gmail.com", + "url": "https://github.com/homebrewing" + }, + "license": "MIT", + "dependencies": { + }, + "devDependencies": { + }, + "engines": { + "node": ">=0.8.19" + } +} diff --git a/node_modules/indent-string/index.d.ts b/node_modules/indent-string/index.d.ts new file mode 100644 index 0000000..1185231 --- /dev/null +++ b/node_modules/indent-string/index.d.ts @@ -0,0 +1,42 @@ +declare namespace indentString { + interface Options { + /** + The string to use for the indent. + + @default ' ' + */ + readonly indent?: string; + + /** + Also indent empty lines. + + @default false + */ + readonly includeEmptyLines?: boolean; + } +} + +/** +Indent each line in a string. + +@param string - The string to indent. +@param count - How many times you want `options.indent` repeated. Default: `1`. + +@example +``` +import indentString = require('indent-string'); + +indentString('Unicorns\nRainbows', 4); +//=> ' Unicorns\n Rainbows' + +indentString('Unicorns\nRainbows', 4, {indent: '♥'}); +//=> '♥♥♥♥Unicorns\n♥♥♥♥Rainbows' +``` +*/ +declare function indentString( + string: string, + count?: number, + options?: indentString.Options +): string; + +export = indentString; diff --git a/node_modules/indent-string/index.js b/node_modules/indent-string/index.js new file mode 100644 index 0000000..e1ab804 --- /dev/null +++ b/node_modules/indent-string/index.js @@ -0,0 +1,35 @@ +'use strict'; + +module.exports = (string, count = 1, options) => { + options = { + indent: ' ', + includeEmptyLines: false, + ...options + }; + + if (typeof string !== 'string') { + throw new TypeError( + `Expected \`input\` to be a \`string\`, got \`${typeof string}\`` + ); + } + + if (typeof count !== 'number') { + throw new TypeError( + `Expected \`count\` to be a \`number\`, got \`${typeof count}\`` + ); + } + + if (typeof options.indent !== 'string') { + throw new TypeError( + `Expected \`options.indent\` to be a \`string\`, got \`${typeof options.indent}\`` + ); + } + + if (count === 0) { + return string; + } + + const regex = options.includeEmptyLines ? /^/gm : /^(?!\s*$)/gm; + + return string.replace(regex, options.indent.repeat(count)); +}; diff --git a/node_modules/indent-string/license b/node_modules/indent-string/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/indent-string/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/indent-string/package.json b/node_modules/indent-string/package.json new file mode 100644 index 0000000..497bb83 --- /dev/null +++ b/node_modules/indent-string/package.json @@ -0,0 +1,37 @@ +{ + "name": "indent-string", + "version": "4.0.0", + "description": "Indent each line in a string", + "license": "MIT", + "repository": "sindresorhus/indent-string", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "indent", + "string", + "pad", + "align", + "line", + "text", + "each", + "every" + ], + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.2", + "xo": "^0.24.0" + } +} diff --git a/node_modules/indent-string/readme.md b/node_modules/indent-string/readme.md new file mode 100644 index 0000000..49967de --- /dev/null +++ b/node_modules/indent-string/readme.md @@ -0,0 +1,70 @@ +# indent-string [![Build Status](https://travis-ci.org/sindresorhus/indent-string.svg?branch=master)](https://travis-ci.org/sindresorhus/indent-string) + +> Indent each line in a string + + +## Install + +``` +$ npm install indent-string +``` + + +## Usage + +```js +const indentString = require('indent-string'); + +indentString('Unicorns\nRainbows', 4); +//=> ' Unicorns\n Rainbows' + +indentString('Unicorns\nRainbows', 4, {indent: '♥'}); +//=> '♥♥♥♥Unicorns\n♥♥♥♥Rainbows' +``` + + +## API + +### indentString(string, [count], [options]) + +#### string + +Type: `string` + +The string to indent. + +#### count + +Type: `number`
+Default: `1` + +How many times you want `options.indent` repeated. + +#### options + +Type: `object` + +##### indent + +Type: `string`
+Default: `' '` + +The string to use for the indent. + +##### includeEmptyLines + +Type: `boolean`
+Default: `false` + +Also indent empty lines. + + +## Related + +- [indent-string-cli](https://github.com/sindresorhus/indent-string-cli) - CLI for this module +- [strip-indent](https://github.com/sindresorhus/strip-indent) - Strip leading whitespace from every line in a string + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/infer-owner/LICENSE b/node_modules/infer-owner/LICENSE new file mode 100644 index 0000000..20a4762 --- /dev/null +++ b/node_modules/infer-owner/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/infer-owner/README.md b/node_modules/infer-owner/README.md new file mode 100644 index 0000000..146caf7 --- /dev/null +++ b/node_modules/infer-owner/README.md @@ -0,0 +1,41 @@ +# infer-owner + +Infer the owner of a path based on the owner of its nearest existing parent + +## USAGE + +```js +const inferOwner = require('infer-owner') + +inferOwner('/some/cache/folder/file').then(owner => { + // owner is {uid, gid} that should be attached to + // the /some/cache/folder/file, based on ownership + // of /some/cache/folder, /some/cache, /some, or /, + // whichever is the first to exist +}) + +// same, but not async +const owner = inferOwner.sync('/some/cache/folder/file') + +// results are cached! to reset the cache (eg, to change +// permissions for whatever reason), do this: +inferOwner.clearCache() +``` + +This module endeavors to be as performant as possible. Parallel requests +for ownership of the same path will only stat the directories one time. + +## API + +* `inferOwner(path) -> Promise<{ uid, gid }>` + + If the path exists, return its uid and gid. If it does not, look to + its parent, then its grandparent, and so on. + +* `inferOwner(path) -> { uid, gid }` + + Sync form of `inferOwner(path)`. + +* `inferOwner.clearCache()` + + Delete all cached ownership information and in-flight tracking. diff --git a/node_modules/infer-owner/index.js b/node_modules/infer-owner/index.js new file mode 100644 index 0000000..a7bddcb --- /dev/null +++ b/node_modules/infer-owner/index.js @@ -0,0 +1,71 @@ +const cache = new Map() +const fs = require('fs') +const { dirname, resolve } = require('path') + + +const lstat = path => new Promise((res, rej) => + fs.lstat(path, (er, st) => er ? rej(er) : res(st))) + +const inferOwner = path => { + path = resolve(path) + if (cache.has(path)) + return Promise.resolve(cache.get(path)) + + const statThen = st => { + const { uid, gid } = st + cache.set(path, { uid, gid }) + return { uid, gid } + } + const parent = dirname(path) + const parentTrap = parent === path ? null : er => { + return inferOwner(parent).then((owner) => { + cache.set(path, owner) + return owner + }) + } + return lstat(path).then(statThen, parentTrap) +} + +const inferOwnerSync = path => { + path = resolve(path) + if (cache.has(path)) + return cache.get(path) + + const parent = dirname(path) + + // avoid obscuring call site by re-throwing + // "catch" the error by returning from a finally, + // only if we're not at the root, and the parent call works. + let threw = true + try { + const st = fs.lstatSync(path) + threw = false + const { uid, gid } = st + cache.set(path, { uid, gid }) + return { uid, gid } + } finally { + if (threw && parent !== path) { + const owner = inferOwnerSync(parent) + cache.set(path, owner) + return owner // eslint-disable-line no-unsafe-finally + } + } +} + +const inflight = new Map() +module.exports = path => { + path = resolve(path) + if (inflight.has(path)) + return Promise.resolve(inflight.get(path)) + const p = inferOwner(path).then(owner => { + inflight.delete(path) + return owner + }) + inflight.set(path, p) + return p +} +module.exports.sync = inferOwnerSync +module.exports.clearCache = () => { + cache.clear() + inflight.clear() +} diff --git a/node_modules/infer-owner/package.json b/node_modules/infer-owner/package.json new file mode 100644 index 0000000..c4b2b6e --- /dev/null +++ b/node_modules/infer-owner/package.json @@ -0,0 +1,26 @@ +{ + "name": "infer-owner", + "version": "1.0.4", + "description": "Infer the owner of a path based on the owner of its nearest existing parent", + "author": "Isaac Z. Schlueter (https://izs.me)", + "license": "ISC", + "scripts": { + "test": "tap -J test/*.js --100", + "snap": "TAP_SNAPSHOT=1 tap -J test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "devDependencies": { + "mutate-fs": "^2.1.1", + "tap": "^12.4.2" + }, + "main": "index.js", + "repository": "https://github.com/npm/infer-owner", + "publishConfig": { + "access": "public" + }, + "files": [ + "index.js" + ] +} diff --git a/node_modules/inflight/LICENSE b/node_modules/inflight/LICENSE new file mode 100644 index 0000000..05eeeb8 --- /dev/null +++ b/node_modules/inflight/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/inflight/README.md b/node_modules/inflight/README.md new file mode 100644 index 0000000..6dc8929 --- /dev/null +++ b/node_modules/inflight/README.md @@ -0,0 +1,37 @@ +# inflight + +Add callbacks to requests in flight to avoid async duplication + +## USAGE + +```javascript +var inflight = require('inflight') + +// some request that does some stuff +function req(key, callback) { + // key is any random string. like a url or filename or whatever. + // + // will return either a falsey value, indicating that the + // request for this key is already in flight, or a new callback + // which when called will call all callbacks passed to inflightk + // with the same key + callback = inflight(key, callback) + + // If we got a falsey value back, then there's already a req going + if (!callback) return + + // this is where you'd fetch the url or whatever + // callback is also once()-ified, so it can safely be assigned + // to multiple events etc. First call wins. + setTimeout(function() { + callback(null, key) + }, 100) +} + +// only assigns a single setTimeout +// when it dings, all cbs get called +req('foo', cb1) +req('foo', cb2) +req('foo', cb3) +req('foo', cb4) +``` diff --git a/node_modules/inflight/inflight.js b/node_modules/inflight/inflight.js new file mode 100644 index 0000000..48202b3 --- /dev/null +++ b/node_modules/inflight/inflight.js @@ -0,0 +1,54 @@ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} diff --git a/node_modules/inflight/package.json b/node_modules/inflight/package.json new file mode 100644 index 0000000..6084d35 --- /dev/null +++ b/node_modules/inflight/package.json @@ -0,0 +1,29 @@ +{ + "name": "inflight", + "version": "1.0.6", + "description": "Add callbacks to requests in flight to avoid async duplication", + "main": "inflight.js", + "files": [ + "inflight.js" + ], + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.1.2" + }, + "scripts": { + "test": "tap test.js --100" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/inflight.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "bugs": { + "url": "https://github.com/isaacs/inflight/issues" + }, + "homepage": "https://github.com/isaacs/inflight", + "license": "ISC" +} diff --git a/node_modules/inherits/LICENSE b/node_modules/inherits/LICENSE new file mode 100644 index 0000000..dea3013 --- /dev/null +++ b/node_modules/inherits/LICENSE @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/inherits/README.md b/node_modules/inherits/README.md new file mode 100644 index 0000000..b1c5665 --- /dev/null +++ b/node_modules/inherits/README.md @@ -0,0 +1,42 @@ +Browser-friendly inheritance fully compatible with standard node.js +[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). + +This package exports standard `inherits` from node.js `util` module in +node environment, but also provides alternative browser-friendly +implementation through [browser +field](https://gist.github.com/shtylman/4339901). Alternative +implementation is a literal copy of standard one located in standalone +module to avoid requiring of `util`. It also has a shim for old +browsers with no `Object.create` support. + +While keeping you sure you are using standard `inherits` +implementation in node.js environment, it allows bundlers such as +[browserify](https://github.com/substack/node-browserify) to not +include full `util` package to your client code if all you need is +just `inherits` function. It worth, because browser shim for `util` +package is large and `inherits` is often the single function you need +from it. + +It's recommended to use this package instead of +`require('util').inherits` for any code that has chances to be used +not only in node.js but in browser too. + +## usage + +```js +var inherits = require('inherits'); +// then use exactly as the standard one +``` + +## note on version ~1.0 + +Version ~1.0 had completely different motivation and is not compatible +neither with 2.0 nor with standard node.js `inherits`. + +If you are using version ~1.0 and planning to switch to ~2.0, be +careful: + +* new version uses `super_` instead of `super` for referencing + superclass +* new version overwrites current prototype while old one preserves any + existing fields on it diff --git a/node_modules/inherits/inherits.js b/node_modules/inherits/inherits.js new file mode 100644 index 0000000..f71f2d9 --- /dev/null +++ b/node_modules/inherits/inherits.js @@ -0,0 +1,9 @@ +try { + var util = require('util'); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = require('./inherits_browser.js'); +} diff --git a/node_modules/inherits/inherits_browser.js b/node_modules/inherits/inherits_browser.js new file mode 100644 index 0000000..86bbb3d --- /dev/null +++ b/node_modules/inherits/inherits_browser.js @@ -0,0 +1,27 @@ +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} diff --git a/node_modules/inherits/package.json b/node_modules/inherits/package.json new file mode 100644 index 0000000..37b4366 --- /dev/null +++ b/node_modules/inherits/package.json @@ -0,0 +1,29 @@ +{ + "name": "inherits", + "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", + "version": "2.0.4", + "keywords": [ + "inheritance", + "class", + "klass", + "oop", + "object-oriented", + "inherits", + "browser", + "browserify" + ], + "main": "./inherits.js", + "browser": "./inherits_browser.js", + "repository": "git://github.com/isaacs/inherits", + "license": "ISC", + "scripts": { + "test": "tap" + }, + "devDependencies": { + "tap": "^14.2.4" + }, + "files": [ + "inherits.js", + "inherits_browser.js" + ] +} diff --git a/node_modules/ip/README.md b/node_modules/ip/README.md new file mode 100644 index 0000000..22e5819 --- /dev/null +++ b/node_modules/ip/README.md @@ -0,0 +1,90 @@ +# IP +[![](https://badge.fury.io/js/ip.svg)](https://www.npmjs.com/package/ip) + +IP address utilities for node.js + +## Installation + +### npm +```shell +npm install ip +``` + +### git + +```shell +git clone https://github.com/indutny/node-ip.git +``` + +## Usage +Get your ip address, compare ip addresses, validate ip addresses, etc. + +```js +var ip = require('ip'); + +ip.address() // my ip address +ip.isEqual('::1', '::0:1'); // true +ip.toBuffer('127.0.0.1') // Buffer([127, 0, 0, 1]) +ip.toString(new Buffer([127, 0, 0, 1])) // 127.0.0.1 +ip.fromPrefixLen(24) // 255.255.255.0 +ip.mask('192.168.1.134', '255.255.255.0') // 192.168.1.0 +ip.cidr('192.168.1.134/26') // 192.168.1.128 +ip.not('255.255.255.0') // 0.0.0.255 +ip.or('192.168.1.134', '0.0.0.255') // 192.168.1.255 +ip.isPrivate('127.0.0.1') // true +ip.isV4Format('127.0.0.1'); // true +ip.isV6Format('::ffff:127.0.0.1'); // true + +// operate on buffers in-place +var buf = new Buffer(128); +var offset = 64; +ip.toBuffer('127.0.0.1', buf, offset); // [127, 0, 0, 1] at offset 64 +ip.toString(buf, offset, 4); // '127.0.0.1' + +// subnet information +ip.subnet('192.168.1.134', '255.255.255.192') +// { networkAddress: '192.168.1.128', +// firstAddress: '192.168.1.129', +// lastAddress: '192.168.1.190', +// broadcastAddress: '192.168.1.191', +// subnetMask: '255.255.255.192', +// subnetMaskLength: 26, +// numHosts: 62, +// length: 64, +// contains: function(addr){...} } +ip.cidrSubnet('192.168.1.134/26') +// Same as previous. + +// range checking +ip.cidrSubnet('192.168.1.134/26').contains('192.168.1.190') // true + + +// ipv4 long conversion +ip.toLong('127.0.0.1'); // 2130706433 +ip.fromLong(2130706433); // '127.0.0.1' +``` + +### License + +This software is licensed under the MIT License. + +Copyright Fedor Indutny, 2012. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ip/lib/ip.js b/node_modules/ip/lib/ip.js new file mode 100644 index 0000000..4b2adb5 --- /dev/null +++ b/node_modules/ip/lib/ip.js @@ -0,0 +1,422 @@ +const ip = exports; +const { Buffer } = require('buffer'); +const os = require('os'); + +ip.toBuffer = function (ip, buff, offset) { + offset = ~~offset; + + let result; + + if (this.isV4Format(ip)) { + result = buff || Buffer.alloc(offset + 4); + ip.split(/\./g).map((byte) => { + result[offset++] = parseInt(byte, 10) & 0xff; + }); + } else if (this.isV6Format(ip)) { + const sections = ip.split(':', 8); + + let i; + for (i = 0; i < sections.length; i++) { + const isv4 = this.isV4Format(sections[i]); + let v4Buffer; + + if (isv4) { + v4Buffer = this.toBuffer(sections[i]); + sections[i] = v4Buffer.slice(0, 2).toString('hex'); + } + + if (v4Buffer && ++i < 8) { + sections.splice(i, 0, v4Buffer.slice(2, 4).toString('hex')); + } + } + + if (sections[0] === '') { + while (sections.length < 8) sections.unshift('0'); + } else if (sections[sections.length - 1] === '') { + while (sections.length < 8) sections.push('0'); + } else if (sections.length < 8) { + for (i = 0; i < sections.length && sections[i] !== ''; i++); + const argv = [i, 1]; + for (i = 9 - sections.length; i > 0; i--) { + argv.push('0'); + } + sections.splice(...argv); + } + + result = buff || Buffer.alloc(offset + 16); + for (i = 0; i < sections.length; i++) { + const word = parseInt(sections[i], 16); + result[offset++] = (word >> 8) & 0xff; + result[offset++] = word & 0xff; + } + } + + if (!result) { + throw Error(`Invalid ip address: ${ip}`); + } + + return result; +}; + +ip.toString = function (buff, offset, length) { + offset = ~~offset; + length = length || (buff.length - offset); + + let result = []; + if (length === 4) { + // IPv4 + for (let i = 0; i < length; i++) { + result.push(buff[offset + i]); + } + result = result.join('.'); + } else if (length === 16) { + // IPv6 + for (let i = 0; i < length; i += 2) { + result.push(buff.readUInt16BE(offset + i).toString(16)); + } + result = result.join(':'); + result = result.replace(/(^|:)0(:0)*:0(:|$)/, '$1::$3'); + result = result.replace(/:{3,4}/, '::'); + } + + return result; +}; + +const ipv4Regex = /^(\d{1,3}\.){3,3}\d{1,3}$/; +const ipv6Regex = /^(::)?(((\d{1,3}\.){3}(\d{1,3}){1})?([0-9a-f]){0,4}:{0,2}){1,8}(::)?$/i; + +ip.isV4Format = function (ip) { + return ipv4Regex.test(ip); +}; + +ip.isV6Format = function (ip) { + return ipv6Regex.test(ip); +}; + +function _normalizeFamily(family) { + if (family === 4) { + return 'ipv4'; + } + if (family === 6) { + return 'ipv6'; + } + return family ? family.toLowerCase() : 'ipv4'; +} + +ip.fromPrefixLen = function (prefixlen, family) { + if (prefixlen > 32) { + family = 'ipv6'; + } else { + family = _normalizeFamily(family); + } + + let len = 4; + if (family === 'ipv6') { + len = 16; + } + const buff = Buffer.alloc(len); + + for (let i = 0, n = buff.length; i < n; ++i) { + let bits = 8; + if (prefixlen < 8) { + bits = prefixlen; + } + prefixlen -= bits; + + buff[i] = ~(0xff >> bits) & 0xff; + } + + return ip.toString(buff); +}; + +ip.mask = function (addr, mask) { + addr = ip.toBuffer(addr); + mask = ip.toBuffer(mask); + + const result = Buffer.alloc(Math.max(addr.length, mask.length)); + + // Same protocol - do bitwise and + let i; + if (addr.length === mask.length) { + for (i = 0; i < addr.length; i++) { + result[i] = addr[i] & mask[i]; + } + } else if (mask.length === 4) { + // IPv6 address and IPv4 mask + // (Mask low bits) + for (i = 0; i < mask.length; i++) { + result[i] = addr[addr.length - 4 + i] & mask[i]; + } + } else { + // IPv6 mask and IPv4 addr + for (i = 0; i < result.length - 6; i++) { + result[i] = 0; + } + + // ::ffff:ipv4 + result[10] = 0xff; + result[11] = 0xff; + for (i = 0; i < addr.length; i++) { + result[i + 12] = addr[i] & mask[i + 12]; + } + i += 12; + } + for (; i < result.length; i++) { + result[i] = 0; + } + + return ip.toString(result); +}; + +ip.cidr = function (cidrString) { + const cidrParts = cidrString.split('/'); + + const addr = cidrParts[0]; + if (cidrParts.length !== 2) { + throw new Error(`invalid CIDR subnet: ${addr}`); + } + + const mask = ip.fromPrefixLen(parseInt(cidrParts[1], 10)); + + return ip.mask(addr, mask); +}; + +ip.subnet = function (addr, mask) { + const networkAddress = ip.toLong(ip.mask(addr, mask)); + + // Calculate the mask's length. + const maskBuffer = ip.toBuffer(mask); + let maskLength = 0; + + for (let i = 0; i < maskBuffer.length; i++) { + if (maskBuffer[i] === 0xff) { + maskLength += 8; + } else { + let octet = maskBuffer[i] & 0xff; + while (octet) { + octet = (octet << 1) & 0xff; + maskLength++; + } + } + } + + const numberOfAddresses = 2 ** (32 - maskLength); + + return { + networkAddress: ip.fromLong(networkAddress), + firstAddress: numberOfAddresses <= 2 + ? ip.fromLong(networkAddress) + : ip.fromLong(networkAddress + 1), + lastAddress: numberOfAddresses <= 2 + ? ip.fromLong(networkAddress + numberOfAddresses - 1) + : ip.fromLong(networkAddress + numberOfAddresses - 2), + broadcastAddress: ip.fromLong(networkAddress + numberOfAddresses - 1), + subnetMask: mask, + subnetMaskLength: maskLength, + numHosts: numberOfAddresses <= 2 + ? numberOfAddresses : numberOfAddresses - 2, + length: numberOfAddresses, + contains(other) { + return networkAddress === ip.toLong(ip.mask(other, mask)); + }, + }; +}; + +ip.cidrSubnet = function (cidrString) { + const cidrParts = cidrString.split('/'); + + const addr = cidrParts[0]; + if (cidrParts.length !== 2) { + throw new Error(`invalid CIDR subnet: ${addr}`); + } + + const mask = ip.fromPrefixLen(parseInt(cidrParts[1], 10)); + + return ip.subnet(addr, mask); +}; + +ip.not = function (addr) { + const buff = ip.toBuffer(addr); + for (let i = 0; i < buff.length; i++) { + buff[i] = 0xff ^ buff[i]; + } + return ip.toString(buff); +}; + +ip.or = function (a, b) { + a = ip.toBuffer(a); + b = ip.toBuffer(b); + + // same protocol + if (a.length === b.length) { + for (let i = 0; i < a.length; ++i) { + a[i] |= b[i]; + } + return ip.toString(a); + + // mixed protocols + } + let buff = a; + let other = b; + if (b.length > a.length) { + buff = b; + other = a; + } + + const offset = buff.length - other.length; + for (let i = offset; i < buff.length; ++i) { + buff[i] |= other[i - offset]; + } + + return ip.toString(buff); +}; + +ip.isEqual = function (a, b) { + a = ip.toBuffer(a); + b = ip.toBuffer(b); + + // Same protocol + if (a.length === b.length) { + for (let i = 0; i < a.length; i++) { + if (a[i] !== b[i]) return false; + } + return true; + } + + // Swap + if (b.length === 4) { + const t = b; + b = a; + a = t; + } + + // a - IPv4, b - IPv6 + for (let i = 0; i < 10; i++) { + if (b[i] !== 0) return false; + } + + const word = b.readUInt16BE(10); + if (word !== 0 && word !== 0xffff) return false; + + for (let i = 0; i < 4; i++) { + if (a[i] !== b[i + 12]) return false; + } + + return true; +}; + +ip.isPrivate = function (addr) { + return /^(::f{4}:)?10\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$/i + .test(addr) + || /^(::f{4}:)?192\.168\.([0-9]{1,3})\.([0-9]{1,3})$/i.test(addr) + || /^(::f{4}:)?172\.(1[6-9]|2\d|30|31)\.([0-9]{1,3})\.([0-9]{1,3})$/i + .test(addr) + || /^(::f{4}:)?127\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})$/i.test(addr) + || /^(::f{4}:)?169\.254\.([0-9]{1,3})\.([0-9]{1,3})$/i.test(addr) + || /^f[cd][0-9a-f]{2}:/i.test(addr) + || /^fe80:/i.test(addr) + || /^::1$/.test(addr) + || /^::$/.test(addr); +}; + +ip.isPublic = function (addr) { + return !ip.isPrivate(addr); +}; + +ip.isLoopback = function (addr) { + return /^(::f{4}:)?127\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})/ + .test(addr) + || /^fe80::1$/.test(addr) + || /^::1$/.test(addr) + || /^::$/.test(addr); +}; + +ip.loopback = function (family) { + // + // Default to `ipv4` + // + family = _normalizeFamily(family); + + if (family !== 'ipv4' && family !== 'ipv6') { + throw new Error('family must be ipv4 or ipv6'); + } + + return family === 'ipv4' ? '127.0.0.1' : 'fe80::1'; +}; + +// +// ### function address (name, family) +// #### @name {string|'public'|'private'} **Optional** Name or security +// of the network interface. +// #### @family {ipv4|ipv6} **Optional** IP family of the address (defaults +// to ipv4). +// +// Returns the address for the network interface on the current system with +// the specified `name`: +// * String: First `family` address of the interface. +// If not found see `undefined`. +// * 'public': the first public ip address of family. +// * 'private': the first private ip address of family. +// * undefined: First address with `ipv4` or loopback address `127.0.0.1`. +// +ip.address = function (name, family) { + const interfaces = os.networkInterfaces(); + + // + // Default to `ipv4` + // + family = _normalizeFamily(family); + + // + // If a specific network interface has been named, + // return the address. + // + if (name && name !== 'private' && name !== 'public') { + const res = interfaces[name].filter((details) => { + const itemFamily = _normalizeFamily(details.family); + return itemFamily === family; + }); + if (res.length === 0) { + return undefined; + } + return res[0].address; + } + + const all = Object.keys(interfaces).map((nic) => { + // + // Note: name will only be `public` or `private` + // when this is called. + // + const addresses = interfaces[nic].filter((details) => { + details.family = _normalizeFamily(details.family); + if (details.family !== family || ip.isLoopback(details.address)) { + return false; + } if (!name) { + return true; + } + + return name === 'public' ? ip.isPrivate(details.address) + : ip.isPublic(details.address); + }); + + return addresses.length ? addresses[0].address : undefined; + }).filter(Boolean); + + return !all.length ? ip.loopback(family) : all[0]; +}; + +ip.toLong = function (ip) { + let ipl = 0; + ip.split('.').forEach((octet) => { + ipl <<= 8; + ipl += parseInt(octet); + }); + return (ipl >>> 0); +}; + +ip.fromLong = function (ipl) { + return (`${ipl >>> 24}.${ + ipl >> 16 & 255}.${ + ipl >> 8 & 255}.${ + ipl & 255}`); +}; diff --git a/node_modules/ip/package.json b/node_modules/ip/package.json new file mode 100644 index 0000000..f0d95e9 --- /dev/null +++ b/node_modules/ip/package.json @@ -0,0 +1,25 @@ +{ + "name": "ip", + "version": "2.0.0", + "author": "Fedor Indutny ", + "homepage": "https://github.com/indutny/node-ip", + "repository": { + "type": "git", + "url": "http://github.com/indutny/node-ip.git" + }, + "files": [ + "lib", + "README.md" + ], + "main": "lib/ip", + "devDependencies": { + "eslint": "^8.15.0", + "mocha": "^10.0.0" + }, + "scripts": { + "lint": "eslint lib/*.js test/*.js", + "test": "npm run lint && mocha --reporter spec test/*-test.js", + "fix": "npm run lint -- --fix" + }, + "license": "MIT" +} diff --git a/node_modules/is-arrayish/.editorconfig b/node_modules/is-arrayish/.editorconfig new file mode 100644 index 0000000..4c017f8 --- /dev/null +++ b/node_modules/is-arrayish/.editorconfig @@ -0,0 +1,18 @@ +root = true + +[*] +indent_style = tab +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[*.coffee] +indent_style = space + +[{package.json,*.yml}] +indent_style = space +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false diff --git a/node_modules/is-arrayish/.istanbul.yml b/node_modules/is-arrayish/.istanbul.yml new file mode 100644 index 0000000..19fbec3 --- /dev/null +++ b/node_modules/is-arrayish/.istanbul.yml @@ -0,0 +1,4 @@ +instrumentation: + excludes: + - test.js + - test/**/* diff --git a/node_modules/is-arrayish/.npmignore b/node_modules/is-arrayish/.npmignore new file mode 100644 index 0000000..8d5eacb --- /dev/null +++ b/node_modules/is-arrayish/.npmignore @@ -0,0 +1,5 @@ +/coverage/ +/test.js +/test/ +*.sw[a-p] +/node_modules/ diff --git a/node_modules/is-arrayish/.travis.yml b/node_modules/is-arrayish/.travis.yml new file mode 100644 index 0000000..5a04243 --- /dev/null +++ b/node_modules/is-arrayish/.travis.yml @@ -0,0 +1,17 @@ +language: node_js + +script: + - node_modules/.bin/istanbul cover node_modules/.bin/_mocha -- --compilers coffee:coffee-script/register + - cat coverage/lcov.info | node_modules/.bin/coveralls +node_js: + - "0.10" + - "0.11" + - "0.12" + - "iojs" +os: + - linux + - osx + +notifications: + slack: + secure: oOt8QGzdrPDsTMcyahtIq5Q+0U1iwfgJgFCxBLsomQ0bpIMn+y5m4viJydA2UinHPGc944HS3LMZS9iKQyv+DjTgbhUyNXqeVjtxCwRe37f5rKQlXVvdfmjHk2kln4H8DcK3r5Qd/+2hd9BeMsp2GImTrkRSud1CZQlhhe5IgZOboSoWpGVMMy1iazWT06tAtiB2LRVhmsdUaFZDWAhGZ+UAvCPf+mnBOAylIj+U0GDrofhfTi25RK0gddG2f/p2M1HCu49O6wECGWkt2hVei233DkNJyLLLJVcvmhf+aXkV5TjMyaoxh/HdcV4DrA7KvYuWmWWKsINa9hlwAsdd/FYmJ6PjRkKWas2JoQ1C+qOzDxyQvn3CaUZFKD99pdsq0rBBZujqXQKZZ/hWb/CE74BI6fKmqQkiEPaD/7uADj04FEg6HVBZaMCyauOaK5b3VC97twbALZ1qVxYV6mU+zSEvnUbpnjjvRO0fSl9ZHA+rzkW73kX3GmHY0wAozEZbSy7QLuZlQ2QtHmBLr+APaGMdL1sFF9qFfzqKy0WDbSE0WS6hpAEJpTsjYmeBrnI8UmK3m++iEgyQPvZoH9LhUT+ek7XIfHZMe04BmC6wuO24/RfpmR6bQK9VMarFCYlBiWxg/z30vkP0KTpUi3o/cqFm7/Noxc0i2LVqM3E0Sy4= diff --git a/node_modules/is-arrayish/LICENSE b/node_modules/is-arrayish/LICENSE new file mode 100644 index 0000000..0a5f461 --- /dev/null +++ b/node_modules/is-arrayish/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 JD Ballard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-arrayish/README.md b/node_modules/is-arrayish/README.md new file mode 100644 index 0000000..7d36072 --- /dev/null +++ b/node_modules/is-arrayish/README.md @@ -0,0 +1,16 @@ +# node-is-arrayish [![Travis-CI.org Build Status](https://img.shields.io/travis/Qix-/node-is-arrayish.svg?style=flat-square)](https://travis-ci.org/Qix-/node-is-arrayish) [![Coveralls.io Coverage Rating](https://img.shields.io/coveralls/Qix-/node-is-arrayish.svg?style=flat-square)](https://coveralls.io/r/Qix-/node-is-arrayish) +> Determines if an object can be used like an Array + +## Example +```javascript +var isArrayish = require('is-arrayish'); + +isArrayish([]); // true +isArrayish({__proto__: []}); // true +isArrayish({}); // false +isArrayish({length:10}); // false +``` + +## License +Licensed under the [MIT License](http://opensource.org/licenses/MIT). +You can find a copy of it in [LICENSE](LICENSE). diff --git a/node_modules/is-arrayish/index.js b/node_modules/is-arrayish/index.js new file mode 100644 index 0000000..5b97186 --- /dev/null +++ b/node_modules/is-arrayish/index.js @@ -0,0 +1,10 @@ +'use strict'; + +module.exports = function isArrayish(obj) { + if (!obj) { + return false; + } + + return obj instanceof Array || Array.isArray(obj) || + (obj.length >= 0 && obj.splice instanceof Function); +}; diff --git a/node_modules/is-arrayish/package.json b/node_modules/is-arrayish/package.json new file mode 100644 index 0000000..8b2d1c3 --- /dev/null +++ b/node_modules/is-arrayish/package.json @@ -0,0 +1,34 @@ +{ + "name": "is-arrayish", + "description": "Determines if an object can be used as an array", + "version": "0.2.1", + "author": "Qix (http://github.com/qix-)", + "keywords": [ + "is", + "array", + "duck", + "type", + "arrayish", + "similar", + "proto", + "prototype", + "type" + ], + "license": "MIT", + "scripts": { + "pretest": "xo", + "test": "mocha --compilers coffee:coffee-script/register" + }, + "repository": { + "type": "git", + "url": "https://github.com/qix-/node-is-arrayish.git" + }, + "devDependencies": { + "coffee-script": "^1.9.3", + "coveralls": "^2.11.2", + "istanbul": "^0.3.17", + "mocha": "^2.2.5", + "should": "^7.0.1", + "xo": "^0.6.1" + } +} diff --git a/node_modules/is-core-module/.eslintrc b/node_modules/is-core-module/.eslintrc new file mode 100644 index 0000000..f2e0726 --- /dev/null +++ b/node_modules/is-core-module/.eslintrc @@ -0,0 +1,18 @@ +{ + "extends": "@ljharb", + "root": true, + "rules": { + "func-style": 1, + }, + "overrides": [ + { + "files": "test/**", + "rules": { + "global-require": 0, + "max-depth": 0, + "max-lines-per-function": 0, + "no-negated-condition": 0, + }, + }, + ], +} diff --git a/node_modules/is-core-module/.nycrc b/node_modules/is-core-module/.nycrc new file mode 100644 index 0000000..bdd626c --- /dev/null +++ b/node_modules/is-core-module/.nycrc @@ -0,0 +1,9 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "exclude": [ + "coverage", + "test" + ] +} diff --git a/node_modules/is-core-module/CHANGELOG.md b/node_modules/is-core-module/CHANGELOG.md new file mode 100644 index 0000000..f44fd84 --- /dev/null +++ b/node_modules/is-core-module/CHANGELOG.md @@ -0,0 +1,173 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v2.13.0](https://github.com/inspect-js/is-core-module/compare/v2.12.1...v2.13.0) - 2023-08-05 + +### Commits + +- [Dev Deps] update `@ljharb/eslint-config`, `aud`, `semver`, `tape` [`c75b263`](https://github.com/inspect-js/is-core-module/commit/c75b263d047cb53430c3970107e5eb64d6cd6c0c) +- [New] `node:test/reporters` and `wasi`/`node:wasi` are in v18.17 [`d76cbf8`](https://github.com/inspect-js/is-core-module/commit/d76cbf8e9b208acfd98913fed5a5f45cb15fe5dc) + +## [v2.12.1](https://github.com/inspect-js/is-core-module/compare/v2.12.0...v2.12.1) - 2023-05-16 + +### Commits + +- [Fix] `test/reporters` now requires the `node:` prefix as of v20.2 [`12183d0`](https://github.com/inspect-js/is-core-module/commit/12183d0d8e4edf56b6ce18a1b3be54bfce10175b) + +## [v2.12.0](https://github.com/inspect-js/is-core-module/compare/v2.11.0...v2.12.0) - 2023-04-10 + +### Commits + +- [actions] update rebase action to use reusable workflow [`c0a7251`](https://github.com/inspect-js/is-core-module/commit/c0a7251f734f3c621932c5fcdfd1bf966b42ca32) +- [Dev Deps] update `@ljharb/eslint-config`, `aud`, `tape` [`9ae8b7f`](https://github.com/inspect-js/is-core-module/commit/9ae8b7fac03c369861d0991b4a2ce8d4848e6a7d) +- [New] `test/reporters` added in v19.9, `wasi` added in v20 [`9d5341a`](https://github.com/inspect-js/is-core-module/commit/9d5341ab32053f25b7fa7db3c0e18461db24a79c) +- [Dev Deps] add missing `in-publish` dep [`5980245`](https://github.com/inspect-js/is-core-module/commit/59802456e9ac919fa748f53be9d8fbf304a197df) + +## [v2.11.0](https://github.com/inspect-js/is-core-module/compare/v2.10.0...v2.11.0) - 2022-10-18 + +### Commits + +- [meta] use `npmignore` to autogenerate an npmignore file [`3360011`](https://github.com/inspect-js/is-core-module/commit/33600118857b46177178072fba2affcdeb009d12) +- [Dev Deps] update `aud`, `tape` [`651c6b0`](https://github.com/inspect-js/is-core-module/commit/651c6b0cc2799d4130866cf43ad333dcade3d26c) +- [New] `inspector/promises` and `node:inspector/promises` is now available in node 19 [`22d332f`](https://github.com/inspect-js/is-core-module/commit/22d332fe22ac050305444e0781ff85af819abcb0) + +## [v2.10.0](https://github.com/inspect-js/is-core-module/compare/v2.9.0...v2.10.0) - 2022-08-03 + +### Commits + +- [New] `node:test` is now available in node ^16.17 [`e8fd36e`](https://github.com/inspect-js/is-core-module/commit/e8fd36e9b86c917775a07cc473b62a3294f459f2) +- [Tests] improve skip message [`c014a4c`](https://github.com/inspect-js/is-core-module/commit/c014a4c0cd6eb15fff573ae4709191775e70cab4) + +## [v2.9.0](https://github.com/inspect-js/is-core-module/compare/v2.8.1...v2.9.0) - 2022-04-19 + +### Commits + +- [New] add `node:test`, in node 18+ [`f853eca`](https://github.com/inspect-js/is-core-module/commit/f853eca801d0a7d4e1dbb670f1b6d9837d9533c5) +- [Tests] use `mock-property` [`03b3644`](https://github.com/inspect-js/is-core-module/commit/03b3644dff4417f4ba5a7d0aa0138f5f6b3e5c46) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `auto-changelog`, `tape` [`7c0e2d0`](https://github.com/inspect-js/is-core-module/commit/7c0e2d06ed2a89acf53abe2ab34d703ed5b03455) +- [meta] simplify "exports" [`d6ed201`](https://github.com/inspect-js/is-core-module/commit/d6ed201eba7fbba0e59814a9050fc49a6e9878c8) + +## [v2.8.1](https://github.com/inspect-js/is-core-module/compare/v2.8.0...v2.8.1) - 2022-01-05 + +### Commits + +- [actions] reuse common workflows [`cd2cf9b`](https://github.com/inspect-js/is-core-module/commit/cd2cf9b3b66c8d328f65610efe41e9325db7716d) +- [Fix] update node 0.4 results [`062195d`](https://github.com/inspect-js/is-core-module/commit/062195d89f0876a88b95d378b43f7fcc1205bc5b) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `safe-publish-latest`, `tape` [`0790b62`](https://github.com/inspect-js/is-core-module/commit/0790b6222848c6167132f9f73acc3520fa8d1298) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape` [`7d139a6`](https://github.com/inspect-js/is-core-module/commit/7d139a6d767709eabf0a0251e074ec1fb230c06e) +- [Tests] run `nyc` in `tests-only`, not `test` [`780e8a0`](https://github.com/inspect-js/is-core-module/commit/780e8a049951c71cf78b1707f0871c48a28bde14) + +## [v2.8.0](https://github.com/inspect-js/is-core-module/compare/v2.7.0...v2.8.0) - 2021-10-14 + +### Commits + +- [actions] update codecov uploader [`0cfe94e`](https://github.com/inspect-js/is-core-module/commit/0cfe94e106a7d005ea03e008c0a21dec13a77904) +- [New] add `readline/promises` to node v17+ [`4f78c30`](https://github.com/inspect-js/is-core-module/commit/4f78c3008b1b58b4db6dc91d99610b1bc859da7e) +- [Tests] node ^14.18 supports `node:` prefixes for CJS [`43e2f17`](https://github.com/inspect-js/is-core-module/commit/43e2f177452cea2f0eaf34f61b5407217bbdb6f4) + +## [v2.7.0](https://github.com/inspect-js/is-core-module/compare/v2.6.0...v2.7.0) - 2021-09-27 + +### Commits + +- [New] node `v14.18` added `node:`-prefixed core modules to `require` [`6d943ab`](https://github.com/inspect-js/is-core-module/commit/6d943abe81382b9bbe344384d80fbfebe1cc0526) +- [Tests] add coverage for Object.prototype pollution [`c6baf5f`](https://github.com/inspect-js/is-core-module/commit/c6baf5f942311a1945c1af41167bb80b84df2af7) +- [Dev Deps] update `@ljharb/eslint-config` [`6717f00`](https://github.com/inspect-js/is-core-module/commit/6717f000d063ea57beb772bded36c2f056ac404c) +- [eslint] fix linter warning [`594c10b`](https://github.com/inspect-js/is-core-module/commit/594c10bb7d39d7eb00925c90924199ff596184b2) +- [meta] add `sideEffects` flag [`c32cfa5`](https://github.com/inspect-js/is-core-module/commit/c32cfa5195632944c4dd4284a142b8476e75be13) + +## [v2.6.0](https://github.com/inspect-js/is-core-module/compare/v2.5.0...v2.6.0) - 2021-08-17 + +### Commits + +- [Dev Deps] update `eslint`, `tape` [`6cc928f`](https://github.com/inspect-js/is-core-module/commit/6cc928f8a4bba66aeeccc4f6beeac736d4bd3081) +- [New] add `stream/consumers` to node `>= 16.7` [`a1a423e`](https://github.com/inspect-js/is-core-module/commit/a1a423e467e4cc27df180234fad5bab45943e67d) +- [Refactor] Remove duplicated `&&` operand [`86faea7`](https://github.com/inspect-js/is-core-module/commit/86faea738213a2433c62d1098488dc9314dca832) +- [Tests] include prereleases [`a4da7a6`](https://github.com/inspect-js/is-core-module/commit/a4da7a6abf7568e2aa4fd98e69452179f1850963) + +## [v2.5.0](https://github.com/inspect-js/is-core-module/compare/v2.4.0...v2.5.0) - 2021-07-12 + +### Commits + +- [Dev Deps] update `auto-changelog`, `eslint` [`6334cc9`](https://github.com/inspect-js/is-core-module/commit/6334cc94f3af7469685bd8f236740991baaf2705) +- [New] add `stream/web` to node v16.5+ [`17ac59b`](https://github.com/inspect-js/is-core-module/commit/17ac59b662d63e220a2e5728625f005c24f177b2) + +## [v2.4.0](https://github.com/inspect-js/is-core-module/compare/v2.3.0...v2.4.0) - 2021-05-09 + +### Commits + +- [readme] add actions and codecov badges [`82b7faa`](https://github.com/inspect-js/is-core-module/commit/82b7faa12b56dbe47fbea67e1a5b9e447027ba40) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`8096868`](https://github.com/inspect-js/is-core-module/commit/8096868c024a161ccd4d44110b136763e92eace8) +- [Dev Deps] update `eslint` [`6726824`](https://github.com/inspect-js/is-core-module/commit/67268249b88230018c510f6532a8046d7326346f) +- [New] add `diagnostics_channel` to node `^14.17` [`86c6563`](https://github.com/inspect-js/is-core-module/commit/86c65634201b8ff9b3e48a9a782594579c7f5c3c) +- [meta] fix prepublish script [`697a01e`](https://github.com/inspect-js/is-core-module/commit/697a01e3c9c0be074066520954f30fb28532ec57) + +## [v2.3.0](https://github.com/inspect-js/is-core-module/compare/v2.2.0...v2.3.0) - 2021-04-24 + +### Commits + +- [meta] do not publish github action workflow files [`060d4bb`](https://github.com/inspect-js/is-core-module/commit/060d4bb971a29451c19ff336eb56bee27f9fa95a) +- [New] add support for `node:` prefix, in node 16+ [`7341223`](https://github.com/inspect-js/is-core-module/commit/73412230a769f6e81c05eea50b6520cebf54ed2f) +- [actions] use `node/install` instead of `node/run`; use `codecov` action [`016269a`](https://github.com/inspect-js/is-core-module/commit/016269abae9f6657a5254adfbb813f09a05067f9) +- [patch] remove unneeded `.0` in version ranges [`cb466a6`](https://github.com/inspect-js/is-core-module/commit/cb466a6d89e52b8389e5c12715efcd550c41cea3) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`, `tape` [`c9f9c39`](https://github.com/inspect-js/is-core-module/commit/c9f9c396ace60ef81906f98059c064e6452473ed) +- [actions] update workflows [`3ee4a89`](https://github.com/inspect-js/is-core-module/commit/3ee4a89fd5a02fccd43882d905448ea6a98e9a3c) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` [`dee4fed`](https://github.com/inspect-js/is-core-module/commit/dee4fed79690c1d43a22f7fa9426abebdc6d727f) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config` [`7d046ba`](https://github.com/inspect-js/is-core-module/commit/7d046ba07ae8c9292e43652694ca808d7b309de8) +- [meta] use `prepublishOnly` script for npm 7+ [`149e677`](https://github.com/inspect-js/is-core-module/commit/149e6771a5ede6d097e71785b467a9c4b4977cc7) +- [readme] remove travis badge [`903b51d`](https://github.com/inspect-js/is-core-module/commit/903b51d6b69b98abeabfbc3695c345b02646f19c) + +## [v2.2.0](https://github.com/inspect-js/is-core-module/compare/v2.1.0...v2.2.0) - 2020-11-26 + +### Commits + +- [Tests] migrate tests to Github Actions [`c919f57`](https://github.com/inspect-js/is-core-module/commit/c919f573c0a92d10a0acad0b650b5aecb033d426) +- [patch] `core.json`: %s/ /\t/g [`db3f685`](https://github.com/inspect-js/is-core-module/commit/db3f68581f53e73cc09cd675955eb1bdd6a5a39b) +- [Tests] run `nyc` on all tests [`b2f925f`](https://github.com/inspect-js/is-core-module/commit/b2f925f8866f210ef441f39fcc8cc42692ab89b1) +- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `aud`; add `safe-publish-latest` [`89f02a2`](https://github.com/inspect-js/is-core-module/commit/89f02a2b4162246dea303a6ee31bb9a550b05c72) +- [New] add `path/posix`, `path/win32`, `util/types` [`77f94f1`](https://github.com/inspect-js/is-core-module/commit/77f94f1e90ffd7c0be2a3f1aa8574ebf7fd981b3) + +## [v2.1.0](https://github.com/inspect-js/is-core-module/compare/v2.0.0...v2.1.0) - 2020-11-04 + +### Commits + +- [Dev Deps] update `eslint` [`5e0034e`](https://github.com/inspect-js/is-core-module/commit/5e0034eae57c09c8f1bd769f502486a00f56c6e4) +- [New] Add `diagnostics_channel` [`c2d83d0`](https://github.com/inspect-js/is-core-module/commit/c2d83d0a0225a1a658945d9bab7036ea347d29ec) + +## [v2.0.0](https://github.com/inspect-js/is-core-module/compare/v1.0.2...v2.0.0) - 2020-09-29 + +### Commits + +- v2 implementation [`865aeb5`](https://github.com/inspect-js/is-core-module/commit/865aeb5ca0e90248a3dfff5d7622e4751fdeb9cd) +- Only apps should have lockfiles [`5a5e660`](https://github.com/inspect-js/is-core-module/commit/5a5e660d568e37eb44e17fb1ebb12a105205fc2b) +- Initial commit for v2 [`5a51524`](https://github.com/inspect-js/is-core-module/commit/5a51524e06f92adece5fbb138c69b7b9748a2348) +- Tests [`116eae4`](https://github.com/inspect-js/is-core-module/commit/116eae4fccd01bc72c1fd3cc4b7561c387afc496) +- [meta] add `auto-changelog` [`c24388b`](https://github.com/inspect-js/is-core-module/commit/c24388bee828d223040519d1f5b226ca35beee63) +- [actions] add "Automatic Rebase" and "require allow edits" actions [`34292db`](https://github.com/inspect-js/is-core-module/commit/34292dbcbadae0868aff03c22dbd8b7b8a11558a) +- [Tests] add `npm run lint` [`4f9eeee`](https://github.com/inspect-js/is-core-module/commit/4f9eeee7ddff10698bbf528620f4dc8d4fa3e697) +- [readme] fix travis badges, https all URLs [`e516a73`](https://github.com/inspect-js/is-core-module/commit/e516a73b0dccce20938c432b1ba512eae8eff9e9) +- [meta] create FUNDING.yml [`1aabebc`](https://github.com/inspect-js/is-core-module/commit/1aabebca98d01f8a04e46bc2e2520fa93cf21ac6) +- [Fix] `domain`: domain landed sometime > v0.7.7 and <= v0.7.12 [`2df7d37`](https://github.com/inspect-js/is-core-module/commit/2df7d37595d41b15eeada732b706b926c2771655) +- [Fix] `sys`: worked in 0.6, not 0.7, and 0.8+ [`a75c134`](https://github.com/inspect-js/is-core-module/commit/a75c134229e1e9441801f6b73f6a52489346eb65) + +## [v1.0.2](https://github.com/inspect-js/is-core-module/compare/v1.0.1...v1.0.2) - 2014-09-28 + +### Commits + +- simpler [`66fe90f`](https://github.com/inspect-js/is-core-module/commit/66fe90f9771581b9adc0c3900baa52c21b5baea2) + +## [v1.0.1](https://github.com/inspect-js/is-core-module/compare/v1.0.0...v1.0.1) - 2014-09-28 + +### Commits + +- remove stupid [`f21f906`](https://github.com/inspect-js/is-core-module/commit/f21f906f882c2bd656a5fc5ed6fbe48ddaffb2ac) +- update readme [`1eff0ec`](https://github.com/inspect-js/is-core-module/commit/1eff0ec69798d1ec65771552d1562911e90a8027) + +## v1.0.0 - 2014-09-28 + +### Commits + +- init [`48e5e76`](https://github.com/inspect-js/is-core-module/commit/48e5e76cac378fddb8c1f7d4055b8dfc943d6b96) diff --git a/node_modules/is-core-module/LICENSE b/node_modules/is-core-module/LICENSE new file mode 100644 index 0000000..2e50287 --- /dev/null +++ b/node_modules/is-core-module/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014 Dave Justice + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/is-core-module/README.md b/node_modules/is-core-module/README.md new file mode 100644 index 0000000..062d906 --- /dev/null +++ b/node_modules/is-core-module/README.md @@ -0,0 +1,40 @@ +# is-core-module [![Version Badge][2]][1] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![dependency status][5]][6] +[![dev dependency status][7]][8] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][11]][1] + +Is this specifier a node.js core module? Optionally provide a node version to check; defaults to the current node version. + +## Example + +```js +var isCore = require('is-core-module'); +var assert = require('assert'); +assert(isCore('fs')); +assert(!isCore('butts')); +``` + +## Tests +Clone the repo, `npm install`, and run `npm test` + +[1]: https://npmjs.org/package/is-core-module +[2]: https://versionbadg.es/inspect-js/is-core-module.svg +[5]: https://david-dm.org/inspect-js/is-core-module.svg +[6]: https://david-dm.org/inspect-js/is-core-module +[7]: https://david-dm.org/inspect-js/is-core-module/dev-status.svg +[8]: https://david-dm.org/inspect-js/is-core-module#info=devDependencies +[11]: https://nodei.co/npm/is-core-module.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/is-core-module.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/is-core-module.svg +[downloads-url]: https://npm-stat.com/charts.html?package=is-core-module +[codecov-image]: https://codecov.io/gh/inspect-js/is-core-module/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/inspect-js/is-core-module/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/inspect-js/is-core-module +[actions-url]: https://github.com/inspect-js/is-core-module/actions diff --git a/node_modules/is-core-module/core.json b/node_modules/is-core-module/core.json new file mode 100644 index 0000000..3cda693 --- /dev/null +++ b/node_modules/is-core-module/core.json @@ -0,0 +1,158 @@ +{ + "assert": true, + "node:assert": [">= 14.18 && < 15", ">= 16"], + "assert/strict": ">= 15", + "node:assert/strict": ">= 16", + "async_hooks": ">= 8", + "node:async_hooks": [">= 14.18 && < 15", ">= 16"], + "buffer_ieee754": ">= 0.5 && < 0.9.7", + "buffer": true, + "node:buffer": [">= 14.18 && < 15", ">= 16"], + "child_process": true, + "node:child_process": [">= 14.18 && < 15", ">= 16"], + "cluster": ">= 0.5", + "node:cluster": [">= 14.18 && < 15", ">= 16"], + "console": true, + "node:console": [">= 14.18 && < 15", ">= 16"], + "constants": true, + "node:constants": [">= 14.18 && < 15", ">= 16"], + "crypto": true, + "node:crypto": [">= 14.18 && < 15", ">= 16"], + "_debug_agent": ">= 1 && < 8", + "_debugger": "< 8", + "dgram": true, + "node:dgram": [">= 14.18 && < 15", ">= 16"], + "diagnostics_channel": [">= 14.17 && < 15", ">= 15.1"], + "node:diagnostics_channel": [">= 14.18 && < 15", ">= 16"], + "dns": true, + "node:dns": [">= 14.18 && < 15", ">= 16"], + "dns/promises": ">= 15", + "node:dns/promises": ">= 16", + "domain": ">= 0.7.12", + "node:domain": [">= 14.18 && < 15", ">= 16"], + "events": true, + "node:events": [">= 14.18 && < 15", ">= 16"], + "freelist": "< 6", + "fs": true, + "node:fs": [">= 14.18 && < 15", ">= 16"], + "fs/promises": [">= 10 && < 10.1", ">= 14"], + "node:fs/promises": [">= 14.18 && < 15", ">= 16"], + "_http_agent": ">= 0.11.1", + "node:_http_agent": [">= 14.18 && < 15", ">= 16"], + "_http_client": ">= 0.11.1", + "node:_http_client": [">= 14.18 && < 15", ">= 16"], + "_http_common": ">= 0.11.1", + "node:_http_common": [">= 14.18 && < 15", ">= 16"], + "_http_incoming": ">= 0.11.1", + "node:_http_incoming": [">= 14.18 && < 15", ">= 16"], + "_http_outgoing": ">= 0.11.1", + "node:_http_outgoing": [">= 14.18 && < 15", ">= 16"], + "_http_server": ">= 0.11.1", + "node:_http_server": [">= 14.18 && < 15", ">= 16"], + "http": true, + "node:http": [">= 14.18 && < 15", ">= 16"], + "http2": ">= 8.8", + "node:http2": [">= 14.18 && < 15", ">= 16"], + "https": true, + "node:https": [">= 14.18 && < 15", ">= 16"], + "inspector": ">= 8", + "node:inspector": [">= 14.18 && < 15", ">= 16"], + "inspector/promises": [">= 19"], + "node:inspector/promises": [">= 19"], + "_linklist": "< 8", + "module": true, + "node:module": [">= 14.18 && < 15", ">= 16"], + "net": true, + "node:net": [">= 14.18 && < 15", ">= 16"], + "node-inspect/lib/_inspect": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_client": ">= 7.6 && < 12", + "node-inspect/lib/internal/inspect_repl": ">= 7.6 && < 12", + "os": true, + "node:os": [">= 14.18 && < 15", ">= 16"], + "path": true, + "node:path": [">= 14.18 && < 15", ">= 16"], + "path/posix": ">= 15.3", + "node:path/posix": ">= 16", + "path/win32": ">= 15.3", + "node:path/win32": ">= 16", + "perf_hooks": ">= 8.5", + "node:perf_hooks": [">= 14.18 && < 15", ">= 16"], + "process": ">= 1", + "node:process": [">= 14.18 && < 15", ">= 16"], + "punycode": ">= 0.5", + "node:punycode": [">= 14.18 && < 15", ">= 16"], + "querystring": true, + "node:querystring": [">= 14.18 && < 15", ">= 16"], + "readline": true, + "node:readline": [">= 14.18 && < 15", ">= 16"], + "readline/promises": ">= 17", + "node:readline/promises": ">= 17", + "repl": true, + "node:repl": [">= 14.18 && < 15", ">= 16"], + "smalloc": ">= 0.11.5 && < 3", + "_stream_duplex": ">= 0.9.4", + "node:_stream_duplex": [">= 14.18 && < 15", ">= 16"], + "_stream_transform": ">= 0.9.4", + "node:_stream_transform": [">= 14.18 && < 15", ">= 16"], + "_stream_wrap": ">= 1.4.1", + "node:_stream_wrap": [">= 14.18 && < 15", ">= 16"], + "_stream_passthrough": ">= 0.9.4", + "node:_stream_passthrough": [">= 14.18 && < 15", ">= 16"], + "_stream_readable": ">= 0.9.4", + "node:_stream_readable": [">= 14.18 && < 15", ">= 16"], + "_stream_writable": ">= 0.9.4", + "node:_stream_writable": [">= 14.18 && < 15", ">= 16"], + "stream": true, + "node:stream": [">= 14.18 && < 15", ">= 16"], + "stream/consumers": ">= 16.7", + "node:stream/consumers": ">= 16.7", + "stream/promises": ">= 15", + "node:stream/promises": ">= 16", + "stream/web": ">= 16.5", + "node:stream/web": ">= 16.5", + "string_decoder": true, + "node:string_decoder": [">= 14.18 && < 15", ">= 16"], + "sys": [">= 0.4 && < 0.7", ">= 0.8"], + "node:sys": [">= 14.18 && < 15", ">= 16"], + "test/reporters": ">= 19.9 && < 20.2", + "node:test/reporters": [">= 18.17 && < 19", ">= 19.9", ">= 20"], + "node:test": [">= 16.17 && < 17", ">= 18"], + "timers": true, + "node:timers": [">= 14.18 && < 15", ">= 16"], + "timers/promises": ">= 15", + "node:timers/promises": ">= 16", + "_tls_common": ">= 0.11.13", + "node:_tls_common": [">= 14.18 && < 15", ">= 16"], + "_tls_legacy": ">= 0.11.3 && < 10", + "_tls_wrap": ">= 0.11.3", + "node:_tls_wrap": [">= 14.18 && < 15", ">= 16"], + "tls": true, + "node:tls": [">= 14.18 && < 15", ">= 16"], + "trace_events": ">= 10", + "node:trace_events": [">= 14.18 && < 15", ">= 16"], + "tty": true, + "node:tty": [">= 14.18 && < 15", ">= 16"], + "url": true, + "node:url": [">= 14.18 && < 15", ">= 16"], + "util": true, + "node:util": [">= 14.18 && < 15", ">= 16"], + "util/types": ">= 15.3", + "node:util/types": ">= 16", + "v8/tools/arguments": ">= 10 && < 12", + "v8/tools/codemap": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/consarray": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/csvparser": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/logreader": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/profile_view": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8/tools/splaytree": [">= 4.4 && < 5", ">= 5.2 && < 12"], + "v8": ">= 1", + "node:v8": [">= 14.18 && < 15", ">= 16"], + "vm": true, + "node:vm": [">= 14.18 && < 15", ">= 16"], + "wasi": [">= 13.4 && < 13.5", ">= 18.17 && < 19", ">= 20"], + "node:wasi": [">= 18.17 && < 19", ">= 20"], + "worker_threads": ">= 11.7", + "node:worker_threads": [">= 14.18 && < 15", ">= 16"], + "zlib": ">= 0.5", + "node:zlib": [">= 14.18 && < 15", ">= 16"] +} diff --git a/node_modules/is-core-module/index.js b/node_modules/is-core-module/index.js new file mode 100644 index 0000000..f9637e0 --- /dev/null +++ b/node_modules/is-core-module/index.js @@ -0,0 +1,69 @@ +'use strict'; + +var has = require('has'); + +function specifierIncluded(current, specifier) { + var nodeParts = current.split('.'); + var parts = specifier.split(' '); + var op = parts.length > 1 ? parts[0] : '='; + var versionParts = (parts.length > 1 ? parts[1] : parts[0]).split('.'); + + for (var i = 0; i < 3; ++i) { + var cur = parseInt(nodeParts[i] || 0, 10); + var ver = parseInt(versionParts[i] || 0, 10); + if (cur === ver) { + continue; // eslint-disable-line no-restricted-syntax, no-continue + } + if (op === '<') { + return cur < ver; + } + if (op === '>=') { + return cur >= ver; + } + return false; + } + return op === '>='; +} + +function matchesRange(current, range) { + var specifiers = range.split(/ ?&& ?/); + if (specifiers.length === 0) { + return false; + } + for (var i = 0; i < specifiers.length; ++i) { + if (!specifierIncluded(current, specifiers[i])) { + return false; + } + } + return true; +} + +function versionIncluded(nodeVersion, specifierValue) { + if (typeof specifierValue === 'boolean') { + return specifierValue; + } + + var current = typeof nodeVersion === 'undefined' + ? process.versions && process.versions.node + : nodeVersion; + + if (typeof current !== 'string') { + throw new TypeError(typeof nodeVersion === 'undefined' ? 'Unable to determine current node version' : 'If provided, a valid node version is required'); + } + + if (specifierValue && typeof specifierValue === 'object') { + for (var i = 0; i < specifierValue.length; ++i) { + if (matchesRange(current, specifierValue[i])) { + return true; + } + } + return false; + } + return matchesRange(current, specifierValue); +} + +var data = require('./core.json'); + +module.exports = function isCore(x, nodeVersion) { + return has(data, x) && versionIncluded(nodeVersion, data[x]); +}; diff --git a/node_modules/is-core-module/package.json b/node_modules/is-core-module/package.json new file mode 100644 index 0000000..1269c80 --- /dev/null +++ b/node_modules/is-core-module/package.json @@ -0,0 +1,73 @@ +{ + "name": "is-core-module", + "version": "2.13.0", + "description": "Is this specifier a node.js core module?", + "main": "index.js", + "sideEffects": false, + "exports": { + ".": "./index.js", + "./package.json": "./package.json" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=autogenerated", + "prepublish": "not-in-publish || npm run prepublishOnly", + "prepublishOnly": "safe-publish-latest", + "lint": "eslint .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/inspect-js/is-core-module.git" + }, + "keywords": [ + "core", + "modules", + "module", + "npm", + "node", + "dependencies" + ], + "author": "Jordan Harband ", + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/inspect-js/is-core-module/issues" + }, + "homepage": "https://github.com/inspect-js/is-core-module", + "dependencies": { + "has": "^1.0.3" + }, + "devDependencies": { + "@ljharb/eslint-config": "^21.1.0", + "aud": "^2.0.3", + "auto-changelog": "^2.4.0", + "eslint": "=8.8.0", + "in-publish": "^2.0.1", + "mock-property": "^1.0.0", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "semver": "^6.3.1", + "tape": "^5.6.6" + }, + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github" + ] + } +} diff --git a/node_modules/is-core-module/test/index.js b/node_modules/is-core-module/test/index.js new file mode 100644 index 0000000..912808b --- /dev/null +++ b/node_modules/is-core-module/test/index.js @@ -0,0 +1,133 @@ +'use strict'; + +var test = require('tape'); +var keys = require('object-keys'); +var semver = require('semver'); +var mockProperty = require('mock-property'); + +var isCore = require('../'); +var data = require('../core.json'); + +var supportsNodePrefix = semver.satisfies(process.versions.node, '^14.18 || >= 16', { includePrerelease: true }); + +test('core modules', function (t) { + t.test('isCore()', function (st) { + st.ok(isCore('fs')); + st.ok(isCore('net')); + st.ok(isCore('http')); + + st.ok(!isCore('seq')); + st.ok(!isCore('../')); + + st.ok(!isCore('toString')); + + st.end(); + }); + + t.test('core list', function (st) { + var cores = keys(data); + st.plan(cores.length); + + for (var i = 0; i < cores.length; ++i) { + var mod = cores[i]; + var requireFunc = function () { require(mod); }; // eslint-disable-line no-loop-func + if (isCore(mod)) { + st.doesNotThrow(requireFunc, mod + ' supported; requiring does not throw'); + } else { + st['throws'](requireFunc, mod + ' not supported; requiring throws'); + } + } + + st.end(); + }); + + t.test('core via repl module', { skip: !data.repl }, function (st) { + var libs = require('repl')._builtinLibs; // eslint-disable-line no-underscore-dangle + if (!libs) { + st.skip('repl._builtinLibs does not exist'); + } else { + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + st.ok(data[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + if (mod.slice(0, 5) !== 'node:') { + if (supportsNodePrefix) { + st.doesNotThrow( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' does not throw' + ); + } else { + st['throws']( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' throws' + ); + } + } + } + } + st.end(); + }); + + t.test('core via builtinModules list', { skip: !data.module }, function (st) { + var libs = require('module').builtinModules; + if (!libs) { + st.skip('module.builtinModules does not exist'); + } else { + var excludeList = [ + '_debug_agent', + 'v8/tools/tickprocessor-driver', + 'v8/tools/SourceMap', + 'v8/tools/tickprocessor', + 'v8/tools/profile' + ]; + // see https://github.com/nodejs/node/issues/42785 + if (semver.satisfies(process.version, '>= 18')) { + libs = libs.concat('node:test'); + } + for (var i = 0; i < libs.length; ++i) { + var mod = libs[i]; + if (excludeList.indexOf(mod) === -1) { + st.ok(data[mod], mod + ' is a core module'); + st.doesNotThrow( + function () { require(mod); }, // eslint-disable-line no-loop-func + 'requiring ' + mod + ' does not throw' + ); + if (mod.slice(0, 5) !== 'node:') { + if (supportsNodePrefix) { + st.doesNotThrow( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' does not throw' + ); + } else { + st['throws']( + function () { require('node:' + mod); }, // eslint-disable-line no-loop-func + 'requiring node:' + mod + ' throws' + ); + } + } + } + } + } + st.end(); + }); + + t.test('Object.prototype pollution', function (st) { + var nonKey = 'not a core module'; + st.teardown(mockProperty(Object.prototype, 'fs', { value: false })); + st.teardown(mockProperty(Object.prototype, 'path', { value: '>= 999999999' })); + st.teardown(mockProperty(Object.prototype, 'http', { value: data.http })); + st.teardown(mockProperty(Object.prototype, nonKey, { value: true })); + + st.equal(isCore('fs'), true, 'fs is a core module even if Object.prototype lies'); + st.equal(isCore('path'), true, 'path is a core module even if Object.prototype lies'); + st.equal(isCore('http'), true, 'path is a core module even if Object.prototype matches data'); + st.equal(isCore(nonKey), false, '"' + nonKey + '" is not a core module even if Object.prototype lies'); + + st.end(); + }); + + t.end(); +}); diff --git a/node_modules/is-fullwidth-code-point/index.d.ts b/node_modules/is-fullwidth-code-point/index.d.ts new file mode 100644 index 0000000..729d202 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/index.d.ts @@ -0,0 +1,17 @@ +/** +Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms). + +@param codePoint - The [code point](https://en.wikipedia.org/wiki/Code_point) of a character. + +@example +``` +import isFullwidthCodePoint from 'is-fullwidth-code-point'; + +isFullwidthCodePoint('谢'.codePointAt(0)); +//=> true + +isFullwidthCodePoint('a'.codePointAt(0)); +//=> false +``` +*/ +export default function isFullwidthCodePoint(codePoint: number): boolean; diff --git a/node_modules/is-fullwidth-code-point/index.js b/node_modules/is-fullwidth-code-point/index.js new file mode 100644 index 0000000..671f97f --- /dev/null +++ b/node_modules/is-fullwidth-code-point/index.js @@ -0,0 +1,50 @@ +/* eslint-disable yoda */ +'use strict'; + +const isFullwidthCodePoint = codePoint => { + if (Number.isNaN(codePoint)) { + return false; + } + + // Code points are derived from: + // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt + if ( + codePoint >= 0x1100 && ( + codePoint <= 0x115F || // Hangul Jamo + codePoint === 0x2329 || // LEFT-POINTING ANGLE BRACKET + codePoint === 0x232A || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (0x2E80 <= codePoint && codePoint <= 0x3247 && codePoint !== 0x303F) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + (0x3250 <= codePoint && codePoint <= 0x4DBF) || + // CJK Unified Ideographs .. Yi Radicals + (0x4E00 <= codePoint && codePoint <= 0xA4C6) || + // Hangul Jamo Extended-A + (0xA960 <= codePoint && codePoint <= 0xA97C) || + // Hangul Syllables + (0xAC00 <= codePoint && codePoint <= 0xD7A3) || + // CJK Compatibility Ideographs + (0xF900 <= codePoint && codePoint <= 0xFAFF) || + // Vertical Forms + (0xFE10 <= codePoint && codePoint <= 0xFE19) || + // CJK Compatibility Forms .. Small Form Variants + (0xFE30 <= codePoint && codePoint <= 0xFE6B) || + // Halfwidth and Fullwidth Forms + (0xFF01 <= codePoint && codePoint <= 0xFF60) || + (0xFFE0 <= codePoint && codePoint <= 0xFFE6) || + // Kana Supplement + (0x1B000 <= codePoint && codePoint <= 0x1B001) || + // Enclosed Ideographic Supplement + (0x1F200 <= codePoint && codePoint <= 0x1F251) || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + (0x20000 <= codePoint && codePoint <= 0x3FFFD) + ) + ) { + return true; + } + + return false; +}; + +module.exports = isFullwidthCodePoint; +module.exports.default = isFullwidthCodePoint; diff --git a/node_modules/is-fullwidth-code-point/license b/node_modules/is-fullwidth-code-point/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-fullwidth-code-point/package.json b/node_modules/is-fullwidth-code-point/package.json new file mode 100644 index 0000000..2137e88 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/package.json @@ -0,0 +1,42 @@ +{ + "name": "is-fullwidth-code-point", + "version": "3.0.0", + "description": "Check if the character represented by a given Unicode code point is fullwidth", + "license": "MIT", + "repository": "sindresorhus/is-fullwidth-code-point", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd-check" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "fullwidth", + "full-width", + "full", + "width", + "unicode", + "character", + "string", + "codepoint", + "code", + "point", + "is", + "detect", + "check" + ], + "devDependencies": { + "ava": "^1.3.1", + "tsd-check": "^0.5.0", + "xo": "^0.24.0" + } +} diff --git a/node_modules/is-fullwidth-code-point/readme.md b/node_modules/is-fullwidth-code-point/readme.md new file mode 100644 index 0000000..4236bba --- /dev/null +++ b/node_modules/is-fullwidth-code-point/readme.md @@ -0,0 +1,39 @@ +# is-fullwidth-code-point [![Build Status](https://travis-ci.org/sindresorhus/is-fullwidth-code-point.svg?branch=master)](https://travis-ci.org/sindresorhus/is-fullwidth-code-point) + +> Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) + + +## Install + +``` +$ npm install is-fullwidth-code-point +``` + + +## Usage + +```js +const isFullwidthCodePoint = require('is-fullwidth-code-point'); + +isFullwidthCodePoint('谢'.codePointAt(0)); +//=> true + +isFullwidthCodePoint('a'.codePointAt(0)); +//=> false +``` + + +## API + +### isFullwidthCodePoint(codePoint) + +#### codePoint + +Type: `number` + +The [code point](https://en.wikipedia.org/wiki/Code_point) of a character. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/is-lambda/.npmignore b/node_modules/is-lambda/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/is-lambda/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/is-lambda/.travis.yml b/node_modules/is-lambda/.travis.yml new file mode 100644 index 0000000..03dcca5 --- /dev/null +++ b/node_modules/is-lambda/.travis.yml @@ -0,0 +1,8 @@ +language: node_js +node_js: +- '7' +- '6' +- '5' +- '4' +- '0.12' +- '0.10' diff --git a/node_modules/is-lambda/LICENSE b/node_modules/is-lambda/LICENSE new file mode 100644 index 0000000..4a59c94 --- /dev/null +++ b/node_modules/is-lambda/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016-2017 Thomas Watson Steen + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/is-lambda/README.md b/node_modules/is-lambda/README.md new file mode 100644 index 0000000..31a8f56 --- /dev/null +++ b/node_modules/is-lambda/README.md @@ -0,0 +1,27 @@ +# is-lambda + +Returns `true` if the current environment is an [AWS +Lambda](https://aws.amazon.com/lambda/) server. + +[![Build status](https://travis-ci.org/watson/is-lambda.svg?branch=master)](https://travis-ci.org/watson/is-lambda) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://github.com/feross/standard) + +## Installation + +``` +npm install is-lambda +``` + +## Usage + +```js +var isLambda = require('is-lambda') + +if (isLambda) { + console.log('The code is running on a AWS Lambda') +} +``` + +## License + +MIT diff --git a/node_modules/is-lambda/index.js b/node_modules/is-lambda/index.js new file mode 100644 index 0000000..b245ab1 --- /dev/null +++ b/node_modules/is-lambda/index.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = !!( + (process.env.LAMBDA_TASK_ROOT && process.env.AWS_EXECUTION_ENV) || + false +) diff --git a/node_modules/is-lambda/package.json b/node_modules/is-lambda/package.json new file mode 100644 index 0000000..d855089 --- /dev/null +++ b/node_modules/is-lambda/package.json @@ -0,0 +1,35 @@ +{ + "name": "is-lambda", + "version": "1.0.1", + "description": "Detect if your code is running on an AWS Lambda server", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "clear-require": "^1.0.1", + "standard": "^10.0.2" + }, + "scripts": { + "test": "standard && node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/watson/is-lambda.git" + }, + "keywords": [ + "aws", + "hosting", + "hosted", + "lambda", + "detect" + ], + "author": "Thomas Watson Steen (https://twitter.com/wa7son)", + "license": "MIT", + "bugs": { + "url": "https://github.com/watson/is-lambda/issues" + }, + "homepage": "https://github.com/watson/is-lambda", + "coordinates": [ + 37.3859955, + -122.0838831 + ] +} diff --git a/node_modules/is-lambda/test.js b/node_modules/is-lambda/test.js new file mode 100644 index 0000000..e8e7325 --- /dev/null +++ b/node_modules/is-lambda/test.js @@ -0,0 +1,16 @@ +'use strict' + +var assert = require('assert') +var clearRequire = require('clear-require') + +process.env.AWS_EXECUTION_ENV = 'AWS_Lambda_nodejs6.10' +process.env.LAMBDA_TASK_ROOT = '/var/task' + +var isCI = require('./') +assert(isCI) + +delete process.env.AWS_EXECUTION_ENV + +clearRequire('./') +isCI = require('./') +assert(!isCI) diff --git a/node_modules/is-plain-obj/index.js b/node_modules/is-plain-obj/index.js new file mode 100644 index 0000000..0d1ba9e --- /dev/null +++ b/node_modules/is-plain-obj/index.js @@ -0,0 +1,7 @@ +'use strict'; +var toString = Object.prototype.toString; + +module.exports = function (x) { + var prototype; + return toString.call(x) === '[object Object]' && (prototype = Object.getPrototypeOf(x), prototype === null || prototype === Object.getPrototypeOf({})); +}; diff --git a/node_modules/is-plain-obj/license b/node_modules/is-plain-obj/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/is-plain-obj/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/is-plain-obj/package.json b/node_modules/is-plain-obj/package.json new file mode 100644 index 0000000..d331f6e --- /dev/null +++ b/node_modules/is-plain-obj/package.json @@ -0,0 +1,36 @@ +{ + "name": "is-plain-obj", + "version": "1.1.0", + "description": "Check if a value is a plain object", + "license": "MIT", + "repository": "sindresorhus/is-plain-obj", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "obj", + "object", + "is", + "check", + "test", + "type", + "plain", + "vanilla", + "pure", + "simple" + ], + "devDependencies": { + "ava": "0.0.4" + } +} diff --git a/node_modules/is-plain-obj/readme.md b/node_modules/is-plain-obj/readme.md new file mode 100644 index 0000000..269e56a --- /dev/null +++ b/node_modules/is-plain-obj/readme.md @@ -0,0 +1,35 @@ +# is-plain-obj [![Build Status](https://travis-ci.org/sindresorhus/is-plain-obj.svg?branch=master)](https://travis-ci.org/sindresorhus/is-plain-obj) + +> Check if a value is a plain object + +An object is plain if it's created by either `{}`, `new Object()` or `Object.create(null)`. + + +## Install + +``` +$ npm install --save is-plain-obj +``` + + +## Usage + +```js +var isPlainObj = require('is-plain-obj'); + +isPlainObj({foo: 'bar'}); +//=> true + +isPlainObj([1, 2, 3]); +//=> false +``` + + +## Related + +- [is-obj](https://github.com/sindresorhus/is-obj) - Check if a value is an object + + +## License + +MIT © [Sindre Sorhus](http://sindresorhus.com) diff --git a/node_modules/isarray/.npmignore b/node_modules/isarray/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/isarray/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/isarray/.travis.yml b/node_modules/isarray/.travis.yml new file mode 100644 index 0000000..cc4dba2 --- /dev/null +++ b/node_modules/isarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/isarray/Makefile b/node_modules/isarray/Makefile new file mode 100644 index 0000000..787d56e --- /dev/null +++ b/node_modules/isarray/Makefile @@ -0,0 +1,6 @@ + +test: + @node_modules/.bin/tape test.js + +.PHONY: test + diff --git a/node_modules/isarray/README.md b/node_modules/isarray/README.md new file mode 100644 index 0000000..16d2c59 --- /dev/null +++ b/node_modules/isarray/README.md @@ -0,0 +1,60 @@ + +# isarray + +`Array#isArray` for older browsers. + +[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) +[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) + +[![browser support](https://ci.testling.com/juliangruber/isarray.png) +](https://ci.testling.com/juliangruber/isarray) + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/isarray/component.json b/node_modules/isarray/component.json new file mode 100644 index 0000000..9e31b68 --- /dev/null +++ b/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/node_modules/isarray/index.js b/node_modules/isarray/index.js new file mode 100644 index 0000000..a57f634 --- /dev/null +++ b/node_modules/isarray/index.js @@ -0,0 +1,5 @@ +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; diff --git a/node_modules/isarray/package.json b/node_modules/isarray/package.json new file mode 100644 index 0000000..1a4317a --- /dev/null +++ b/node_modules/isarray/package.json @@ -0,0 +1,45 @@ +{ + "name": "isarray", + "description": "Array#isArray for older browsers", + "version": "1.0.0", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "homepage": "https://github.com/juliangruber/isarray", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tape": "~2.13.4" + }, + "keywords": [ + "browser", + "isarray", + "array" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "scripts": { + "test": "tape test.js" + } +} diff --git a/node_modules/isarray/test.js b/node_modules/isarray/test.js new file mode 100644 index 0000000..e0c3444 --- /dev/null +++ b/node_modules/isarray/test.js @@ -0,0 +1,20 @@ +var isArray = require('./'); +var test = require('tape'); + +test('is array', function(t){ + t.ok(isArray([])); + t.notOk(isArray({})); + t.notOk(isArray(null)); + t.notOk(isArray(false)); + + var obj = {}; + obj[0] = true; + t.notOk(isArray(obj)); + + var arr = []; + arr.foo = 'bar'; + t.ok(isArray(arr)); + + t.end(); +}); + diff --git a/node_modules/isexe/.npmignore b/node_modules/isexe/.npmignore new file mode 100644 index 0000000..c1cb757 --- /dev/null +++ b/node_modules/isexe/.npmignore @@ -0,0 +1,2 @@ +.nyc_output/ +coverage/ diff --git a/node_modules/isexe/LICENSE b/node_modules/isexe/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/isexe/README.md b/node_modules/isexe/README.md new file mode 100644 index 0000000..35769e8 --- /dev/null +++ b/node_modules/isexe/README.md @@ -0,0 +1,51 @@ +# isexe + +Minimal module to check if a file is executable, and a normal file. + +Uses `fs.stat` and tests against the `PATHEXT` environment variable on +Windows. + +## USAGE + +```javascript +var isexe = require('isexe') +isexe('some-file-name', function (err, isExe) { + if (err) { + console.error('probably file does not exist or something', err) + } else if (isExe) { + console.error('this thing can be run') + } else { + console.error('cannot be run') + } +}) + +// same thing but synchronous, throws errors +var isExe = isexe.sync('some-file-name') + +// treat errors as just "not executable" +isexe('maybe-missing-file', { ignoreErrors: true }, callback) +var isExe = isexe.sync('maybe-missing-file', { ignoreErrors: true }) +``` + +## API + +### `isexe(path, [options], [callback])` + +Check if the path is executable. If no callback provided, and a +global `Promise` object is available, then a Promise will be returned. + +Will raise whatever errors may be raised by `fs.stat`, unless +`options.ignoreErrors` is set to true. + +### `isexe.sync(path, [options])` + +Same as `isexe` but returns the value and throws any errors raised. + +### Options + +* `ignoreErrors` Treat all errors as "no, this is not executable", but + don't raise them. +* `uid` Number to use as the user id +* `gid` Number to use as the group id +* `pathExt` List of path extensions to use instead of `PATHEXT` + environment variable on Windows. diff --git a/node_modules/isexe/index.js b/node_modules/isexe/index.js new file mode 100644 index 0000000..553fb32 --- /dev/null +++ b/node_modules/isexe/index.js @@ -0,0 +1,57 @@ +var fs = require('fs') +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = require('./windows.js') +} else { + core = require('./mode.js') +} + +module.exports = isexe +isexe.sync = sync + +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } + + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } + + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} + +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } +} diff --git a/node_modules/isexe/mode.js b/node_modules/isexe/mode.js new file mode 100644 index 0000000..1995ea4 --- /dev/null +++ b/node_modules/isexe/mode.js @@ -0,0 +1,41 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), options) +} + +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} + +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid + + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() + + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g + + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 + + return ret +} diff --git a/node_modules/isexe/package.json b/node_modules/isexe/package.json new file mode 100644 index 0000000..e452689 --- /dev/null +++ b/node_modules/isexe/package.json @@ -0,0 +1,31 @@ +{ + "name": "isexe", + "version": "2.0.0", + "description": "Minimal module to check if a file is executable.", + "main": "index.js", + "directories": { + "test": "test" + }, + "devDependencies": { + "mkdirp": "^0.5.1", + "rimraf": "^2.5.0", + "tap": "^10.3.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/isexe.git" + }, + "keywords": [], + "bugs": { + "url": "https://github.com/isaacs/isexe/issues" + }, + "homepage": "https://github.com/isaacs/isexe#readme" +} diff --git a/node_modules/isexe/test/basic.js b/node_modules/isexe/test/basic.js new file mode 100644 index 0000000..d926df6 --- /dev/null +++ b/node_modules/isexe/test/basic.js @@ -0,0 +1,221 @@ +var t = require('tap') +var fs = require('fs') +var path = require('path') +var fixture = path.resolve(__dirname, 'fixtures') +var meow = fixture + '/meow.cat' +var mine = fixture + '/mine.cat' +var ours = fixture + '/ours.cat' +var fail = fixture + '/fail.false' +var noent = fixture + '/enoent.exe' +var mkdirp = require('mkdirp') +var rimraf = require('rimraf') + +var isWindows = process.platform === 'win32' +var hasAccess = typeof fs.access === 'function' +var winSkip = isWindows && 'windows' +var accessSkip = !hasAccess && 'no fs.access function' +var hasPromise = typeof Promise === 'function' +var promiseSkip = !hasPromise && 'no global Promise' + +function reset () { + delete require.cache[require.resolve('../')] + return require('../') +} + +t.test('setup fixtures', function (t) { + rimraf.sync(fixture) + mkdirp.sync(fixture) + fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n') + fs.chmodSync(meow, parseInt('0755', 8)) + fs.writeFileSync(fail, '#!/usr/bin/env false\n') + fs.chmodSync(fail, parseInt('0644', 8)) + fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n') + fs.chmodSync(mine, parseInt('0744', 8)) + fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n') + fs.chmodSync(ours, parseInt('0754', 8)) + t.end() +}) + +t.test('promise', { skip: promiseSkip }, function (t) { + var isexe = reset() + t.test('meow async', function (t) { + isexe(meow).then(function (is) { + t.ok(is) + t.end() + }) + }) + t.test('fail async', function (t) { + isexe(fail).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.test('noent async', function (t) { + isexe(noent).catch(function (er) { + t.ok(er) + t.end() + }) + }) + t.test('noent ignore async', function (t) { + isexe(noent, { ignoreErrors: true }).then(function (is) { + t.notOk(is) + t.end() + }) + }) + t.end() +}) + +t.test('no promise', function (t) { + global.Promise = null + var isexe = reset() + t.throws('try to meow a promise', function () { + isexe(meow) + }) + t.end() +}) + +t.test('access', { skip: accessSkip || winSkip }, function (t) { + runTest(t) +}) + +t.test('mode', { skip: winSkip }, function (t) { + delete fs.access + delete fs.accessSync + var isexe = reset() + t.ok(isexe.sync(ours, { uid: 0, gid: 0 })) + t.ok(isexe.sync(mine, { uid: 0, gid: 0 })) + runTest(t) +}) + +t.test('windows', function (t) { + global.TESTING_WINDOWS = true + var pathExt = '.EXE;.CAT;.CMD;.COM' + t.test('pathExt option', function (t) { + runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' }) + }) + t.test('pathExt env', function (t) { + process.env.PATHEXT = pathExt + runTest(t) + }) + t.test('no pathExt', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: '', skipFail: true }) + }) + t.test('pathext with empty entry', function (t) { + // with a pathExt of '', any filename is fine. + // so the "fail" one would still pass. + runTest(t, { pathExt: ';' + pathExt, skipFail: true }) + }) + t.end() +}) + +t.test('cleanup', function (t) { + rimraf.sync(fixture) + t.end() +}) + +function runTest (t, options) { + var isexe = reset() + + var optionsIgnore = Object.create(options || {}) + optionsIgnore.ignoreErrors = true + + if (!options || !options.skipFail) { + t.notOk(isexe.sync(fail, options)) + } + t.notOk(isexe.sync(noent, optionsIgnore)) + if (!options) { + t.ok(isexe.sync(meow)) + } else { + t.ok(isexe.sync(meow, options)) + } + + t.ok(isexe.sync(mine, options)) + t.ok(isexe.sync(ours, options)) + t.throws(function () { + isexe.sync(noent, options) + }) + + t.test('meow async', function (t) { + if (!options) { + isexe(meow, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } else { + isexe(meow, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + } + }) + + t.test('mine async', function (t) { + isexe(mine, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + t.test('ours async', function (t) { + isexe(ours, options, function (er, is) { + if (er) { + throw er + } + t.ok(is) + t.end() + }) + }) + + if (!options || !options.skipFail) { + t.test('fail async', function (t) { + isexe(fail, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + } + + t.test('noent async', function (t) { + isexe(noent, options, function (er, is) { + t.ok(er) + t.notOk(is) + t.end() + }) + }) + + t.test('noent ignore async', function (t) { + isexe(noent, optionsIgnore, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.test('directory is not executable', function (t) { + isexe(__dirname, options, function (er, is) { + if (er) { + throw er + } + t.notOk(is) + t.end() + }) + }) + + t.end() +} diff --git a/node_modules/isexe/windows.js b/node_modules/isexe/windows.js new file mode 100644 index 0000000..3499673 --- /dev/null +++ b/node_modules/isexe/windows.js @@ -0,0 +1,42 @@ +module.exports = isexe +isexe.sync = sync + +var fs = require('fs') + +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT + + if (!pathext) { + return true + } + + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true + } + } + return false +} + +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) +} + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} + +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} diff --git a/node_modules/js-base64/LICENSE.md b/node_modules/js-base64/LICENSE.md new file mode 100644 index 0000000..fd579a4 --- /dev/null +++ b/node_modules/js-base64/LICENSE.md @@ -0,0 +1,27 @@ +Copyright (c) 2014, Dan Kogai +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of {{{project}}} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/js-base64/README.md b/node_modules/js-base64/README.md new file mode 100644 index 0000000..4a70bd6 --- /dev/null +++ b/node_modules/js-base64/README.md @@ -0,0 +1,123 @@ +[![build status](https://secure.travis-ci.org/dankogai/js-base64.png)](http://travis-ci.org/dankogai/js-base64) + +# base64.js + +Yet another Base64 transcoder + +## Usage + +### Install + +```javascript +$ npm install --save js-base64 +``` + +If you are using it on ES6 transpilers, you may also need: + +```javascript +$ npm install --save babel-preset-env +``` + +Note `js-base64` itself is stand-alone so its `package.json` has no `dependencies`.  However, it is also tested on ES6 environment so `"babel-preset-env": "^1.7.0"` is on `devDependencies`. + + +### In Browser + +* Locally + +```html + +``` + +* Directly from CDN. In which case you don't even need to install. + +```html + + +``` + +### In an AMD loader +```js +require(['async'], function(async) {}); +``` + +### Promise and async/await + +I recommend to use [`Aigle`](https://github.com/suguru03/aigle). + +It is optimized for Promise handling and has almost the same functionality as `neo-async`. + +### Node.js + +#### standard + +```bash +$ npm install neo-async +``` +```js +var async = require('neo-async'); +``` + +#### replacement +```bash +$ npm install neo-async +$ ln -s ./node_modules/neo-async ./node_modules/async +``` +```js +var async = require('async'); +``` + +### Bower + +```bash +bower install neo-async +``` + +## Feature + +[JSDoc](http://suguru03.github.io/neo-async/doc/async.html) + +\* not in Async + +### Collections + +- [`each`](http://suguru03.github.io/neo-async/doc/async.each.html) +- [`eachSeries`](http://suguru03.github.io/neo-async/doc/async.eachSeries.html) +- [`eachLimit`](http://suguru03.github.io/neo-async/doc/async.eachLimit.html) +- [`forEach`](http://suguru03.github.io/neo-async/doc/async.each.html) -> [`each`](http://suguru03.github.io/neo-async/doc/async.each.html) +- [`forEachSeries`](http://suguru03.github.io/neo-async/doc/async.eachSeries.html) -> [`eachSeries`](http://suguru03.github.io/neo-async/doc/async.eachSeries.html) +- [`forEachLimit`](http://suguru03.github.io/neo-async/doc/async.eachLimit.html) -> [`eachLimit`](http://suguru03.github.io/neo-async/doc/async.eachLimit.html) +- [`eachOf`](http://suguru03.github.io/neo-async/doc/async.each.html) -> [`each`](http://suguru03.github.io/neo-async/doc/async.each.html) +- [`eachOfSeries`](http://suguru03.github.io/neo-async/doc/async.eachSeries.html) -> [`eachSeries`](http://suguru03.github.io/neo-async/doc/async.eachSeries.html) +- [`eachOfLimit`](http://suguru03.github.io/neo-async/doc/async.eachLimit.html) -> [`eachLimit`](http://suguru03.github.io/neo-async/doc/async.eachLimit.html) +- [`forEachOf`](http://suguru03.github.io/neo-async/doc/async.each.html) -> [`each`](http://suguru03.github.io/neo-async/doc/async.each.html) +- [`forEachOfSeries`](http://suguru03.github.io/neo-async/doc/async.eachSeries.html) -> [`eachSeries`](http://suguru03.github.io/neo-async/doc/async.eachSeries.html) +- [`eachOfLimit`](http://suguru03.github.io/neo-async/doc/async.eachLimit.html) -> [`forEachLimit`](http://suguru03.github.io/neo-async/doc/async.eachLimit.html) +- [`map`](http://suguru03.github.io/neo-async/doc/async.map.html) +- [`mapSeries`](http://suguru03.github.io/neo-async/doc/async.mapSeries.html) +- [`mapLimit`](http://suguru03.github.io/neo-async/doc/async.mapLimit.html) +- [`mapValues`](http://suguru03.github.io/neo-async/doc/async.mapValues.html) +- [`mapValuesSeries`](http://suguru03.github.io/neo-async/doc/async.mapValuesSeries.html) +- [`mapValuesLimit`](http://suguru03.github.io/neo-async/doc/async.mapValuesLimit.html) +- [`filter`](http://suguru03.github.io/neo-async/doc/async.filter.html) +- [`filterSeries`](http://suguru03.github.io/neo-async/doc/async.filterSeries.html) +- [`filterLimit`](http://suguru03.github.io/neo-async/doc/async.filterLimit.html) +- [`select`](http://suguru03.github.io/neo-async/doc/async.filter.html) -> [`filter`](http://suguru03.github.io/neo-async/doc/async.filter.html) +- [`selectSeries`](http://suguru03.github.io/neo-async/doc/async.filterSeries.html) -> [`filterSeries`](http://suguru03.github.io/neo-async/doc/async.filterSeries.html) +- [`selectLimit`](http://suguru03.github.io/neo-async/doc/async.filterLimit.html) -> [`filterLimit`](http://suguru03.github.io/neo-async/doc/async.filterLimit.html) +- [`reject`](http://suguru03.github.io/neo-async/doc/async.reject.html) +- [`rejectSeries`](http://suguru03.github.io/neo-async/doc/async.rejectSeries.html) +- [`rejectLimit`](http://suguru03.github.io/neo-async/doc/async.rejectLimit.html) +- [`detect`](http://suguru03.github.io/neo-async/doc/async.detect.html) +- [`detectSeries`](http://suguru03.github.io/neo-async/doc/async.detectSeries.html) +- [`detectLimit`](http://suguru03.github.io/neo-async/doc/async.detectLimit.html) +- [`find`](http://suguru03.github.io/neo-async/doc/async.detect.html) -> [`detect`](http://suguru03.github.io/neo-async/doc/async.detect.html) +- [`findSeries`](http://suguru03.github.io/neo-async/doc/async.detectSeries.html) -> [`detectSeries`](http://suguru03.github.io/neo-async/doc/async.detectSeries.html) +- [`findLimit`](http://suguru03.github.io/neo-async/doc/async.detectLimit.html) -> [`detectLimit`](http://suguru03.github.io/neo-async/doc/async.detectLimit.html) +- [`pick`](http://suguru03.github.io/neo-async/doc/async.pick.html) * +- [`pickSeries`](http://suguru03.github.io/neo-async/doc/async.pickSeries.html) * +- [`pickLimit`](http://suguru03.github.io/neo-async/doc/async.pickLimit.html) * +- [`omit`](http://suguru03.github.io/neo-async/doc/async.omit.html) * +- [`omitSeries`](http://suguru03.github.io/neo-async/doc/async.omitSeries.html) * +- [`omitLimit`](http://suguru03.github.io/neo-async/doc/async.omitLimit.html) * +- [`reduce`](http://suguru03.github.io/neo-async/doc/async.reduce.html) +- [`inject`](http://suguru03.github.io/neo-async/doc/async.reduce.html) -> [`reduce`](http://suguru03.github.io/neo-async/doc/async.reduce.html) +- [`foldl`](http://suguru03.github.io/neo-async/doc/async.reduce.html) -> [`reduce`](http://suguru03.github.io/neo-async/doc/async.reduce.html) +- [`reduceRight`](http://suguru03.github.io/neo-async/doc/async.reduceRight.html) +- [`foldr`](http://suguru03.github.io/neo-async/doc/async.reduceRight.html) -> [`reduceRight`](http://suguru03.github.io/neo-async/doc/async.reduceRight.html) +- [`transform`](http://suguru03.github.io/neo-async/doc/async.transform.html) +- [`transformSeries`](http://suguru03.github.io/neo-async/doc/async.transformSeries.html) * +- [`transformLimit`](http://suguru03.github.io/neo-async/doc/async.transformLimit.html) * +- [`sortBy`](http://suguru03.github.io/neo-async/doc/async.sortBy.html) +- [`sortBySeries`](http://suguru03.github.io/neo-async/doc/async.sortBySeries.html) * +- [`sortByLimit`](http://suguru03.github.io/neo-async/doc/async.sortByLimit.html) * +- [`some`](http://suguru03.github.io/neo-async/doc/async.some.html) +- [`someSeries`](http://suguru03.github.io/neo-async/doc/async.someSeries.html) +- [`someLimit`](http://suguru03.github.io/neo-async/doc/async.someLimit.html) +- [`any`](http://suguru03.github.io/neo-async/doc/async.some.html) -> [`some`](http://suguru03.github.io/neo-async/doc/async.some.html) +- [`anySeries`](http://suguru03.github.io/neo-async/doc/async.someSeries.html) -> [`someSeries`](http://suguru03.github.io/neo-async/doc/async.someSeries.html) +- [`anyLimit`](http://suguru03.github.io/neo-async/doc/async.someLimit.html) -> [`someLimit`](http://suguru03.github.io/neo-async/doc/async.someLimit.html) +- [`every`](http://suguru03.github.io/neo-async/doc/async.every.html) +- [`everySeries`](http://suguru03.github.io/neo-async/doc/async.everySeries.html) +- [`everyLimit`](http://suguru03.github.io/neo-async/doc/async.everyLimit.html) +- [`all`](http://suguru03.github.io/neo-async/doc/async.every.html) -> [`every`](http://suguru03.github.io/neo-async/doc/async.every.html) +- [`allSeries`](http://suguru03.github.io/neo-async/doc/async.everySeries.html) -> [`every`](http://suguru03.github.io/neo-async/doc/async.everySeries.html) +- [`allLimit`](http://suguru03.github.io/neo-async/doc/async.everyLimit.html) -> [`every`](http://suguru03.github.io/neo-async/doc/async.everyLimit.html) +- [`concat`](http://suguru03.github.io/neo-async/doc/async.concat.html) +- [`concatSeries`](http://suguru03.github.io/neo-async/doc/async.concatSeries.html) +- [`concatLimit`](http://suguru03.github.io/neo-async/doc/async.concatLimit.html) * + +### Control Flow + +- [`parallel`](http://suguru03.github.io/neo-async/doc/async.parallel.html) +- [`series`](http://suguru03.github.io/neo-async/doc/async.series.html) +- [`parallelLimit`](http://suguru03.github.io/neo-async/doc/async.series.html) +- [`tryEach`](http://suguru03.github.io/neo-async/doc/async.tryEach.html) +- [`waterfall`](http://suguru03.github.io/neo-async/doc/async.waterfall.html) +- [`angelFall`](http://suguru03.github.io/neo-async/doc/async.angelFall.html) * +- [`angelfall`](http://suguru03.github.io/neo-async/doc/async.angelFall.html) -> [`angelFall`](http://suguru03.github.io/neo-async/doc/async.angelFall.html) * +- [`whilst`](#whilst) +- [`doWhilst`](#doWhilst) +- [`until`](#until) +- [`doUntil`](#doUntil) +- [`during`](#during) +- [`doDuring`](#doDuring) +- [`forever`](#forever) +- [`compose`](#compose) +- [`seq`](#seq) +- [`applyEach`](#applyEach) +- [`applyEachSeries`](#applyEachSeries) +- [`queue`](#queue) +- [`priorityQueue`](#priorityQueue) +- [`cargo`](#cargo) +- [`auto`](#auto) +- [`autoInject`](#autoInject) +- [`retry`](#retry) +- [`retryable`](#retryable) +- [`iterator`](#iterator) +- [`times`](http://suguru03.github.io/neo-async/doc/async.times.html) +- [`timesSeries`](http://suguru03.github.io/neo-async/doc/async.timesSeries.html) +- [`timesLimit`](http://suguru03.github.io/neo-async/doc/async.timesLimit.html) +- [`race`](#race) + +### Utils +- [`apply`](#apply) +- [`setImmediate`](#setImmediate) +- [`nextTick`](#nextTick) +- [`memoize`](#memoize) +- [`unmemoize`](#unmemoize) +- [`ensureAsync`](#ensureAsync) +- [`constant`](#constant) +- [`asyncify`](#asyncify) +- [`wrapSync`](#asyncify) -> [`asyncify`](#asyncify) +- [`log`](#log) +- [`dir`](#dir) +- [`timeout`](http://suguru03.github.io/neo-async/doc/async.timeout.html) +- [`reflect`](#reflect) +- [`reflectAll`](#reflectAll) +- [`createLogger`](#createLogger) + +## Mode +- [`safe`](#safe) * +- [`fast`](#fast) * + +## Benchmark + +[Benchmark: Async vs Neo-Async](http://suguru03.hatenablog.com/entry/2016/06/10/135559) + +### How to check + +```bash +$ node perf +``` + +### Environment + +* Darwin 17.3.0 x64 +* Node.js v8.9.4 +* async v2.6.0 +* neo-async v2.5.0 +* benchmark v2.1.4 + +### Result + +The value is the ratio (Neo-Async/Async) of the average speed. + +#### Collections +|function|benchmark| +|---|--:| +|each/forEach|2.43| +|eachSeries/forEachSeries|1.75| +|eachLimit/forEachLimit|1.68| +|eachOf|3.29| +|eachOfSeries|1.50| +|eachOfLimit|1.59| +|map|3.95| +|mapSeries|1.81| +|mapLimit|1.27| +|mapValues|2.73| +|mapValuesSeries|1.59| +|mapValuesLimit|1.23| +|filter|3.00| +|filterSeries|1.74| +|filterLimit|1.17| +|reject|4.59| +|rejectSeries|2.31| +|rejectLimit|1.58| +|detect|4.30| +|detectSeries|1.86| +|detectLimit|1.32| +|reduce|1.82| +|transform|2.46| +|sortBy|4.08| +|some|2.19| +|someSeries|1.83| +|someLimit|1.32| +|every|2.09| +|everySeries|1.84| +|everyLimit|1.35| +|concat|3.79| +|concatSeries|4.45| + +#### Control Flow +|funciton|benchmark| +|---|--:| +|parallel|2.93| +|series|1.96| +|waterfall|1.29| +|whilst|1.00| +|doWhilst|1.12| +|until|1.12| +|doUntil|1.12| +|during|1.18| +|doDuring|2.42| +|times|4.25| +|auto|1.97| + + +## License +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fsuguru03%2Fneo-async.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fsuguru03%2Fneo-async?ref=badge_large) diff --git a/node_modules/neo-async/all.js b/node_modules/neo-async/all.js new file mode 100644 index 0000000..dad54e7 --- /dev/null +++ b/node_modules/neo-async/all.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').all; diff --git a/node_modules/neo-async/allLimit.js b/node_modules/neo-async/allLimit.js new file mode 100644 index 0000000..d9d7aaa --- /dev/null +++ b/node_modules/neo-async/allLimit.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').allLimit; diff --git a/node_modules/neo-async/allSeries.js b/node_modules/neo-async/allSeries.js new file mode 100644 index 0000000..2a7a8ba --- /dev/null +++ b/node_modules/neo-async/allSeries.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').allSeries; diff --git a/node_modules/neo-async/angelFall.js b/node_modules/neo-async/angelFall.js new file mode 100644 index 0000000..476c23e --- /dev/null +++ b/node_modules/neo-async/angelFall.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').angelfall; diff --git a/node_modules/neo-async/any.js b/node_modules/neo-async/any.js new file mode 100644 index 0000000..d6b07bb --- /dev/null +++ b/node_modules/neo-async/any.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').any; diff --git a/node_modules/neo-async/anyLimit.js b/node_modules/neo-async/anyLimit.js new file mode 100644 index 0000000..34114f8 --- /dev/null +++ b/node_modules/neo-async/anyLimit.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').anyLimit; diff --git a/node_modules/neo-async/anySeries.js b/node_modules/neo-async/anySeries.js new file mode 100644 index 0000000..bb3781f --- /dev/null +++ b/node_modules/neo-async/anySeries.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').anySeries; diff --git a/node_modules/neo-async/apply.js b/node_modules/neo-async/apply.js new file mode 100644 index 0000000..41135e2 --- /dev/null +++ b/node_modules/neo-async/apply.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').apply; diff --git a/node_modules/neo-async/applyEach.js b/node_modules/neo-async/applyEach.js new file mode 100644 index 0000000..292bd1c --- /dev/null +++ b/node_modules/neo-async/applyEach.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').applyEach; diff --git a/node_modules/neo-async/applyEachSeries.js b/node_modules/neo-async/applyEachSeries.js new file mode 100644 index 0000000..0aece7c --- /dev/null +++ b/node_modules/neo-async/applyEachSeries.js @@ -0,0 +1,3 @@ +'use strict'; + +module.exports = require('./async').applyEachSeries; diff --git a/node_modules/neo-async/async.js b/node_modules/neo-async/async.js new file mode 100644 index 0000000..e78eb16 --- /dev/null +++ b/node_modules/neo-async/async.js @@ -0,0 +1,9184 @@ +(function(global, factory) { + /*jshint -W030 */ + 'use strict'; + typeof exports === 'object' && typeof module !== 'undefined' + ? factory(exports) + : typeof define === 'function' && define.amd + ? define(['exports'], factory) + : global.async + ? factory((global.neo_async = global.neo_async || {})) + : factory((global.async = global.async || {})); +})(this, function(exports) { + 'use strict'; + + var noop = function noop() {}; + var throwError = function throwError() { + throw new Error('Callback was already called.'); + }; + + var DEFAULT_TIMES = 5; + var DEFAULT_INTERVAL = 0; + + var obj = 'object'; + var func = 'function'; + var isArray = Array.isArray; + var nativeKeys = Object.keys; + var nativePush = Array.prototype.push; + var iteratorSymbol = typeof Symbol === func && Symbol.iterator; + + var nextTick, asyncNextTick, asyncSetImmediate; + createImmediate(); + + /** + * @memberof async + * @namespace each + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(); + * }, num * 10); + * }; + * async.each(array, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(); + * }, num * 10); + * }; + * async.each(array, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [[1, 0], [2, 2], [3, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(); + * }, num * 10); + * }; + * async.each(object, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(); + * }, num * 10); + * }; + * async.each(object, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b']] + * }); + * + * @example + * + * // break + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num !== 2); + * }, num * 10); + * }; + * async.each(array, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 2] + * }); + * + */ + var each = createEach(arrayEach, baseEach, symbolEach); + + /** + * @memberof async + * @namespace map + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.map(array, iterator, function(err, res) { + * console.log(res); // [1, 3, 2]; + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.map(array, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [[1, 0], [2, 2], [3, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.map(object, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.map(object, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b']] + * }); + * + */ + var map = createMap(arrayEachIndex, baseEachIndex, symbolEachIndex, true); + + /** + * @memberof async + * @namespace mapValues + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapValues(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3, '2': 2 } + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.mapValues(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3, '2': 2 } + * console.log(order); // [[1, 0], [2, 2], [3, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapValues(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3, c: 2 } + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.mapValues(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3, c: 2 } + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b']] + * }); + * + */ + var mapValues = createMap(arrayEachIndex, baseEachKey, symbolEachKey, false); + + /** + * @memberof async + * @namespace filter + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filter(array, iterator, function(err, res) { + * console.log(res); // [1, 3]; + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filter(array, iterator, function(err, res) { + * console.log(res); // [1, 3]; + * console.log(order); // [[1, 0], [2, 2], [3, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filter(object, iterator, function(err, res) { + * console.log(res); // [1, 3]; + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filter(object, iterator, function(err, res) { + * console.log(res); // [1, 3]; + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b']] + * }); + * + */ + var filter = createFilter(arrayEachIndexValue, baseEachIndexValue, symbolEachIndexValue, true); + + /** + * @memberof async + * @namespace filterSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterSeries(array, iterator, function(err, res) { + * console.log(res); // [1, 3]; + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterSeries(array, iterator, function(err, res) { + * console.log(res); // [1, 3] + * console.log(order); // [[1, 0], [3, 1], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterSeries(object, iterator, function(err, res) { + * console.log(res); // [1, 3] + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterSeries(object, iterator, function(err, res) { + * console.log(res); // [1, 3] + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c']] + * }); + * + */ + var filterSeries = createFilterSeries(true); + + /** + * @memberof async + * @namespace filterLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3] + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.filterLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3] + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + var filterLimit = createFilterLimit(true); + + /** + * @memberof async + * @namespace reject + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.reject(array, iterator, function(err, res) { + * console.log(res); // [2]; + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.reject(array, iterator, function(err, res) { + * console.log(res); // [2]; + * console.log(order); // [[1, 0], [2, 2], [3, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.reject(object, iterator, function(err, res) { + * console.log(res); // [2]; + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.reject(object, iterator, function(err, res) { + * console.log(res); // [2]; + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b']] + * }); + * + */ + var reject = createFilter(arrayEachIndexValue, baseEachIndexValue, symbolEachIndexValue, false); + + /** + * @memberof async + * @namespace rejectSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.rejectSeries(array, iterator, function(err, res) { + * console.log(res); // [2]; + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.rejectSeries(object, iterator, function(err, res) { + * console.log(res); // [2]; + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.rejectSeries(object, iterator, function(err, res) { + * console.log(res); // [2]; + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c']] + * }); + * + */ + var rejectSeries = createFilterSeries(false); + + /** + * @memberof async + * @namespace rejectLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.rejectLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [4, 2] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.rejectLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [4, 2] + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.rejectLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [4, 2] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.rejectLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [4, 2] + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + var rejectLimit = createFilterLimit(false); + + /** + * @memberof async + * @namespace detect + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detect(array, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detect(array, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [[1, 0]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detect(object, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detect(object, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [[1, 'a']] + * }); + * + */ + var detect = createDetect(arrayEachValue, baseEachValue, symbolEachValue, true); + + /** + * @memberof async + * @namespace detectSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectSeries(array, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectSeries(array, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [[1, 0]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectSeries(object, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectSeries(object, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [[1, 'a']] + * }); + * + */ + var detectSeries = createDetectSeries(true); + + /** + * @memberof async + * @namespace detectLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectLimit(array, 2, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectLimit(array, 2, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [[1, 0]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectLimit(object, 2, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.detectLimit(object, 2, iterator, function(err, res) { + * console.log(res); // 1 + * console.log(order); // [[1, 'a']] + * }); + * + */ + var detectLimit = createDetectLimit(true); + + /** + * @memberof async + * @namespace every + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.every(array, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [1, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.every(array, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [[1, 0], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.every(object, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [1, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.every(object, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [[1, 'a'], [2, 'c']] + * }); + * + */ + var every = createEvery(arrayEachValue, baseEachValue, symbolEachValue); + + /** + * @memberof async + * @namespace everySeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everySeries(array, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everySeries(array, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [[1, 0], [3, 1], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everySeries(object, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everySeries(object, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [[1, 'a'], [3, 'b'] [2, 'c']] + * }); + * + */ + var everySeries = createEverySeries(); + + /** + * @memberof async + * @namespace everyLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everyLimit(array, 2, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [1, 3, 5, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everyLimit(array, 2, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everyLimit(object, 2, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [1, 3, 5, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.everyLimit(object, 2, iterator, function(err, res) { + * console.log(res); // false + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e']] + * }); + * + */ + var everyLimit = createEveryLimit(); + + /** + * @memberof async + * @namespace pick + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pick(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3 } + * console.log(order); // [1, 2, 3, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pick(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3 } + * console.log(order); // [[0, 1], [2, 2], [3, 1], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pick(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3 } + * console.log(order); // [1, 2, 3, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pick(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3 } + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b'], [4, 'd']] + * }); + * + */ + var pick = createPick(arrayEachIndexValue, baseEachKeyValue, symbolEachKeyValue, true); + + /** + * @memberof async + * @namespace pickSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickSeries(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3 } + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickSeries(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3 } + * console.log(order); // [[0, 1], [3, 1], [2, 2], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickSeries(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3 } + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickSeries(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3 } + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c'], [4, 'd']] + * }); + * + */ + var pickSeries = createPickSeries(true); + + /** + * @memberof async + * @namespace pickLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 5, '2': 3 } + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 5, '2': 3 } + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 5, c: 3 } + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.pickLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 5, c: 3 } + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + var pickLimit = createPickLimit(true); + + /** + * @memberof async + * @namespace omit + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omit(array, iterator, function(err, res) { + * console.log(res); // { '2': 2, '3': 4 } + * console.log(order); // [1, 2, 3, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omit(array, iterator, function(err, res) { + * console.log(res); // { '2': 2, '3': 4 } + * console.log(order); // [[0, 1], [2, 2], [3, 1], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omit(object, iterator, function(err, res) { + * console.log(res); // { c: 2, d: 4 } + * console.log(order); // [1, 2, 3, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omit(object, iterator, function(err, res) { + * console.log(res); // { c: 2, d: 4 } + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b'], [4, 'd']] + * }); + * + */ + var omit = createPick(arrayEachIndexValue, baseEachKeyValue, symbolEachKeyValue, false); + + /** + * @memberof async + * @namespace omitSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitSeries(array, iterator, function(err, res) { + * console.log(res); // { '2': 2, '3': 4 } + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2, 4]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitSeries(array, iterator, function(err, res) { + * console.log(res); // { '2': 2, '3': 4 } + * console.log(order); // [[0, 1], [3, 1], [2, 2], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitSeries(object, iterator, function(err, res) { + * console.log(res); // { c: 2, d: 4 } + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitSeries(object, iterator, function(err, res) { + * console.log(res); // { c: 2, d: 4 } + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c'], [4, 'd']] + * }); + * + */ + var omitSeries = createPickSeries(false); + + /** + * @memberof async + * @namespace omitLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '3': 4, '4': 2 } + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '3': 4, '4': 2 } + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { d: 4, e: 2 } + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.omitLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { d: 4, e: 2 } + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + var omitLimit = createPickLimit(false); + + /** + * @memberof async + * @namespace transform + * @param {Array|Object} collection + * @param {Array|Object|Function} [accumulator] + * @param {Function} [iterator] + * @param {Function} [callback] + * @example + * + * // array + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * result.push(num) + * done(); + * }, num * 10); + * }; + * async.transform(collection, iterator, function(err, res) { + * console.log(res); // [1, 2, 3, 4] + * console.log(order); // [1, 2, 3, 4] + * }); + * + * @example + * + * // array with index and accumulator + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * result[index] = num; + * done(); + * }, num * 10); + * }; + * async.transform(collection, {}, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3, '2': 2, '3': 4 } + * console.log(order); // [[1, 0], [2, 2], [3, 1], [4, 3]] + * }); + * + * @example + * + * // object with accumulator + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * result.push(num); + * done(); + * }, num * 10); + * }; + * async.transform(collection, [], iterator, function(err, res) { + * console.log(res); // [1, 2, 3, 4] + * console.log(order); // [1, 2, 3, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * result[key] = num; + * done(); + * }, num * 10); + * }; + * async.transform(collection, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3, c: 2, d: 4 } + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b'], [4, 'd']] + * }); + * + */ + var transform = createTransform(arrayEachResult, baseEachResult, symbolEachResult); + + /** + * @memberof async + * @namespace sortBy + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.sortBy(array, iterator, function(err, res) { + * console.log(res); // [1, 2, 3]; + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.sortBy(array, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [[1, 0], [2, 2], [3, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.sortBy(object, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.sortBy(object, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b']] + * }); + * + */ + var sortBy = createSortBy(arrayEachIndexValue, baseEachIndexValue, symbolEachIndexValue); + + /** + * @memberof async + * @namespace concat + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, [num]); + * }, num * 10); + * }; + * async.concat(array, iterator, function(err, res) { + * console.log(res); // [1, 2, 3]; + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, [num]); + * }, num * 10); + * }; + * async.concat(array, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [[1, 0], [2, 2], [3, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, [num]); + * }, num * 10); + * }; + * async.concat(object, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [1, 2, 3] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, [num]); + * }, num * 10); + * }; + * async.concat(object, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [[1, 'a'], [2, 'c'], [3, 'b']] + * }); + * + */ + var concat = createConcat(arrayEachIndex, baseEachIndex, symbolEachIndex); + + /** + * @memberof async + * @namespace groupBy + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [4.2, 6.4, 6.1]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBy(array, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.1, 6.4] } + * console.log(order); // [4.2, 6.1, 6.4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [4.2, 6.4, 6.1]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBy(array, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.1, 6.4] } + * console.log(order); // [[4.2, 0], [6.1, 2], [6.4, 1]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 4.2, b: 6.4, c: 6.1 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBy(object, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.1, 6.4] } + * console.log(order); // [4.2, 6.1, 6.4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 4.2, b: 6.4, c: 6.1 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBy(object, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.1, 6.4] } + * console.log(order); // [[4.2, 'a'], [6.1, 'c'], [6.4, 'b']] + * }); + * + */ + var groupBy = createGroupBy(arrayEachValue, baseEachValue, symbolEachValue); + + /** + * @memberof async + * @namespace parallel + * @param {Array|Object} tasks - functions + * @param {Function} callback + * @example + * + * var order = []; + * var tasks = [ + * function(done) { + * setTimeout(function() { + * order.push(1); + * done(null, 1); + * }, 10); + * }, + * function(done) { + * setTimeout(function() { + * order.push(2); + * done(null, 2); + * }, 30); + * }, + * function(done) { + * setTimeout(function() { + * order.push(3); + * done(null, 3); + * }, 40); + * }, + * function(done) { + * setTimeout(function() { + * order.push(4); + * done(null, 4); + * }, 20); + * } + * ]; + * async.parallel(tasks, function(err, res) { + * console.log(res); // [1, 2, 3, 4]; + * console.log(order); // [1, 4, 2, 3] + * }); + * + * @example + * + * var order = []; + * var tasks = { + * 'a': function(done) { + * setTimeout(function() { + * order.push(1); + * done(null, 1); + * }, 10); + * }, + * 'b': function(done) { + * setTimeout(function() { + * order.push(2); + * done(null, 2); + * }, 30); + * }, + * 'c': function(done) { + * setTimeout(function() { + * order.push(3); + * done(null, 3); + * }, 40); + * }, + * 'd': function(done) { + * setTimeout(function() { + * order.push(4); + * done(null, 4); + * }, 20); + * } + * }; + * async.parallel(tasks, function(err, res) { + * console.log(res); // { a: 1, b: 2, c: 3, d:4 } + * console.log(order); // [1, 4, 2, 3] + * }); + * + */ + var parallel = createParallel(arrayEachFunc, baseEachFunc); + + /** + * @memberof async + * @namespace applyEach + */ + var applyEach = createApplyEach(map); + + /** + * @memberof async + * @namespace applyEachSeries + */ + var applyEachSeries = createApplyEach(mapSeries); + + /** + * @memberof async + * @namespace log + */ + var log = createLogger('log'); + + /** + * @memberof async + * @namespace dir + */ + var dir = createLogger('dir'); + + /** + * @version 2.6.2 + * @namespace async + */ + var index = { + VERSION: '2.6.2', + + // Collections + each: each, + eachSeries: eachSeries, + eachLimit: eachLimit, + forEach: each, + forEachSeries: eachSeries, + forEachLimit: eachLimit, + eachOf: each, + eachOfSeries: eachSeries, + eachOfLimit: eachLimit, + forEachOf: each, + forEachOfSeries: eachSeries, + forEachOfLimit: eachLimit, + map: map, + mapSeries: mapSeries, + mapLimit: mapLimit, + mapValues: mapValues, + mapValuesSeries: mapValuesSeries, + mapValuesLimit: mapValuesLimit, + filter: filter, + filterSeries: filterSeries, + filterLimit: filterLimit, + select: filter, + selectSeries: filterSeries, + selectLimit: filterLimit, + reject: reject, + rejectSeries: rejectSeries, + rejectLimit: rejectLimit, + detect: detect, + detectSeries: detectSeries, + detectLimit: detectLimit, + find: detect, + findSeries: detectSeries, + findLimit: detectLimit, + pick: pick, + pickSeries: pickSeries, + pickLimit: pickLimit, + omit: omit, + omitSeries: omitSeries, + omitLimit: omitLimit, + reduce: reduce, + inject: reduce, + foldl: reduce, + reduceRight: reduceRight, + foldr: reduceRight, + transform: transform, + transformSeries: transformSeries, + transformLimit: transformLimit, + sortBy: sortBy, + sortBySeries: sortBySeries, + sortByLimit: sortByLimit, + some: some, + someSeries: someSeries, + someLimit: someLimit, + any: some, + anySeries: someSeries, + anyLimit: someLimit, + every: every, + everySeries: everySeries, + everyLimit: everyLimit, + all: every, + allSeries: everySeries, + allLimit: everyLimit, + concat: concat, + concatSeries: concatSeries, + concatLimit: concatLimit, + groupBy: groupBy, + groupBySeries: groupBySeries, + groupByLimit: groupByLimit, + + // Control Flow + parallel: parallel, + series: series, + parallelLimit: parallelLimit, + tryEach: tryEach, + waterfall: waterfall, + angelFall: angelFall, + angelfall: angelFall, + whilst: whilst, + doWhilst: doWhilst, + until: until, + doUntil: doUntil, + during: during, + doDuring: doDuring, + forever: forever, + compose: compose, + seq: seq, + applyEach: applyEach, + applyEachSeries: applyEachSeries, + queue: queue, + priorityQueue: priorityQueue, + cargo: cargo, + auto: auto, + autoInject: autoInject, + retry: retry, + retryable: retryable, + iterator: iterator, + times: times, + timesSeries: timesSeries, + timesLimit: timesLimit, + race: race, + + // Utils + apply: apply, + nextTick: asyncNextTick, + setImmediate: asyncSetImmediate, + memoize: memoize, + unmemoize: unmemoize, + ensureAsync: ensureAsync, + constant: constant, + asyncify: asyncify, + wrapSync: asyncify, + log: log, + dir: dir, + reflect: reflect, + reflectAll: reflectAll, + timeout: timeout, + createLogger: createLogger, + + // Mode + safe: safe, + fast: fast + }; + + exports['default'] = index; + baseEachSync( + index, + function(func, key) { + exports[key] = func; + }, + nativeKeys(index) + ); + + /** + * @private + */ + function createImmediate(safeMode) { + var delay = function delay(fn) { + var args = slice(arguments, 1); + setTimeout(function() { + fn.apply(null, args); + }); + }; + asyncSetImmediate = typeof setImmediate === func ? setImmediate : delay; + if (typeof process === obj && typeof process.nextTick === func) { + nextTick = /^v0.10/.test(process.version) ? asyncSetImmediate : process.nextTick; + asyncNextTick = /^v0/.test(process.version) ? asyncSetImmediate : process.nextTick; + } else { + asyncNextTick = nextTick = asyncSetImmediate; + } + if (safeMode === false) { + nextTick = function(cb) { + cb(); + }; + } + } + + /* sync functions based on lodash */ + + /** + * Converts `arguments` to an array. + * + * @private + * @param {Array} array = The array to slice. + */ + function createArray(array) { + var index = -1; + var size = array.length; + var result = Array(size); + + while (++index < size) { + result[index] = array[index]; + } + return result; + } + + /** + * Create an array from `start` + * + * @private + * @param {Array} array - The array to slice. + * @param {number} start - The start position. + */ + function slice(array, start) { + var end = array.length; + var index = -1; + var size = end - start; + if (size <= 0) { + return []; + } + var result = Array(size); + + while (++index < size) { + result[index] = array[index + start]; + } + return result; + } + + /** + * @private + * @param {Object} object + */ + function objectClone(object) { + var keys = nativeKeys(object); + var size = keys.length; + var index = -1; + var result = {}; + + while (++index < size) { + var key = keys[index]; + result[key] = object[key]; + } + return result; + } + + /** + * Create an array with all falsey values removed. + * + * @private + * @param {Array} array - The array to compact. + */ + function compact(array) { + var index = -1; + var size = array.length; + var result = []; + + while (++index < size) { + var value = array[index]; + if (value) { + result[result.length] = value; + } + } + return result; + } + + /** + * Create an array of reverse sequence. + * + * @private + * @param {Array} array - The array to reverse. + */ + function reverse(array) { + var index = -1; + var size = array.length; + var result = Array(size); + var resIndex = size; + + while (++index < size) { + result[--resIndex] = array[index]; + } + return result; + } + + /** + * Checks if key exists in object property. + * + * @private + * @param {Object} object - The object to inspect. + * @param {string} key - The key to check. + */ + function has(object, key) { + return object.hasOwnProperty(key); + } + + /** + * Check if target exists in array. + * @private + * @param {Array} array + * @param {*} target + */ + function notInclude(array, target) { + var index = -1; + var size = array.length; + + while (++index < size) { + if (array[index] === target) { + return false; + } + } + return true; + } + + /** + * @private + * @param {Array} array - The array to iterate over. + * @param {Function} iterator - The function invoked per iteration. + */ + function arrayEachSync(array, iterator) { + var index = -1; + var size = array.length; + + while (++index < size) { + iterator(array[index], index); + } + return array; + } + + /** + * @private + * @param {Object} object - The object to iterate over. + * @param {Function} iterator - The function invoked per iteration. + * @param {Array} keys + */ + function baseEachSync(object, iterator, keys) { + var index = -1; + var size = keys.length; + + while (++index < size) { + var key = keys[index]; + iterator(object[key], key); + } + return object; + } + + /** + * @private + * @param {number} n + * @param {Function} iterator + */ + function timesSync(n, iterator) { + var index = -1; + while (++index < n) { + iterator(index); + } + } + + /** + * @private + * @param {Array} array + * @param {number[]} criteria + */ + function sortByCriteria(array, criteria) { + var l = array.length; + var indices = Array(l); + var i; + for (i = 0; i < l; i++) { + indices[i] = i; + } + quickSort(criteria, 0, l - 1, indices); + var result = Array(l); + for (var n = 0; n < l; n++) { + i = indices[n]; + result[n] = i === undefined ? array[n] : array[i]; + } + return result; + } + + function partition(array, i, j, mid, indices) { + var l = i; + var r = j; + while (l <= r) { + i = l; + while (l < r && array[l] < mid) { + l++; + } + while (r >= i && array[r] >= mid) { + r--; + } + if (l > r) { + break; + } + swap(array, indices, l++, r--); + } + return l; + } + + function swap(array, indices, l, r) { + var n = array[l]; + array[l] = array[r]; + array[r] = n; + var i = indices[l]; + indices[l] = indices[r]; + indices[r] = i; + } + + function quickSort(array, i, j, indices) { + if (i === j) { + return; + } + var k = i; + while (++k <= j && array[i] === array[k]) { + var l = k - 1; + if (indices[l] > indices[k]) { + var index = indices[l]; + indices[l] = indices[k]; + indices[k] = index; + } + } + if (k > j) { + return; + } + var p = array[i] > array[k] ? i : k; + k = partition(array, i, j, array[p], indices); + quickSort(array, i, k - 1, indices); + quickSort(array, k, j, indices); + } + + /** + * @Private + */ + function makeConcatResult(array) { + var result = []; + arrayEachSync(array, function(value) { + if (value === noop) { + return; + } + if (isArray(value)) { + nativePush.apply(result, value); + } else { + result.push(value); + } + }); + return result; + } + + /* async functions */ + + /** + * @private + */ + function arrayEach(array, iterator, callback) { + var index = -1; + var size = array.length; + + if (iterator.length === 3) { + while (++index < size) { + iterator(array[index], index, onlyOnce(callback)); + } + } else { + while (++index < size) { + iterator(array[index], onlyOnce(callback)); + } + } + } + + /** + * @private + */ + function baseEach(object, iterator, callback, keys) { + var key; + var index = -1; + var size = keys.length; + + if (iterator.length === 3) { + while (++index < size) { + key = keys[index]; + iterator(object[key], key, onlyOnce(callback)); + } + } else { + while (++index < size) { + iterator(object[keys[index]], onlyOnce(callback)); + } + } + } + + /** + * @private + */ + function symbolEach(collection, iterator, callback) { + var iter = collection[iteratorSymbol](); + var index = 0; + var item; + if (iterator.length === 3) { + while ((item = iter.next()).done === false) { + iterator(item.value, index++, onlyOnce(callback)); + } + } else { + while ((item = iter.next()).done === false) { + index++; + iterator(item.value, onlyOnce(callback)); + } + } + return index; + } + + /** + * @private + */ + function arrayEachResult(array, result, iterator, callback) { + var index = -1; + var size = array.length; + + if (iterator.length === 4) { + while (++index < size) { + iterator(result, array[index], index, onlyOnce(callback)); + } + } else { + while (++index < size) { + iterator(result, array[index], onlyOnce(callback)); + } + } + } + + /** + * @private + */ + function baseEachResult(object, result, iterator, callback, keys) { + var key; + var index = -1; + var size = keys.length; + + if (iterator.length === 4) { + while (++index < size) { + key = keys[index]; + iterator(result, object[key], key, onlyOnce(callback)); + } + } else { + while (++index < size) { + iterator(result, object[keys[index]], onlyOnce(callback)); + } + } + } + + /** + * @private + */ + function symbolEachResult(collection, result, iterator, callback) { + var item; + var index = 0; + var iter = collection[iteratorSymbol](); + + if (iterator.length === 4) { + while ((item = iter.next()).done === false) { + iterator(result, item.value, index++, onlyOnce(callback)); + } + } else { + while ((item = iter.next()).done === false) { + index++; + iterator(result, item.value, onlyOnce(callback)); + } + } + return index; + } + + /** + * @private + */ + function arrayEachFunc(array, createCallback) { + var index = -1; + var size = array.length; + + while (++index < size) { + array[index](createCallback(index)); + } + } + + /** + * @private + */ + function baseEachFunc(object, createCallback, keys) { + var key; + var index = -1; + var size = keys.length; + + while (++index < size) { + key = keys[index]; + object[key](createCallback(key)); + } + } + + /** + * @private + */ + function arrayEachIndex(array, iterator, createCallback) { + var index = -1; + var size = array.length; + + if (iterator.length === 3) { + while (++index < size) { + iterator(array[index], index, createCallback(index)); + } + } else { + while (++index < size) { + iterator(array[index], createCallback(index)); + } + } + } + + /** + * @private + */ + function baseEachIndex(object, iterator, createCallback, keys) { + var key; + var index = -1; + var size = keys.length; + + if (iterator.length === 3) { + while (++index < size) { + key = keys[index]; + iterator(object[key], key, createCallback(index)); + } + } else { + while (++index < size) { + iterator(object[keys[index]], createCallback(index)); + } + } + } + + /** + * @private + */ + function symbolEachIndex(collection, iterator, createCallback) { + var item; + var index = 0; + var iter = collection[iteratorSymbol](); + + if (iterator.length === 3) { + while ((item = iter.next()).done === false) { + iterator(item.value, index, createCallback(index++)); + } + } else { + while ((item = iter.next()).done === false) { + iterator(item.value, createCallback(index++)); + } + } + return index; + } + + /** + * @private + */ + function baseEachKey(object, iterator, createCallback, keys) { + var key; + var index = -1; + var size = keys.length; + + if (iterator.length === 3) { + while (++index < size) { + key = keys[index]; + iterator(object[key], key, createCallback(key)); + } + } else { + while (++index < size) { + key = keys[index]; + iterator(object[key], createCallback(key)); + } + } + } + + /** + * @private + */ + function symbolEachKey(collection, iterator, createCallback) { + var item; + var index = 0; + var iter = collection[iteratorSymbol](); + + if (iterator.length === 3) { + while ((item = iter.next()).done === false) { + iterator(item.value, index, createCallback(index++)); + } + } else { + while ((item = iter.next()).done === false) { + iterator(item.value, createCallback(index++)); + } + } + return index; + } + + /** + * @private + */ + function arrayEachValue(array, iterator, createCallback) { + var value; + var index = -1; + var size = array.length; + + if (iterator.length === 3) { + while (++index < size) { + value = array[index]; + iterator(value, index, createCallback(value)); + } + } else { + while (++index < size) { + value = array[index]; + iterator(value, createCallback(value)); + } + } + } + + /** + * @private + */ + function baseEachValue(object, iterator, createCallback, keys) { + var key, value; + var index = -1; + var size = keys.length; + + if (iterator.length === 3) { + while (++index < size) { + key = keys[index]; + value = object[key]; + iterator(value, key, createCallback(value)); + } + } else { + while (++index < size) { + value = object[keys[index]]; + iterator(value, createCallback(value)); + } + } + } + + /** + * @private + */ + function symbolEachValue(collection, iterator, createCallback) { + var value, item; + var index = 0; + var iter = collection[iteratorSymbol](); + + if (iterator.length === 3) { + while ((item = iter.next()).done === false) { + value = item.value; + iterator(value, index++, createCallback(value)); + } + } else { + while ((item = iter.next()).done === false) { + index++; + value = item.value; + iterator(value, createCallback(value)); + } + } + return index; + } + + /** + * @private + */ + function arrayEachIndexValue(array, iterator, createCallback) { + var value; + var index = -1; + var size = array.length; + + if (iterator.length === 3) { + while (++index < size) { + value = array[index]; + iterator(value, index, createCallback(index, value)); + } + } else { + while (++index < size) { + value = array[index]; + iterator(value, createCallback(index, value)); + } + } + } + + /** + * @private + */ + function baseEachIndexValue(object, iterator, createCallback, keys) { + var key, value; + var index = -1; + var size = keys.length; + + if (iterator.length === 3) { + while (++index < size) { + key = keys[index]; + value = object[key]; + iterator(value, key, createCallback(index, value)); + } + } else { + while (++index < size) { + value = object[keys[index]]; + iterator(value, createCallback(index, value)); + } + } + } + + /** + * @private + */ + function symbolEachIndexValue(collection, iterator, createCallback) { + var value, item; + var index = 0; + var iter = collection[iteratorSymbol](); + + if (iterator.length === 3) { + while ((item = iter.next()).done === false) { + value = item.value; + iterator(value, index, createCallback(index++, value)); + } + } else { + while ((item = iter.next()).done === false) { + value = item.value; + iterator(value, createCallback(index++, value)); + } + } + return index; + } + + /** + * @private + */ + function baseEachKeyValue(object, iterator, createCallback, keys) { + var key, value; + var index = -1; + var size = keys.length; + + if (iterator.length === 3) { + while (++index < size) { + key = keys[index]; + value = object[key]; + iterator(value, key, createCallback(key, value)); + } + } else { + while (++index < size) { + key = keys[index]; + value = object[key]; + iterator(value, createCallback(key, value)); + } + } + } + + /** + * @private + */ + function symbolEachKeyValue(collection, iterator, createCallback) { + var value, item; + var index = 0; + var iter = collection[iteratorSymbol](); + + if (iterator.length === 3) { + while ((item = iter.next()).done === false) { + value = item.value; + iterator(value, index, createCallback(index++, value)); + } + } else { + while ((item = iter.next()).done === false) { + value = item.value; + iterator(value, createCallback(index++, value)); + } + } + return index; + } + + /** + * @private + * @param {Function} func + */ + function onlyOnce(func) { + return function(err, res) { + var fn = func; + func = throwError; + fn(err, res); + }; + } + + /** + * @private + * @param {Function} func + */ + function once(func) { + return function(err, res) { + var fn = func; + func = noop; + fn(err, res); + }; + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + */ + function createEach(arrayEach, baseEach, symbolEach) { + return function each(collection, iterator, callback) { + callback = once(callback || noop); + var size, keys; + var completed = 0; + if (isArray(collection)) { + size = collection.length; + arrayEach(collection, iterator, done); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = symbolEach(collection, iterator, done); + size && size === completed && callback(null); + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + baseEach(collection, iterator, done, keys); + } + if (!size) { + callback(null); + } + + function done(err, bool) { + if (err) { + callback = once(callback); + callback(err); + } else if (++completed === size) { + callback(null); + } else if (bool === false) { + callback = once(callback); + callback(null); + } + } + }; + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + */ + function createMap(arrayEach, baseEach, symbolEach, useArray) { + var init, clone; + if (useArray) { + init = Array; + clone = createArray; + } else { + init = function() { + return {}; + }; + clone = objectClone; + } + + return function(collection, iterator, callback) { + callback = callback || noop; + var size, keys, result; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + result = init(size); + arrayEach(collection, iterator, createCallback); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + // TODO: size could be changed + result = init(0); + size = symbolEach(collection, iterator, createCallback); + size && size === completed && callback(null, result); + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + result = init(size); + baseEach(collection, iterator, createCallback, keys); + } + if (!size) { + callback(null, init()); + } + + function createCallback(key) { + return function done(err, res) { + if (key === null) { + throwError(); + } + if (err) { + key = null; + callback = once(callback); + callback(err, clone(result)); + return; + } + result[key] = res; + key = null; + if (++completed === size) { + callback(null, result); + } + }; + } + }; + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + * @param {boolean} bool + */ + function createFilter(arrayEach, baseEach, symbolEach, bool) { + return function(collection, iterator, callback) { + callback = callback || noop; + var size, keys, result; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + result = Array(size); + arrayEach(collection, iterator, createCallback); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + result = []; + size = symbolEach(collection, iterator, createCallback); + size && size === completed && callback(null, compact(result)); + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + result = Array(size); + baseEach(collection, iterator, createCallback, keys); + } + if (!size) { + return callback(null, []); + } + + function createCallback(index, value) { + return function done(err, res) { + if (index === null) { + throwError(); + } + if (err) { + index = null; + callback = once(callback); + callback(err); + return; + } + if (!!res === bool) { + result[index] = value; + } + index = null; + if (++completed === size) { + callback(null, compact(result)); + } + }; + } + }; + } + + /** + * @private + * @param {boolean} bool + */ + function createFilterSeries(bool) { + return function(collection, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, value, keys, iter, item, iterate; + var sync = false; + var completed = 0; + var result = []; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, []); + } + iterate(); + + function arrayIterator() { + value = collection[completed]; + iterator(value, done); + } + + function arrayIteratorWithIndex() { + value = collection[completed]; + iterator(value, completed, done); + } + + function symbolIterator() { + item = iter.next(); + value = item.value; + item.done ? callback(null, result) : iterator(value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + value = item.value; + item.done ? callback(null, result) : iterator(value, completed, done); + } + + function objectIterator() { + key = keys[completed]; + value = collection[key]; + iterator(value, done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + value = collection[key]; + iterator(value, key, done); + } + + function done(err, res) { + if (err) { + callback(err); + return; + } + if (!!res === bool) { + result[result.length] = value; + } + if (++completed === size) { + iterate = throwError; + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + }; + } + + /** + * @private + * @param {boolean} bool + */ + function createFilterLimit(bool) { + return function(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, value, keys, iter, item, iterate, result; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + result = []; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, []); + } + result = result || Array(size); + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, createCallback(value, index)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, index, createCallback(value, index)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + value = item.value; + iterator(value, createCallback(value, started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, compact(result)); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + value = item.value; + iterator(value, started, createCallback(value, started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, compact(result)); + } + } + + function objectIterator() { + index = started++; + if (index < size) { + value = collection[keys[index]]; + iterator(value, createCallback(value, index)); + } + } + + function objectIteratorWithKey() { + index = started++; + if (index < size) { + key = keys[index]; + value = collection[key]; + iterator(value, key, createCallback(value, index)); + } + } + + function createCallback(value, index) { + return function(err, res) { + if (index === null) { + throwError(); + } + if (err) { + index = null; + iterate = noop; + callback = once(callback); + callback(err); + return; + } + if (!!res === bool) { + result[index] = value; + } + index = null; + if (++completed === size) { + callback = onlyOnce(callback); + callback(null, compact(result)); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + }; + } + + /** + * @memberof async + * @namespace eachSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(); + * }, num * 10); + * }; + * async.eachSeries(array, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(); + * }, num * 10); + * }; + * async.eachSeries(array, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [[1, 0], [3, 1], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(); + * }, num * 10); + * }; + * async.eachSeries(object, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(); + * }, num * 10); + * }; + * async.eachSeries(object, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'b']] + * }); + * + * @example + * + * // break + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num !== 3); + * }, num * 10); + * }; + * async.eachSeries(array, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 3] + * }); + */ + function eachSeries(collection, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, keys, iter, item, iterate; + var sync = false; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null); + } + iterate(); + + function arrayIterator() { + iterator(collection[completed], done); + } + + function arrayIteratorWithIndex() { + iterator(collection[completed], completed, done); + } + + function symbolIterator() { + item = iter.next(); + item.done ? callback(null) : iterator(item.value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + item.done ? callback(null) : iterator(item.value, completed, done); + } + + function objectIterator() { + iterator(collection[keys[completed]], done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + iterator(collection[key], key, done); + } + + function done(err, bool) { + if (err) { + callback(err); + } else if (++completed === size || bool === false) { + iterate = throwError; + callback(null); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace eachLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(); + * }, num * 10); + * }; + * async.eachLimit(array, 2, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(); + * }, num * 10); + * }; + * async.eachLimit(array, 2, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(); + * }, num * 10); + * }; + * async.eachLimit(object, 2, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(); + * }, num * 10); + * }; + * async.eachLimit(object, 2, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + * @example + * + * // break + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num !== 5); + * }, num * 10); + * }; + * async.eachLimit(array, 2, iterator, function(err, res) { + * console.log(res); // undefined + * console.log(order); // [1, 3, 5] + * }); + * + */ + function eachLimit(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, keys, iter, item, iterate; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } else { + return callback(null); + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null); + } + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + if (started < size) { + iterator(collection[started++], done); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + iterator(collection[index], index, done); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + started++; + iterator(item.value, done); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + iterator(item.value, started++, done); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null); + } + } + + function objectIterator() { + if (started < size) { + iterator(collection[keys[started++]], done); + } + } + + function objectIteratorWithKey() { + index = started++; + if (index < size) { + key = keys[index]; + iterator(collection[key], key, done); + } + } + + function done(err, bool) { + if (err || bool === false) { + iterate = noop; + callback = once(callback); + callback(err); + } else if (++completed === size) { + iterator = noop; + iterate = throwError; + callback = onlyOnce(callback); + callback(null); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace mapSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapSeries(array, iterator, function(err, res) { + * console.log(res); // [1, 3, 2]; + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.mapSeries(array, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [[1, 0], [3, 1], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapSeries(object, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.mapSeries(object, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c']] + * }); + * + */ + function mapSeries(collection, iterator, callback) { + callback = callback || noop; + var size, key, keys, iter, item, result, iterate; + var sync = false; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + result = []; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, []); + } + result = result || Array(size); + iterate(); + + function arrayIterator() { + iterator(collection[completed], done); + } + + function arrayIteratorWithIndex() { + iterator(collection[completed], completed, done); + } + + function symbolIterator() { + item = iter.next(); + item.done ? callback(null, result) : iterator(item.value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + item.done ? callback(null, result) : iterator(item.value, completed, done); + } + + function objectIterator() { + iterator(collection[keys[completed]], done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + iterator(collection[key], key, done); + } + + function done(err, res) { + if (err) { + iterate = throwError; + callback = onlyOnce(callback); + callback(err, createArray(result)); + return; + } + result[completed] = res; + if (++completed === size) { + iterate = throwError; + callback(null, result); + callback = throwError; + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace mapLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3, 4, 2] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.mapLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3, 4, 2] + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3, 4, 2] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.mapLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 5, 3, 4, 2] + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + function mapLimit(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, keys, iter, item, result, iterate; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + result = []; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, []); + } + result = result || Array(size); + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + index = started++; + if (index < size) { + iterator(collection[index], createCallback(index)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + iterator(collection[index], index, createCallback(index)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + iterator(item.value, createCallback(started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + iterator(item.value, started, createCallback(started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function objectIterator() { + index = started++; + if (index < size) { + iterator(collection[keys[index]], createCallback(index)); + } + } + + function objectIteratorWithKey() { + index = started++; + if (index < size) { + key = keys[index]; + iterator(collection[key], key, createCallback(index)); + } + } + + function createCallback(index) { + return function(err, res) { + if (index === null) { + throwError(); + } + if (err) { + index = null; + iterate = noop; + callback = once(callback); + callback(err, createArray(result)); + return; + } + result[index] = res; + index = null; + if (++completed === size) { + iterate = throwError; + callback(null, result); + callback = throwError; + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + } + + /** + * @memberof async + * @namespace mapValuesSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesSeries(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3, '2': 2 } + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesSeries(array, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3, '2': 2 } + * console.log(order); // [[1, 0], [3, 1], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesSeries(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3, c: 2 } + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesSeries(object, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3, c: 2 } + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c']] + * }); + * + */ + function mapValuesSeries(collection, iterator, callback) { + callback = callback || noop; + var size, key, keys, iter, item, iterate; + var sync = false; + var result = {}; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, result); + } + iterate(); + + function arrayIterator() { + key = completed; + iterator(collection[completed], done); + } + + function arrayIteratorWithIndex() { + key = completed; + iterator(collection[completed], completed, done); + } + + function symbolIterator() { + key = completed; + item = iter.next(); + item.done ? callback(null, result) : iterator(item.value, done); + } + + function symbolIteratorWithKey() { + key = completed; + item = iter.next(); + item.done ? callback(null, result) : iterator(item.value, completed, done); + } + + function objectIterator() { + key = keys[completed]; + iterator(collection[key], done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + iterator(collection[key], key, done); + } + + function done(err, res) { + if (err) { + iterate = throwError; + callback = onlyOnce(callback); + callback(err, objectClone(result)); + return; + } + result[key] = res; + if (++completed === size) { + iterate = throwError; + callback(null, result); + callback = throwError; + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace mapValuesLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 5, '2': 3, '3': 4, '4': 2 } + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 5, '2': 3, '3': 4, '4': 2 } + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 5, c: 3, d: 4, e: 2 } + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.mapValuesLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 5, c: 3, d: 4, e: 2 } + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + function mapValuesLimit(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, keys, iter, item, iterate; + var sync = false; + var result = {}; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, result); + } + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + index = started++; + if (index < size) { + iterator(collection[index], createCallback(index)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + iterator(collection[index], index, createCallback(index)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + iterator(item.value, createCallback(started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + iterator(item.value, started, createCallback(started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function objectIterator() { + index = started++; + if (index < size) { + key = keys[index]; + iterator(collection[key], createCallback(key)); + } + } + + function objectIteratorWithKey() { + index = started++; + if (index < size) { + key = keys[index]; + iterator(collection[key], key, createCallback(key)); + } + } + + function createCallback(key) { + return function(err, res) { + if (key === null) { + throwError(); + } + if (err) { + key = null; + iterate = noop; + callback = once(callback); + callback(err, objectClone(result)); + return; + } + result[key] = res; + key = null; + if (++completed === size) { + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + * @param {boolean} bool + */ + function createDetect(arrayEach, baseEach, symbolEach, bool) { + return function(collection, iterator, callback) { + callback = callback || noop; + var size, keys; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + arrayEach(collection, iterator, createCallback); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = symbolEach(collection, iterator, createCallback); + size && size === completed && callback(null); + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + baseEach(collection, iterator, createCallback, keys); + } + if (!size) { + callback(null); + } + + function createCallback(value) { + var called = false; + return function done(err, res) { + if (called) { + throwError(); + } + called = true; + if (err) { + callback = once(callback); + callback(err); + } else if (!!res === bool) { + callback = once(callback); + callback(null, value); + } else if (++completed === size) { + callback(null); + } + }; + } + }; + } + + /** + * @private + * @param {boolean} bool + */ + function createDetectSeries(bool) { + return function(collection, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, value, keys, iter, item, iterate; + var sync = false; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null); + } + iterate(); + + function arrayIterator() { + value = collection[completed]; + iterator(value, done); + } + + function arrayIteratorWithIndex() { + value = collection[completed]; + iterator(value, completed, done); + } + + function symbolIterator() { + item = iter.next(); + value = item.value; + item.done ? callback(null) : iterator(value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + value = item.value; + item.done ? callback(null) : iterator(value, completed, done); + } + + function objectIterator() { + value = collection[keys[completed]]; + iterator(value, done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + value = collection[key]; + iterator(value, key, done); + } + + function done(err, res) { + if (err) { + callback(err); + } else if (!!res === bool) { + iterate = throwError; + callback(null, value); + } else if (++completed === size) { + iterate = throwError; + callback(null); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + }; + } + + /** + * @private + * @param {boolean} bool + */ + function createDetectLimit(bool) { + return function(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, value, keys, iter, item, iterate; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null); + } + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, createCallback(value)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, index, createCallback(value)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + started++; + value = item.value; + iterator(value, createCallback(value)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + value = item.value; + iterator(value, started++, createCallback(value)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null); + } + } + + function objectIterator() { + index = started++; + if (index < size) { + value = collection[keys[index]]; + iterator(value, createCallback(value)); + } + } + + function objectIteratorWithKey() { + if (started < size) { + key = keys[started++]; + value = collection[key]; + iterator(value, key, createCallback(value)); + } + } + + function createCallback(value) { + var called = false; + return function(err, res) { + if (called) { + throwError(); + } + called = true; + if (err) { + iterate = noop; + callback = once(callback); + callback(err); + } else if (!!res === bool) { + iterate = noop; + callback = once(callback); + callback(null, value); + } else if (++completed === size) { + callback(null); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + }; + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + * @param {boolean} bool + */ + function createPick(arrayEach, baseEach, symbolEach, bool) { + return function(collection, iterator, callback) { + callback = callback || noop; + var size, keys; + var completed = 0; + var result = {}; + + if (isArray(collection)) { + size = collection.length; + arrayEach(collection, iterator, createCallback); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = symbolEach(collection, iterator, createCallback); + size && size === completed && callback(null, result); + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + baseEach(collection, iterator, createCallback, keys); + } + if (!size) { + return callback(null, {}); + } + + function createCallback(key, value) { + return function done(err, res) { + if (key === null) { + throwError(); + } + if (err) { + key = null; + callback = once(callback); + callback(err, objectClone(result)); + return; + } + if (!!res === bool) { + result[key] = value; + } + key = null; + if (++completed === size) { + callback(null, result); + } + }; + } + }; + } + + /** + * @private + * @param {boolean} bool + */ + function createPickSeries(bool) { + return function(collection, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, value, keys, iter, item, iterate; + var sync = false; + var result = {}; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, {}); + } + iterate(); + + function arrayIterator() { + key = completed; + value = collection[completed]; + iterator(value, done); + } + + function arrayIteratorWithIndex() { + key = completed; + value = collection[completed]; + iterator(value, completed, done); + } + + function symbolIterator() { + key = completed; + item = iter.next(); + value = item.value; + item.done ? callback(null, result) : iterator(value, done); + } + + function symbolIteratorWithKey() { + key = completed; + item = iter.next(); + value = item.value; + item.done ? callback(null, result) : iterator(value, key, done); + } + + function objectIterator() { + key = keys[completed]; + value = collection[key]; + iterator(value, done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + value = collection[key]; + iterator(value, key, done); + } + + function done(err, res) { + if (err) { + callback(err, result); + return; + } + if (!!res === bool) { + result[key] = value; + } + if (++completed === size) { + iterate = throwError; + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + }; + } + + /** + * @private + * @param {boolean} bool + */ + function createPickLimit(bool) { + return function(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, value, keys, iter, item, iterate; + var sync = false; + var result = {}; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, {}); + } + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, createCallback(value, index)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, index, createCallback(value, index)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + value = item.value; + iterator(value, createCallback(value, started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + value = item.value; + iterator(value, started, createCallback(value, started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function objectIterator() { + if (started < size) { + key = keys[started++]; + value = collection[key]; + iterator(value, createCallback(value, key)); + } + } + + function objectIteratorWithKey() { + if (started < size) { + key = keys[started++]; + value = collection[key]; + iterator(value, key, createCallback(value, key)); + } + } + + function createCallback(value, key) { + return function(err, res) { + if (key === null) { + throwError(); + } + if (err) { + key = null; + iterate = noop; + callback = once(callback); + callback(err, objectClone(result)); + return; + } + if (!!res === bool) { + result[key] = value; + } + key = null; + if (++completed === size) { + iterate = throwError; + callback = onlyOnce(callback); + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + }; + } + + /** + * @memberof async + * @namespace reduce + * @param {Array|Object} collection + * @param {*} result + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduce(collection, 0, iterator, function(err, res) { + * console.log(res); // 10 + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduce(collection, '', iterator, function(err, res) { + * console.log(res); // '1324' + * console.log(order); // [[1, 0], [3, 1], [2, 2], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduce(collection, '', iterator, function(err, res) { + * console.log(res); // '1324' + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduce(collection, 0, iterator, function(err, res) { + * console.log(res); // 10 + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'b'], [4, 'd']] + * }); + * + */ + function reduce(collection, result, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, keys, iter, item, iterate; + var sync = false; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 4 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 4 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 4 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, result); + } + iterate(result); + + function arrayIterator(result) { + iterator(result, collection[completed], done); + } + + function arrayIteratorWithIndex(result) { + iterator(result, collection[completed], completed, done); + } + + function symbolIterator(result) { + item = iter.next(); + item.done ? callback(null, result) : iterator(result, item.value, done); + } + + function symbolIteratorWithKey(result) { + item = iter.next(); + item.done ? callback(null, result) : iterator(result, item.value, completed, done); + } + + function objectIterator(result) { + iterator(result, collection[keys[completed]], done); + } + + function objectIteratorWithKey(result) { + key = keys[completed]; + iterator(result, collection[key], key, done); + } + + function done(err, result) { + if (err) { + callback(err, result); + } else if (++completed === size) { + iterator = throwError; + callback(null, result); + } else if (sync) { + nextTick(function() { + iterate(result); + }); + } else { + sync = true; + iterate(result); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace reduceRight + * @param {Array|Object} collection + * @param {*} result + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduceRight(collection, 0, iterator, function(err, res) { + * console.log(res); // 10 + * console.log(order); // [4, 2, 3, 1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduceRight(collection, '', iterator, function(err, res) { + * console.log(res); // '4231' + * console.log(order); // [[4, 3], [2, 2], [3, 1], [1, 0]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduceRight(collection, '', iterator, function(err, res) { + * console.log(res); // '4231' + * console.log(order); // [4, 2, 3, 1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, result + num); + * }, num * 10); + * }; + * async.reduceRight(collection, 0, iterator, function(err, res) { + * console.log(res); // 10 + * console.log(order); // [[4, 3], [2, 2], [3, 1], [1, 0]] + * }); + * + */ + function reduceRight(collection, result, iterator, callback) { + callback = onlyOnce(callback || noop); + var resIndex, index, key, keys, iter, item, col, iterate; + var sync = false; + + if (isArray(collection)) { + resIndex = collection.length; + iterate = iterator.length === 4 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + col = []; + iter = collection[iteratorSymbol](); + index = -1; + while ((item = iter.next()).done === false) { + col[++index] = item.value; + } + collection = col; + resIndex = col.length; + iterate = iterator.length === 4 ? arrayIteratorWithIndex : arrayIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + resIndex = keys.length; + iterate = iterator.length === 4 ? objectIteratorWithKey : objectIterator; + } + if (!resIndex) { + return callback(null, result); + } + iterate(result); + + function arrayIterator(result) { + iterator(result, collection[--resIndex], done); + } + + function arrayIteratorWithIndex(result) { + iterator(result, collection[--resIndex], resIndex, done); + } + + function objectIterator(result) { + iterator(result, collection[keys[--resIndex]], done); + } + + function objectIteratorWithKey(result) { + key = keys[--resIndex]; + iterator(result, collection[key], key, done); + } + + function done(err, result) { + if (err) { + callback(err, result); + } else if (resIndex === 0) { + iterate = throwError; + callback(null, result); + } else if (sync) { + nextTick(function() { + iterate(result); + }); + } else { + sync = true; + iterate(result); + } + sync = false; + } + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + */ + function createTransform(arrayEach, baseEach, symbolEach) { + return function transform(collection, accumulator, iterator, callback) { + if (arguments.length === 3) { + callback = iterator; + iterator = accumulator; + accumulator = undefined; + } + callback = callback || noop; + var size, keys, result; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + result = accumulator !== undefined ? accumulator : []; + arrayEach(collection, result, iterator, done); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + result = accumulator !== undefined ? accumulator : {}; + size = symbolEach(collection, result, iterator, done); + size && size === completed && callback(null, result); + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + result = accumulator !== undefined ? accumulator : {}; + baseEach(collection, result, iterator, done, keys); + } + if (!size) { + callback(null, accumulator !== undefined ? accumulator : result || {}); + } + + function done(err, bool) { + if (err) { + callback = once(callback); + callback(err, isArray(result) ? createArray(result) : objectClone(result)); + } else if (++completed === size) { + callback(null, result); + } else if (bool === false) { + callback = once(callback); + callback(null, isArray(result) ? createArray(result) : objectClone(result)); + } + } + }; + } + + /** + * @memberof async + * @namespace transformSeries + * @param {Array|Object} collection + * @param {Array|Object|Function} [accumulator] + * @param {Function} [iterator] + * @param {Function} [callback] + * @example + * + * // array + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * result.push(num) + * done(); + * }, num * 10); + * }; + * async.transformSeries(collection, iterator, function(err, res) { + * console.log(res); // [1, 3, 2, 4] + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // array with index and accumulator + * var order = []; + * var collection = [1, 3, 2, 4]; + * var iterator = function(result, num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * result[index] = num; + * done(); + * }, num * 10); + * }; + * async.transformSeries(collection, {}, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 3, '2': 2, '3': 4 } + * console.log(order); // [[1, 0], [3, 1], [2, 2], [4, 3]] + * }); + * + * @example + * + * // object with accumulator + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * result.push(num); + * done(); + * }, num * 10); + * }; + * async.transformSeries(collection, [], iterator, function(err, res) { + * console.log(res); // [1, 3, 2, 4] + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2, d: 4 }; + * var iterator = function(result, num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * result[key] = num; + * done(); + * }, num * 10); + * }; + * async.transformSeries(collection, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 3, c: 2, d: 4 } + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'b'], [4, 'd']] + * }); + * + */ + function transformSeries(collection, accumulator, iterator, callback) { + if (arguments.length === 3) { + callback = iterator; + iterator = accumulator; + accumulator = undefined; + } + callback = onlyOnce(callback || noop); + var size, key, keys, iter, item, iterate, result; + var sync = false; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + result = accumulator !== undefined ? accumulator : []; + iterate = iterator.length === 4 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + result = accumulator !== undefined ? accumulator : {}; + iterate = iterator.length === 4 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + result = accumulator !== undefined ? accumulator : {}; + iterate = iterator.length === 4 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, accumulator !== undefined ? accumulator : result || {}); + } + iterate(); + + function arrayIterator() { + iterator(result, collection[completed], done); + } + + function arrayIteratorWithIndex() { + iterator(result, collection[completed], completed, done); + } + + function symbolIterator() { + item = iter.next(); + item.done ? callback(null, result) : iterator(result, item.value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + item.done ? callback(null, result) : iterator(result, item.value, completed, done); + } + + function objectIterator() { + iterator(result, collection[keys[completed]], done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + iterator(result, collection[key], key, done); + } + + function done(err, bool) { + if (err) { + callback(err, result); + } else if (++completed === size || bool === false) { + iterate = throwError; + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace transformLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Array|Object|Function} [accumulator] + * @param {Function} [iterator] + * @param {Function} [callback] + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * result.push(num); + * done(); + * }, num * 10); + * }; + * async.transformLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 3, 5, 2, 4] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index and accumulator + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(result, num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * result[index] = key; + * done(); + * }, num * 10); + * }; + * async.transformLimit(array, 2, {}, iterator, function(err, res) { + * console.log(res); // { '0': 1, '1': 5, '2': 3, '3': 4, '4': 2 } + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object with accumulator + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(result, num, done) { + * setTimeout(function() { + * order.push(num); + * result.push(num); + * done(); + * }, num * 10); + * }; + * async.transformLimit(object, 2, [], iterator, function(err, res) { + * console.log(res); // [1, 3, 5, 2, 4] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(result, num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * result[key] = num; + * done(); + * }, num * 10); + * }; + * async.transformLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + function transformLimit(collection, limit, accumulator, iterator, callback) { + if (arguments.length === 4) { + callback = iterator; + iterator = accumulator; + accumulator = undefined; + } + callback = callback || noop; + var size, index, key, keys, iter, item, iterate, result; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + result = accumulator !== undefined ? accumulator : []; + iterate = iterator.length === 4 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + result = accumulator !== undefined ? accumulator : {}; + iterate = iterator.length === 4 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + result = accumulator !== undefined ? accumulator : {}; + iterate = iterator.length === 4 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, accumulator !== undefined ? accumulator : result || {}); + } + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + index = started++; + if (index < size) { + iterator(result, collection[index], onlyOnce(done)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + iterator(result, collection[index], index, onlyOnce(done)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + started++; + iterator(result, item.value, onlyOnce(done)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + iterator(result, item.value, started++, onlyOnce(done)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function objectIterator() { + index = started++; + if (index < size) { + iterator(result, collection[keys[index]], onlyOnce(done)); + } + } + + function objectIteratorWithKey() { + index = started++; + if (index < size) { + key = keys[index]; + iterator(result, collection[key], key, onlyOnce(done)); + } + } + + function done(err, bool) { + if (err || bool === false) { + iterate = noop; + callback(err || null, isArray(result) ? createArray(result) : objectClone(result)); + callback = noop; + } else if (++completed === size) { + iterator = noop; + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @private + * @param {function} arrayEach + * @param {function} baseEach + * @param {function} symbolEach + */ + function createSortBy(arrayEach, baseEach, symbolEach) { + return function sortBy(collection, iterator, callback) { + callback = callback || noop; + var size, array, criteria; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + array = Array(size); + criteria = Array(size); + arrayEach(collection, iterator, createCallback); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + array = []; + criteria = []; + size = symbolEach(collection, iterator, createCallback); + size && size === completed && callback(null, sortByCriteria(array, criteria)); + } else if (typeof collection === obj) { + var keys = nativeKeys(collection); + size = keys.length; + array = Array(size); + criteria = Array(size); + baseEach(collection, iterator, createCallback, keys); + } + if (!size) { + callback(null, []); + } + + function createCallback(index, value) { + var called = false; + array[index] = value; + return function done(err, criterion) { + if (called) { + throwError(); + } + called = true; + criteria[index] = criterion; + if (err) { + callback = once(callback); + callback(err); + } else if (++completed === size) { + callback(null, sortByCriteria(array, criteria)); + } + }; + } + }; + } + + /** + * @memberof async + * @namespace sortBySeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.sortBySeries(array, iterator, function(err, res) { + * console.log(res); // [1, 2, 3]; + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.sortBySeries(array, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [[1, 0], [3, 1], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.sortBySeries(object, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.sortBySeries(object, iterator, function(err, res) { + * console.log(res); // [1, 2, 3] + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c']] + * }); + * + */ + function sortBySeries(collection, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, value, keys, iter, item, array, criteria, iterate; + var sync = false; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + array = collection; + criteria = Array(size); + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + array = []; + criteria = []; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + array = Array(size); + criteria = Array(size); + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, []); + } + iterate(); + + function arrayIterator() { + value = collection[completed]; + iterator(value, done); + } + + function arrayIteratorWithIndex() { + value = collection[completed]; + iterator(value, completed, done); + } + + function symbolIterator() { + item = iter.next(); + if (item.done) { + return callback(null, sortByCriteria(array, criteria)); + } + value = item.value; + array[completed] = value; + iterator(value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done) { + return callback(null, sortByCriteria(array, criteria)); + } + value = item.value; + array[completed] = value; + iterator(value, completed, done); + } + + function objectIterator() { + value = collection[keys[completed]]; + array[completed] = value; + iterator(value, done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + value = collection[key]; + array[completed] = value; + iterator(value, key, done); + } + + function done(err, criterion) { + criteria[completed] = criterion; + if (err) { + callback(err); + } else if (++completed === size) { + iterate = throwError; + callback(null, sortByCriteria(array, criteria)); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace sortByLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.sortByLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 2, 3, 4, 5] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num); + * }, num * 10); + * }; + * async.sortByLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 2, 3, 4, 5] + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num); + * }, num * 10); + * }; + * async.sortByLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 2, 3, 4, 5] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.sortByLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 2, 3, 4, 5] + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + function sortByLimit(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, value, array, keys, iter, item, criteria, iterate; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + array = collection; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + array = []; + criteria = []; + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + array = Array(size); + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, []); + } + criteria = criteria || Array(size); + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + if (started < size) { + value = collection[started]; + iterator(value, createCallback(value, started++)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, index, createCallback(value, index)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + value = item.value; + array[started] = value; + iterator(value, createCallback(value, started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, sortByCriteria(array, criteria)); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + value = item.value; + array[started] = value; + iterator(value, started, createCallback(value, started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, sortByCriteria(array, criteria)); + } + } + + function objectIterator() { + if (started < size) { + value = collection[keys[started]]; + array[started] = value; + iterator(value, createCallback(value, started++)); + } + } + + function objectIteratorWithKey() { + if (started < size) { + key = keys[started]; + value = collection[key]; + array[started] = value; + iterator(value, key, createCallback(value, started++)); + } + } + + function createCallback(value, index) { + var called = false; + return function(err, criterion) { + if (called) { + throwError(); + } + called = true; + criteria[index] = criterion; + if (err) { + iterate = noop; + callback(err); + callback = noop; + } else if (++completed === size) { + callback(null, sortByCriteria(array, criteria)); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + } + + /** + * @memberof async + * @namespace some + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.some(array, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.some(array, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [[1, 0]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.some(object, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.some(object, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [[1, 'a']] + * }); + * + */ + function some(collection, iterator, callback) { + callback = callback || noop; + detect(collection, iterator, done); + + function done(err, res) { + if (err) { + return callback(err); + } + callback(null, !!res); + } + } + + /** + * @memberof async + * @namespace someSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someSeries(array, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someSeries(array, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [[1, 0]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someSeries(object, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someSeries(object, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [[1, 'a']] + * }); + * + */ + function someSeries(collection, iterator, callback) { + callback = callback || noop; + detectSeries(collection, iterator, done); + + function done(err, res) { + if (err) { + return callback(err); + } + callback(null, !!res); + } + } + + /** + * @memberof async + * @namespace someLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someLimit(array, 2, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someLimit(array, 2, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [[1, 0]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someLimit(object, 2, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num % 2); + * }, num * 10); + * }; + * async.someLimit(object, 2, iterator, function(err, res) { + * console.log(res); // true + * console.log(order); // [[1, 'a']] + * }); + * + */ + function someLimit(collection, limit, iterator, callback) { + callback = callback || noop; + detectLimit(collection, limit, iterator, done); + + function done(err, res) { + if (err) { + return callback(err); + } + callback(null, !!res); + } + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + */ + function createEvery(arrayEach, baseEach, symbolEach) { + var deny = createDetect(arrayEach, baseEach, symbolEach, false); + + return function every(collection, iterator, callback) { + callback = callback || noop; + deny(collection, iterator, done); + + function done(err, res) { + if (err) { + return callback(err); + } + callback(null, !res); + } + }; + } + + /** + * @private + */ + function createEverySeries() { + var denySeries = createDetectSeries(false); + + return function everySeries(collection, iterator, callback) { + callback = callback || noop; + denySeries(collection, iterator, done); + + function done(err, res) { + if (err) { + return callback(err); + } + callback(null, !res); + } + }; + } + + /** + * @private + */ + function createEveryLimit() { + var denyLimit = createDetectLimit(false); + + return function everyLimit(collection, limit, iterator, callback) { + callback = callback || noop; + denyLimit(collection, limit, iterator, done); + + function done(err, res) { + if (err) { + return callback(err); + } + callback(null, !res); + } + }; + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + */ + function createConcat(arrayEach, baseEach, symbolEach) { + return function concat(collection, iterator, callback) { + callback = callback || noop; + var size, result; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + result = Array(size); + arrayEach(collection, iterator, createCallback); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + result = []; + size = symbolEach(collection, iterator, createCallback); + size && size === completed && callback(null, result); + } else if (typeof collection === obj) { + var keys = nativeKeys(collection); + size = keys.length; + result = Array(size); + baseEach(collection, iterator, createCallback, keys); + } + if (!size) { + callback(null, []); + } + + function createCallback(index) { + return function done(err, res) { + if (index === null) { + throwError(); + } + if (err) { + index = null; + callback = once(callback); + arrayEachSync(result, function(array, index) { + if (array === undefined) { + result[index] = noop; + } + }); + callback(err, makeConcatResult(result)); + return; + } + switch (arguments.length) { + case 0: + case 1: + result[index] = noop; + break; + case 2: + result[index] = res; + break; + default: + result[index] = slice(arguments, 1); + break; + } + index = null; + if (++completed === size) { + callback(null, makeConcatResult(result)); + } + }; + } + }; + } + + /** + * @memberof async + * @namespace concatSeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, [num]); + * }, num * 10); + * }; + * async.concatSeries(array, iterator, function(err, res) { + * console.log(res); // [1, 3, 2]; + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 3, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, [num]); + * }, num * 10); + * }; + * async.concatSeries(array, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [[1, 0], [3, 1], [2, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, [num]); + * }, num * 10); + * }; + * async.concatSeries(object, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [1, 3, 2] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 3, c: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, [num]); + * }, num * 10); + * }; + * async.concatSeries(object, iterator, function(err, res) { + * console.log(res); // [1, 3, 2] + * console.log(order); // [[1, 'a'], [3, 'b'], [2, 'c']] + * }); + * + */ + function concatSeries(collection, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, keys, iter, item, iterate; + var sync = false; + var result = []; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, result); + } + iterate(); + + function arrayIterator() { + iterator(collection[completed], done); + } + + function arrayIteratorWithIndex() { + iterator(collection[completed], completed, done); + } + + function symbolIterator() { + item = iter.next(); + item.done ? callback(null, result) : iterator(item.value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + item.done ? callback(null, result) : iterator(item.value, completed, done); + } + + function objectIterator() { + iterator(collection[keys[completed]], done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + iterator(collection[key], key, done); + } + + function done(err, array) { + if (isArray(array)) { + nativePush.apply(result, array); + } else if (arguments.length >= 2) { + nativePush.apply(result, slice(arguments, 1)); + } + if (err) { + callback(err, result); + } else if (++completed === size) { + iterate = throwError; + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace concatLimit + * @param {Array|Object} collection + * @param {number} limit - limit >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, [num]); + * }, num * 10); + * }; + * async.concatLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 3, 5, 2, 4] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1, 5, 3, 4, 2]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, [num]); + * }, num * 10); + * }; + * async.cocnatLimit(array, 2, iterator, function(err, res) { + * console.log(res); // [1, 3, 5, 2, 4] + * console.log(order); // [[1, 0], [3, 2], [5, 1], [2, 4], [4, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, [num]); + * }, num * 10); + * }; + * async.concatLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 3, 5, 2, 4] + * console.log(order); // [1, 3, 5, 2, 4] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1, b: 5, c: 3, d: 4, e: 2 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, num); + * }, num * 10); + * }; + * async.cocnatLimit(object, 2, iterator, function(err, res) { + * console.log(res); // [1, 3, 5, 2, 4] + * console.log(order); // [[1, 'a'], [3, 'c'], [5, 'b'], [2, 'e'], [4, 'd']] + * }); + * + */ + function concatLimit(collection, limit, iterator, callback) { + callback = callback || noop; + var size, key, iter, item, iterate, result; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + result = []; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + var keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, []); + } + result = result || Array(size); + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + if (started < size) { + iterator(collection[started], createCallback(started++)); + } + } + + function arrayIteratorWithIndex() { + if (started < size) { + iterator(collection[started], started, createCallback(started++)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + iterator(item.value, createCallback(started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, makeConcatResult(result)); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + iterator(item.value, started, createCallback(started++)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, makeConcatResult(result)); + } + } + + function objectIterator() { + if (started < size) { + iterator(collection[keys[started]], createCallback(started++)); + } + } + + function objectIteratorWithKey() { + if (started < size) { + key = keys[started]; + iterator(collection[key], key, createCallback(started++)); + } + } + + function createCallback(index) { + return function(err, res) { + if (index === null) { + throwError(); + } + if (err) { + index = null; + iterate = noop; + callback = once(callback); + arrayEachSync(result, function(array, index) { + if (array === undefined) { + result[index] = noop; + } + }); + callback(err, makeConcatResult(result)); + return; + } + switch (arguments.length) { + case 0: + case 1: + result[index] = noop; + break; + case 2: + result[index] = res; + break; + default: + result[index] = slice(arguments, 1); + break; + } + index = null; + if (++completed === size) { + iterate = throwError; + callback(null, makeConcatResult(result)); + callback = throwError; + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + * @param {Function} symbolEach + */ + function createGroupBy(arrayEach, baseEach, symbolEach) { + return function groupBy(collection, iterator, callback) { + callback = callback || noop; + var size; + var completed = 0; + var result = {}; + + if (isArray(collection)) { + size = collection.length; + arrayEach(collection, iterator, createCallback); + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = symbolEach(collection, iterator, createCallback); + size && size === completed && callback(null, result); + } else if (typeof collection === obj) { + var keys = nativeKeys(collection); + size = keys.length; + baseEach(collection, iterator, createCallback, keys); + } + if (!size) { + callback(null, {}); + } + + function createCallback(value) { + var called = false; + return function done(err, key) { + if (called) { + throwError(); + } + called = true; + if (err) { + callback = once(callback); + callback(err, objectClone(result)); + return; + } + var array = result[key]; + if (!array) { + result[key] = [value]; + } else { + array.push(value); + } + if (++completed === size) { + callback(null, result); + } + }; + } + }; + } + + /** + * @memberof async + * @namespace groupBySeries + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [4.2, 6.4, 6.1]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBySeries(array, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.4, 6.1] } + * console.log(order); // [4.2, 6.4, 6.1] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [4.2, 6.4, 6.1]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBySeries(array, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.4, 6.1] } + * console.log(order); // [[4.2, 0], [6.4, 1], [6.1, 2]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 4.2, b: 6.4, c: 6.1 }; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBySeries(object, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.4, 6.1] } + * console.log(order); // [4.2, 6.4, 6.1] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 4.2, b: 6.4, c: 6.1 }; + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupBySeries(object, iterator, function(err, res) { + * console.log(res); // { '4': [4.2], '6': [6.4, 6.1] } + * console.log(order); // [[4.2, 'a'], [6.4, 'b'], [6.1, 'c']] + * }); + * + */ + function groupBySeries(collection, iterator, callback) { + callback = onlyOnce(callback || noop); + var size, key, value, keys, iter, item, iterate; + var sync = false; + var completed = 0; + var result = {}; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size) { + return callback(null, result); + } + iterate(); + + function arrayIterator() { + value = collection[completed]; + iterator(value, done); + } + + function arrayIteratorWithIndex() { + value = collection[completed]; + iterator(value, completed, done); + } + + function symbolIterator() { + item = iter.next(); + value = item.value; + item.done ? callback(null, result) : iterator(value, done); + } + + function symbolIteratorWithKey() { + item = iter.next(); + value = item.value; + item.done ? callback(null, result) : iterator(value, completed, done); + } + + function objectIterator() { + value = collection[keys[completed]]; + iterator(value, done); + } + + function objectIteratorWithKey() { + key = keys[completed]; + value = collection[key]; + iterator(value, key, done); + } + + function done(err, key) { + if (err) { + iterate = throwError; + callback = onlyOnce(callback); + callback(err, objectClone(result)); + return; + } + var array = result[key]; + if (!array) { + result[key] = [value]; + } else { + array.push(value); + } + if (++completed === size) { + iterate = throwError; + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace groupByLimit + * @param {Array|Object} collection + * @param {Function} iterator + * @param {Function} callback + * @example + * + * // array + * var order = []; + * var array = [1.1, 5.9, 3.2, 3.9, 2.1]; + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupByLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '1': [1.1], '3': [3.2, 3.9], '5': [5.9], '2': [2.1] } + * console.log(order); // [1.1, 3.2, 5.9, 2.1, 3.9] + * }); + * + * @example + * + * // array with index + * var order = []; + * var array = [1.1, 5.9, 3.2, 3.9, 2.1]; + * var iterator = function(num, index, done) { + * setTimeout(function() { + * order.push([num, index]); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupByLimit(array, 2, iterator, function(err, res) { + * console.log(res); // { '1': [1.1], '3': [3.2, 3.9], '5': [5.9], '2': [2.1] } + * console.log(order); // [[1.1, 0], [3.2, 2], [5.9, 1], [2.1, 4], [3.9, 3]] + * }); + * + * @example + * + * // object + * var order = []; + * var object = { a: 1.1, b: 5.9, c: 3.2, d: 3.9, e: 2.1 } + * var iterator = function(num, done) { + * setTimeout(function() { + * order.push(num); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupByLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { '1': [1.1], '3': [3.2, 3.9], '5': [5.9], '2': [2.1] } + * console.log(order); // [1.1, 3.2, 5.9, 2.1, 3.9] + * }); + * + * @example + * + * // object with key + * var order = []; + * var object = { a: 1.1, b: 5.9, c: 3.2, d: 3.9, e: 2.1 } + * var iterator = function(num, key, done) { + * setTimeout(function() { + * order.push([num, key]); + * done(null, Math.floor(num)); + * }, num * 10); + * }; + * async.groupByLimit(object, 2, iterator, function(err, res) { + * console.log(res); // { '1': [1.1], '3': [3.2, 3.9], '5': [5.9], '2': [2.1] } + * console.log(order); // [[1.1, 'a'], [3.2, 'c'], [5.9, 'b'], [2.1, 'e'], [3.9, 'd']] + * }); + * + */ + function groupByLimit(collection, limit, iterator, callback) { + callback = callback || noop; + var size, index, key, value, keys, iter, item, iterate; + var sync = false; + var started = 0; + var completed = 0; + var result = {}; + + if (isArray(collection)) { + size = collection.length; + iterate = iterator.length === 3 ? arrayIteratorWithIndex : arrayIterator; + } else if (!collection) { + } else if (iteratorSymbol && collection[iteratorSymbol]) { + size = Infinity; + iter = collection[iteratorSymbol](); + iterate = iterator.length === 3 ? symbolIteratorWithKey : symbolIterator; + } else if (typeof collection === obj) { + keys = nativeKeys(collection); + size = keys.length; + iterate = iterator.length === 3 ? objectIteratorWithKey : objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, result); + } + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + if (started < size) { + value = collection[started++]; + iterator(value, createCallback(value)); + } + } + + function arrayIteratorWithIndex() { + index = started++; + if (index < size) { + value = collection[index]; + iterator(value, index, createCallback(value)); + } + } + + function symbolIterator() { + item = iter.next(); + if (item.done === false) { + started++; + value = item.value; + iterator(value, createCallback(value)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function symbolIteratorWithKey() { + item = iter.next(); + if (item.done === false) { + value = item.value; + iterator(value, started++, createCallback(value)); + } else if (completed === started && iterator !== noop) { + iterator = noop; + callback(null, result); + } + } + + function objectIterator() { + if (started < size) { + value = collection[keys[started++]]; + iterator(value, createCallback(value)); + } + } + + function objectIteratorWithKey() { + if (started < size) { + key = keys[started++]; + value = collection[key]; + iterator(value, key, createCallback(value)); + } + } + + function createCallback(value) { + var called = false; + return function(err, key) { + if (called) { + throwError(); + } + called = true; + if (err) { + iterate = noop; + callback = once(callback); + callback(err, objectClone(result)); + return; + } + var array = result[key]; + if (!array) { + result[key] = [value]; + } else { + array.push(value); + } + if (++completed === size) { + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + } + + /** + * @private + * @param {Function} arrayEach + * @param {Function} baseEach + */ + function createParallel(arrayEach, baseEach) { + return function parallel(tasks, callback) { + callback = callback || noop; + var size, keys, result; + var completed = 0; + + if (isArray(tasks)) { + size = tasks.length; + result = Array(size); + arrayEach(tasks, createCallback); + } else if (tasks && typeof tasks === obj) { + keys = nativeKeys(tasks); + size = keys.length; + result = {}; + baseEach(tasks, createCallback, keys); + } + if (!size) { + callback(null, result); + } + + function createCallback(key) { + return function(err, res) { + if (key === null) { + throwError(); + } + if (err) { + key = null; + callback = once(callback); + callback(err, result); + return; + } + result[key] = arguments.length <= 2 ? res : slice(arguments, 1); + key = null; + if (++completed === size) { + callback(null, result); + } + }; + } + }; + } + + /** + * @memberof async + * @namespace series + * @param {Array|Object} tasks - functions + * @param {Function} callback + * @example + * + * var order = []; + * var tasks = [ + * function(done) { + * setTimeout(function() { + * order.push(1); + * done(null, 1); + * }, 10); + * }, + * function(done) { + * setTimeout(function() { + * order.push(2); + * done(null, 2); + * }, 30); + * }, + * function(done) { + * setTimeout(function() { + * order.push(3); + * done(null, 3); + * }, 40); + * }, + * function(done) { + * setTimeout(function() { + * order.push(4); + * done(null, 4); + * }, 20); + * } + * ]; + * async.series(tasks, function(err, res) { + * console.log(res); // [1, 2, 3, 4]; + * console.log(order); // [1, 2, 3, 4] + * }); + * + * @example + * + * var order = []; + * var tasks = { + * 'a': function(done) { + * setTimeout(function() { + * order.push(1); + * done(null, 1); + * }, 10); + * }, + * 'b': function(done) { + * setTimeout(function() { + * order.push(2); + * done(null, 2); + * }, 30); + * }, + * 'c': function(done) { + * setTimeout(function() { + * order.push(3); + * done(null, 3); + * }, 40); + * }, + * 'd': function(done) { + * setTimeout(function() { + * order.push(4); + * done(null, 4); + * }, 20); + * } + * }; + * async.series(tasks, function(err, res) { + * console.log(res); // { a: 1, b: 2, c: 3, d:4 } + * console.log(order); // [1, 4, 2, 3] + * }); + * + */ + function series(tasks, callback) { + callback = callback || noop; + var size, key, keys, result, iterate; + var sync = false; + var completed = 0; + + if (isArray(tasks)) { + size = tasks.length; + result = Array(size); + iterate = arrayIterator; + } else if (tasks && typeof tasks === obj) { + keys = nativeKeys(tasks); + size = keys.length; + result = {}; + iterate = objectIterator; + } else { + return callback(null); + } + if (!size) { + return callback(null, result); + } + iterate(); + + function arrayIterator() { + key = completed; + tasks[completed](done); + } + + function objectIterator() { + key = keys[completed]; + tasks[key](done); + } + + function done(err, res) { + if (err) { + iterate = throwError; + callback = onlyOnce(callback); + callback(err, result); + return; + } + result[key] = arguments.length <= 2 ? res : slice(arguments, 1); + if (++completed === size) { + iterate = throwError; + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace parallelLimit + * @param {Array|Object} tasks - functions + * @param {number} limit - limit >= 1 + * @param {Function} callback + * @example + * + * var order = []; + * var tasks = [ + * function(done) { + * setTimeout(function() { + * order.push(1); + * done(null, 1); + * }, 10); + * }, + * function(done) { + * setTimeout(function() { + * order.push(2); + * done(null, 2); + * }, 50); + * }, + * function(done) { + * setTimeout(function() { + * order.push(3); + * done(null, 3); + * }, 30); + * }, + * function(done) { + * setTimeout(function() { + * order.push(4); + * done(null, 4); + * }, 40); + * } + * ]; + * async.parallelLimit(tasks, 2, function(err, res) { + * console.log(res); // [1, 2, 3, 4]; + * console.log(order); // [1, 3, 2, 4] + * }); + * + * @example + * + * var order = []; + * var tasks = { + * 'a': function(done) { + * setTimeout(function() { + * order.push(1); + * done(null, 1); + * }, 10); + * }, + * 'b': function(done) { + * setTimeout(function() { + * order.push(2); + * done(null, 2); + * }, 50); + * }, + * 'c': function(done) { + * setTimeout(function() { + * order.push(3); + * done(null, 3); + * }, 20); + * }, + * 'd': function(done) { + * setTimeout(function() { + * order.push(4); + * done(null, 4); + * }, 40); + * } + * }; + * async.parallelLimit(tasks, 2, function(err, res) { + * console.log(res); // { a: 1, b: 2, c: 3, d:4 } + * console.log(order); // [1, 3, 2, 4] + * }); + * + */ + function parallelLimit(tasks, limit, callback) { + callback = callback || noop; + var size, index, key, keys, result, iterate; + var sync = false; + var started = 0; + var completed = 0; + + if (isArray(tasks)) { + size = tasks.length; + result = Array(size); + iterate = arrayIterator; + } else if (tasks && typeof tasks === obj) { + keys = nativeKeys(tasks); + size = keys.length; + result = {}; + iterate = objectIterator; + } + if (!size || isNaN(limit) || limit < 1) { + return callback(null, result); + } + timesSync(limit > size ? size : limit, iterate); + + function arrayIterator() { + index = started++; + if (index < size) { + tasks[index](createCallback(index)); + } + } + + function objectIterator() { + if (started < size) { + key = keys[started++]; + tasks[key](createCallback(key)); + } + } + + function createCallback(key) { + return function(err, res) { + if (key === null) { + throwError(); + } + if (err) { + key = null; + iterate = noop; + callback = once(callback); + callback(err, result); + return; + } + result[key] = arguments.length <= 2 ? res : slice(arguments, 1); + key = null; + if (++completed === size) { + callback(null, result); + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + } + + /** + * @memberof async + * @namespace tryEach + * @param {Array|Object} tasks - functions + * @param {Function} callback + * @example + * + * var tasks = [ + * function(done) { + * setTimeout(function() { + * done(new Error('error')); + * }, 10); + * }, + * function(done) { + * setTimeout(function() { + * done(null, 2); + * }, 10); + * } + * ]; + * async.tryEach(tasks, function(err, res) { + * console.log(res); // 2 + * }); + * + * @example + * + * var tasks = [ + * function(done) { + * setTimeout(function() { + * done(new Error('error1')); + * }, 10); + * }, + * function(done) { + * setTimeout(function() { + * done(new Error('error2'); + * }, 10); + * } + * ]; + * async.tryEach(tasks, function(err, res) { + * console.log(err); // error2 + * console.log(res); // undefined + * }); + * + */ + function tryEach(tasks, callback) { + callback = callback || noop; + var size, keys, iterate; + var sync = false; + var completed = 0; + + if (isArray(tasks)) { + size = tasks.length; + iterate = arrayIterator; + } else if (tasks && typeof tasks === obj) { + keys = nativeKeys(tasks); + size = keys.length; + iterate = objectIterator; + } + if (!size) { + return callback(null); + } + iterate(); + + function arrayIterator() { + tasks[completed](done); + } + + function objectIterator() { + tasks[keys[completed]](done); + } + + function done(err, res) { + if (!err) { + if (arguments.length <= 2) { + callback(null, res); + } else { + callback(null, slice(arguments, 1)); + } + } else if (++completed === size) { + callback(err); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * check for waterfall tasks + * @private + * @param {Array} tasks + * @param {Function} callback + * @return {boolean} + */ + function checkWaterfallTasks(tasks, callback) { + if (!isArray(tasks)) { + callback(new Error('First argument to waterfall must be an array of functions')); + return false; + } + if (tasks.length === 0) { + callback(null); + return false; + } + return true; + } + + /** + * check for waterfall tasks + * @private + * @param {function} func + * @param {Array|Object} args - arguments + * @return {function} next + */ + function waterfallIterator(func, args, next) { + switch (args.length) { + case 0: + case 1: + return func(next); + case 2: + return func(args[1], next); + case 3: + return func(args[1], args[2], next); + case 4: + return func(args[1], args[2], args[3], next); + case 5: + return func(args[1], args[2], args[3], args[4], next); + case 6: + return func(args[1], args[2], args[3], args[4], args[5], next); + default: + args = slice(args, 1); + args.push(next); + return func.apply(null, args); + } + } + + /** + * @memberof async + * @namespace waterfall + * @param {Array} tasks - functions + * @param {Function} callback + * @example + * + * var order = []; + * var tasks = [ + * function(next) { + * setTimeout(function() { + * order.push(1); + * next(null, 1); + * }, 10); + * }, + * function(arg1, next) { + * setTimeout(function() { + * order.push(2); + * next(null, 1, 2); + * }, 30); + * }, + * function(arg1, arg2, next) { + * setTimeout(function() { + * order.push(3); + * next(null, 3); + * }, 20); + * }, + * function(arg1, next) { + * setTimeout(function() { + * order.push(4); + * next(null, 1, 2, 3, 4); + * }, 40); + * } + * ]; + * async.waterfall(tasks, function(err, arg1, arg2, arg3, arg4) { + * console.log(arg1, arg2, arg3, arg4); // 1 2 3 4 + * }); + * + */ + function waterfall(tasks, callback) { + callback = callback || noop; + if (!checkWaterfallTasks(tasks, callback)) { + return; + } + var func, args, done, sync; + var completed = 0; + var size = tasks.length; + waterfallIterator(tasks[0], [], createCallback(0)); + + function iterate() { + waterfallIterator(func, args, createCallback(func)); + } + + function createCallback(index) { + return function next(err, res) { + if (index === undefined) { + callback = noop; + throwError(); + } + index = undefined; + if (err) { + done = callback; + callback = throwError; + done(err); + return; + } + if (++completed === size) { + done = callback; + callback = throwError; + if (arguments.length <= 2) { + done(err, res); + } else { + done.apply(null, createArray(arguments)); + } + return; + } + if (sync) { + args = arguments; + func = tasks[completed] || throwError; + nextTick(iterate); + } else { + sync = true; + waterfallIterator(tasks[completed] || throwError, arguments, createCallback(completed)); + } + sync = false; + }; + } + } + + /** + * `angelFall` is like `waterfall` and inject callback to last argument of next task. + * + * @memberof async + * @namespace angelFall + * @param {Array} tasks - functions + * @param {Function} callback + * @example + * + * var order = []; + * var tasks = [ + * function(next) { + * setTimeout(function() { + * order.push(1); + * next(null, 1); + * }, 10); + * }, + * function(arg1, empty, next) { + * setTimeout(function() { + * order.push(2); + * next(null, 1, 2); + * }, 30); + * }, + * function(next) { + * setTimeout(function() { + * order.push(3); + * next(null, 3); + * }, 20); + * }, + * function(arg1, empty1, empty2, empty3, next) { + * setTimeout(function() { + * order.push(4); + * next(null, 1, 2, 3, 4); + * }, 40); + * } + * ]; + * async.angelFall(tasks, function(err, arg1, arg2, arg3, arg4) { + * console.log(arg1, arg2, arg3, arg4); // 1 2 3 4 + * }); + * + */ + function angelFall(tasks, callback) { + callback = callback || noop; + if (!checkWaterfallTasks(tasks, callback)) { + return; + } + var completed = 0; + var sync = false; + var size = tasks.length; + var func = tasks[completed]; + var args = []; + var iterate = function() { + switch (func.length) { + case 0: + try { + next(null, func()); + } catch (e) { + next(e); + } + return; + case 1: + return func(next); + case 2: + return func(args[1], next); + case 3: + return func(args[1], args[2], next); + case 4: + return func(args[1], args[2], args[3], next); + case 5: + return func(args[1], args[2], args[3], args[4], next); + default: + args = slice(args, 1); + args[func.length - 1] = next; + return func.apply(null, args); + } + }; + iterate(); + + function next(err, res) { + if (err) { + iterate = throwError; + callback = onlyOnce(callback); + callback(err); + return; + } + if (++completed === size) { + iterate = throwError; + var done = callback; + callback = throwError; + if (arguments.length === 2) { + done(err, res); + } else { + done.apply(null, createArray(arguments)); + } + return; + } + func = tasks[completed]; + args = arguments; + if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace whilst + * @param {Function} test + * @param {Function} iterator + * @param {Function} callback + */ + function whilst(test, iterator, callback) { + callback = callback || noop; + var sync = false; + if (test()) { + iterate(); + } else { + callback(null); + } + + function iterate() { + if (sync) { + nextTick(next); + } else { + sync = true; + iterator(done); + } + sync = false; + } + + function next() { + iterator(done); + } + + function done(err, arg) { + if (err) { + return callback(err); + } + if (arguments.length <= 2) { + if (test(arg)) { + iterate(); + } else { + callback(null, arg); + } + return; + } + arg = slice(arguments, 1); + if (test.apply(null, arg)) { + iterate(); + } else { + callback.apply(null, [null].concat(arg)); + } + } + } + + /** + * @memberof async + * @namespace doWhilst + * @param {Function} iterator + * @param {Function} test + * @param {Function} callback + */ + function doWhilst(iterator, test, callback) { + callback = callback || noop; + var sync = false; + next(); + + function iterate() { + if (sync) { + nextTick(next); + } else { + sync = true; + iterator(done); + } + sync = false; + } + + function next() { + iterator(done); + } + + function done(err, arg) { + if (err) { + return callback(err); + } + if (arguments.length <= 2) { + if (test(arg)) { + iterate(); + } else { + callback(null, arg); + } + return; + } + arg = slice(arguments, 1); + if (test.apply(null, arg)) { + iterate(); + } else { + callback.apply(null, [null].concat(arg)); + } + } + } + + /** + * @memberof async + * @namespace until + * @param {Function} test + * @param {Function} iterator + * @param {Function} callback + */ + function until(test, iterator, callback) { + callback = callback || noop; + var sync = false; + if (!test()) { + iterate(); + } else { + callback(null); + } + + function iterate() { + if (sync) { + nextTick(next); + } else { + sync = true; + iterator(done); + } + sync = false; + } + + function next() { + iterator(done); + } + + function done(err, arg) { + if (err) { + return callback(err); + } + if (arguments.length <= 2) { + if (!test(arg)) { + iterate(); + } else { + callback(null, arg); + } + return; + } + arg = slice(arguments, 1); + if (!test.apply(null, arg)) { + iterate(); + } else { + callback.apply(null, [null].concat(arg)); + } + } + } + + /** + * @memberof async + * @namespace doUntil + * @param {Function} iterator + * @param {Function} test + * @param {Function} callback + */ + function doUntil(iterator, test, callback) { + callback = callback || noop; + var sync = false; + next(); + + function iterate() { + if (sync) { + nextTick(next); + } else { + sync = true; + iterator(done); + } + sync = false; + } + + function next() { + iterator(done); + } + + function done(err, arg) { + if (err) { + return callback(err); + } + if (arguments.length <= 2) { + if (!test(arg)) { + iterate(); + } else { + callback(null, arg); + } + return; + } + arg = slice(arguments, 1); + if (!test.apply(null, arg)) { + iterate(); + } else { + callback.apply(null, [null].concat(arg)); + } + } + } + + /** + * @memberof async + * @namespace during + * @param {Function} test + * @param {Function} iterator + * @param {Function} callback + */ + function during(test, iterator, callback) { + callback = callback || noop; + _test(); + + function _test() { + test(iterate); + } + + function iterate(err, truth) { + if (err) { + return callback(err); + } + if (truth) { + iterator(done); + } else { + callback(null); + } + } + + function done(err) { + if (err) { + return callback(err); + } + _test(); + } + } + + /** + * @memberof async + * @namespace doDuring + * @param {Function} test + * @param {Function} iterator + * @param {Function} callback + */ + function doDuring(iterator, test, callback) { + callback = callback || noop; + iterate(null, true); + + function iterate(err, truth) { + if (err) { + return callback(err); + } + if (truth) { + iterator(done); + } else { + callback(null); + } + } + + function done(err, res) { + if (err) { + return callback(err); + } + switch (arguments.length) { + case 0: + case 1: + test(iterate); + break; + case 2: + test(res, iterate); + break; + default: + var args = slice(arguments, 1); + args.push(iterate); + test.apply(null, args); + break; + } + } + } + + /** + * @memberof async + * @namespace forever + */ + function forever(iterator, callback) { + var sync = false; + iterate(); + + function iterate() { + iterator(next); + } + + function next(err) { + if (err) { + if (callback) { + return callback(err); + } + throw err; + } + if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace compose + */ + function compose() { + return seq.apply(null, reverse(arguments)); + } + + /** + * @memberof async + * @namespace seq + */ + function seq(/* functions... */) { + var fns = createArray(arguments); + + return function() { + var self = this; + var args = createArray(arguments); + var callback = args[args.length - 1]; + if (typeof callback === func) { + args.pop(); + } else { + callback = noop; + } + reduce(fns, args, iterator, done); + + function iterator(newargs, fn, callback) { + var func = function(err) { + var nextargs = slice(arguments, 1); + callback(err, nextargs); + }; + newargs.push(func); + fn.apply(self, newargs); + } + + function done(err, res) { + res = isArray(res) ? res : [res]; + res.unshift(err); + callback.apply(self, res); + } + }; + } + + function createApplyEach(func) { + return function applyEach(fns /* arguments */) { + var go = function() { + var self = this; + var args = createArray(arguments); + var callback = args.pop() || noop; + return func(fns, iterator, callback); + + function iterator(fn, done) { + fn.apply(self, args.concat([done])); + } + }; + if (arguments.length > 1) { + var args = slice(arguments, 1); + return go.apply(this, args); + } else { + return go; + } + }; + } + + /** + * @see https://github.com/caolan/async/blob/master/lib/internal/DoublyLinkedList.js + */ + function DLL() { + this.head = null; + this.tail = null; + this.length = 0; + } + + DLL.prototype._removeLink = function(node) { + var prev = node.prev; + var next = node.next; + if (prev) { + prev.next = next; + } else { + this.head = next; + } + if (next) { + next.prev = prev; + } else { + this.tail = prev; + } + node.prev = null; + node.next = null; + this.length--; + return node; + }; + + DLL.prototype.empty = DLL; + + DLL.prototype._setInitial = function(node) { + this.length = 1; + this.head = this.tail = node; + }; + + DLL.prototype.insertBefore = function(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev) { + node.prev.next = newNode; + } else { + this.head = newNode; + } + node.prev = newNode; + this.length++; + }; + + DLL.prototype.unshift = function(node) { + if (this.head) { + this.insertBefore(this.head, node); + } else { + this._setInitial(node); + } + }; + + DLL.prototype.push = function(node) { + var tail = this.tail; + if (tail) { + node.prev = tail; + node.next = tail.next; + this.tail = node; + tail.next = node; + this.length++; + } else { + this._setInitial(node); + } + }; + + DLL.prototype.shift = function() { + return this.head && this._removeLink(this.head); + }; + + DLL.prototype.splice = function(end) { + var task; + var tasks = []; + while (end-- && (task = this.shift())) { + tasks.push(task); + } + return tasks; + }; + + DLL.prototype.remove = function(test) { + var node = this.head; + while (node) { + if (test(node)) { + this._removeLink(node); + } + node = node.next; + } + return this; + }; + + /** + * @private + */ + function baseQueue(isQueue, worker, concurrency, payload) { + if (concurrency === undefined) { + concurrency = 1; + } else if (isNaN(concurrency) || concurrency < 1) { + throw new Error('Concurrency must not be zero'); + } + + var workers = 0; + var workersList = []; + var _callback, _unshift; + + var q = { + _tasks: new DLL(), + concurrency: concurrency, + payload: payload, + saturated: noop, + unsaturated: noop, + buffer: concurrency / 4, + empty: noop, + drain: noop, + error: noop, + started: false, + paused: false, + push: push, + kill: kill, + unshift: unshift, + remove: remove, + process: isQueue ? runQueue : runCargo, + length: getLength, + running: running, + workersList: getWorkersList, + idle: idle, + pause: pause, + resume: resume, + _worker: worker + }; + return q; + + function push(tasks, callback) { + _insert(tasks, callback); + } + + function unshift(tasks, callback) { + _insert(tasks, callback, true); + } + + function _exec(task) { + var item = { + data: task, + callback: _callback + }; + if (_unshift) { + q._tasks.unshift(item); + } else { + q._tasks.push(item); + } + nextTick(q.process); + } + + function _insert(tasks, callback, unshift) { + if (callback == null) { + callback = noop; + } else if (typeof callback !== 'function') { + throw new Error('task callback must be a function'); + } + q.started = true; + var _tasks = isArray(tasks) ? tasks : [tasks]; + + if (tasks === undefined || !_tasks.length) { + if (q.idle()) { + nextTick(q.drain); + } + return; + } + + _unshift = unshift; + _callback = callback; + arrayEachSync(_tasks, _exec); + // Avoid leaking the callback + _callback = undefined; + } + + function kill() { + q.drain = noop; + q._tasks.empty(); + } + + function _next(q, tasks) { + var called = false; + return function done(err, res) { + if (called) { + throwError(); + } + called = true; + + workers--; + var task; + var index = -1; + var size = workersList.length; + var taskIndex = -1; + var taskSize = tasks.length; + var useApply = arguments.length > 2; + var args = useApply && createArray(arguments); + while (++taskIndex < taskSize) { + task = tasks[taskIndex]; + while (++index < size) { + if (workersList[index] === task) { + if (index === 0) { + workersList.shift(); + } else { + workersList.splice(index, 1); + } + index = size; + size--; + } + } + index = -1; + if (useApply) { + task.callback.apply(task, args); + } else { + task.callback(err, res); + } + if (err) { + q.error(err, task.data); + } + } + + if (workers <= q.concurrency - q.buffer) { + q.unsaturated(); + } + + if (q._tasks.length + workers === 0) { + q.drain(); + } + q.process(); + }; + } + + function runQueue() { + while (!q.paused && workers < q.concurrency && q._tasks.length) { + var task = q._tasks.shift(); + workers++; + workersList.push(task); + if (q._tasks.length === 0) { + q.empty(); + } + if (workers === q.concurrency) { + q.saturated(); + } + var done = _next(q, [task]); + worker(task.data, done); + } + } + + function runCargo() { + while (!q.paused && workers < q.concurrency && q._tasks.length) { + var tasks = q._tasks.splice(q.payload || q._tasks.length); + var index = -1; + var size = tasks.length; + var data = Array(size); + while (++index < size) { + data[index] = tasks[index].data; + } + workers++; + nativePush.apply(workersList, tasks); + if (q._tasks.length === 0) { + q.empty(); + } + if (workers === q.concurrency) { + q.saturated(); + } + var done = _next(q, tasks); + worker(data, done); + } + } + + function getLength() { + return q._tasks.length; + } + + function running() { + return workers; + } + + function getWorkersList() { + return workersList; + } + + function idle() { + return q.length() + workers === 0; + } + + function pause() { + q.paused = true; + } + + function _resume() { + nextTick(q.process); + } + + function resume() { + if (q.paused === false) { + return; + } + q.paused = false; + var count = q.concurrency < q._tasks.length ? q.concurrency : q._tasks.length; + timesSync(count, _resume); + } + + /** + * @param {Function} test + */ + function remove(test) { + q._tasks.remove(test); + } + } + + /** + * @memberof async + * @namespace queue + */ + function queue(worker, concurrency) { + return baseQueue(true, worker, concurrency); + } + + /** + * @memberof async + * @namespace priorityQueue + */ + function priorityQueue(worker, concurrency) { + var q = baseQueue(true, worker, concurrency); + q.push = push; + delete q.unshift; + return q; + + function push(tasks, priority, callback) { + q.started = true; + priority = priority || 0; + var _tasks = isArray(tasks) ? tasks : [tasks]; + var taskSize = _tasks.length; + + if (tasks === undefined || taskSize === 0) { + if (q.idle()) { + nextTick(q.drain); + } + return; + } + + callback = typeof callback === func ? callback : noop; + var nextNode = q._tasks.head; + while (nextNode && priority >= nextNode.priority) { + nextNode = nextNode.next; + } + while (taskSize--) { + var item = { + data: _tasks[taskSize], + priority: priority, + callback: callback + }; + if (nextNode) { + q._tasks.insertBefore(nextNode, item); + } else { + q._tasks.push(item); + } + nextTick(q.process); + } + } + } + + /** + * @memberof async + * @namespace cargo + */ + function cargo(worker, payload) { + return baseQueue(false, worker, 1, payload); + } + + /** + * @memberof async + * @namespace auto + * @param {Object} tasks + * @param {number} [concurrency] + * @param {Function} [callback] + */ + function auto(tasks, concurrency, callback) { + if (typeof concurrency === func) { + callback = concurrency; + concurrency = null; + } + var keys = nativeKeys(tasks); + var rest = keys.length; + var results = {}; + if (rest === 0) { + return callback(null, results); + } + var runningTasks = 0; + var readyTasks = new DLL(); + var listeners = Object.create(null); + callback = onlyOnce(callback || noop); + concurrency = concurrency || rest; + + baseEachSync(tasks, iterator, keys); + proceedQueue(); + + function iterator(task, key) { + // no dependencies + var _task, _taskSize; + if (!isArray(task)) { + _task = task; + _taskSize = 0; + readyTasks.push([_task, _taskSize, done]); + return; + } + var dependencySize = task.length - 1; + _task = task[dependencySize]; + _taskSize = dependencySize; + if (dependencySize === 0) { + readyTasks.push([_task, _taskSize, done]); + return; + } + // dependencies + var index = -1; + while (++index < dependencySize) { + var dependencyName = task[index]; + if (notInclude(keys, dependencyName)) { + var msg = + 'async.auto task `' + + key + + '` has non-existent dependency `' + + dependencyName + + '` in ' + + task.join(', '); + throw new Error(msg); + } + var taskListeners = listeners[dependencyName]; + if (!taskListeners) { + taskListeners = listeners[dependencyName] = []; + } + taskListeners.push(taskListener); + } + + function done(err, arg) { + if (key === null) { + throwError(); + } + arg = arguments.length <= 2 ? arg : slice(arguments, 1); + if (err) { + rest = 0; + runningTasks = 0; + readyTasks.length = 0; + var safeResults = objectClone(results); + safeResults[key] = arg; + key = null; + var _callback = callback; + callback = noop; + _callback(err, safeResults); + return; + } + runningTasks--; + rest--; + results[key] = arg; + taskComplete(key); + key = null; + } + + function taskListener() { + if (--dependencySize === 0) { + readyTasks.push([_task, _taskSize, done]); + } + } + } + + function proceedQueue() { + if (readyTasks.length === 0 && runningTasks === 0) { + if (rest !== 0) { + throw new Error('async.auto task has cyclic dependencies'); + } + return callback(null, results); + } + while (readyTasks.length && runningTasks < concurrency && callback !== noop) { + runningTasks++; + var array = readyTasks.shift(); + if (array[1] === 0) { + array[0](array[2]); + } else { + array[0](results, array[2]); + } + } + } + + function taskComplete(key) { + var taskListeners = listeners[key] || []; + arrayEachSync(taskListeners, function(task) { + task(); + }); + proceedQueue(); + } + } + + var FN_ARGS = /^(function)?\s*[^\(]*\(\s*([^\)]*)\)/m; + var FN_ARG_SPLIT = /,/; + var FN_ARG = /(=.+)?(\s*)$/; + var STRIP_COMMENTS = /((\/\/.*$)|(\/\*[\s\S]*?\*\/))/gm; + + /** + * parse function arguments for `autoInject` + * + * @private + */ + function parseParams(func) { + func = func.toString().replace(STRIP_COMMENTS, ''); + func = func.match(FN_ARGS)[2].replace(' ', ''); + func = func ? func.split(FN_ARG_SPLIT) : []; + func = func.map(function(arg) { + return arg.replace(FN_ARG, '').trim(); + }); + return func; + } + + /** + * @memberof async + * @namespace autoInject + * @param {Object} tasks + * @param {number} [concurrency] + * @param {Function} [callback] + */ + function autoInject(tasks, concurrency, callback) { + var newTasks = {}; + baseEachSync(tasks, iterator, nativeKeys(tasks)); + auto(newTasks, concurrency, callback); + + function iterator(task, key) { + var params; + var taskLength = task.length; + + if (isArray(task)) { + if (taskLength === 0) { + throw new Error('autoInject task functions require explicit parameters.'); + } + params = createArray(task); + taskLength = params.length - 1; + task = params[taskLength]; + if (taskLength === 0) { + newTasks[key] = task; + return; + } + } else if (taskLength === 1) { + newTasks[key] = task; + return; + } else { + params = parseParams(task); + if (taskLength === 0 && params.length === 0) { + throw new Error('autoInject task functions require explicit parameters.'); + } + taskLength = params.length - 1; + } + params[taskLength] = newTask; + newTasks[key] = params; + + function newTask(results, done) { + switch (taskLength) { + case 1: + task(results[params[0]], done); + break; + case 2: + task(results[params[0]], results[params[1]], done); + break; + case 3: + task(results[params[0]], results[params[1]], results[params[2]], done); + break; + default: + var i = -1; + while (++i < taskLength) { + params[i] = results[params[i]]; + } + params[i] = done; + task.apply(null, params); + break; + } + } + } + } + + /** + * @memberof async + * @namespace retry + * @param {integer|Object|Function} opts + * @param {Function} [task] + * @param {Function} [callback] + */ + function retry(opts, task, callback) { + var times, intervalFunc, errorFilter; + var count = 0; + if (arguments.length < 3 && typeof opts === func) { + callback = task || noop; + task = opts; + opts = null; + times = DEFAULT_TIMES; + } else { + callback = callback || noop; + switch (typeof opts) { + case 'object': + if (typeof opts.errorFilter === func) { + errorFilter = opts.errorFilter; + } + var interval = opts.interval; + switch (typeof interval) { + case func: + intervalFunc = interval; + break; + case 'string': + case 'number': + interval = +interval; + intervalFunc = interval + ? function() { + return interval; + } + : function() { + return DEFAULT_INTERVAL; + }; + break; + } + times = +opts.times || DEFAULT_TIMES; + break; + case 'number': + times = opts || DEFAULT_TIMES; + break; + case 'string': + times = +opts || DEFAULT_TIMES; + break; + default: + throw new Error('Invalid arguments for async.retry'); + } + } + if (typeof task !== 'function') { + throw new Error('Invalid arguments for async.retry'); + } + + if (intervalFunc) { + task(intervalCallback); + } else { + task(simpleCallback); + } + + function simpleIterator() { + task(simpleCallback); + } + + function simpleCallback(err, res) { + if (++count === times || !err || (errorFilter && !errorFilter(err))) { + if (arguments.length <= 2) { + return callback(err, res); + } + var args = createArray(arguments); + return callback.apply(null, args); + } + simpleIterator(); + } + + function intervalIterator() { + task(intervalCallback); + } + + function intervalCallback(err, res) { + if (++count === times || !err || (errorFilter && !errorFilter(err))) { + if (arguments.length <= 2) { + return callback(err, res); + } + var args = createArray(arguments); + return callback.apply(null, args); + } + setTimeout(intervalIterator, intervalFunc(count)); + } + } + + function retryable(opts, task) { + if (!task) { + task = opts; + opts = null; + } + return done; + + function done() { + var taskFn; + var args = createArray(arguments); + var lastIndex = args.length - 1; + var callback = args[lastIndex]; + switch (task.length) { + case 1: + taskFn = task1; + break; + case 2: + taskFn = task2; + break; + case 3: + taskFn = task3; + break; + default: + taskFn = task4; + } + if (opts) { + retry(opts, taskFn, callback); + } else { + retry(taskFn, callback); + } + + function task1(done) { + task(done); + } + + function task2(done) { + task(args[0], done); + } + + function task3(done) { + task(args[0], args[1], done); + } + + function task4(callback) { + args[lastIndex] = callback; + task.apply(null, args); + } + } + } + + /** + * @memberof async + * @namespace iterator + */ + function iterator(tasks) { + var size = 0; + var keys = []; + if (isArray(tasks)) { + size = tasks.length; + } else { + keys = nativeKeys(tasks); + size = keys.length; + } + return makeCallback(0); + + function makeCallback(index) { + var fn = function() { + if (size) { + var key = keys[index] || index; + tasks[key].apply(null, createArray(arguments)); + } + return fn.next(); + }; + fn.next = function() { + return index < size - 1 ? makeCallback(index + 1) : null; + }; + return fn; + } + } + + /** + * @memberof async + * @namespace apply + */ + function apply(func) { + switch (arguments.length) { + case 0: + case 1: + return func; + case 2: + return func.bind(null, arguments[1]); + case 3: + return func.bind(null, arguments[1], arguments[2]); + case 4: + return func.bind(null, arguments[1], arguments[2], arguments[3]); + case 5: + return func.bind(null, arguments[1], arguments[2], arguments[3], arguments[4]); + default: + var size = arguments.length; + var index = 0; + var args = Array(size); + args[index] = null; + while (++index < size) { + args[index] = arguments[index]; + } + return func.bind.apply(func, args); + } + } + + /** + * @memberof async + * @namespace timeout + * @param {Function} func + * @param {number} millisec + * @param {*} info + */ + function timeout(func, millisec, info) { + var callback, timer; + return wrappedFunc; + + function wrappedFunc() { + timer = setTimeout(timeoutCallback, millisec); + var args = createArray(arguments); + var lastIndex = args.length - 1; + callback = args[lastIndex]; + args[lastIndex] = injectedCallback; + simpleApply(func, args); + } + + function timeoutCallback() { + var name = func.name || 'anonymous'; + var err = new Error('Callback function "' + name + '" timed out.'); + err.code = 'ETIMEDOUT'; + if (info) { + err.info = info; + } + timer = null; + callback(err); + } + + function injectedCallback() { + if (timer !== null) { + simpleApply(callback, createArray(arguments)); + clearTimeout(timer); + } + } + + function simpleApply(func, args) { + switch (args.length) { + case 0: + func(); + break; + case 1: + func(args[0]); + break; + case 2: + func(args[0], args[1]); + break; + default: + func.apply(null, args); + break; + } + } + } + + /** + * @memberof async + * @namespace times + * @param {number} n - n >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * var iterator = function(n, done) { + * done(null, n); + * }; + * async.times(4, iterator, function(err, res) { + * console.log(res); // [0, 1, 2, 3]; + * }); + * + */ + function times(n, iterator, callback) { + callback = callback || noop; + n = +n; + if (isNaN(n) || n < 1) { + return callback(null, []); + } + var result = Array(n); + timesSync(n, iterate); + + function iterate(num) { + iterator(num, createCallback(num)); + } + + function createCallback(index) { + return function(err, res) { + if (index === null) { + throwError(); + } + result[index] = res; + index = null; + if (err) { + callback(err); + callback = noop; + } else if (--n === 0) { + callback(null, result); + } + }; + } + } + + /** + * @memberof async + * @namespace timesSeries + * @param {number} n - n >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * var iterator = function(n, done) { + * done(null, n); + * }; + * async.timesSeries(4, iterator, function(err, res) { + * console.log(res); // [0, 1, 2, 3]; + * }); + * + */ + function timesSeries(n, iterator, callback) { + callback = callback || noop; + n = +n; + if (isNaN(n) || n < 1) { + return callback(null, []); + } + var result = Array(n); + var sync = false; + var completed = 0; + iterate(); + + function iterate() { + iterator(completed, done); + } + + function done(err, res) { + result[completed] = res; + if (err) { + callback(err); + callback = throwError; + } else if (++completed >= n) { + callback(null, result); + callback = throwError; + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + } + } + + /** + * @memberof async + * @namespace timesLimit + * @param {number} n - n >= 1 + * @param {number} limit - n >= 1 + * @param {Function} iterator + * @param {Function} callback + * @example + * + * var iterator = function(n, done) { + * done(null, n); + * }; + * async.timesLimit(4, 2, iterator, function(err, res) { + * console.log(res); // [0, 1, 2, 3]; + * }); + * + */ + function timesLimit(n, limit, iterator, callback) { + callback = callback || noop; + n = +n; + if (isNaN(n) || n < 1 || isNaN(limit) || limit < 1) { + return callback(null, []); + } + var result = Array(n); + var sync = false; + var started = 0; + var completed = 0; + timesSync(limit > n ? n : limit, iterate); + + function iterate() { + var index = started++; + if (index < n) { + iterator(index, createCallback(index)); + } + } + + function createCallback(index) { + return function(err, res) { + if (index === null) { + throwError(); + } + result[index] = res; + index = null; + if (err) { + callback(err); + callback = noop; + } else if (++completed >= n) { + callback(null, result); + callback = throwError; + } else if (sync) { + nextTick(iterate); + } else { + sync = true; + iterate(); + } + sync = false; + }; + } + } + + /** + * @memberof async + * @namespace race + * @param {Array|Object} tasks - functions + * @param {Function} callback + * @example + * + * // array + * var called = 0; + * var tasks = [ + * function(done) { + * setTimeout(function() { + * called++; + * done(null, '1'); + * }, 30); + * }, + * function(done) { + * setTimeout(function() { + * called++; + * done(null, '2'); + * }, 20); + * }, + * function(done) { + * setTimeout(function() { + * called++; + * done(null, '3'); + * }, 10); + * } + * ]; + * async.race(tasks, function(err, res) { + * console.log(res); // '3' + * console.log(called); // 1 + * setTimeout(function() { + * console.log(called); // 3 + * }, 50); + * }); + * + * @example + * + * // object + * var called = 0; + * var tasks = { + * 'test1': function(done) { + * setTimeout(function() { + * called++; + * done(null, '1'); + * }, 30); + * }, + * 'test2': function(done) { + * setTimeout(function() { + * called++; + * done(null, '2'); + * }, 20); + * }, + * 'test3': function(done) { + * setTimeout(function() { + * called++; + * done(null, '3'); + * }, 10); + * } + * }; + * async.race(tasks, function(err, res) { + * console.log(res); // '3' + * console.log(called); // 1 + * setTimeout(function() { + * console.log(called); // 3 + * done(); + * }, 50); + * }); + * + */ + function race(tasks, callback) { + callback = once(callback || noop); + var size, keys; + var index = -1; + if (isArray(tasks)) { + size = tasks.length; + while (++index < size) { + tasks[index](callback); + } + } else if (tasks && typeof tasks === obj) { + keys = nativeKeys(tasks); + size = keys.length; + while (++index < size) { + tasks[keys[index]](callback); + } + } else { + return callback(new TypeError('First argument to race must be a collection of functions')); + } + if (!size) { + callback(null); + } + } + + /** + * @memberof async + * @namespace memoize + */ + function memoize(fn, hasher) { + hasher = + hasher || + function(hash) { + return hash; + }; + + var memo = {}; + var queues = {}; + var memoized = function() { + var args = createArray(arguments); + var callback = args.pop(); + var key = hasher.apply(null, args); + if (has(memo, key)) { + nextTick(function() { + callback.apply(null, memo[key]); + }); + return; + } + if (has(queues, key)) { + return queues[key].push(callback); + } + + queues[key] = [callback]; + args.push(done); + fn.apply(null, args); + + function done(err) { + var args = createArray(arguments); + if (!err) { + memo[key] = args; + } + var q = queues[key]; + delete queues[key]; + + var i = -1; + var size = q.length; + while (++i < size) { + q[i].apply(null, args); + } + } + }; + memoized.memo = memo; + memoized.unmemoized = fn; + return memoized; + } + + /** + * @memberof async + * @namespace unmemoize + */ + function unmemoize(fn) { + return function() { + return (fn.unmemoized || fn).apply(null, arguments); + }; + } + + /** + * @memberof async + * @namespace ensureAsync + */ + function ensureAsync(fn) { + return function(/* ...args, callback */) { + var args = createArray(arguments); + var lastIndex = args.length - 1; + var callback = args[lastIndex]; + var sync = true; + args[lastIndex] = done; + fn.apply(this, args); + sync = false; + + function done() { + var innerArgs = createArray(arguments); + if (sync) { + nextTick(function() { + callback.apply(null, innerArgs); + }); + } else { + callback.apply(null, innerArgs); + } + } + }; + } + + /** + * @memberof async + * @namespace constant + */ + function constant(/* values... */) { + var args = [null].concat(createArray(arguments)); + return function(callback) { + callback = arguments[arguments.length - 1]; + callback.apply(this, args); + }; + } + + function asyncify(fn) { + return function(/* args..., callback */) { + var args = createArray(arguments); + var callback = args.pop(); + var result; + try { + result = fn.apply(this, args); + } catch (e) { + return callback(e); + } + if (result && typeof result.then === func) { + result.then( + function(value) { + invokeCallback(callback, null, value); + }, + function(err) { + invokeCallback(callback, err && err.message ? err : new Error(err)); + } + ); + } else { + callback(null, result); + } + }; + } + + function invokeCallback(callback, err, value) { + try { + callback(err, value); + } catch (e) { + nextTick(rethrow, e); + } + } + + function rethrow(error) { + throw error; + } + + /** + * @memberof async + * @namespace reflect + * @param {Function} func + * @return {Function} + */ + function reflect(func) { + return function(/* args..., callback */) { + var callback; + switch (arguments.length) { + case 1: + callback = arguments[0]; + return func(done); + case 2: + callback = arguments[1]; + return func(arguments[0], done); + default: + var args = createArray(arguments); + var lastIndex = args.length - 1; + callback = args[lastIndex]; + args[lastIndex] = done; + func.apply(this, args); + } + + function done(err, res) { + if (err) { + return callback(null, { + error: err + }); + } + if (arguments.length > 2) { + res = slice(arguments, 1); + } + callback(null, { + value: res + }); + } + }; + } + + /** + * @memberof async + * @namespace reflectAll + * @param {Array[]|Object} tasks + * @return {Function} + */ + function reflectAll(tasks) { + var newTasks, keys; + if (isArray(tasks)) { + newTasks = Array(tasks.length); + arrayEachSync(tasks, iterate); + } else if (tasks && typeof tasks === obj) { + keys = nativeKeys(tasks); + newTasks = {}; + baseEachSync(tasks, iterate, keys); + } + return newTasks; + + function iterate(func, key) { + newTasks[key] = reflect(func); + } + } + + /** + * @memberof async + * @namespace createLogger + */ + function createLogger(name) { + return function(fn) { + var args = slice(arguments, 1); + args.push(done); + fn.apply(null, args); + }; + + function done(err) { + if (typeof console === obj) { + if (err) { + if (console.error) { + console.error(err); + } + return; + } + if (console[name]) { + var args = slice(arguments, 1); + arrayEachSync(args, function(arg) { + console[name](arg); + }); + } + } + } + } + + /** + * @memberof async + * @namespace safe + */ + function safe() { + createImmediate(); + return exports; + } + + /** + * @memberof async + * @namespace fast + */ + function fast() { + createImmediate(false); + return exports; + } +}); diff --git a/node_modules/neo-async/async.min.js b/node_modules/neo-async/async.min.js new file mode 100644 index 0000000..4161a3f --- /dev/null +++ b/node_modules/neo-async/async.min.js @@ -0,0 +1,80 @@ +(function(N,O){"object"===typeof exports&&"undefined"!==typeof module?O(exports):"function"===typeof define&&define.amd?define(["exports"],O):N.async?O(N.neo_async=N.neo_async||{}):O(N.async=N.async||{})})(this,function(N){function O(a){var c=function(a){var d=J(arguments,1);setTimeout(function(){a.apply(null,d)})};T="function"===typeof setImmediate?setImmediate:c;"object"===typeof process&&"function"===typeof process.nextTick?(D=/^v0.10/.test(process.version)?T:process.nextTick,ba=/^v0/.test(process.version)? +T:process.nextTick):ba=D=T;!1===a&&(D=function(a){a()})}function H(a){for(var c=-1,b=a.length,d=Array(b);++c=d)return[];for(var e=Array(d);++bd[e]){var g=d[f]; +d[f]=d[e];d[e]=g}}if(!(e>b)){for(var l,e=a[a[c]>a[e]?c:e],f=c,g=b;f<=g;){for(l=f;f=l&&a[g]>=e;)g--;if(f>g)break;var q=a;l=d;var s=f++,h=g--,k=q[s];q[s]=q[h];q[h]=k;q=l[s];l[s]=l[h];l[h]=q}e=f;ca(a,c,e-1,d);ca(a,e,b,d)}}}function S(a){var c=[];Q(a,function(a){a!==w&&(C(a)?X.apply(c,a):c.push(a))});return c}function da(a,c,b){var d=-1,e=a.length;if(3===c.length)for(;++db)return e(null,[]);y=y||Array(m);K(b>m?m:b,x)}}function Y(a,c,b){function d(){c(a[v],s)}function e(){c(a[v],v,s)}function f(){n=r.next();n.done?b(null):c(n.value,s)}function g(){n=r.next();n.done?b(null):c(n.value,v,s)}function l(){c(a[m[v]],s)}function q(){k=m[v];c(a[k],k,s)}function s(a,d){a?b(a):++v===h||!1===d?(p=A,b(null)):u?D(p): +(u=!0,p());u=!1}b=E(b||w);var h,k,m,r,n,p,u=!1,v=0;C(a)?(h=a.length,p=3===c.length?e:d):a&&(z&&a[z]?(h=Infinity,r=a[z](),p=3===c.length?g:f):"object"===typeof a&&(m=F(a),h=m.length,p=3===c.length?q:l));if(!h)return b(null);p()}function Z(a,c,b,d){function e(){xc)return d(null);K(c>k?k:c,v)}function za(a,c,b){function d(){c(a[t],s)}function e(){c(a[t],t,s)}function f(){n=r.next(); +n.done?b(null,p):c(n.value,s)}function g(){n=r.next();n.done?b(null,p):c(n.value,t,s)}function l(){c(a[m[t]],s)}function q(){k=m[t];c(a[k],k,s)}function s(a,d){a?(u=A,b=E(b),b(a,H(p))):(p[t]=d,++t===h?(u=A,b(null,p),b=A):v?D(u):(v=!0,u()),v=!1)}b=b||w;var h,k,m,r,n,p,u,v=!1,t=0;C(a)?(h=a.length,u=3===c.length?e:d):a&&(z&&a[z]?(h=Infinity,p=[],r=a[z](),u=3===c.length?g:f):"object"===typeof a&&(m=F(a),h=m.length,u=3===c.length?q:l));if(!h)return b(null,[]);p=p||Array(h);u()}function Aa(a,c,b,d){return function(e, +f,g){function l(a){var b=!1;return function(c,e){b&&A();b=!0;c?(g=I(g),g(c)):!!e===d?(g=I(g),g(null,a)):++h===q&&g(null)}}g=g||w;var q,s,h=0;C(e)?(q=e.length,a(e,f,l)):e&&(z&&e[z]?(q=b(e,f,l))&&q===h&&g(null):"object"===typeof e&&(s=F(e),q=s.length,c(e,f,l,s)));q||g(null)}}function Ba(a){return function(c,b,d){function e(){r=c[x];b(r,h)}function f(){r=c[x];b(r,x,h)}function g(){u=p.next();r=u.value;u.done?d(null):b(r,h)}function l(){u=p.next();r=u.value;u.done?d(null):b(r,x,h)}function q(){r=c[n[x]]; +b(r,h)}function s(){m=n[x];r=c[m];b(r,m,h)}function h(b,c){b?d(b):!!c===a?(v=A,d(null,r)):++x===k?(v=A,d(null)):t?D(v):(t=!0,v());t=!1}d=E(d||w);var k,m,r,n,p,u,v,t=!1,x=0;C(c)?(k=c.length,v=3===b.length?f:e):c&&(z&&c[z]?(k=Infinity,p=c[z](),v=3===b.length?l:g):"object"===typeof c&&(n=F(c),k=n.length,v=3===b.length?s:q));if(!k)return d(null);v()}}function Ca(a){return function(c,b,d,e){function f(){r=G++;rb)return e(null);K(b>m?m:b,x)}}function Da(a,c,b,d){return function(e,f,g){function l(a,b){return function(c,e){null===a&&A();c?(a=null,g=I(g),g(c,L(k))):(!!e===d&&(k[a]=b),a=null,++h===q&&g(null,k))}}g=g||w;var q,s,h=0,k={};C(e)?(q=e.length,a(e,f,l)):e&&(z&&e[z]?(q=b(e,f,l))&&q===h&&g(null,k):"object"===typeof e&&(s=F(e),q=s.length,c(e,f,l,s)));if(!q)return g(null,{})}}function Ea(a){return function(c, +b,d){function e(){m=y;r=c[y];b(r,h)}function f(){m=y;r=c[y];b(r,y,h)}function g(){m=y;u=p.next();r=u.value;u.done?d(null,x):b(r,h)}function l(){m=y;u=p.next();r=u.value;u.done?d(null,x):b(r,m,h)}function q(){m=n[y];r=c[m];b(r,h)}function s(){m=n[y];r=c[m];b(r,m,h)}function h(b,c){b?d(b,x):(!!c===a&&(x[m]=r),++y===k?(v=A,d(null,x)):t?D(v):(t=!0,v()),t=!1)}d=E(d||w);var k,m,r,n,p,u,v,t=!1,x={},y=0;C(c)?(k=c.length,v=3===b.length?f:e):c&&(z&&c[z]?(k=Infinity,p=c[z](),v=3===b.length?l:g):"object"===typeof c&& +(n=F(c),k=n.length,v=3===b.length?s:q));if(!k)return d(null,{});v()}}function Fa(a){return function(c,b,d,e){function f(){r=B++;rb)return e(null,{});K(b>m?m:b,x)}}function $(a,c,b,d){function e(d){b(d,a[t],h)}function f(d){b(d,a[t],t,h)}function g(a){p=n.next();p.done?d(null,a):b(a,p.value, +h)}function l(a){p=n.next();p.done?d(null,a):b(a,p.value,t,h)}function q(d){b(d,a[r[t]],h)}function s(d){m=r[t];b(d,a[m],m,h)}function h(a,c){a?d(a,c):++t===k?(b=A,d(null,c)):v?D(function(){u(c)}):(v=!0,u(c));v=!1}d=E(d||w);var k,m,r,n,p,u,v=!1,t=0;C(a)?(k=a.length,u=4===b.length?f:e):a&&(z&&a[z]?(k=Infinity,n=a[z](),u=4===b.length?l:g):"object"===typeof a&&(r=F(a),k=r.length,u=4===b.length?s:q));if(!k)return d(null,c);u(c)}function Ga(a,c,b,d){function e(d){b(d,a[--s],q)}function f(d){b(d,a[--s], +s,q)}function g(d){b(d,a[m[--s]],q)}function l(d){k=m[--s];b(d,a[k],k,q)}function q(a,b){a?d(a,b):0===s?(u=A,d(null,b)):v?D(function(){u(b)}):(v=!0,u(b));v=!1}d=E(d||w);var s,h,k,m,r,n,p,u,v=!1;if(C(a))s=a.length,u=4===b.length?f:e;else if(a)if(z&&a[z]){p=[];r=a[z]();for(h=-1;!1===(n=r.next()).done;)p[++h]=n.value;a=p;s=p.length;u=4===b.length?f:e}else"object"===typeof a&&(m=F(a),s=m.length,u=4===b.length?l:g);if(!s)return d(null,c);u(c)}function Ha(a,c,b){b=b||w;ja(a,c,function(a,c){if(a)return b(a); +b(null,!!c)})}function Ia(a,c,b){b=b||w;ka(a,c,function(a,c){if(a)return b(a);b(null,!!c)})}function Ja(a,c,b,d){d=d||w;la(a,c,b,function(a,b){if(a)return d(a);d(null,!!b)})}function Ka(a,c){return C(a)?0===a.length?(c(null),!1):!0:(c(Error("First argument to waterfall must be an array of functions")),!1)}function ma(a,c,b){switch(c.length){case 0:case 1:return a(b);case 2:return a(c[1],b);case 3:return a(c[1],c[2],b);case 4:return a(c[1],c[2],c[3],b);case 5:return a(c[1],c[2],c[3],c[4],b);case 6:return a(c[1], +c[2],c[3],c[4],c[5],b);default:return c=J(c,1),c.push(b),a.apply(null,c)}}function La(a,c){function b(b,h){if(b)q=A,c=E(c),c(b);else if(++d===f){q=A;var k=c;c=A;2===arguments.length?k(b,h):k.apply(null,H(arguments))}else g=a[d],l=arguments,e?D(q):(e=!0,q()),e=!1}c=c||w;if(Ka(a,c)){var d=0,e=!1,f=a.length,g=a[d],l=[],q=function(){switch(g.length){case 0:try{b(null,g())}catch(a){b(a)}break;case 1:return g(b);case 2:return g(l[1],b);case 3:return g(l[1],l[2],b);case 4:return g(l[1],l[2],l[3],b);case 5:return g(l[1], +l[2],l[3],l[4],b);default:return l=J(l,1),l[g.length-1]=b,g.apply(null,l)}};q()}}function Ma(){var a=H(arguments);return function(){var c=this,b=H(arguments),d=b[b.length-1];"function"===typeof d?b.pop():d=w;$(a,b,function(a,b,d){a.push(function(a){var b=J(arguments,1);d(a,b)});b.apply(c,a)},function(a,b){b=C(b)?b:[b];b.unshift(a);d.apply(c,b)})}}function Na(a){return function(c){var b=function(){var b=this,d=H(arguments),g=d.pop()||w;return a(c,function(a,c){a.apply(b,d.concat([c]))},g)};if(1b)throw Error("Concurrency must not be zero");var h=0,k=[],m,r,n={_tasks:new M,concurrency:b,payload:d,saturated:w,unsaturated:w,buffer:b/4,empty:w,drain:w,error:w,started:!1,paused:!1,push:function(a, +b){f(a,b)},kill:function(){n.drain=w;n._tasks.empty()},unshift:function(a,b){f(a,b,!0)},remove:function(a){n._tasks.remove(a)},process:a?l:q,length:function(){return n._tasks.length},running:function(){return h},workersList:function(){return k},idle:function(){return 0===n.length()+h},pause:function(){n.paused=!0},resume:function(){!1!==n.paused&&(n.paused=!1,K(n.concurrency=arguments.length?f:J(arguments,1);if(a){q=g=0;s.length=0;var k=L(l);k[d]=f;d=null;var h= +b;b=w;h(a,k)}else q--,g--,l[d]=f,e(d),d=null}function n(){0===--v&&s.push([p,u,c])}var p,u;if(C(a)){var v=a.length-1;p=a[v];u=v;if(0===v)s.push([p,u,c]);else for(var t=-1;++t=arguments.length)return b(a,e);var f=H(arguments);return b.apply(null,f)}c(d)}function e(){c(f)}function f(a,d){if(++s===g||!a||q&&!q(a)){if(2>=arguments.length)return b(a,d);var c=H(arguments);return b.apply(null,c)}setTimeout(e,l(s))}var g,l,q,s=0;if(3>arguments.length&&"function"===typeof a)b=c||w,c=a,a=null,g=5;else switch(b=b||w,typeof a){case "object":"function"===typeof a.errorFilter&&(q=a.errorFilter);var h=a.interval; +switch(typeof h){case "function":l=h;break;case "string":case "number":l=(h=+h)?function(){return h}:function(){return 0}}g=+a.times||5;break;case "number":g=a||5;break;case "string":g=+a||5;break;default:throw Error("Invalid arguments for async.retry");}if("function"!==typeof c)throw Error("Invalid arguments for async.retry");l?c(f):c(d)}function Pa(a){return function(){var c=H(arguments),b=c.pop(),d;try{d=a.apply(this,c)}catch(e){return b(e)}d&&"function"===typeof d.then?d.then(function(a){try{b(null, +a)}catch(d){D(Qa,d)}},function(a){a=a&&a.message?a:Error(a);try{b(a,void 0)}catch(d){D(Qa,d)}}):b(null,d)}}function Qa(a){throw a;}function Ra(a){return function(){function c(a,d){if(a)return b(null,{error:a});2=arguments.length?c:J(arguments,1),a=null,++q===f&&d(null,l))}}d=d||w;var f,g,l,q=0;C(b)?(f=b.length,l=Array(f),a(b,e)):b&&"object"===typeof b&&(g=F(b),f=g.length,l={},c(b,e,g));f||d(null,l)}}(function(a,c){for(var b=-1,d=a.length;++bc)return d(null,[]);v=v||Array(k);K(c>k?k:c,t)},mapValues:fb,mapValuesSeries:function(a,c,b){function d(){k=t;c(a[t], +s)}function e(){k=t;c(a[t],t,s)}function f(){k=t;n=r.next();n.done?b(null,v):c(n.value,s)}function g(){k=t;n=r.next();n.done?b(null,v):c(n.value,t,s)}function l(){k=m[t];c(a[k],s)}function q(){k=m[t];c(a[k],k,s)}function s(a,d){a?(p=A,b=E(b),b(a,L(v))):(v[k]=d,++t===h?(p=A,b(null,v),b=A):u?D(p):(u=!0,p()),u=!1)}b=b||w;var h,k,m,r,n,p,u=!1,v={},t=0;C(a)?(h=a.length,p=3===c.length?e:d):a&&(z&&a[z]?(h=Infinity,r=a[z](),p=3===c.length?g:f):"object"===typeof a&&(m=F(a),h=m.length,p=3===c.length?q:l)); +if(!h)return b(null,v);p()},mapValuesLimit:function(a,c,b,d){function e(){m=y++;mc)return d(null,x);K(c>k?k:c,v)},filter:Ta,filterSeries:Ua,filterLimit:Va,select:Ta,selectSeries:Ua,selectLimit:Va,reject:gb,rejectSeries:hb,rejectLimit:ib,detect:ja,detectSeries:ka,detectLimit:la,find:ja,findSeries:ka,findLimit:la,pick:jb,pickSeries:kb, +pickLimit:lb,omit:mb,omitSeries:nb,omitLimit:ob,reduce:$,inject:$,foldl:$,reduceRight:Ga,foldr:Ga,transform:pb,transformSeries:function(a,c,b,d){function e(){b(v,a[x],h)}function f(){b(v,a[x],x,h)}function g(){p=n.next();p.done?d(null,v):b(v,p.value,h)}function l(){p=n.next();p.done?d(null,v):b(v,p.value,x,h)}function q(){b(v,a[r[x]],h)}function s(){m=r[x];b(v,a[m],m,h)}function h(a,b){a?d(a,v):++x===k||!1===b?(u=A,d(null,v)):t?D(u):(t=!0,u());t=!1}3===arguments.length&&(d=b,b=c,c=void 0);d=E(d|| +w);var k,m,r,n,p,u,v,t=!1,x=0;C(a)?(k=a.length,v=void 0!==c?c:[],u=4===b.length?f:e):a&&(z&&a[z]?(k=Infinity,n=a[z](),v=void 0!==c?c:{},u=4===b.length?l:g):"object"===typeof a&&(r=F(a),k=r.length,v=void 0!==c?c:{},u=4===b.length?s:q));if(!k)return d(null,void 0!==c?c:v||{});u()},transformLimit:function(a,c,b,d,e){function f(){r=A++;rc)return e(null,void 0!==b?b:x||{});K(c>m?m:c,t)},sortBy:qb,sortBySeries:function(a,c,b){function d(){m=a[y];c(m,s)}function e(){m=a[y];c(m,y,s)}function f(){p=n.next();if(p.done)return b(null,P(u,v));m=p.value;u[y]=m;c(m,s)}function g(){p=n.next();if(p.done)return b(null,P(u,v));m=p.value;u[y]=m;c(m,y,s)}function l(){m=a[r[y]];u[y]=m;c(m,s)}function q(){k=r[y];m=a[k];u[y]=m;c(m,k,s)}function s(a,d){v[y]=d;a?b(a):++y===h?(t=A,b(null, +P(u,v))):x?D(t):(x=!0,t());x=!1}b=E(b||w);var h,k,m,r,n,p,u,v,t,x=!1,y=0;C(a)?(h=a.length,u=a,v=Array(h),t=3===c.length?e:d):a&&(z&&a[z]?(h=Infinity,u=[],v=[],n=a[z](),t=3===c.length?g:f):"object"===typeof a&&(r=F(a),h=r.length,u=Array(h),v=Array(h),t=3===c.length?q:l));if(!h)return b(null,[]);t()},sortByLimit:function(a,c,b,d){function e(){Bc)return d(null,[]);x=x||Array(k);K(c>k?k:c,y)},some:Ha,someSeries:Ia,someLimit:Ja,any:Ha,anySeries:Ia,anyLimit:Ja,every:Wa,everySeries:Xa,everyLimit:Ya,all:Wa,allSeries:Xa,allLimit:Ya,concat:rb,concatSeries:function(a,c,b){function d(){c(a[t],s)}function e(){c(a[t],t,s)}function f(){n=r.next();n.done?b(null,v):c(n.value,s)}function g(){n=r.next();n.done?b(null,v):c(n.value,t,s)}function l(){c(a[m[t]], +s)}function q(){k=m[t];c(a[k],k,s)}function s(a,d){C(d)?X.apply(v,d):2<=arguments.length&&X.apply(v,J(arguments,1));a?b(a,v):++t===h?(p=A,b(null,v)):u?D(p):(u=!0,p());u=!1}b=E(b||w);var h,k,m,r,n,p,u=!1,v=[],t=0;C(a)?(h=a.length,p=3===c.length?e:d):a&&(z&&a[z]?(h=Infinity,r=a[z](),p=3===c.length?g:f):"object"===typeof a&&(m=F(a),h=m.length,p=3===c.length?q:l));if(!h)return b(null,v);p()},concatLimit:function(a,c,b,d){function e(){tc)return d(null,[]);u=u||Array(k);K(c>k?k:c,p)},groupBy:sb,groupBySeries:function(a,c,b){function d(){m=a[t];c(m,s)}function e(){m=a[t];c(m,t,s)}function f(){p=n.next();m=p.value;p.done?b(null,x):c(m,s)}function g(){p=n.next();m=p.value;p.done? +b(null,x):c(m,t,s)}function l(){m=a[r[t]];c(m,s)}function q(){k=r[t];m=a[k];c(m,k,s)}function s(a,d){if(a)u=A,b=E(b),b(a,L(x));else{var c=x[d];c?c.push(m):x[d]=[m];++t===h?(u=A,b(null,x)):v?D(u):(v=!0,u());v=!1}}b=E(b||w);var h,k,m,r,n,p,u,v=!1,t=0,x={};C(a)?(h=a.length,u=3===c.length?e:d):a&&(z&&a[z]?(h=Infinity,n=a[z](),u=3===c.length?g:f):"object"===typeof a&&(r=F(a),h=r.length,u=3===c.length?q:l));if(!h)return b(null,x);u()},groupByLimit:function(a,c,b,d){function e(){yc)return d(null,B);K(c>k?k:c,t)},parallel:tb,series:function(a,c){function b(){g=k;a[k](e)}function d(){g=l[k];a[g](e)}function e(a,b){a?(s=A,c=E(c),c(a,q)):(q[g]=2>=arguments.length?b:J(arguments,1),++k===f?(s=A,c(null,q)):h?D(s):(h=!0,s()),h=!1)}c=c||w;var f,g,l,q,s,h=!1,k=0;if(C(a))f=a.length,q=Array(f), +s=b;else if(a&&"object"===typeof a)l=F(a),f=l.length,q={},s=d;else return c(null);if(!f)return c(null,q);s()},parallelLimit:function(a,c,b){function d(){l=r++;if(l=arguments.length?c:J(arguments,1),a=null,++n===g?b(null,h):m?D(k):(m=!0,k()),m=!1)}}b=b||w;var g,l,q,s,h,k,m=!1,r=0,n=0;C(a)?(g=a.length,h=Array(g),k=d):a&&"object"===typeof a&&(s=F(a),g=s.length,h= +{},k=e);if(!g||isNaN(c)||1>c)return b(null,h);K(c>g?g:c,k)},tryEach:function(a,c){function b(){a[q](e)}function d(){a[g[q]](e)}function e(a,b){a?++q===f?c(a):l():2>=arguments.length?c(null,b):c(null,J(arguments,1))}c=c||w;var f,g,l,q=0;C(a)?(f=a.length,l=b):a&&"object"===typeof a&&(g=F(a),f=g.length,l=d);if(!f)return c(null);l()},waterfall:function(a,c){function b(){ma(e,f,d(e))}function d(h){return function(k,m){void 0===h&&(c=w,A());h=void 0;k?(g=c,c=A,g(k)):++q===s?(g=c,c=A,2>=arguments.length? +g(k,m):g.apply(null,H(arguments))):(l?(f=arguments,e=a[q]||A,D(b)):(l=!0,ma(a[q]||A,arguments,d(q))),l=!1)}}c=c||w;if(Ka(a,c)){var e,f,g,l,q=0,s=a.length;ma(a[0],[],d(0))}},angelFall:La,angelfall:La,whilst:function(a,c,b){function d(){g?D(e):(g=!0,c(f));g=!1}function e(){c(f)}function f(c,e){if(c)return b(c);2>=arguments.length?a(e)?d():b(null,e):(e=J(arguments,1),a.apply(null,e)?d():b.apply(null,[null].concat(e)))}b=b||w;var g=!1;a()?d():b(null)},doWhilst:function(a,c,b){function d(){g?D(e):(g=!0, +a(f));g=!1}function e(){a(f)}function f(a,e){if(a)return b(a);2>=arguments.length?c(e)?d():b(null,e):(e=J(arguments,1),c.apply(null,e)?d():b.apply(null,[null].concat(e)))}b=b||w;var g=!1;e()},until:function(a,c,b){function d(){g?D(e):(g=!0,c(f));g=!1}function e(){c(f)}function f(c,e){if(c)return b(c);2>=arguments.length?a(e)?b(null,e):d():(e=J(arguments,1),a.apply(null,e)?b.apply(null,[null].concat(e)):d())}b=b||w;var g=!1;a()?b(null):d()},doUntil:function(a,c,b){function d(){g?D(e):(g=!0,a(f));g= +!1}function e(){a(f)}function f(a,e){if(a)return b(a);2>=arguments.length?c(e)?b(null,e):d():(e=J(arguments,1),c.apply(null,e)?b.apply(null,[null].concat(e)):d())}b=b||w;var g=!1;e()},during:function(a,c,b){function d(a,d){if(a)return b(a);d?c(e):b(null)}function e(c){if(c)return b(c);a(d)}b=b||w;a(d)},doDuring:function(a,c,b){function d(d,c){if(d)return b(d);c?a(e):b(null)}function e(a,e){if(a)return b(a);switch(arguments.length){case 0:case 1:c(d);break;case 2:c(e,d);break;default:var l=J(arguments, +1);l.push(d);c.apply(null,l)}}b=b||w;d(null,!0)},forever:function(a,c){function b(){a(d)}function d(a){if(a){if(c)return c(a);throw a;}e?D(b):(e=!0,b());e=!1}var e=!1;b()},compose:function(){return Ma.apply(null,Za(arguments))},seq:Ma,applyEach:ub,applyEachSeries:vb,queue:function(a,c){return na(!0,a,c)},priorityQueue:function(a,c){var b=na(!0,a,c);b.push=function(a,c,f){b.started=!0;c=c||0;var g=C(a)?a:[a],l=g.length;if(void 0===a||0===l)b.idle()&&D(b.drain);else{f="function"===typeof f?f:w;for(a= +b._tasks.head;a&&c>=a.priority;)a=a.next;for(;l--;){var q={data:g[l],priority:c,callback:f};a?b._tasks.insertBefore(a,q):b._tasks.push(q);D(b.process)}}};delete b.unshift;return b},cargo:function(a,c){return na(!1,a,1,c)},auto:Oa,autoInject:function(a,c,b){var d={};W(a,function(a,b){var c,l=a.length;if(C(a)){if(0===l)throw Error("autoInject task functions require explicit parameters.");c=H(a);l=c.length-1;a=c[l];if(0===l){d[b]=a;return}}else{if(1===l){d[b]=a;return}c=ab(a);if(0===l&&0===c.length)throw Error("autoInject task functions require explicit parameters."); +l=c.length-1}c[l]=function(b,d){switch(l){case 1:a(b[c[0]],d);break;case 2:a(b[c[0]],b[c[1]],d);break;case 3:a(b[c[0]],b[c[1]],b[c[2]],d);break;default:for(var f=-1;++fa)return b(null,[]);var e=Array(a);K(a,function(a){c(a,d(a))})},timesSeries:function(a,c,b){function d(){c(l, +e)}function e(c,e){f[l]=e;c?(b(c),b=A):++l>=a?(b(null,f),b=A):g?D(d):(g=!0,d());g=!1}b=b||w;a=+a;if(isNaN(a)||1>a)return b(null,[]);var f=Array(a),g=!1,l=0;d()},timesLimit:function(a,c,b,d){function e(){var c=q++;c=a?(d(null,g),d=A):l?D(e):(l=!0,e());l=!1}}d=d||w;a=+a;if(isNaN(a)||1>a||isNaN(c)||1>c)return d(null,[]);var g=Array(a),l=!1,q=0,s=0;K(c>a?a:c,e)},race:function(a,c){c=I(c||w);var b,d,e=-1;if(C(a))for(b= +a.length;++e + +Please look thru your error log for the string `gyp info using node-gyp@` and if the version number is less than the [current release of node-gyp](https://github.com/nodejs/node-gyp/releases) then __please upgrade__ using the instructions at https://github.com/nodejs/node-gyp/blob/master/docs/Updating-npm-bundled-node-gyp.md and try your command again. + +Requests for help with [`node-sass` are very common](https://github.com/nodejs/node-gyp/issues?q=label%3A%22Node+Sass+--%3E+Dart+Sass%22). Please be aware that this package is deprecated, you should seek alternatives and avoid opening new issues about it here. + +* **Node Version**: +* **Platform**: +* **Compiler**: +* **Module**: + +
Verbose output (from npm or node-gyp): + +``` +Paste your log here, between the backticks. It can be: + - npm --verbose output, + - or contents of npm-debug.log, + - or output of node-gyp rebuild --verbose. +Include the command you were trying to run. + +This should look like this: + +>npm --verbose +npm info it worked if it ends with ok +npm verb cli [ +npm verb cli 'C:\\...\\node\\13.9.0\\x64\\node.exe', +npm verb cli 'C:\\...\\node\\13.9.0\\x64\\node_modules\\npm\\bin\\npm-cli.js', +npm verb cli '--verbose' +npm verb cli ] +npm info using npm@6.13.7 +npm info using node@v13.9.0 + +Usage: npm +(...) +``` + +
+ + diff --git a/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md b/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 0000000..bcc4bb1 --- /dev/null +++ b/node_modules/node-gyp/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,17 @@ + + +##### Checklist + + +- [ ] `npm install && npm test` passes +- [ ] tests are included +- [ ] documentation is changed or added +- [ ] commit message follows [commit guidelines](https://github.com/googleapis/release-please#how-should-i-write-my-commits) + +##### Description of change + + diff --git a/node_modules/node-gyp/.github/workflows/release-please.yml b/node_modules/node-gyp/.github/workflows/release-please.yml new file mode 100644 index 0000000..7d3cf9d --- /dev/null +++ b/node_modules/node-gyp/.github/workflows/release-please.yml @@ -0,0 +1,56 @@ +name: release-please + +on: + push: + branches: + - master + +jobs: + release-please: + runs-on: ubuntu-latest + steps: + - uses: google-github-actions/release-please-action@v2 + id: release + with: + package-name: node-gyp + release-type: node + changelog-types: > + [{"type":"feat","section":"Features","hidden":false}, + {"type":"fix","section":"Bug Fixes","hidden":false}, + {"type":"bin","section":"Core","hidden":false}, + {"type":"gyp","section":"Core","hidden":false}, + {"type":"lib","section":"Core","hidden":false}, + {"type":"src","section":"Core","hidden":false}, + {"type":"test","section":"Tests","hidden":false}, + {"type":"build","section":"Core","hidden":false}, + {"type":"clean","section":"Core","hidden":false}, + {"type":"configure","section":"Core","hidden":false}, + {"type":"install","section":"Core","hidden":false}, + {"type":"list","section":"Core","hidden":false}, + {"type":"rebuild","section":"Core","hidden":false}, + {"type":"remove","section":"Core","hidden":false}, + {"type":"deps","section":"Core","hidden":false}, + {"type":"python","section":"Core","hidden":false}, + {"type":"lin","section":"Core","hidden":false}, + {"type":"linux","section":"Core","hidden":false}, + {"type":"mac","section":"Core","hidden":false}, + {"type":"macos","section":"Core","hidden":false}, + {"type":"win","section":"Core","hidden":false}, + {"type":"windows","section":"Core","hidden":false}, + {"type":"zos","section":"Core","hidden":false}, + {"type":"doc","section":"Doc","hidden":false}, + {"type":"docs","section":"Doc","hidden":false}, + {"type":"readme","section":"Doc","hidden":false}, + {"type":"chore","section":"Miscellaneous","hidden":false}, + {"type":"refactor","section":"Miscellaneous","hidden":false}, + {"type":"ci","section":"Miscellaneous","hidden":false}, + {"type":"meta","section":"Miscellaneous","hidden":false}] + + # Standard Conventional Commits: `feat` and `fix` + # node-gyp subdirectories: `bin`, `gyp`, `lib`, `src`, `test` + # node-gyp subcommands: `build`, `clean`, `configure`, `install`, `list`, `rebuild`, `remove` + # Core abstract category: `deps` + # Languages/platforms: `python`, `lin`, `linux`, `mac`, `macos`, `win`, `window`, `zos` + # Documentation: `doc`, `docs`, `readme` + # Standard Conventional Commits: `chore` (under "Miscellaneous") + # Miscellaneous abstract categories: `refactor`, `ci`, `meta` diff --git a/node_modules/node-gyp/.github/workflows/tests.yml b/node_modules/node-gyp/.github/workflows/tests.yml new file mode 100644 index 0000000..f7c9b97 --- /dev/null +++ b/node_modules/node-gyp/.github/workflows/tests.yml @@ -0,0 +1,45 @@ +# TODO: Line 43, enable pytest --doctest-modules + +name: Tests +on: [push, pull_request] +jobs: + Tests: + strategy: + fail-fast: false + max-parallel: 15 + matrix: + node: [12.x, 14.x, 16.x] + python: ["3.6", "3.8", "3.10"] + os: [macos-latest, ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node }} + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node }} + - name: Use Python ${{ matrix.python }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + env: + PYTHON_VERSION: ${{ matrix.python }} + - name: Install Dependencies + run: | + npm install --no-progress + pip install flake8 pytest + - name: Set Windows environment + if: matrix.os == 'windows-latest' + run: | + echo 'GYP_MSVS_VERSION=2015' >> $Env:GITHUB_ENV + echo 'GYP_MSVS_OVERRIDE_PATH=C:\\Dummy' >> $Env:GITHUB_ENV + - name: Lint Python + if: matrix.os == 'ubuntu-latest' + run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics + - name: Run Python tests + run: python -m pytest + # - name: Run doctests with pytest + # run: python -m pytest --doctest-modules + - name: Run Node tests + run: npm test diff --git a/node_modules/node-gyp/.github/workflows/visual-studio.yml b/node_modules/node-gyp/.github/workflows/visual-studio.yml new file mode 100644 index 0000000..6bb4574 --- /dev/null +++ b/node_modules/node-gyp/.github/workflows/visual-studio.yml @@ -0,0 +1,25 @@ +name: Tests on Windows +on: [push, pull_request] +jobs: + Tests: + strategy: + fail-fast: false + max-parallel: 15 + matrix: + os: [windows-2022] + runs-on: ${{ matrix.os }} + steps: + - name: Checkout Repository + uses: actions/checkout@v2 + - name: Install Dependencies + run: | + npm install --no-progress + - name: Set Windows environment + if: matrix.os == 'windows-latest' + run: | + echo 'GYP_MSVS_VERSION=2015' >> $Env:GITHUB_ENV + echo 'GYP_MSVS_OVERRIDE_PATH=C:\\Dummy' >> $Env:GITHUB_ENV + - name: Environment Information + run: npx envinfo + - name: Run Node tests + run: npm test diff --git a/node_modules/node-gyp/CHANGELOG.md b/node_modules/node-gyp/CHANGELOG.md new file mode 100644 index 0000000..1e54fd6 --- /dev/null +++ b/node_modules/node-gyp/CHANGELOG.md @@ -0,0 +1,687 @@ +# Changelog + +### [8.4.1](https://www.github.com/nodejs/node-gyp/compare/v8.4.0...v8.4.1) (2021-11-19) + + +### Bug Fixes + +* windows command missing space ([#2553](https://www.github.com/nodejs/node-gyp/issues/2553)) ([cc37b88](https://www.github.com/nodejs/node-gyp/commit/cc37b880690706d3c5d04d5a68c76c392a0a23ed)) + + +### Doc + +* fix typo in powershell node-gyp update ([787cf7f](https://www.github.com/nodejs/node-gyp/commit/787cf7f8e5ddd5039e02b64ace6b7b15e06fe0a4)) + + +### Core + +* npmlog@6.0.0 ([8083f6b](https://www.github.com/nodejs/node-gyp/commit/8083f6b855bd7f3326af04c5f5269fc28d7f2508)) + +## [8.4.0](https://www.github.com/nodejs/node-gyp/compare/v8.3.0...v8.4.0) (2021-11-05) + + +### Features + +* build with config.gypi from node headers ([a27dc08](https://www.github.com/nodejs/node-gyp/commit/a27dc08696911c6d81e76cc228697243069103c1)) +* support vs2022 ([#2533](https://www.github.com/nodejs/node-gyp/issues/2533)) ([5a00387](https://www.github.com/nodejs/node-gyp/commit/5a00387e5f8018264a1822f6c4d5dbf425f21cf6)) + +## [8.3.0](https://www.github.com/nodejs/node-gyp/compare/v8.2.0...v8.3.0) (2021-10-11) + + +### Features + +* **gyp:** update gyp to v0.10.0 ([#2521](https://www.github.com/nodejs/node-gyp/issues/2521)) ([5585792](https://www.github.com/nodejs/node-gyp/commit/5585792922a97f0629f143c560efd74470eae87f)) + + +### Tests + +* Python 3.10 was release on Oct. 4th ([#2504](https://www.github.com/nodejs/node-gyp/issues/2504)) ([0a67dcd](https://www.github.com/nodejs/node-gyp/commit/0a67dcd1307f3560495219253241eafcbf4e2a69)) + + +### Miscellaneous + +* **deps:** bump make-fetch-happen from 8.0.14 to 9.1.0 ([b05b4fe](https://www.github.com/nodejs/node-gyp/commit/b05b4fe9891f718f40edf547e9b50e982826d48a)) +* refactor the creation of config.gypi file ([f2ad87f](https://www.github.com/nodejs/node-gyp/commit/f2ad87ff65f98ad66daa7225ad59d99b759a2b07)) + +## [8.2.0](https://www.github.com/nodejs/node-gyp/compare/v8.1.0...v8.2.0) (2021-08-23) + + +### Features + +* **gyp:** update gyp to v0.9.6 ([#2481](https://www.github.com/nodejs/node-gyp/issues/2481)) ([ed9a9ed](https://www.github.com/nodejs/node-gyp/commit/ed9a9ed653a17c84afa3c327161992d0da7d0cea)) + + +### Bug Fixes + +* add error arg back into catch block for older Node.js users ([5cde818](https://www.github.com/nodejs/node-gyp/commit/5cde818aac715477e9e9747966bb6b4c4ed070a8)) +* change default gyp update message ([#2420](https://www.github.com/nodejs/node-gyp/issues/2420)) ([cfd12ff](https://www.github.com/nodejs/node-gyp/commit/cfd12ff3bb0eb4525173413ef6a94b3cd8398cad)) +* doc how to update node-gyp independently from npm ([c8c0af7](https://www.github.com/nodejs/node-gyp/commit/c8c0af72e78141a02b5da4cd4d704838333a90bd)) +* missing spaces ([f0882b1](https://www.github.com/nodejs/node-gyp/commit/f0882b1264b2fa701adbc81a3be0b3cba80e333d)) + + +### Core + +* deep-copy process.config during configure ([#2368](https://www.github.com/nodejs/node-gyp/issues/2368)) ([5f1a06c](https://www.github.com/nodejs/node-gyp/commit/5f1a06c50f3b0c3d292f64948f85a004cfcc5c87)) + + +### Miscellaneous + +* **deps:** bump tar from 6.1.0 to 6.1.2 ([#2474](https://www.github.com/nodejs/node-gyp/issues/2474)) ([ec15a3e](https://www.github.com/nodejs/node-gyp/commit/ec15a3e5012004172713c11eebcc9d852d32d380)) +* fix typos discovered by codespell ([#2442](https://www.github.com/nodejs/node-gyp/issues/2442)) ([2d0ce55](https://www.github.com/nodejs/node-gyp/commit/2d0ce5595e232a3fc7c562cdf39efb77e2312cc1)) +* GitHub Actions Test on node: [12.x, 14.x, 16.x] ([#2439](https://www.github.com/nodejs/node-gyp/issues/2439)) ([b7bccdb](https://www.github.com/nodejs/node-gyp/commit/b7bccdb527d93b0bb0ce99713f083ce2985fe85c)) + + +### Doc + +* correct link to "binding.gyp files out in the wild" ([#2483](https://www.github.com/nodejs/node-gyp/issues/2483)) ([660dd7b](https://www.github.com/nodejs/node-gyp/commit/660dd7b2a822c184be8027b300e68be67b366772)) +* **wiki:** Add a link to the node-midi binding.gyp file. ([b354711](https://www.github.com/nodejs/node-gyp/commit/b3547115f6e356358138310e857c7f1ec627a8a7)) +* **wiki:** add bcrypt ([e199cfa](https://www.github.com/nodejs/node-gyp/commit/e199cfa8fc6161492d2a6ade2190510d0ebf7c0f)) +* **wiki:** Add helpful information ([4eda827](https://www.github.com/nodejs/node-gyp/commit/4eda8275c03dae6d2f5c40f3c1dbe930d84b0f2b)) +* **wiki:** Add node-canvas ([13a9553](https://www.github.com/nodejs/node-gyp/commit/13a955317b39caf98fd1f412d8d3f41599e979fd)) +* **wiki:** Add node-openvg-canvas and node-openvg. ([61f709e](https://www.github.com/nodejs/node-gyp/commit/61f709ec4d9f256a6467e9ff84430a48eeb629d1)) +* **wiki:** add one more example ([77f3632](https://www.github.com/nodejs/node-gyp/commit/77f363272930d3d4d24fd3973be22e6237128fcc)) +* **wiki:** add topcube, node-osmium, and node-osrm ([1a75d2b](https://www.github.com/nodejs/node-gyp/commit/1a75d2bf2f562ba50846893a516e111cfbb50885)) +* **wiki:** Added details for properly fixing ([3d4d9d5](https://www.github.com/nodejs/node-gyp/commit/3d4d9d52d6b5b49de06bb0bb5b68e2686d2b7ebd)) +* **wiki:** Added Ghostscript4JS ([bf4bed1](https://www.github.com/nodejs/node-gyp/commit/bf4bed1b96a7d22fba6f97f4552ad09f32ac3737)) +* **wiki:** added levelup ([1575bce](https://www.github.com/nodejs/node-gyp/commit/1575bce3a53db628bfb023fd6f3258fdf98c3195)) +* **wiki:** Added nk-mysql (nodamysql) ([5b4f2d0](https://www.github.com/nodejs/node-gyp/commit/5b4f2d0e1d5d3eadfd03aaf9c1668340f76c4bea)) +* **wiki:** Added nk-xrm-installer .gyp references, including .py scripts for providing complete reference to examples of fetching source via http, extracting, and moving files (as opposed to copying) ([ceb3088](https://www.github.com/nodejs/node-gyp/commit/ceb30885b74f6789374ef52267b84767be93ebe4)) +* **wiki:** Added tip about resolving frustrating LNK1181 error ([e64798d](https://www.github.com/nodejs/node-gyp/commit/e64798de8cac6031ad598a86d7599e81b4d20b17)) +* **wiki:** ADDED: Node.js binding to OpenCV ([e2dc777](https://www.github.com/nodejs/node-gyp/commit/e2dc77730b09d7ee8682d7713a7603a2d7aacabd)) +* **wiki:** Adding link to node-cryptopp's gyp file ([875adbe](https://www.github.com/nodejs/node-gyp/commit/875adbe2a4669fa5f2be0250ffbf98fb55e800fd)) +* **wiki:** Adding the sharp library to the list ([9dce0e4](https://www.github.com/nodejs/node-gyp/commit/9dce0e41650c3fa973e6135a79632d022c662a1d)) +* **wiki:** Adds node-fann ([23e3d48](https://www.github.com/nodejs/node-gyp/commit/23e3d485ed894ba7c631e9c062f5e366b50c416c)) +* **wiki:** Adds node-inotify and v8-profiler ([b6e542f](https://www.github.com/nodejs/node-gyp/commit/b6e542f644dbbfe22b88524ec500696e06ee4af7)) +* **wiki:** Bumping Python version from 2.3 to 2.7 as per the node-gyp readme ([55ebd6e](https://www.github.com/nodejs/node-gyp/commit/55ebd6ebacde975bf84f7bf4d8c66e64cc7cd0da)) +* **wiki:** C++ build tools version upgraded ([5b899b7](https://www.github.com/nodejs/node-gyp/commit/5b899b70db729c392ced7c98e8e17590c6499fc3)) +* **wiki:** change bcrypt url to binding.gyp file ([e11bdd8](https://www.github.com/nodejs/node-gyp/commit/e11bdd84de6144492d3eb327d67cbf2d62da1a76)) +* **wiki:** Clarification + direct link to VS2010 ([531c724](https://www.github.com/nodejs/node-gyp/commit/531c724561d947b5d870de8d52dd8c3c51c5ec2d)) +* **wiki:** Correcting the link to node-osmium ([fae7516](https://www.github.com/nodejs/node-gyp/commit/fae7516a1d2829b6e234eaded74fb112ebd79a05)) +* **wiki:** Created "binding.gyp" files out in the wild (markdown) ([d4fd143](https://www.github.com/nodejs/node-gyp/commit/d4fd14355bbe57f229f082f47bb2b3670868203f)) +* **wiki:** Created Common issues (markdown) ([a38299e](https://www.github.com/nodejs/node-gyp/commit/a38299ea340ceb0e732c6dc6a1b4760257644839)) +* **wiki:** Created Error: "pre" versions of node cannot be installed (markdown) ([98bc80d](https://www.github.com/nodejs/node-gyp/commit/98bc80d7a62ba70c881f3c39d94f804322e57852)) +* **wiki:** Created Linking to OpenSSL (markdown) ([c46d00d](https://www.github.com/nodejs/node-gyp/commit/c46d00d83bac5173dea8bbbb175a1a7de74fdaca)) +* **wiki:** Created Updating npm's bundled node gyp (markdown) ([e0ac8d1](https://www.github.com/nodejs/node-gyp/commit/e0ac8d15af46aadd1c220599e63199b154a514e6)) +* **wiki:** Created use of undeclared identifier 'TypedArray' (markdown) ([65ba711](https://www.github.com/nodejs/node-gyp/commit/65ba71139e9b7f64ac823e575ee9dbf17d937ce4)) +* **wiki:** Created Visual Studio 2010 Setup (markdown) ([5b80e83](https://www.github.com/nodejs/node-gyp/commit/5b80e834c8f79dda9fb2770a876ff3cf649c06f3)) +* **wiki:** Created Visual studio 2012 setup (markdown) ([becef31](https://www.github.com/nodejs/node-gyp/commit/becef316b6c46a33e783667720ee074a0141d1a5)) +* **wiki:** Destroyed Visual Studio 2010 Setup (markdown) ([93423b4](https://www.github.com/nodejs/node-gyp/commit/93423b43606de9664aeb79635825f5e9941ec9bc)) +* **wiki:** Destroyed Visual studio 2012 setup (markdown) ([3601508](https://www.github.com/nodejs/node-gyp/commit/3601508bb10fa05da0ddc7e70d57e4b4dd679657)) +* **wiki:** Different commands for Windows npm v6 vs. v7 ([0fce46b](https://www.github.com/nodejs/node-gyp/commit/0fce46b53340c85e8091cde347d5ed23a443c82f)) +* **wiki:** Drop in favor of ([9285ff6](https://www.github.com/nodejs/node-gyp/commit/9285ff6e451c52c070a05f05f0a9602621d91d53)) +* **wiki:** Explicit link to Visual C++ 2010 Express ([378c363](https://www.github.com/nodejs/node-gyp/commit/378c3632f02c096ed819ec8f2611c65bef0c0554)) +* **wiki:** fix link to gyp file used to build libsqlite3 ([54db8d7](https://www.github.com/nodejs/node-gyp/commit/54db8d7ac33e3f98220960b5d86cfa18a75b53cb)) +* **wiki:** Fix link to node-zipfile ([92e49a8](https://www.github.com/nodejs/node-gyp/commit/92e49a858ed69cb4847a26a5676ab56ef5e2de33)) +* **wiki:** fixed node-serialport link ([954ee53](https://www.github.com/nodejs/node-gyp/commit/954ee530b3972d1db591fce32368e4e31b5a25d8)) +* **wiki:** I highly missing it in common issue as every windows biggner face that issue ([d617fae](https://www.github.com/nodejs/node-gyp/commit/d617faee29c40871ca5c8f93efd0ce929a40d541)) +* **wiki:** if ouns that the -h did not help. I founs on github that there was support for visual studio 2015, while i couldn't install node-red beacuse it kept telling me the key 2015 was missing. looking in he gyp python code i found the local file was bot up t dat with the github repo. updating took several efforts before i tried to drop the -g option. ([408b72f](https://www.github.com/nodejs/node-gyp/commit/408b72f561329408daeb17834436e381406efcc8)) +* **wiki:** If permissions error, please try and then the command. ([ee8e1c1](https://www.github.com/nodejs/node-gyp/commit/ee8e1c1e5334096d58e0d6bca6c006f2ee9c88cb)) +* **wiki:** Improve Unix instructions ([c3e5487](https://www.github.com/nodejs/node-gyp/commit/c3e548736645b535ea5bce613d74ca3e98598243)) +* **wiki:** link to docs/ from README ([b52e487](https://www.github.com/nodejs/node-gyp/commit/b52e487eac1eb421573d1e67114a242eeff45a00)) +* **wiki:** Lower case L ([3aa2c6b](https://www.github.com/nodejs/node-gyp/commit/3aa2c6bdb07971b87505e32e32548d75264bd19f)) +* **wiki:** Make changes discussed in https://github.com/nodejs/node-gyp/issues/2416 ([1dcad87](https://www.github.com/nodejs/node-gyp/commit/1dcad873539027511a5f0243baf770ea90f6f4e2)) +* **wiki:** move wiki docs into doc/ ([f0a4835](https://www.github.com/nodejs/node-gyp/commit/f0a48355d86534ec3bdabcdb3ce3340fa2e17f39)) +* **wiki:** node-sass in the wild ([d310a73](https://www.github.com/nodejs/node-gyp/commit/d310a73d64d0065050377baac7047472f7424a1b)) +* **wiki:** node-srs was a 404 ([bbca21a](https://www.github.com/nodejs/node-gyp/commit/bbca21a1e1ede4c473aff365ca71989a5bda7b57)) +* **wiki:** Note: VS2010 seems to be no longer available! VS2013 or nothing! ([7b5dcaf](https://www.github.com/nodejs/node-gyp/commit/7b5dcafafccdceae4b8f2b53ac9081a694b6ade8)) +* **wiki:** safer doc names, remove unnecessary TypedArray doc ([161c235](https://www.github.com/nodejs/node-gyp/commit/161c2353ef5b562f4acfb2fd77608fcbd0800fc0)) +* **wiki:** sorry, forgot to mention a specific windows version. ([d69dffc](https://www.github.com/nodejs/node-gyp/commit/d69dffc16c2b1e3c60dcb5d1c35a49270ba22a35)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([7444b47](https://www.github.com/nodejs/node-gyp/commit/7444b47a7caac1e14d1da474a7fcfcf88d328017)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d766b74](https://www.github.com/nodejs/node-gyp/commit/d766b7427851e6c2edc02e2504a7be9be7e330c0)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d319b0e](https://www.github.com/nodejs/node-gyp/commit/d319b0e98c7085de8e51bc5595eba4264b99a7d5)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3c6692d](https://www.github.com/nodejs/node-gyp/commit/3c6692d538f0ce973869aa237118b7d2483feccd)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([93392d5](https://www.github.com/nodejs/node-gyp/commit/93392d559ce6f250b9c7fe8177e6c88603809dc1)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([8841158](https://www.github.com/nodejs/node-gyp/commit/88411588f300e9b7c00fe516ecd977a1feeeb15c)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([81bfa1f](https://www.github.com/nodejs/node-gyp/commit/81bfa1f1b63d522a9f8a9ae9ca0c7ae90fe75140)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d1cd237](https://www.github.com/nodejs/node-gyp/commit/d1cd237bad06fa507adb354b9e2181a14dc63d24)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3de9e17](https://www.github.com/nodejs/node-gyp/commit/3de9e17e0b8a387eafe7bd18d0ec1e3191d118e8)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([a9b7096](https://www.github.com/nodejs/node-gyp/commit/a9b70968fb956eab3b95672048b94350e1565ca3)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([3236069](https://www.github.com/nodejs/node-gyp/commit/3236069689e7e0eb15b324fce74ab58158956f98)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([1462755](https://www.github.com/nodejs/node-gyp/commit/14627556966e5d513bdb8e5208f0e1300f68991f)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([7ab1337](https://www.github.com/nodejs/node-gyp/commit/7ab133752a6c402bb96dcd3d671d73e03e9487ad)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([640895d](https://www.github.com/nodejs/node-gyp/commit/640895d36b7448c646a3b850c1e159106f83c724)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([ced8c96](https://www.github.com/nodejs/node-gyp/commit/ced8c968457f285ab8989c291d28173d7730833c)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([27b883a](https://www.github.com/nodejs/node-gyp/commit/27b883a350ad0db6b9130d7b996f35855ec34c7a)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([d29fb13](https://www.github.com/nodejs/node-gyp/commit/d29fb134f1c4b9dd729ba95f2979e69e0934809f)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([2765891](https://www.github.com/nodejs/node-gyp/commit/27658913e6220cf0371b4b73e25a0e4ab11108a1)) +* **wiki:** Updated "binding.gyp" files out in the wild (markdown) ([dc97766](https://www.github.com/nodejs/node-gyp/commit/dc9776648d432bca6775c176641f16da14522d4c)) +* **wiki:** Updated Error: "pre" versions of node cannot be installed (markdown) ([e9f8b33](https://www.github.com/nodejs/node-gyp/commit/e9f8b33d1f87d04f22cb09a814d7c55d0fa38446)) +* **wiki:** Updated Home (markdown) ([3407109](https://www.github.com/nodejs/node-gyp/commit/3407109325cf7ba1e925656b9eb75feffab0557c)) +* **wiki:** Updated Home (markdown) ([6e392bc](https://www.github.com/nodejs/node-gyp/commit/6e392bcdd3dd1691773e6e16e1dffc35931b81e0)) +* **wiki:** Updated Home (markdown) ([65efe32](https://www.github.com/nodejs/node-gyp/commit/65efe32ccb8d446ce569453364f922dd9d27c945)) +* **wiki:** Updated Home (markdown) ([ea28f09](https://www.github.com/nodejs/node-gyp/commit/ea28f0947af91fa638be355143f5df89d2e431c8)) +* **wiki:** Updated Home (markdown) ([0e37ff4](https://www.github.com/nodejs/node-gyp/commit/0e37ff48b306c12149661b375895741d3d710da7)) +* **wiki:** Updated Home (markdown) ([b398ef4](https://www.github.com/nodejs/node-gyp/commit/b398ef46f660d2b1506508550dadfb4c35639e4b)) +* **wiki:** Updated Linking to OpenSSL (markdown) ([8919028](https://www.github.com/nodejs/node-gyp/commit/8919028921fd304f08044098434f0dc6071fb7cf)) +* **wiki:** Updated Linking to OpenSSL (markdown) ([c00eb77](https://www.github.com/nodejs/node-gyp/commit/c00eb778fc7dc27e4dab3a9219035ea20458b33b)) +* **wiki:** Updated node-levelup to node-leveldown (broken links) ([59668bb](https://www.github.com/nodejs/node-gyp/commit/59668bb0b904feccf3c09afa2fd37378c77af967)) +* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([d314854](https://www.github.com/nodejs/node-gyp/commit/d31485415ef69d46effa6090c95698341965de1b)) +* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([11858b0](https://www.github.com/nodejs/node-gyp/commit/11858b0655d1eee00c62ad628e719d4378803d14)) +* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([33561e9](https://www.github.com/nodejs/node-gyp/commit/33561e9cbf5f4eb46111318503c77df2c6eb484a)) +* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([4a7f2d0](https://www.github.com/nodejs/node-gyp/commit/4a7f2d0d869a65c99a78504976567017edadf657)) +* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([979a706](https://www.github.com/nodejs/node-gyp/commit/979a7063b950c088a7f4896fc3a48e1d00dfd231)) +* **wiki:** Updated Updating npm's bundled node gyp (markdown) ([e50e04d](https://www.github.com/nodejs/node-gyp/commit/e50e04d7b6a3754ea0aa11fe8cef491b3bc5bdd4)) + +## [8.1.0](https://www.github.com/nodejs/node-gyp/compare/v8.0.0...v8.1.0) (2021-05-28) + + +### Features + +* **gyp:** update gyp to v0.9.1 ([#2402](https://www.github.com/nodejs/node-gyp/issues/2402)) ([814b1b0](https://www.github.com/nodejs/node-gyp/commit/814b1b0eda102afb9fc87e81638a9cf5b650bb10)) + + +### Miscellaneous + +* add `release-please-action` for automated releases ([#2395](https://www.github.com/nodejs/node-gyp/issues/2395)) ([07e9d7c](https://www.github.com/nodejs/node-gyp/commit/07e9d7c7ee80ba119ea760c635f72fd8e7efe198)) + + +### Core + +* fail gracefully if we can't find the username ([#2375](https://www.github.com/nodejs/node-gyp/issues/2375)) ([fca4795](https://www.github.com/nodejs/node-gyp/commit/fca4795512c67dc8420aaa0d913b5b89a4b147f3)) +* log as yes/no whether build dir was created ([#2370](https://www.github.com/nodejs/node-gyp/issues/2370)) ([245dee5](https://www.github.com/nodejs/node-gyp/commit/245dee5b62581309946872ae253226ea3a42c0e3)) + + +### Doc + +* fix v8.0.0 release date ([4b83c3d](https://www.github.com/nodejs/node-gyp/commit/4b83c3de7300457919d53f26d96ea9ad6f6bedd8)) +* remove redundant version info ([#2403](https://www.github.com/nodejs/node-gyp/issues/2403)) ([1423670](https://www.github.com/nodejs/node-gyp/commit/14236709de64b100a424396b91a5115639daa0ef)) +* Update README.md Visual Studio Community page polski to auto ([#2371](https://www.github.com/nodejs/node-gyp/issues/2371)) ([1b4697a](https://www.github.com/nodejs/node-gyp/commit/1b4697abf69ef574a48faf832a7098f4c6c224a5)) + +## v8.0.0 2021-04-03 + +* [[`0d8a6f1b19`](https://github.com/nodejs/node-gyp/commit/0d8a6f1b19)] - **ci**: update actions/setup-node to v2 (#2302) (Sora Morimoto) [#2302](https://github.com/nodejs/node-gyp/pull/2302) +* [[`15a5c7d45b`](https://github.com/nodejs/node-gyp/commit/15a5c7d45b)] - **ci**: migrate deprecated grammar (#2285) (Jiawen Geng) [#2285](https://github.com/nodejs/node-gyp/pull/2285) +* [[`06ddde27f9`](https://github.com/nodejs/node-gyp/commit/06ddde27f9)] - **deps**: sync mutual dependencies with npm (DeeDeeG) [#2348](https://github.com/nodejs/node-gyp/pull/2348) +* [[`a5fd1f41e3`](https://github.com/nodejs/node-gyp/commit/a5fd1f41e3)] - **doc**: add downloads badge (#2352) (Jiawen Geng) [#2352](https://github.com/nodejs/node-gyp/pull/2352) +* [[`cc1cbce056`](https://github.com/nodejs/node-gyp/commit/cc1cbce056)] - **doc**: update macOS\_Catalina.md (#2293) (iMrLopez) [#2293](https://github.com/nodejs/node-gyp/pull/2293) +* [[`6287118fc4`](https://github.com/nodejs/node-gyp/commit/6287118fc4)] - **doc**: updated README.md to copy easily (#2281) (மனோஜ்குமார் பழனிச்சாமி) [#2281](https://github.com/nodejs/node-gyp/pull/2281) +* [[`66c0f04467`](https://github.com/nodejs/node-gyp/commit/66c0f04467)] - **doc**: add missing `sudo` to Catalina doc (Karl Horky) [#2244](https://github.com/nodejs/node-gyp/pull/2244) +* [[`0da2e0140d`](https://github.com/nodejs/node-gyp/commit/0da2e0140d)] - **gyp**: update gyp to v0.8.1 (#2355) (DeeDeeG) [#2355](https://github.com/nodejs/node-gyp/pull/2355) +* [[`0093ec8646`](https://github.com/nodejs/node-gyp/commit/0093ec8646)] - **gyp**: Improve our flake8 linting tests (Christian Clauss) [#2356](https://github.com/nodejs/node-gyp/pull/2356) +* [[`a78b584236`](https://github.com/nodejs/node-gyp/commit/a78b584236)] - **(SEMVER-MAJOR)** **gyp**: remove support for Python 2 (#2300) (Christian Clauss) [#2300](https://github.com/nodejs/node-gyp/pull/2300) +* [[`c3c510d89e`](https://github.com/nodejs/node-gyp/commit/c3c510d89e)] - **gyp**: update gyp to v0.8.0 (#2318) (Christian Clauss) [#2318](https://github.com/nodejs/node-gyp/pull/2318) +* [[`9e1397c52e`](https://github.com/nodejs/node-gyp/commit/9e1397c52e)] - **(SEMVER-MAJOR)** **gyp**: update gyp to v0.7.0 (#2284) (Jiawen Geng) [#2284](https://github.com/nodejs/node-gyp/pull/2284) +* [[`1bd18f3e77`](https://github.com/nodejs/node-gyp/commit/1bd18f3e77)] - **(SEMVER-MAJOR)** **lib**: drop Python 2 support in find-python.js (#2333) (DeeDeeG) [#2333](https://github.com/nodejs/node-gyp/pull/2333) +* [[`e81602ef55`](https://github.com/nodejs/node-gyp/commit/e81602ef55)] - **(SEMVER-MAJOR)** **lib**: migrate requests to fetch (#2220) (Matias Lopez) [#2220](https://github.com/nodejs/node-gyp/pull/2220) +* [[`392b7760b4`](https://github.com/nodejs/node-gyp/commit/392b7760b4)] - **lib**: avoid changing process.config (#2322) (Michaël Zasso) [#2322](https://github.com/nodejs/node-gyp/pull/2322) + +## v7.1.2 2020-10-17 + +* [[`096e3aded5`](https://github.com/nodejs/node-gyp/commit/096e3aded5)] - **gyp**: update gyp to 0.6.2 (Myles Borins) [#2241](https://github.com/nodejs/node-gyp/pull/2241) +* [[`54f97cd243`](https://github.com/nodejs/node-gyp/commit/54f97cd243)] - **doc**: add cmd to reset `xcode-select` to initial state (Valera Rozuvan) [#2235](https://github.com/nodejs/node-gyp/pull/2235) + +## v7.1.1 2020-10-15 + +This release restores the location of shared library builds to the pre-v7 +location. In v7.0.0 until this release, shared library outputs were placed +in a lib.target subdirectory inside the build/{Release,Debug} directory for +builds using `make` (Linux, etc.). This is inconsistent with macOS (Xcode) +behavior and previous node-gyp behavior so has been reverted. +We consider this a bug-fix rather than semver-major change. + +* [[`18bf2d1d38`](https://github.com/nodejs/node-gyp/commit/18bf2d1d38)] - **deps**: update deps to match npm@7 (Rod Vagg) [#2240](https://github.com/nodejs/node-gyp/pull/2240) +* [[`ee6a837cb7`](https://github.com/nodejs/node-gyp/commit/ee6a837cb7)] - **gyp**: update gyp to 0.6.1 (Rod Vagg) [#2238](https://github.com/nodejs/node-gyp/pull/2238) +* [[`3e7f8ccafc`](https://github.com/nodejs/node-gyp/commit/3e7f8ccafc)] - **lib**: better log message when ps fails (Martin Midtgaard) [#2229](https://github.com/nodejs/node-gyp/pull/2229) +* [[`7fb314339f`](https://github.com/nodejs/node-gyp/commit/7fb314339f)] - **test**: GitHub Actions: Test on Python 3.9 (Christian Clauss) [#2230](https://github.com/nodejs/node-gyp/pull/2230) +* [[`754996b9ec`](https://github.com/nodejs/node-gyp/commit/754996b9ec)] - **doc**: replace status badges with new Actions badge (Rod Vagg) [#2218](https://github.com/nodejs/node-gyp/pull/2218) +* [[`2317dc400c`](https://github.com/nodejs/node-gyp/commit/2317dc400c)] - **ci**: switch to GitHub Actions (Shelley Vohr) [#2210](https://github.com/nodejs/node-gyp/pull/2210) +* [[`2cca9b74f7`](https://github.com/nodejs/node-gyp/commit/2cca9b74f7)] - **doc**: drop the --production flag for installing windows-build-tools (DeeDeeG) [#2206](https://github.com/nodejs/node-gyp/pull/2206) + +## v7.1.0 2020-08-12 + +* [[`aaf33c3029`](https://github.com/nodejs/node-gyp/commit/aaf33c3029)] - **build**: add update-gyp script (Samuel Attard) [#2167](https://github.com/nodejs/node-gyp/pull/2167) +* * [[`3baa4e4172`](https://github.com/nodejs/node-gyp/commit/3baa4e4172)] - **(SEMVER-MINOR)** **gyp**: update gyp to 0.4.0 (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165) +* * [[`f461d56c53`](https://github.com/nodejs/node-gyp/commit/f461d56c53)] - **(SEMVER-MINOR)** **build**: support apple silicon (arm64 darwin) builds (Samuel Attard) [#2165](https://github.com/nodejs/node-gyp/pull/2165) +* * [[`ee6fa7d3bc`](https://github.com/nodejs/node-gyp/commit/ee6fa7d3bc)] - **docs**: note that node-gyp@7 should solve Catalina CLT issues (Rod Vagg) [#2156](https://github.com/nodejs/node-gyp/pull/2156) +* * [[`4fc8ff179d`](https://github.com/nodejs/node-gyp/commit/4fc8ff179d)] - **doc**: silence curl for macOS Catalina acid test (Chia Wei Ong) [#2150](https://github.com/nodejs/node-gyp/pull/2150) +* * [[`7857cb2eb1`](https://github.com/nodejs/node-gyp/commit/7857cb2eb1)] - **deps**: increase "engines" to "node" : "\>= 10.12.0" (DeeDeeG) [#2153](https://github.com/nodejs/node-gyp/pull/2153) + +## v7.0.0 2020-06-03 + +* [[`e18a61afc1`](https://github.com/nodejs/node-gyp/commit/e18a61afc1)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060) +* [[`4937722cf5`](https://github.com/nodejs/node-gyp/commit/4937722cf5)] - **(SEMVER-MAJOR)** **deps**: replace mkdirp with {recursive} mkdir (Rod Vagg) [#2123](https://github.com/nodejs/node-gyp/pull/2123) +* [[`d45438a047`](https://github.com/nodejs/node-gyp/commit/d45438a047)] - **(SEMVER-MAJOR)** **deps**: update deps, match to npm@7 (Rod Vagg) [#2126](https://github.com/nodejs/node-gyp/pull/2126) +* [[`ba4f34b7d6`](https://github.com/nodejs/node-gyp/commit/ba4f34b7d6)] - **doc**: update catalina xcode clt download link (Dario Vladovic) [#2133](https://github.com/nodejs/node-gyp/pull/2133) +* [[`f7bfce96ed`](https://github.com/nodejs/node-gyp/commit/f7bfce96ed)] - **doc**: update acid test and introduce curl|bash test script (Dario Vladovic) [#2105](https://github.com/nodejs/node-gyp/pull/2105) +* [[`e529f3309d`](https://github.com/nodejs/node-gyp/commit/e529f3309d)] - **doc**: update README to reflect upgrade to gyp-next (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092) +* [[`9aed6286a3`](https://github.com/nodejs/node-gyp/commit/9aed6286a3)] - **doc**: give more attention to Catalina issues doc (Matheus Marchini) [#2134](https://github.com/nodejs/node-gyp/pull/2134) +* [[`963f2a7b48`](https://github.com/nodejs/node-gyp/commit/963f2a7b48)] - **doc**: improve Catalina discoverability for search engines (Matheus Marchini) [#2135](https://github.com/nodejs/node-gyp/pull/2135) +* [[`7b75af349b`](https://github.com/nodejs/node-gyp/commit/7b75af349b)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078) +* [[`4f23c7bee2`](https://github.com/nodejs/node-gyp/commit/4f23c7bee2)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073) +* [[`473cfa283f`](https://github.com/nodejs/node-gyp/commit/473cfa283f)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072) +* [[`e7402b4a7c`](https://github.com/nodejs/node-gyp/commit/e7402b4a7c)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044) +* [[`35de45984f`](https://github.com/nodejs/node-gyp/commit/35de45984f)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034) +* [[`48642191f5`](https://github.com/nodejs/node-gyp/commit/48642191f5)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029) +* [[`ae5b150051`](https://github.com/nodejs/node-gyp/commit/ae5b150051)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022) +* [[`d1dea13fe4`](https://github.com/nodejs/node-gyp/commit/d1dea13fe4)] - **doc**: fix changelog 6.1.0 release year to be 2020 (Quentin Vernot) [#2021](https://github.com/nodejs/node-gyp/pull/2021) +* [[`6356117b08`](https://github.com/nodejs/node-gyp/commit/6356117b08)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096) +* [[`a6b76a8b48`](https://github.com/nodejs/node-gyp/commit/a6b76a8b48)] - **gyp**: update gyp to 0.2.1 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092) +* [[`ebc34ec823`](https://github.com/nodejs/node-gyp/commit/ebc34ec823)] - **gyp**: update gyp to 0.2.0 (Ujjwal Sharma) [#2092](https://github.com/nodejs/node-gyp/pull/2092) +* [[`972780bde7`](https://github.com/nodejs/node-gyp/commit/972780bde7)] - **(SEMVER-MAJOR)** **gyp**: sync code base with nodejs repo (#1975) (Michaël Zasso) [#1975](https://github.com/nodejs/node-gyp/pull/1975) +* [[`c255ffbf6a`](https://github.com/nodejs/node-gyp/commit/c255ffbf6a)] - **lib**: drop "-2" flag for "py.exe" launcher (DeeDeeG) [#2131](https://github.com/nodejs/node-gyp/pull/2131) +* [[`1f7e1e93b5`](https://github.com/nodejs/node-gyp/commit/1f7e1e93b5)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018) +* [[`741ab096d5`](https://github.com/nodejs/node-gyp/commit/741ab096d5)] - **test**: remove support for EOL versions of Node.js (Shelley Vohr) +* [[`ca86ef2539`](https://github.com/nodejs/node-gyp/commit/ca86ef2539)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063) + +## v6.1.0 2020-01-08 + +* [[`9a7dd16b76`](https://github.com/nodejs/node-gyp/commit/9a7dd16b76)] - **doc**: remove backticks from Python version list (Rod Vagg) [#2011](https://github.com/nodejs/node-gyp/pull/2011) +* [[`26cd6eaea6`](https://github.com/nodejs/node-gyp/commit/26cd6eaea6)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994) +* [[`312c12ef4f`](https://github.com/nodejs/node-gyp/commit/312c12ef4f)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992) +* [[`f7b6b6b77b`](https://github.com/nodejs/node-gyp/commit/f7b6b6b77b)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985) +* [[`6b8f2652dd`](https://github.com/nodejs/node-gyp/commit/6b8f2652dd)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971) +* [[`20aa0b44f7`](https://github.com/nodejs/node-gyp/commit/20aa0b44f7)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962) +* [[`14f2a07a39`](https://github.com/nodejs/node-gyp/commit/14f2a07a39)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009) +* [[`f242ce4d2c`](https://github.com/nodejs/node-gyp/commit/f242ce4d2c)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006) +* [[`3bcba2a01a`](https://github.com/nodejs/node-gyp/commit/3bcba2a01a)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978) +* [[`470cc2178e`](https://github.com/nodejs/node-gyp/commit/470cc2178e)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993) +* [[`31ecc8421d`](https://github.com/nodejs/node-gyp/commit/31ecc8421d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996) +* [[`5a729e86ee`](https://github.com/nodejs/node-gyp/commit/5a729e86ee)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001) +* [[`345c70e56d`](https://github.com/nodejs/node-gyp/commit/345c70e56d)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979) +* [[`d6a7e0e1fb`](https://github.com/nodejs/node-gyp/commit/d6a7e0e1fb)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979) +* [[`5a64e9bd32`](https://github.com/nodejs/node-gyp/commit/5a64e9bd32)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985) +* [[`04da736d38`](https://github.com/nodejs/node-gyp/commit/04da736d38)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961) +* [[`0670e5189d`](https://github.com/nodejs/node-gyp/commit/0670e5189d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) +* [[`c506a6a150`](https://github.com/nodejs/node-gyp/commit/c506a6a150)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) + +## v6.0.1 2019-11-01 + +* [[`8ec2e681d5`](https://github.com/nodejs/node-gyp/commit/8ec2e681d5)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940) +* [[`1b11be63cc`](https://github.com/nodejs/node-gyp/commit/1b11be63cc)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925) +* [[`c0282daa48`](https://github.com/nodejs/node-gyp/commit/c0282daa48)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947) +* [[`d8e09a1b6a`](https://github.com/nodejs/node-gyp/commit/d8e09a1b6a)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944) +* [[`9c0f3404f0`](https://github.com/nodejs/node-gyp/commit/9c0f3404f0)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939) +* [[`bb2eb72a3f`](https://github.com/nodejs/node-gyp/commit/bb2eb72a3f)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937) +* [[`f0693413d9`](https://github.com/nodejs/node-gyp/commit/f0693413d9)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934) +* [[`c60c22de58`](https://github.com/nodejs/node-gyp/commit/c60c22de58)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920) +* [[`b91718eefc`](https://github.com/nodejs/node-gyp/commit/b91718eefc)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923) +* [[`3538a317b6`](https://github.com/nodejs/node-gyp/commit/3538a317b6)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919) +* [[`4fff8458c0`](https://github.com/nodejs/node-gyp/commit/4fff8458c0)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) +* [[`60e4488f08`](https://github.com/nodejs/node-gyp/commit/60e4488f08)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) +* [[`032db2a2d0`](https://github.com/nodejs/node-gyp/commit/032db2a2d0)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926) +* [[`5a83630c33`](https://github.com/nodejs/node-gyp/commit/5a83630c33)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921) + +## v6.0.0 2019-10-04 + +* [[`dd0e97ef0b`](https://github.com/nodejs/node-gyp/commit/dd0e97ef0b)] - **(SEMVER-MAJOR)** **lib**: try to find `python` after `python3` (Sam Roberts) [#1907](https://github.com/nodejs/node-gyp/pull/1907) +* [[`f60ed47d14`](https://github.com/nodejs/node-gyp/commit/f60ed47d14)] - **travis**: add Python 3.5 and 3.6 tests on Linux (Christian Clauss) [#1903](https://github.com/nodejs/node-gyp/pull/1903) +* [[`c763ca1838`](https://github.com/nodejs/node-gyp/commit/c763ca1838)] - **(SEMVER-MAJOR)** **doc**: Declare that node-gyp is Python 3 compatible (cclauss) [#1811](https://github.com/nodejs/node-gyp/pull/1811) +* [[`3d1c60ab81`](https://github.com/nodejs/node-gyp/commit/3d1c60ab81)] - **(SEMVER-MAJOR)** **lib**: accept Python 3 by default (João Reis) [#1844](https://github.com/nodejs/node-gyp/pull/1844) +* [[`c6e3b65a23`](https://github.com/nodejs/node-gyp/commit/c6e3b65a23)] - **(SEMVER-MAJOR)** **lib**: raise the minimum Python version from 2.6 to 2.7 (cclauss) [#1818](https://github.com/nodejs/node-gyp/pull/1818) + +## v5.1.1 2020-05-25 + +* [[`bdd3a79abe`](https://github.com/nodejs/node-gyp/commit/bdd3a79abe)] - **build**: shrink bloated addon binaries on windows (Shelley Vohr) [#2060](https://github.com/nodejs/node-gyp/pull/2060) +* [[`1f2ba75bc0`](https://github.com/nodejs/node-gyp/commit/1f2ba75bc0)] - **doc**: add macOS Catalina software update info (Karl Horky) [#2078](https://github.com/nodejs/node-gyp/pull/2078) +* [[`c106d915f5`](https://github.com/nodejs/node-gyp/commit/c106d915f5)] - **doc**: update catalina xcode cli tools download link (#2044) (Dario Vladović) [#2044](https://github.com/nodejs/node-gyp/pull/2044) +* [[`9a6fea92e2`](https://github.com/nodejs/node-gyp/commit/9a6fea92e2)] - **doc**: update catalina xcode cli tools download link; formatting (Jonathan Hult) [#2034](https://github.com/nodejs/node-gyp/pull/2034) +* [[`59b0b1add8`](https://github.com/nodejs/node-gyp/commit/59b0b1add8)] - **doc**: add download link for Command Line Tools for Xcode (Przemysław Bitkowski) [#2029](https://github.com/nodejs/node-gyp/pull/2029) +* [[`bb8d0e7b10`](https://github.com/nodejs/node-gyp/commit/bb8d0e7b10)] - **doc**: Catalina suggestion: remove /Library/Developer/CommandLineTools (Christian Clauss) [#2022](https://github.com/nodejs/node-gyp/pull/2022) +* [[`fb2e80d4e3`](https://github.com/nodejs/node-gyp/commit/fb2e80d4e3)] - **doc**: update link to the code of conduct (#2073) (Michaël Zasso) [#2073](https://github.com/nodejs/node-gyp/pull/2073) +* [[`251d9c885c`](https://github.com/nodejs/node-gyp/commit/251d9c885c)] - **doc**: note in README that Python 3.8 is supported (#2072) (Michaël Zasso) [#2072](https://github.com/nodejs/node-gyp/pull/2072) +* [[`2b6fc3c8d6`](https://github.com/nodejs/node-gyp/commit/2b6fc3c8d6)] - **doc, bin**: stop suggesting opening node-gyp issues (Bartosz Sosnowski) [#2096](https://github.com/nodejs/node-gyp/pull/2096) +* [[`a876ae58ad`](https://github.com/nodejs/node-gyp/commit/a876ae58ad)] - **test**: bump actions/checkout from v1 to v2 (BSKY) [#2063](https://github.com/nodejs/node-gyp/pull/2063) + +## v5.1.0 2020-02-05 + +* [[`f37a8b40d0`](https://github.com/nodejs/node-gyp/commit/f37a8b40d0)] - **doc**: add GitHub Actions badge (#1994) (Rod Vagg) [#1994](https://github.com/nodejs/node-gyp/pull/1994) +* [[`cb3f6aae5e`](https://github.com/nodejs/node-gyp/commit/cb3f6aae5e)] - **doc**: update macOS\_Catalina.md (#1992) (James Home) [#1992](https://github.com/nodejs/node-gyp/pull/1992) +* [[`0607596a4c`](https://github.com/nodejs/node-gyp/commit/0607596a4c)] - **doc**: fix typo in README.md (#1985) (Suraneti Rodsuwan) [#1985](https://github.com/nodejs/node-gyp/pull/1985) +* [[`0d5a415a14`](https://github.com/nodejs/node-gyp/commit/0d5a415a14)] - **doc**: add travis badge (Rod Vagg) [#1971](https://github.com/nodejs/node-gyp/pull/1971) +* [[`103740cd95`](https://github.com/nodejs/node-gyp/commit/103740cd95)] - **gyp**: list(dict) so we can del dict(key) while iterating (Christian Clauss) [#2009](https://github.com/nodejs/node-gyp/pull/2009) +* [[`278dcddbdd`](https://github.com/nodejs/node-gyp/commit/278dcddbdd)] - **lib**: ignore VS instances that cause COMExceptions (Andrew Casey) [#2018](https://github.com/nodejs/node-gyp/pull/2018) +* [[`1694907bbf`](https://github.com/nodejs/node-gyp/commit/1694907bbf)] - **lib**: compatibility with semver ≥ 7 (`new` for semver.Range) (Xavier Guimard) [#2006](https://github.com/nodejs/node-gyp/pull/2006) +* [[`a3f1143514`](https://github.com/nodejs/node-gyp/commit/a3f1143514)] - **(SEMVER-MINOR)** **lib**: noproxy support, match proxy detection to `request` (Matias Lopez) [#1978](https://github.com/nodejs/node-gyp/pull/1978) +* [[`52365819c7`](https://github.com/nodejs/node-gyp/commit/52365819c7)] - **test**: remove old docker test harness (#1993) (Rod Vagg) [#1993](https://github.com/nodejs/node-gyp/pull/1993) +* [[`bc509c511d`](https://github.com/nodejs/node-gyp/commit/bc509c511d)] - **test**: add Windows to GitHub Actions testing (#1996) (Christian Clauss) [#1996](https://github.com/nodejs/node-gyp/pull/1996) +* [[`91ee26dd48`](https://github.com/nodejs/node-gyp/commit/91ee26dd48)] - **test**: fix typo in header download test (#2001) (Richard Lau) [#2001](https://github.com/nodejs/node-gyp/pull/2001) +* [[`0923f344c9`](https://github.com/nodejs/node-gyp/commit/0923f344c9)] - **test**: direct python invocation & simpler pyenv (Matias Lopez) [#1979](https://github.com/nodejs/node-gyp/pull/1979) +* [[`32c8744b34`](https://github.com/nodejs/node-gyp/commit/32c8744b34)] - **test**: fix macOS Travis on Python 2.7 & 3.7 (Christian Clauss) [#1979](https://github.com/nodejs/node-gyp/pull/1979) +* [[`fd4b1351e4`](https://github.com/nodejs/node-gyp/commit/fd4b1351e4)] - **test**: initial Github Actions with Ubuntu & macOS (Christian Clauss) [#1985](https://github.com/nodejs/node-gyp/pull/1985) + +## v5.0.7 2019-12-16 + +Republish of v5.0.6 with unnecessary tarball removed from pack file. + +## v5.0.6 2019-12-16 + +* [[`cdec00286f`](https://github.com/nodejs/node-gyp/commit/cdec00286f)] - **doc**: adjustments to the README.md for new users (Dan Pike) [#1919](https://github.com/nodejs/node-gyp/pull/1919) +* [[`b7c8233ef2`](https://github.com/nodejs/node-gyp/commit/b7c8233ef2)] - **test**: fix Python unittests (cclauss) [#1961](https://github.com/nodejs/node-gyp/pull/1961) +* [[`e12b00ab0a`](https://github.com/nodejs/node-gyp/commit/e12b00ab0a)] - **doc**: macOS Catalina add two commands (Christian Clauss) [#1962](https://github.com/nodejs/node-gyp/pull/1962) +* [[`70b9890c0d`](https://github.com/nodejs/node-gyp/commit/70b9890c0d)] - **test**: add header download test (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) +* [[`4029fa8629`](https://github.com/nodejs/node-gyp/commit/4029fa8629)] - **test**: configure proper devDir for invoking configure() (Rod Vagg) [#1796](https://github.com/nodejs/node-gyp/pull/1796) +* [[`fe8b02cc8b`](https://github.com/nodejs/node-gyp/commit/fe8b02cc8b)] - **doc**: add macOS\_Catalina.md document (cclauss) [#1940](https://github.com/nodejs/node-gyp/pull/1940) +* [[`8ea47ce365`](https://github.com/nodejs/node-gyp/commit/8ea47ce365)] - **gyp**: python3 fixes: utf8 decode, use of 'None' in eval (Wilfried Goesgens) [#1925](https://github.com/nodejs/node-gyp/pull/1925) +* [[`c7229716ba`](https://github.com/nodejs/node-gyp/commit/c7229716ba)] - **gyp**: iteritems() -\> items() in compile\_commands\_json.py (cclauss) [#1947](https://github.com/nodejs/node-gyp/pull/1947) +* [[`2a18b2a0f8`](https://github.com/nodejs/node-gyp/commit/2a18b2a0f8)] - **gyp**: make cmake python3 compatible (gengjiawen) [#1944](https://github.com/nodejs/node-gyp/pull/1944) +* [[`70f391e844`](https://github.com/nodejs/node-gyp/commit/70f391e844)] - **gyp**: fix TypeError in XcodeVersion() (Christian Clauss) [#1939](https://github.com/nodejs/node-gyp/pull/1939) +* [[`9f4f0fa34e`](https://github.com/nodejs/node-gyp/commit/9f4f0fa34e)] - **gyp**: finish decode stdout on Python 3 (Christian Clauss) [#1937](https://github.com/nodejs/node-gyp/pull/1937) +* [[`7cf507906d`](https://github.com/nodejs/node-gyp/commit/7cf507906d)] - **src,win**: allow 403 errors for arm64 node.lib (Richard Lau) [#1934](https://github.com/nodejs/node-gyp/pull/1934) +* [[`ad0d182c01`](https://github.com/nodejs/node-gyp/commit/ad0d182c01)] - **deps**: update deps to roughly match current npm@6 (Rod Vagg) [#1920](https://github.com/nodejs/node-gyp/pull/1920) +* [[`1553081ed6`](https://github.com/nodejs/node-gyp/commit/1553081ed6)] - **test**: upgrade Linux Travis CI to Python 3.8 (Christian Clauss) [#1923](https://github.com/nodejs/node-gyp/pull/1923) +* [[`0705cae9aa`](https://github.com/nodejs/node-gyp/commit/0705cae9aa)] - **travis**: ignore failed `brew upgrade npm`, update xcode (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) +* [[`7bfdb6f5bf`](https://github.com/nodejs/node-gyp/commit/7bfdb6f5bf)] - **build**: avoid bare exceptions in xcode\_emulation.py (Christian Clauss) [#1932](https://github.com/nodejs/node-gyp/pull/1932) +* [[`7edf7658fa`](https://github.com/nodejs/node-gyp/commit/7edf7658fa)] - **lib,install**: always download SHA sums on Windows (Sam Hughes) [#1926](https://github.com/nodejs/node-gyp/pull/1926) +* [[`69056d04fe`](https://github.com/nodejs/node-gyp/commit/69056d04fe)] - **travis**: add Windows + Python 3.8 to the mix (Rod Vagg) [#1921](https://github.com/nodejs/node-gyp/pull/1921) + +## v5.0.5 2019-10-04 + +* [[`3891391746`](https://github.com/nodejs/node-gyp/commit/3891391746)] - **doc**: reconcile README with Python 3 compat changes (Rod Vagg) [#1911](https://github.com/nodejs/node-gyp/pull/1911) +* [[`07f81f1920`](https://github.com/nodejs/node-gyp/commit/07f81f1920)] - **lib**: accept Python 3 after Python 2 (Sam Roberts) [#1910](https://github.com/nodejs/node-gyp/pull/1910) +* [[`04ce59f4a2`](https://github.com/nodejs/node-gyp/commit/04ce59f4a2)] - **doc**: clarify Python configuration, etc (Sam Roberts) [#1908](https://github.com/nodejs/node-gyp/pull/1908) +* [[`01c46ee3df`](https://github.com/nodejs/node-gyp/commit/01c46ee3df)] - **gyp**: add \_\_lt\_\_ to MSVSSolutionEntry (João Reis) [#1904](https://github.com/nodejs/node-gyp/pull/1904) +* [[`735d961b99`](https://github.com/nodejs/node-gyp/commit/735d961b99)] - **win**: support VS 2017 Desktop Express (João Reis) [#1902](https://github.com/nodejs/node-gyp/pull/1902) +* [[`3834156a92`](https://github.com/nodejs/node-gyp/commit/3834156a92)] - **test**: add Python 3.5 and 3.6 tests on Linux (cclauss) [#1909](https://github.com/nodejs/node-gyp/pull/1909) +* [[`1196e990d8`](https://github.com/nodejs/node-gyp/commit/1196e990d8)] - **src**: update to standard@14 (Rod Vagg) [#1899](https://github.com/nodejs/node-gyp/pull/1899) +* [[`53ee7dfe89`](https://github.com/nodejs/node-gyp/commit/53ee7dfe89)] - **gyp**: fix undefined name: cflags --\> ldflags (Christian Clauss) [#1901](https://github.com/nodejs/node-gyp/pull/1901) +* [[`5871dcf6c9`](https://github.com/nodejs/node-gyp/commit/5871dcf6c9)] - **src,win**: add support for fetching arm64 node.lib (Richard Townsend) [#1875](https://github.com/nodejs/node-gyp/pull/1875) + +## v5.0.4 2019-09-27 + +* [[`1236869ffc`](https://github.com/nodejs/node-gyp/commit/1236869ffc)] - **gyp**: modify XcodeVersion() to convert "4.2" to "0420" and "10.0" to "1000" (Christian Clauss) [#1895](https://github.com/nodejs/node-gyp/pull/1895) +* [[`36638afe48`](https://github.com/nodejs/node-gyp/commit/36638afe48)] - **gyp**: more decode stdout on Python 3 (cclauss) [#1894](https://github.com/nodejs/node-gyp/pull/1894) +* [[`f753c167c5`](https://github.com/nodejs/node-gyp/commit/f753c167c5)] - **gyp**: decode stdout on Python 3 (cclauss) [#1890](https://github.com/nodejs/node-gyp/pull/1890) +* [[`60a4083523`](https://github.com/nodejs/node-gyp/commit/60a4083523)] - **doc**: update xcode install instructions to match Node's BUILDING (Nhan Khong) [#1884](https://github.com/nodejs/node-gyp/pull/1884) +* [[`19dbc9ac32`](https://github.com/nodejs/node-gyp/commit/19dbc9ac32)] - **deps**: update tar to 4.4.12 (Matheus Marchini) [#1889](https://github.com/nodejs/node-gyp/pull/1889) +* [[`5f3ed92181`](https://github.com/nodejs/node-gyp/commit/5f3ed92181)] - **bin**: fix the usage instructions (Halit Ogunc) [#1888](https://github.com/nodejs/node-gyp/pull/1888) +* [[`aab118edf1`](https://github.com/nodejs/node-gyp/commit/aab118edf1)] - **lib**: adding keep-alive header to download requests (Milad Farazmand) [#1863](https://github.com/nodejs/node-gyp/pull/1863) +* [[`1186e89326`](https://github.com/nodejs/node-gyp/commit/1186e89326)] - **lib**: ignore non-critical os.userInfo() failures (Rod Vagg) [#1835](https://github.com/nodejs/node-gyp/pull/1835) +* [[`785e527c3d`](https://github.com/nodejs/node-gyp/commit/785e527c3d)] - **doc**: fix missing argument for setting python path (lagorsse) [#1802](https://github.com/nodejs/node-gyp/pull/1802) +* [[`a97615196c`](https://github.com/nodejs/node-gyp/commit/a97615196c)] - **gyp**: rm semicolons (Python != JavaScript) (MattIPv4) [#1858](https://github.com/nodejs/node-gyp/pull/1858) +* [[`06019bac24`](https://github.com/nodejs/node-gyp/commit/06019bac24)] - **gyp**: assorted typo fixes (XhmikosR) [#1853](https://github.com/nodejs/node-gyp/pull/1853) +* [[`3f4972c1ca`](https://github.com/nodejs/node-gyp/commit/3f4972c1ca)] - **gyp**: use "is" when comparing to None (Vladyslav Burzakovskyy) [#1860](https://github.com/nodejs/node-gyp/pull/1860) +* [[`1cb4708073`](https://github.com/nodejs/node-gyp/commit/1cb4708073)] - **src,win**: improve unmanaged handling (Peter Sabath) [#1852](https://github.com/nodejs/node-gyp/pull/1852) +* [[`5553cd910e`](https://github.com/nodejs/node-gyp/commit/5553cd910e)] - **gyp**: improve Windows+Cygwin compatibility (Jose Quijada) [#1817](https://github.com/nodejs/node-gyp/pull/1817) +* [[`8bcb1fbb43`](https://github.com/nodejs/node-gyp/commit/8bcb1fbb43)] - **gyp**: Python 3 Windows fixes (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843) +* [[`2e24d0a326`](https://github.com/nodejs/node-gyp/commit/2e24d0a326)] - **test**: accept Python 3 in test-find-python.js (João Reis) [#1843](https://github.com/nodejs/node-gyp/pull/1843) +* [[`1267b4dc1c`](https://github.com/nodejs/node-gyp/commit/1267b4dc1c)] - **build**: add test run Python 3.7 on macOS (Christian Clauss) [#1843](https://github.com/nodejs/node-gyp/pull/1843) +* [[`da1b031aa3`](https://github.com/nodejs/node-gyp/commit/da1b031aa3)] - **build**: import StringIO on Python 2 and Python 3 (Christian Clauss) [#1836](https://github.com/nodejs/node-gyp/pull/1836) +* [[`fa0ed4aa42`](https://github.com/nodejs/node-gyp/commit/fa0ed4aa42)] - **build**: more Python 3 compat, replace compile with ast (cclauss) [#1820](https://github.com/nodejs/node-gyp/pull/1820) +* [[`18d5c7c9d0`](https://github.com/nodejs/node-gyp/commit/18d5c7c9d0)] - **win,src**: update win\_delay\_load\_hook.cc to work with /clr (Ivan Petrovic) [#1819](https://github.com/nodejs/node-gyp/pull/1819) + +## v5.0.3 2019-07-17 + +* [[`66ad305775`](https://github.com/nodejs/node-gyp/commit/66ad305775)] - **python**: accept Python 3 conditionally (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815) +* [[`7e7fce3fed`](https://github.com/nodejs/node-gyp/commit/7e7fce3fed)] - **python**: move Python detection to its own file (João Reis) [#1815](https://github.com/nodejs/node-gyp/pull/1815) +* [[`e40c99e283`](https://github.com/nodejs/node-gyp/commit/e40c99e283)] - **src**: implement standard.js linting (Rod Vagg) [#1794](https://github.com/nodejs/node-gyp/pull/1794) +* [[`bb92c761a9`](https://github.com/nodejs/node-gyp/commit/bb92c761a9)] - **test**: add Node.js 6 on Windows to Travis CI (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812) +* [[`7fd924079f`](https://github.com/nodejs/node-gyp/commit/7fd924079f)] - **test**: increase tap timeout (João Reis) [#1812](https://github.com/nodejs/node-gyp/pull/1812) +* [[`7e8127068f`](https://github.com/nodejs/node-gyp/commit/7e8127068f)] - **test**: cover supported node versions with travis (Rod Vagg) [#1809](https://github.com/nodejs/node-gyp/pull/1809) +* [[`24109148df`](https://github.com/nodejs/node-gyp/commit/24109148df)] - **test**: downgrade to tap@^12 for continued Node 6 support (Rod Vagg) [#1808](https://github.com/nodejs/node-gyp/pull/1808) +* [[`656117cc4a`](https://github.com/nodejs/node-gyp/commit/656117cc4a)] - **win**: make VS path match case-insensitive (João Reis) [#1806](https://github.com/nodejs/node-gyp/pull/1806) + +## v5.0.2 2019-06-27 + +* [[`2761afbf73`](https://github.com/nodejs/node-gyp/commit/2761afbf73)] - **build,test**: add duplicate symbol test (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689) +* [[`82f129d6de`](https://github.com/nodejs/node-gyp/commit/82f129d6de)] - **gyp**: replace optparse to argparse (KiYugadgeter) [#1591](https://github.com/nodejs/node-gyp/pull/1591) +* [[`afaaa29c61`](https://github.com/nodejs/node-gyp/commit/afaaa29c61)] - **gyp**: remove from \_\_future\_\_ import with\_statement (cclauss) [#1799](https://github.com/nodejs/node-gyp/pull/1799) +* [[`a991f633d6`](https://github.com/nodejs/node-gyp/commit/a991f633d6)] - **gyp**: fix the remaining Python 3 issues (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793) +* [[`f952b08f84`](https://github.com/nodejs/node-gyp/commit/f952b08f84)] - **gyp**: move from \_\_future\_\_ import to the top of the file (cclauss) [#1789](https://github.com/nodejs/node-gyp/pull/1789) +* [[`4f4a677dfa`](https://github.com/nodejs/node-gyp/commit/4f4a677dfa)] - **gyp**: use different default compiler for z/OS (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768) +* [[`03683f09d6`](https://github.com/nodejs/node-gyp/commit/03683f09d6)] - **lib**: code de-duplication (Pavel Medvedev) [#965](https://github.com/nodejs/node-gyp/pull/965) +* [[`611bc3c89f`](https://github.com/nodejs/node-gyp/commit/611bc3c89f)] - **lib**: add .json suffix for explicit require (Rod Vagg) [#1787](https://github.com/nodejs/node-gyp/pull/1787) +* [[`d3478d7b0b`](https://github.com/nodejs/node-gyp/commit/d3478d7b0b)] - **meta**: add to .gitignore (Refael Ackermann) [#1573](https://github.com/nodejs/node-gyp/pull/1573) +* [[`7a9a038e9e`](https://github.com/nodejs/node-gyp/commit/7a9a038e9e)] - **test**: add parallel test runs on macOS and Windows (cclauss) [#1800](https://github.com/nodejs/node-gyp/pull/1800) +* [[`7dd7f2b2a2`](https://github.com/nodejs/node-gyp/commit/7dd7f2b2a2)] - **test**: fix Python syntax error in test-adding.js (cclauss) [#1793](https://github.com/nodejs/node-gyp/pull/1793) +* [[`395f843de0`](https://github.com/nodejs/node-gyp/commit/395f843de0)] - **test**: replace self-signed cert with 'localhost' (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795) +* [[`a52c6eb9e8`](https://github.com/nodejs/node-gyp/commit/a52c6eb9e8)] - **test**: migrate from tape to tap (Rod Vagg) [#1795](https://github.com/nodejs/node-gyp/pull/1795) +* [[`ec2eb44a30`](https://github.com/nodejs/node-gyp/commit/ec2eb44a30)] - **test**: use Nan in duplicate\_symbols (Gabriel Schulhof) [#1689](https://github.com/nodejs/node-gyp/pull/1689) +* [[`1597c84aad`](https://github.com/nodejs/node-gyp/commit/1597c84aad)] - **test**: use Travis CI to run tests on every pull request (cclauss) [#1752](https://github.com/nodejs/node-gyp/pull/1752) +* [[`dd9bf929ac`](https://github.com/nodejs/node-gyp/commit/dd9bf929ac)] - **zos**: update compiler options (Shuowang (Wayne) Zhang) [#1768](https://github.com/nodejs/node-gyp/pull/1768) + +## v5.0.1 2019-06-20 + +* [[`e3861722ed`](https://github.com/nodejs/node-gyp/commit/e3861722ed)] - **doc**: document --jobs max (David Sanders) [#1770](https://github.com/nodejs/node-gyp/pull/1770) +* [[`1cfdb28886`](https://github.com/nodejs/node-gyp/commit/1cfdb28886)] - **lib**: reintroduce support for iojs file naming for releases \>= 1 && \< 4 (Samuel Attard) [#1777](https://github.com/nodejs/node-gyp/pull/1777) + +## v5.0.0 2019-06-13 + +* [[`8a83972743`](https://github.com/nodejs/node-gyp/commit/8a83972743)] - **(SEMVER-MAJOR)** **bin**: follow XDG OS conventions for storing data (Selwyn) [#1570](https://github.com/nodejs/node-gyp/pull/1570) +* [[`9e46872ea3`](https://github.com/nodejs/node-gyp/commit/9e46872ea3)] - **bin,lib**: remove extra comments/lines/spaces (Jon Moss) [#1508](https://github.com/nodejs/node-gyp/pull/1508) +* [[`8098ebdeb4`](https://github.com/nodejs/node-gyp/commit/8098ebdeb4)] - **deps**: replace `osenv` dependency with native `os` (Selwyn) +* [[`f83b457e03`](https://github.com/nodejs/node-gyp/commit/f83b457e03)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492) +* [[`323cee7323`](https://github.com/nodejs/node-gyp/commit/323cee7323)] - **deps**: pin `request` version range (Refael Ackermann) [#1300](https://github.com/nodejs/node-gyp/pull/1300) +* [[`c515912d08`](https://github.com/nodejs/node-gyp/commit/c515912d08)] - **doc**: improve issue template (Bartosz Sosnowski) [#1618](https://github.com/nodejs/node-gyp/pull/1618) +* [[`cca2d66727`](https://github.com/nodejs/node-gyp/commit/cca2d66727)] - **doc**: python info needs own header (Taylor D. Lee) [#1245](https://github.com/nodejs/node-gyp/pull/1245) +* [[`3e64c780f5`](https://github.com/nodejs/node-gyp/commit/3e64c780f5)] - **doc**: lint README.md (Jon Moss) [#1498](https://github.com/nodejs/node-gyp/pull/1498) +* [[`a20faedc91`](https://github.com/nodejs/node-gyp/commit/a20faedc91)] - **(SEMVER-MAJOR)** **gyp**: enable MARMASM items only on new VS versions (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`721eb691cf`](https://github.com/nodejs/node-gyp/commit/721eb691cf)] - **gyp**: teach MSVS generator about MARMASM Items (Jon Kunkee) [#1679](https://github.com/nodejs/node-gyp/pull/1679) +* [[`91744bfecc`](https://github.com/nodejs/node-gyp/commit/91744bfecc)] - **gyp**: add support for Windows on Arm (Richard Townsend) [#1739](https://github.com/nodejs/node-gyp/pull/1739) +* [[`a6e0a6c7ed`](https://github.com/nodejs/node-gyp/commit/a6e0a6c7ed)] - **gyp**: move compile\_commands\_json (Paul Maréchal) [#1661](https://github.com/nodejs/node-gyp/pull/1661) +* [[`92e8b52cee`](https://github.com/nodejs/node-gyp/commit/92e8b52cee)] - **gyp**: fix target --\> self.target (cclauss) +* [[`febdfa2137`](https://github.com/nodejs/node-gyp/commit/febdfa2137)] - **gyp**: fix sntex error (cclauss) [#1333](https://github.com/nodejs/node-gyp/pull/1333) +* [[`588d333c14`](https://github.com/nodejs/node-gyp/commit/588d333c14)] - **gyp**: \_winreg module was renamed to winreg in Python 3. (Craig Rodrigues) +* [[`98226d198c`](https://github.com/nodejs/node-gyp/commit/98226d198c)] - **gyp**: replace basestring with str, but only on Python 3. (Craig Rodrigues) +* [[`7535e4478e`](https://github.com/nodejs/node-gyp/commit/7535e4478e)] - **gyp**: replace deprecated functions (Craig Rodrigues) +* [[`2040cd21cc`](https://github.com/nodejs/node-gyp/commit/2040cd21cc)] - **gyp**: use print as a function, as specified in PEP 3105. (Craig Rodrigues) +* [[`abef93ded5`](https://github.com/nodejs/node-gyp/commit/abef93ded5)] - **gyp**: get ready for python 3 (cclauss) +* [[`43031fadcb`](https://github.com/nodejs/node-gyp/commit/43031fadcb)] - **python**: clean-up detection (João Reis) [#1582](https://github.com/nodejs/node-gyp/pull/1582) +* [[`49ab79d221`](https://github.com/nodejs/node-gyp/commit/49ab79d221)] - **python**: more informative error (Refael Ackermann) [#1269](https://github.com/nodejs/node-gyp/pull/1269) +* [[`997bc3c748`](https://github.com/nodejs/node-gyp/commit/997bc3c748)] - **readme**: add ARM64 info to MSVC setup instructions (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655) +* [[`788e767179`](https://github.com/nodejs/node-gyp/commit/788e767179)] - **test**: remove unused variable (João Reis) +* [[`6f5a408934`](https://github.com/nodejs/node-gyp/commit/6f5a408934)] - **tools**: fix usage of inherited -fPIC and -fPIE (Jens) [#1340](https://github.com/nodejs/node-gyp/pull/1340) +* [[`0efb8fb34b`](https://github.com/nodejs/node-gyp/commit/0efb8fb34b)] - **(SEMVER-MAJOR)** **win**: support running in VS Command Prompt (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`360ddbdf3a`](https://github.com/nodejs/node-gyp/commit/360ddbdf3a)] - **(SEMVER-MAJOR)** **win**: add support for Visual Studio 2019 (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`8f43f68275`](https://github.com/nodejs/node-gyp/commit/8f43f68275)] - **(SEMVER-MAJOR)** **win**: detect all VS versions in node-gyp (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`7fe4095974`](https://github.com/nodejs/node-gyp/commit/7fe4095974)] - **(SEMVER-MAJOR)** **win**: generic Visual Studio 2017 detection (João Reis) [#1762](https://github.com/nodejs/node-gyp/pull/1762) +* [[`7a71d68bce`](https://github.com/nodejs/node-gyp/commit/7a71d68bce)] - **win**: use msbuild from the configure stage (Bartosz Sosnowski) [#1654](https://github.com/nodejs/node-gyp/pull/1654) +* [[`d3b21220a0`](https://github.com/nodejs/node-gyp/commit/d3b21220a0)] - **win**: fix delay-load hook for electron 4 (Andy Dill) +* [[`81f3a92338`](https://github.com/nodejs/node-gyp/commit/81f3a92338)] - Update list of Node.js versions to test against. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670) +* [[`4748f6ab75`](https://github.com/nodejs/node-gyp/commit/4748f6ab75)] - Remove deprecated compatibility code. (Ben Noordhuis) [#1670](https://github.com/nodejs/node-gyp/pull/1670) +* [[`45e3221fd4`](https://github.com/nodejs/node-gyp/commit/45e3221fd4)] - Remove an outdated workaround for Python 2.4 (cclauss) [#1650](https://github.com/nodejs/node-gyp/pull/1650) +* [[`721dc7d314`](https://github.com/nodejs/node-gyp/commit/721dc7d314)] - Add ARM64 to MSBuild /Platform logic (Jon Kunkee) [#1655](https://github.com/nodejs/node-gyp/pull/1655) +* [[`a5b7410497`](https://github.com/nodejs/node-gyp/commit/a5b7410497)] - Add ESLint no-unused-vars rule (Jon Moss) [#1497](https://github.com/nodejs/node-gyp/pull/1497) + +## v4.0.0 2019-04-24 + +* [[`ceed5cbe10`](https://github.com/nodejs/node-gyp/commit/ceed5cbe10)] - **deps**: updated tar package version to 4.4.8 (Pobegaylo Maksim) [#1713](https://github.com/nodejs/node-gyp/pull/1713) +* [[`374519e066`](https://github.com/nodejs/node-gyp/commit/374519e066)] - **(SEMVER-MAJOR)** Upgrade to tar v3 (isaacs) [#1212](https://github.com/nodejs/node-gyp/pull/1212) +* [[`e6699d13cd`](https://github.com/nodejs/node-gyp/commit/e6699d13cd)] - **test**: fix addon test for Node.js 12 and V8 7.4 (Richard Lau) [#1705](https://github.com/nodejs/node-gyp/pull/1705) +* [[`0c6bf530a0`](https://github.com/nodejs/node-gyp/commit/0c6bf530a0)] - **lib**: use print() for python version detection (GreenAddress) [#1534](https://github.com/nodejs/node-gyp/pull/1534) + +## v3.8.0 2018-08-09 + +* [[`c5929cb4fe`](https://github.com/nodejs/node-gyp/commit/c5929cb4fe)] - **doc**: update Xcode preferences tab name. (Ivan Daniluk) [#1330](https://github.com/nodejs/node-gyp/pull/1330) +* [[`8b488da8b9`](https://github.com/nodejs/node-gyp/commit/8b488da8b9)] - **doc**: update link to commit guidelines (Jonas Hermsmeier) [#1456](https://github.com/nodejs/node-gyp/pull/1456) +* [[`b4fe8c16f9`](https://github.com/nodejs/node-gyp/commit/b4fe8c16f9)] - **doc**: fix visual studio links (Bartosz Sosnowski) [#1490](https://github.com/nodejs/node-gyp/pull/1490) +* [[`536759c7e9`](https://github.com/nodejs/node-gyp/commit/536759c7e9)] - **configure**: use sys.version\_info to get python version (Yang Guo) [#1504](https://github.com/nodejs/node-gyp/pull/1504) +* [[`94c39c604e`](https://github.com/nodejs/node-gyp/commit/94c39c604e)] - **gyp**: fix ninja build failure (GYP patch) (Daniel Bevenius) [nodejs/node#12484](https://github.com/nodejs/node/pull/12484) +* [[`e8ea74e0fa`](https://github.com/nodejs/node-gyp/commit/e8ea74e0fa)] - **tools**: patch gyp to avoid xcrun errors (Ujjwal Sharma) [nodejs/node#21520](https://github.com/nodejs/node/pull/21520) +* [[`ea9aff44f2`](https://github.com/nodejs/node-gyp/commit/ea9aff44f2)] - **tools**: fix "the the" typos in comments (Masashi Hirano) [nodejs/node#20716](https://github.com/nodejs/node/pull/20716) +* [[`207e5aa4fd`](https://github.com/nodejs/node-gyp/commit/207e5aa4fd)] - **gyp**: implement LD/LDXX for ninja and FIPS (Sam Roberts) +* [[`b416c5f4b7`](https://github.com/nodejs/node-gyp/commit/b416c5f4b7)] - **gyp**: enable cctest to use objects (gyp part) (Daniel Bevenius) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450) +* [[`40692d016b`](https://github.com/nodejs/node-gyp/commit/40692d016b)] - **gyp**: add compile\_commands.json gyp generator (Ben Noordhuis) [nodejs/node#12450](https://github.com/nodejs/node/pull/12450) +* [[`fc3c4e2b10`](https://github.com/nodejs/node-gyp/commit/fc3c4e2b10)] - **gyp**: float gyp patch for long filenames (Anna Henningsen) [nodejs/node#7963](https://github.com/nodejs/node/pull/7963) +* [[`8aedbfdef6`](https://github.com/nodejs/node-gyp/commit/8aedbfdef6)] - **gyp**: backport GYP fix to fix AIX shared suffix (Stewart Addison) +* [[`6cd84b84fc`](https://github.com/nodejs/node-gyp/commit/6cd84b84fc)] - **test**: formatting and minor fixes for execFileSync replacement (Rod Vagg) [#1521](https://github.com/nodejs/node-gyp/pull/1521) +* [[`60e421363f`](https://github.com/nodejs/node-gyp/commit/60e421363f)] - **test**: added test/processExecSync.js for when execFileSync is not available. (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492) +* [[`969447c5bd`](https://github.com/nodejs/node-gyp/commit/969447c5bd)] - **deps**: bump request to 2.8.7, fixes heok/hawk issues (Rohit Hazra) [#1492](https://github.com/nodejs/node-gyp/pull/1492) +* [[`340403ccfe`](https://github.com/nodejs/node-gyp/commit/340403ccfe)] - **win**: improve parsing of SDK version (Alessandro Vergani) [#1516](https://github.com/nodejs/node-gyp/pull/1516) + +## v3.7.0 2018-06-08 + +* [[`84cea7b30d`](https://github.com/nodejs/node-gyp/commit/84cea7b30d)] - Remove unused gyp test scripts. (Ben Noordhuis) [#1458](https://github.com/nodejs/node-gyp/pull/1458) +* [[`0540e4ec63`](https://github.com/nodejs/node-gyp/commit/0540e4ec63)] - **gyp**: escape spaces in filenames in make generator (Jeff Senn) [#1436](https://github.com/nodejs/node-gyp/pull/1436) +* [[`88fc6fa0ec`](https://github.com/nodejs/node-gyp/commit/88fc6fa0ec)] - Drop dependency on minimatch. (Brian Woodward) [#1158](https://github.com/nodejs/node-gyp/pull/1158) +* [[`1e203c5148`](https://github.com/nodejs/node-gyp/commit/1e203c5148)] - Fix include path when pointing to Node.js source (Richard Lau) [#1055](https://github.com/nodejs/node-gyp/pull/1055) +* [[`53d8cb967c`](https://github.com/nodejs/node-gyp/commit/53d8cb967c)] - Prefix build targets with /t: on Windows (Natalie Wolfe) [#1164](https://github.com/nodejs/node-gyp/pull/1164) +* [[`53a5f8ff38`](https://github.com/nodejs/node-gyp/commit/53a5f8ff38)] - **gyp**: add support for .mm files to msvs generator (Julien Racle) [#1167](https://github.com/nodejs/node-gyp/pull/1167) +* [[`dd8561e528`](https://github.com/nodejs/node-gyp/commit/dd8561e528)] - **zos**: don't use universal-new-lines mode (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451) +* [[`e5a69010ed`](https://github.com/nodejs/node-gyp/commit/e5a69010ed)] - **zos**: add search locations for libnode.x (John Barboza) [#1451](https://github.com/nodejs/node-gyp/pull/1451) +* [[`79febace53`](https://github.com/nodejs/node-gyp/commit/79febace53)] - **doc**: update macOS information in README (Josh Parnham) [#1323](https://github.com/nodejs/node-gyp/pull/1323) +* [[`9425448945`](https://github.com/nodejs/node-gyp/commit/9425448945)] - **gyp**: don't print xcodebuild not found errors (Gibson Fahnestock) [#1370](https://github.com/nodejs/node-gyp/pull/1370) +* [[`6f1286f5b2`](https://github.com/nodejs/node-gyp/commit/6f1286f5b2)] - Fix infinite install loop. (Ben Noordhuis) [#1384](https://github.com/nodejs/node-gyp/pull/1384) +* [[`2580b9139e`](https://github.com/nodejs/node-gyp/commit/2580b9139e)] - Update `--nodedir` description in README. (Ben Noordhuis) [#1372](https://github.com/nodejs/node-gyp/pull/1372) +* [[`a61360391a`](https://github.com/nodejs/node-gyp/commit/a61360391a)] - Update README with another way to install on windows (JeffAtDeere) [#1352](https://github.com/nodejs/node-gyp/pull/1352) +* [[`47496bf6dc`](https://github.com/nodejs/node-gyp/commit/47496bf6dc)] - Fix IndexError when parsing GYP files. (Ben Noordhuis) [#1267](https://github.com/nodejs/node-gyp/pull/1267) +* [[`b2024dee7b`](https://github.com/nodejs/node-gyp/commit/b2024dee7b)] - **zos**: support platform (John Barboza) [#1276](https://github.com/nodejs/node-gyp/pull/1276) +* [[`90d86512f4`](https://github.com/nodejs/node-gyp/commit/90d86512f4)] - **win**: run PS with `-NoProfile` (Refael Ackermann) [#1292](https://github.com/nodejs/node-gyp/pull/1292) +* [[`2da5f86ef7`](https://github.com/nodejs/node-gyp/commit/2da5f86ef7)] - **doc**: add github PR and Issue templates (Gibson Fahnestock) [#1228](https://github.com/nodejs/node-gyp/pull/1228) +* [[`a46a770d68`](https://github.com/nodejs/node-gyp/commit/a46a770d68)] - **doc**: update proposed DCO and CoC (Mikeal Rogers) [#1229](https://github.com/nodejs/node-gyp/pull/1229) +* [[`7e803d58e0`](https://github.com/nodejs/node-gyp/commit/7e803d58e0)] - **doc**: headerify the Install instructions (Nick Schonning) [#1225](https://github.com/nodejs/node-gyp/pull/1225) +* [[`f27599193a`](https://github.com/nodejs/node-gyp/commit/f27599193a)] - **gyp**: update xml string encoding conversion (Liu Chao) [#1203](https://github.com/nodejs/node-gyp/pull/1203) +* [[`0a07e481f7`](https://github.com/nodejs/node-gyp/commit/0a07e481f7)] - **configure**: don't set ensure if tarball is set (Gibson Fahnestock) [#1220](https://github.com/nodejs/node-gyp/pull/1220) + +## v3.6.3 2018-06-08 + +* [[`90cd2e8da9`](https://github.com/nodejs/node-gyp/commit/90cd2e8da9)] - **gyp**: fix regex to match multi-digit versions (Jonas Hermsmeier) [#1455](https://github.com/nodejs/node-gyp/pull/1455) +* [[`7900122337`](https://github.com/nodejs/node-gyp/commit/7900122337)] - deps: pin `request` version range (Refael Ackerman) [#1300](https://github.com/nodejs/node-gyp/pull/1300) + +## v3.6.2 2017-06-01 + +* [[`72afdd62cd`](https://github.com/nodejs/node-gyp/commit/72afdd62cd)] - **build**: rename copyNodeLib() to doBuild() (Liu Chao) [#1206](https://github.com/nodejs/node-gyp/pull/1206) +* [[`bad903ac70`](https://github.com/nodejs/node-gyp/commit/bad903ac70)] - **win**: more robust parsing of SDK version (Refael Ackermann) [#1198](https://github.com/nodejs/node-gyp/pull/1198) +* [[`241752f381`](https://github.com/nodejs/node-gyp/commit/241752f381)] - Log dist-url. (Ben Noordhuis) [#1170](https://github.com/nodejs/node-gyp/pull/1170) +* [[`386746c7d1`](https://github.com/nodejs/node-gyp/commit/386746c7d1)] - **configure**: use full path in node_lib_file GYP var (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964) +* [[`0913b2dd99`](https://github.com/nodejs/node-gyp/commit/0913b2dd99)] - **build, win**: use target_arch to link with node.lib (Pavel Medvedev) [#964](https://github.com/nodejs/node-gyp/pull/964) +* [[`c307b302f7`](https://github.com/nodejs/node-gyp/commit/c307b302f7)] - **doc**: blorb about setting `npm_config_OPTION_NAME` (Refael Ackermann) [#1185](https://github.com/nodejs/node-gyp/pull/1185) + +## v3.6.1 2017-04-30 + +* [[`49801716c2`](https://github.com/nodejs/node-gyp/commit/49801716c2)] - **test**: fix test-find-python on v0.10.x buildbot. (Ben Noordhuis) [#1172](https://github.com/nodejs/node-gyp/pull/1172) +* [[`a83a3801fc`](https://github.com/nodejs/node-gyp/commit/a83a3801fc)] - **test**: fix test/test-configure-python on AIX (Richard Lau) [#1131](https://github.com/nodejs/node-gyp/pull/1131) +* [[`8a767145c9`](https://github.com/nodejs/node-gyp/commit/8a767145c9)] - **gyp**: Revert quote_cmd workaround (Kunal Pathak) [#1153](https://github.com/nodejs/node-gyp/pull/1153) +* [[`c09cf7671e`](https://github.com/nodejs/node-gyp/commit/c09cf7671e)] - **doc**: add a note for using `configure` on Windows (Vse Mozhet Byt) [#1152](https://github.com/nodejs/node-gyp/pull/1152) +* [[`da9cb5f411`](https://github.com/nodejs/node-gyp/commit/da9cb5f411)] - Delete superfluous .patch files. (Ben Noordhuis) [#1122](https://github.com/nodejs/node-gyp/pull/1122) + +## v3.6.0 2017-03-16 + +* [[`ae141e1906`](https://github.com/nodejs/node-gyp/commit/ae141e1906)] - **win**: find and setup for VS2017 (Refael Ackermann) [#1130](https://github.com/nodejs/node-gyp/pull/1130) +* [[`ec5fc36a80`](https://github.com/nodejs/node-gyp/commit/ec5fc36a80)] - Add support to build node.js with chakracore for ARM. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873) +* [[`a04ea3051a`](https://github.com/nodejs/node-gyp/commit/a04ea3051a)] - Add support to build node.js with chakracore. (Kunal Pathak) [#873](https://github.com/nodejs/node-gyp/pull/873) +* [[`93d7fa83c8`](https://github.com/nodejs/node-gyp/commit/93d7fa83c8)] - Upgrade semver dependency. (Ben Noordhuis) [#1107](https://github.com/nodejs/node-gyp/pull/1107) +* [[`ff9a6fadfd`](https://github.com/nodejs/node-gyp/commit/ff9a6fadfd)] - Update link of gyp as Google code is shutting down (Peter Dave Hello) [#1061](https://github.com/nodejs/node-gyp/pull/1061) + +## v3.5.0 2017-01-10 + +* [[`762d19a39e`](https://github.com/nodejs/node-gyp/commit/762d19a39e)] - \[doc\] merge History.md and CHANGELOG.md (Rod Vagg) +* [[`80fc5c3d31`](https://github.com/nodejs/node-gyp/commit/80fc5c3d31)] - Fix deprecated dependency warning (Simone Primarosa) [#1069](https://github.com/nodejs/node-gyp/pull/1069) +* [[`05c44944fd`](https://github.com/nodejs/node-gyp/commit/05c44944fd)] - Open the build file with universal-newlines mode (Guy Margalit) [#1053](https://github.com/nodejs/node-gyp/pull/1053) +* [[`37ae7be114`](https://github.com/nodejs/node-gyp/commit/37ae7be114)] - Try python launcher when stock python is python 3. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992) +* [[`e3778d9907`](https://github.com/nodejs/node-gyp/commit/e3778d9907)] - Add lots of findPython() tests. (Ben Noordhuis) [#992](https://github.com/nodejs/node-gyp/pull/992) +* [[`afc766adf6`](https://github.com/nodejs/node-gyp/commit/afc766adf6)] - Unset executable bit for .bat files (Pavel Medvedev) [#969](https://github.com/nodejs/node-gyp/pull/969) +* [[`ddac348991`](https://github.com/nodejs/node-gyp/commit/ddac348991)] - Use push on PYTHONPATH and add tests (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990) +* [[`b182a19042`](https://github.com/nodejs/node-gyp/commit/b182a19042)] - ***Revert*** "add "path-array" dep" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990) +* [[`7c08b85c5a`](https://github.com/nodejs/node-gyp/commit/7c08b85c5a)] - ***Revert*** "**configure**: use "path-array" for PYTHONPATH" (Michael Hart) [#990](https://github.com/nodejs/node-gyp/pull/990) +* [[`9c8d275526`](https://github.com/nodejs/node-gyp/commit/9c8d275526)] - Add --devdir flag. (Ben Noordhuis) [#916](https://github.com/nodejs/node-gyp/pull/916) +* [[`f6eab1f9e4`](https://github.com/nodejs/node-gyp/commit/f6eab1f9e4)] - **doc**: add windows-build-tools to readme (Felix Rieseberg) [#970](https://github.com/nodejs/node-gyp/pull/970) + +## v3.4.0 2016-06-28 + +* [[`ce5fd04e94`](https://github.com/nodejs/node-gyp/commit/ce5fd04e94)] - **deps**: update minimatch version (delphiactual) [#961](https://github.com/nodejs/node-gyp/pull/961) +* [[`77383ddd85`](https://github.com/nodejs/node-gyp/commit/77383ddd85)] - Replace fs.accessSync call to fs.statSync (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955) +* [[`0dba4bda57`](https://github.com/nodejs/node-gyp/commit/0dba4bda57)] - **test**: add simple addon test (Richard Lau) [#955](https://github.com/nodejs/node-gyp/pull/955) +* [[`c4344b3889`](https://github.com/nodejs/node-gyp/commit/c4344b3889)] - **doc**: add --target option to README (Gibson Fahnestock) [#958](https://github.com/nodejs/node-gyp/pull/958) +* [[`cc778e9215`](https://github.com/nodejs/node-gyp/commit/cc778e9215)] - Override BUILDING_UV_SHARED, BUILDING_V8_SHARED. (Ben Noordhuis) [#915](https://github.com/nodejs/node-gyp/pull/915) +* [[`af35b2ad32`](https://github.com/nodejs/node-gyp/commit/af35b2ad32)] - Move VC++ Build Tools to Build Tools landing page. (Andrew Pardoe) [#953](https://github.com/nodejs/node-gyp/pull/953) +* [[`f31482e226`](https://github.com/nodejs/node-gyp/commit/f31482e226)] - **win**: work around __pfnDliNotifyHook2 type change (Alexis Campailla) [#952](https://github.com/nodejs/node-gyp/pull/952) +* [[`3df8222fa5`](https://github.com/nodejs/node-gyp/commit/3df8222fa5)] - Allow for npmlog@3.x (Rebecca Turner) [#950](https://github.com/nodejs/node-gyp/pull/950) +* [[`a4fa07b390`](https://github.com/nodejs/node-gyp/commit/a4fa07b390)] - More verbose error on locating msbuild.exe failure. (Mateusz Jaworski) [#930](https://github.com/nodejs/node-gyp/pull/930) +* [[`4ee31329e0`](https://github.com/nodejs/node-gyp/commit/4ee31329e0)] - **doc**: add command options to README.md (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937) +* [[`c8c7ca86b9`](https://github.com/nodejs/node-gyp/commit/c8c7ca86b9)] - Add --silent option for zero output. (Gibson Fahnestock) [#937](https://github.com/nodejs/node-gyp/pull/937) +* [[`ac29d23a7c`](https://github.com/nodejs/node-gyp/commit/ac29d23a7c)] - Upgrade to glob@7.0.3. (Ben Noordhuis) [#943](https://github.com/nodejs/node-gyp/pull/943) +* [[`15fd56be3d`](https://github.com/nodejs/node-gyp/commit/15fd56be3d)] - Enable V8 deprecation warnings for native modules (Matt Loring) [#920](https://github.com/nodejs/node-gyp/pull/920) +* [[`7f1c1b960c`](https://github.com/nodejs/node-gyp/commit/7f1c1b960c)] - **gyp**: improvements for android generator (Robert Chiras) [#935](https://github.com/nodejs/node-gyp/pull/935) +* [[`088082766c`](https://github.com/nodejs/node-gyp/commit/088082766c)] - Update Windows install instructions (Sara Itani) [#867](https://github.com/nodejs/node-gyp/pull/867) +* [[`625c1515f9`](https://github.com/nodejs/node-gyp/commit/625c1515f9)] - **gyp**: inherit CC/CXX for CC/CXX.host (Johan Bergström) [#908](https://github.com/nodejs/node-gyp/pull/908) +* [[`3bcb1720e4`](https://github.com/nodejs/node-gyp/commit/3bcb1720e4)] - Add support for the Python launcher on Windows (Patrick Westerhoff) [#894](https://github.com/nodejs/node-gyp/pull/894 + +## v3.3.1 2016-03-04 + +* [[`a981ef847a`](https://github.com/nodejs/node-gyp/commit/a981ef847a)] - **gyp**: fix android generator (Robert Chiras) [#889](https://github.com/nodejs/node-gyp/pull/889) + +## v3.3.0 2016-02-16 + +* [[`818d854a4d`](https://github.com/nodejs/node-gyp/commit/818d854a4d)] - Introduce NODEJS_ORG_MIRROR and IOJS_ORG_MIRROR (Rod Vagg) [#878](https://github.com/nodejs/node-gyp/pull/878) +* [[`d1e4cc4b62`](https://github.com/nodejs/node-gyp/commit/d1e4cc4b62)] - **(SEMVER-MINOR)** Download headers tarball for ~0.12.10 || ~0.10.42 (Rod Vagg) [#877](https://github.com/nodejs/node-gyp/pull/877) +* [[`6e28ad1bea`](https://github.com/nodejs/node-gyp/commit/6e28ad1bea)] - Allow for npmlog@2.x (Rebecca Turner) [#861](https://github.com/nodejs/node-gyp/pull/861) +* [[`07371e5812`](https://github.com/nodejs/node-gyp/commit/07371e5812)] - Use -fPIC for NetBSD. (Marcin Cieślak) [#856](https://github.com/nodejs/node-gyp/pull/856) +* [[`8c4b0ffa50`](https://github.com/nodejs/node-gyp/commit/8c4b0ffa50)] - **(SEMVER-MINOR)** Add --cafile command line option. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837) +* [[`b3ad43498e`](https://github.com/nodejs/node-gyp/commit/b3ad43498e)] - **(SEMVER-MINOR)** Make download() function testable. (Ben Noordhuis) [#837](https://github.com/nodejs/node-gyp/pull/837) + +## v3.2.1 2015-12-03 + +* [[`ab89b477c4`](https://github.com/nodejs/node-gyp/commit/ab89b477c4)] - Upgrade gyp to b3cef02. (Ben Noordhuis) [#831](https://github.com/nodejs/node-gyp/pull/831) +* [[`90078ecb17`](https://github.com/nodejs/node-gyp/commit/90078ecb17)] - Define WIN32_LEAN_AND_MEAN conditionally. (Ben Noordhuis) [#824](https://github.com/nodejs/node-gyp/pull/824) + +## v3.2.0 2015-11-25 + +* [[`268f1ca4c7`](https://github.com/nodejs/node-gyp/commit/268f1ca4c7)] - Use result of `which` when searching for python. (Refael Ackermann) [#668](https://github.com/nodejs/node-gyp/pull/668) +* [[`817ed9bd78`](https://github.com/nodejs/node-gyp/commit/817ed9bd78)] - Add test for python executable search logic. (Ben Noordhuis) [#756](https://github.com/nodejs/node-gyp/pull/756) +* [[`0e2dfda1f3`](https://github.com/nodejs/node-gyp/commit/0e2dfda1f3)] - Fix test/test-options when run through `npm test`. (Ben Noordhuis) [#755](https://github.com/nodejs/node-gyp/pull/755) +* [[`9bfa0876b4`](https://github.com/nodejs/node-gyp/commit/9bfa0876b4)] - Add support for AIX (Michael Dawson) [#753](https://github.com/nodejs/node-gyp/pull/753) +* [[`a8d441a0a2`](https://github.com/nodejs/node-gyp/commit/a8d441a0a2)] - Update README for Windows 10 support. (Jason Williams) [#766](https://github.com/nodejs/node-gyp/pull/766) +* [[`d1d6015276`](https://github.com/nodejs/node-gyp/commit/d1d6015276)] - Update broken links and switch to HTTPS. (andrew morton) + +## v3.1.0 2015-11-14 + +* [[`9049241f91`](https://github.com/nodejs/node-gyp/commit/9049241f91)] - **gyp**: don't use links at all, just copy the files instead (Nathan Zadoks) +* [[`8ef90348d1`](https://github.com/nodejs/node-gyp/commit/8ef90348d1)] - **gyp**: apply https://codereview.chromium.org/11361103/ (Nathan Rajlich) +* [[`a2ed0df84e`](https://github.com/nodejs/node-gyp/commit/a2ed0df84e)] - **gyp**: always install into $PRODUCT_DIR (Nathan Rajlich) +* [[`cc8b2fa83e`](https://github.com/nodejs/node-gyp/commit/cc8b2fa83e)] - Update gyp to b3cef02. (Imran Iqbal) [#781](https://github.com/nodejs/node-gyp/pull/781) +* [[`f5d86eb84e`](https://github.com/nodejs/node-gyp/commit/f5d86eb84e)] - Update to tar@2.0.0. (Edgar Muentes) [#797](https://github.com/nodejs/node-gyp/pull/797) +* [[`2ac7de02c4`](https://github.com/nodejs/node-gyp/commit/2ac7de02c4)] - Fix infinite loop with zero-length options. (Ben Noordhuis) [#745](https://github.com/nodejs/node-gyp/pull/745) +* [[`101bed639b`](https://github.com/nodejs/node-gyp/commit/101bed639b)] - This platform value came from debian package, and now the value (Jérémy Lal) [#738](https://github.com/nodejs/node-gyp/pull/738) + +## v3.0.3 2015-09-14 + +* [[`ad827cda30`](https://github.com/nodejs/node-gyp/commit/ad827cda30)] - tarballUrl global and && when checking for iojs (Lars-Magnus Skog) [#729](https://github.com/nodejs/node-gyp/pull/729) + +## v3.0.2 2015-09-12 + +* [[`6e8c3bf3c6`](https://github.com/nodejs/node-gyp/commit/6e8c3bf3c6)] - add back support for passing additional cmdline args (Rod Vagg) [#723](https://github.com/nodejs/node-gyp/pull/723) +* [[`ff82f2f3b9`](https://github.com/nodejs/node-gyp/commit/ff82f2f3b9)] - fixed broken link in docs to Visual Studio 2013 download (simon-p-r) [#722](https://github.com/nodejs/node-gyp/pull/722) + +## v3.0.1 2015-09-08 + +* [[`846337e36b`](https://github.com/nodejs/node-gyp/commit/846337e36b)] - normalise versions for target == this comparison (Rod Vagg) [#716](https://github.com/nodejs/node-gyp/pull/716) + +## v3.0.0 2015-09-08 + +* [[`9720d0373c`](https://github.com/nodejs/node-gyp/commit/9720d0373c)] - remove node_modules from tree (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) +* [[`6dcf220db7`](https://github.com/nodejs/node-gyp/commit/6dcf220db7)] - test version major directly, don't use semver.satisfies() (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) +* [[`938dd18d1c`](https://github.com/nodejs/node-gyp/commit/938dd18d1c)] - refactor for clarity, fix dist-url, add env var dist-url functionality (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) +* [[`9e9df66a06`](https://github.com/nodejs/node-gyp/commit/9e9df66a06)] - use process.release, make aware of io.js & node v4 differences (Rod Vagg) [#711](https://github.com/nodejs/node-gyp/pull/711) +* [[`1ea7ed01f4`](https://github.com/nodejs/node-gyp/commit/1ea7ed01f4)] - **deps**: update graceful-fs dependency to the latest (Sakthipriyan Vairamani) [#714](https://github.com/nodejs/node-gyp/pull/714) +* [[`0fbc387b35`](https://github.com/nodejs/node-gyp/commit/0fbc387b35)] - Update repository URLs. (Ben Noordhuis) [#715](https://github.com/nodejs/node-gyp/pull/715) +* [[`bbedb8868b`](https://github.com/nodejs/node-gyp/commit/bbedb8868b)] - **(SEMVER-MAJOR)** **win**: enable delay-load hook by default (Jeremiah Senkpiel) [#708](https://github.com/nodejs/node-gyp/pull/708) +* [[`85ed107565`](https://github.com/nodejs/node-gyp/commit/85ed107565)] - Merge pull request #664 from othiym23/othiym23/allow-semver-5 (Nathan Rajlich) +* [[`0c720d234c`](https://github.com/nodejs/node-gyp/commit/0c720d234c)] - allow semver@5 (Forrest L Norvell) + +## 2.0.2 / 2015-07-14 + + * Use HTTPS for dist url (#656, @SonicHedgehog) + * Merge pull request #648 from nevosegal/master + * Merge pull request #650 from magic890/patch-1 + * Updated Installation section on README + * Updated link to gyp user documentation + * Fix download error message spelling (#643, @tomxtobin) + * Merge pull request #637 from lygstate/master + * Set NODE_GYP_DIR for addon.gypi to setting absolute path for + src/win_delay_load_hook.c, and fixes of the long relative path issue on Win32. + Fixes #636 (#637, @lygstate). + +## 2.0.1 / 2015-05-28 + + * configure: try/catch the semver range.test() call + * README: update for visual studio 2013 (#510, @samccone) + +## 2.0.0 / 2015-05-24 + + * configure: check for python2 executable by default, fallback to python + * configure: don't clobber existing $PYTHONPATH + * configure: use "path-array" for PYTHONPATH + * gyp: fix for non-acsii userprofile name on Windows + * gyp: always install into $PRODUCT_DIR + * gyp: apply https://codereview.chromium.org/11361103/ + * gyp: don't use links at all, just copy the files instead + * gyp: update gyp to e1c8fcf7 + * Updated README.md with updated Windows build info + * Show URL when a download fails + * package: add a "license" field + * move HMODULE m declaration to top + * Only add "-undefined dynamic_lookup" to loadable_module targets + * win: optionally allow node.exe/iojs.exe to be renamed + * Avoid downloading shasums if using tarPath + * Add target name preprocessor define: `NODE_GYP_MODULE_NAME` + * Show better error message in case of bad network settings diff --git a/node_modules/node-gyp/CONTRIBUTING.md b/node_modules/node-gyp/CONTRIBUTING.md new file mode 100644 index 0000000..c1c50ea --- /dev/null +++ b/node_modules/node-gyp/CONTRIBUTING.md @@ -0,0 +1,34 @@ +# Contributing to node-gyp + +## Code of Conduct + +Please read the +[Code of Conduct](https://github.com/nodejs/admin/blob/master/CODE_OF_CONDUCT.md) +which explains the minimum behavior expectations for node-gyp contributors. + + +## Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. diff --git a/node_modules/node-gyp/LICENSE b/node_modules/node-gyp/LICENSE new file mode 100644 index 0000000..2ea4dc5 --- /dev/null +++ b/node_modules/node-gyp/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2012 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-gyp/README.md b/node_modules/node-gyp/README.md new file mode 100644 index 0000000..71eea0a --- /dev/null +++ b/node_modules/node-gyp/README.md @@ -0,0 +1,256 @@ +# `node-gyp` - Node.js native addon build tool + +[![Build Status](https://github.com/nodejs/node-gyp/workflows/Tests/badge.svg?branch=master)](https://github.com/nodejs/node-gyp/actions?query=workflow%3ATests+branch%3Amaster) +![npm](https://img.shields.io/npm/dm/node-gyp) + +`node-gyp` is a cross-platform command-line tool written in Node.js for +compiling native addon modules for Node.js. It contains a vendored copy of the +[gyp-next](https://github.com/nodejs/gyp-next) project that was previously used +by the Chromium team, extended to support the development of Node.js native addons. + +Note that `node-gyp` is _not_ used to build Node.js itself. + +Multiple target versions of Node.js are supported (i.e. `0.8`, ..., `4`, `5`, `6`, +etc.), regardless of what version of Node.js is actually installed on your system +(`node-gyp` downloads the necessary development files or headers for the target version). + +## Features + + * The same build commands work on any of the supported platforms + * Supports the targeting of different versions of Node.js + +## Installation + +You can install `node-gyp` using `npm`: + +``` bash +npm install -g node-gyp +``` + +Depending on your operating system, you will need to install: + +### On Unix + + * Python v3.6, v3.7, v3.8, or v3.9 + * `make` + * A proper C/C++ compiler toolchain, like [GCC](https://gcc.gnu.org) + +### On macOS + +**ATTENTION**: If your Mac has been _upgraded_ to macOS Catalina (10.15), please read [macOS_Catalina.md](macOS_Catalina.md). + + * Python v3.6, v3.7, v3.8, or v3.9 + * [Xcode](https://developer.apple.com/xcode/download/) + * You also need to install the `XCode Command Line Tools` by running `xcode-select --install`. Alternatively, if you already have the full Xcode installed, you can find them under the menu `Xcode -> Open Developer Tool -> More Developer Tools...`. This step will install `clang`, `clang++`, and `make`. + +### On Windows + +Install the current version of Python from the [Microsoft Store package](https://docs.python.org/3/using/windows.html#the-microsoft-store-package). + +Install tools and configuration manually: + * Install Visual C++ Build Environment: [Visual Studio Build Tools](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools) + (using "Visual C++ build tools" workload) or [Visual Studio Community](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=Community) + (using the "Desktop development with C++" workload) + * Launch cmd, `npm config set msvs_version 2017` + + If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips. + + To target native ARM64 Node.js on Windows 10 on ARM, add the components "Visual C++ compilers and libraries for ARM64" and "Visual C++ ATL for ARM64". + +### Configuring Python Dependency + +`node-gyp` requires that you have installed a compatible version of Python, one of: v3.6, v3.7, +v3.8, or v3.9. If you have multiple Python versions installed, you can identify which Python +version `node-gyp` should use in one of the following ways: + +1. by setting the `--python` command-line option, e.g.: + +``` bash +node-gyp --python /path/to/executable/python +``` + +2. If `node-gyp` is called by way of `npm`, *and* you have multiple versions of +Python installed, then you can set `npm`'s 'python' config key to the appropriate +value: + +``` bash +npm config set python /path/to/executable/python +``` + +3. If the `PYTHON` environment variable is set to the path of a Python executable, +then that version will be used, if it is a compatible version. + +4. If the `NODE_GYP_FORCE_PYTHON` environment variable is set to the path of a +Python executable, it will be used instead of any of the other configured or +builtin Python search paths. If it's not a compatible version, no further +searching will be done. + +### Build for Third Party Node.js Runtimes + +When building modules for thid party Node.js runtimes like Electron, which have +different build configurations from the official Node.js distribution, you +should use `--dist-url` or `--nodedir` flags to specify the headers of the +runtime to build for. + +Also when `--dist-url` or `--nodedir` flags are passed, node-gyp will use the +`config.gypi` shipped in the headers distribution to generate build +configurations, which is different from the default mode that would use the +`process.config` object of the running Node.js instance. + +Some old versions of Electron shipped malformed `config.gypi` in their headers +distributions, and you might need to pass `--force-process-config` to node-gyp +to work around configuration errors. + +## How to Use + +To compile your native addon, first go to its root directory: + +``` bash +cd my_node_addon +``` + +The next step is to generate the appropriate project build files for the current +platform. Use `configure` for that: + +``` bash +node-gyp configure +``` + +Auto-detection fails for Visual C++ Build Tools 2015, so `--msvs_version=2015` +needs to be added (not needed when run by npm as configured above): +``` bash +node-gyp configure --msvs_version=2015 +``` + +__Note__: The `configure` step looks for a `binding.gyp` file in the current +directory to process. See below for instructions on creating a `binding.gyp` file. + +Now you will have either a `Makefile` (on Unix platforms) or a `vcxproj` file +(on Windows) in the `build/` directory. Next, invoke the `build` command: + +``` bash +node-gyp build +``` + +Now you have your compiled `.node` bindings file! The compiled bindings end up +in `build/Debug/` or `build/Release/`, depending on the build mode. At this point, +you can require the `.node` file with Node.js and run your tests! + +__Note:__ To create a _Debug_ build of the bindings file, pass the `--debug` (or +`-d`) switch when running either the `configure`, `build` or `rebuild` commands. + +## The `binding.gyp` file + +A `binding.gyp` file describes the configuration to build your module, in a +JSON-like format. This file gets placed in the root of your package, alongside +`package.json`. + +A barebones `gyp` file appropriate for building a Node.js addon could look like: + +```python +{ + "targets": [ + { + "target_name": "binding", + "sources": [ "src/binding.cc" ] + } + ] +} +``` + +## Further reading + +The **[docs](./docs/)** directory contains additional documentation on specific node-gyp topics that may be useful if you are experiencing problems installing or building addons using node-gyp. + +Some additional resources for Node.js native addons and writing `gyp` configuration files: + + * ["Going Native" a nodeschool.io tutorial](http://nodeschool.io/#goingnative) + * ["Hello World" node addon example](https://github.com/nodejs/node/tree/master/test/addons/hello-world) + * [gyp user documentation](https://gyp.gsrc.io/docs/UserDocumentation.md) + * [gyp input format reference](https://gyp.gsrc.io/docs/InputFormatReference.md) + * [*"binding.gyp" files out in the wild* wiki page](./docs/binding.gyp-files-in-the-wild.md) + +## Commands + +`node-gyp` responds to the following commands: + +| **Command** | **Description** +|:--------------|:--------------------------------------------------------------- +| `help` | Shows the help dialog +| `build` | Invokes `make`/`msbuild.exe` and builds the native addon +| `clean` | Removes the `build` directory if it exists +| `configure` | Generates project build files for the current platform +| `rebuild` | Runs `clean`, `configure` and `build` all in a row +| `install` | Installs Node.js header files for the given version +| `list` | Lists the currently installed Node.js header versions +| `remove` | Removes the Node.js header files for the given version + + +## Command Options + +`node-gyp` accepts the following command options: + +| **Command** | **Description** +|:----------------------------------|:------------------------------------------ +| `-j n`, `--jobs n` | Run `make` in parallel. The value `max` will use all available CPU cores +| `--target=v6.2.1` | Node.js version to build for (default is `process.version`) +| `--silly`, `--loglevel=silly` | Log all progress to console +| `--verbose`, `--loglevel=verbose` | Log most progress to console +| `--silent`, `--loglevel=silent` | Don't log anything to console +| `debug`, `--debug` | Make Debug build (default is `Release`) +| `--release`, `--no-debug` | Make Release build +| `-C $dir`, `--directory=$dir` | Run command in different directory +| `--make=$make` | Override `make` command (e.g. `gmake`) +| `--thin=yes` | Enable thin static libraries +| `--arch=$arch` | Set target architecture (e.g. ia32) +| `--tarball=$path` | Get headers from a local tarball +| `--devdir=$path` | SDK download directory (default is OS cache directory) +| `--ensure` | Don't reinstall headers if already present +| `--dist-url=$url` | Download header tarball from custom URL +| `--proxy=$url` | Set HTTP(S) proxy for downloading header tarball +| `--noproxy=$urls` | Set urls to ignore proxies when downloading header tarball +| `--cafile=$cafile` | Override default CA chain (to download tarball) +| `--nodedir=$path` | Set the path to the node source code +| `--python=$path` | Set path to the Python binary +| `--msvs_version=$version` | Set Visual Studio version (Windows only) +| `--solution=$solution` | Set Visual Studio Solution version (Windows only) +| `--force-process-config` | Force using runtime's `process.config` object to generate `config.gypi` file + +## Configuration + +### Environment variables + +Use the form `npm_config_OPTION_NAME` for any of the command options listed +above (dashes in option names should be replaced by underscores). + +For example, to set `devdir` equal to `/tmp/.gyp`, you would: + +Run this on Unix: + +```bash +export npm_config_devdir=/tmp/.gyp +``` + +Or this on Windows: + +```console +set npm_config_devdir=c:\temp\.gyp +``` + +### `npm` configuration + +Use the form `OPTION_NAME` for any of the command options listed above. + +For example, to set `devdir` equal to `/tmp/.gyp`, you would run: + +```bash +npm config set [--global] devdir /tmp/.gyp +``` + +**Note:** Configuration set via `npm` will only be used when `node-gyp` +is run via `npm`, not when `node-gyp` is run directly. + +## License + +`node-gyp` is available under the MIT license. See the [LICENSE +file](LICENSE) for details. diff --git a/node_modules/node-gyp/addon.gypi b/node_modules/node-gyp/addon.gypi new file mode 100644 index 0000000..9327b0d --- /dev/null +++ b/node_modules/node-gyp/addon.gypi @@ -0,0 +1,185 @@ +{ + 'variables' : { + 'node_engine_include_dir%': 'deps/v8/include', + 'node_host_binary%': 'node', + 'node_with_ltcg%': 'true', + }, + 'target_defaults': { + 'type': 'loadable_module', + 'win_delay_load_hook': 'true', + 'product_prefix': '', + + 'conditions': [ + [ 'node_engine=="chakracore"', { + 'variables': { + 'node_engine_include_dir%': 'deps/chakrashim/include' + }, + }] + ], + + 'include_dirs': [ + '<(node_root_dir)/include/node', + '<(node_root_dir)/src', + '<(node_root_dir)/deps/openssl/config', + '<(node_root_dir)/deps/openssl/openssl/include', + '<(node_root_dir)/deps/uv/include', + '<(node_root_dir)/deps/zlib', + '<(node_root_dir)/<(node_engine_include_dir)' + ], + 'defines!': [ + 'BUILDING_UV_SHARED=1', # Inherited from common.gypi. + 'BUILDING_V8_SHARED=1', # Inherited from common.gypi. + ], + 'defines': [ + 'NODE_GYP_MODULE_NAME=>(_target_name)', + 'USING_UV_SHARED=1', + 'USING_V8_SHARED=1', + # Warn when using deprecated V8 APIs. + 'V8_DEPRECATION_WARNINGS=1' + ], + + 'target_conditions': [ + ['_type=="loadable_module"', { + 'product_extension': 'node', + 'defines': [ + 'BUILDING_NODE_EXTENSION' + ], + 'xcode_settings': { + 'OTHER_LDFLAGS': [ + '-undefined dynamic_lookup' + ], + }, + }], + + ['_type=="static_library"', { + # set to `1` to *disable* the -T thin archive 'ld' flag. + # older linkers don't support this flag. + 'standalone_static_library': '<(standalone_static_library)' + }], + + ['_type!="executable"', { + 'conditions': [ + [ 'OS=="android"', { + 'cflags!': [ '-fPIE' ], + }] + ] + }], + + ['_win_delay_load_hook=="true"', { + # If the addon specifies `'win_delay_load_hook': 'true'` in its + # binding.gyp, link a delay-load hook into the DLL. This hook ensures + # that the addon will work regardless of whether the node/iojs binary + # is named node.exe, iojs.exe, or something else. + 'conditions': [ + [ 'OS=="win"', { + 'defines': [ 'HOST_BINARY=\"<(node_host_binary)<(EXECUTABLE_SUFFIX)\"', ], + 'sources': [ + '<(node_gyp_dir)/src/win_delay_load_hook.cc', + ], + 'msvs_settings': { + 'VCLinkerTool': { + 'DelayLoadDLLs': [ '<(node_host_binary)<(EXECUTABLE_SUFFIX)' ], + # Don't print a linker warning when no imports from either .exe + # are used. + 'AdditionalOptions': [ '/ignore:4199' ], + }, + }, + }], + ], + }], + ], + + 'conditions': [ + [ 'OS=="mac"', { + 'defines': [ + '_DARWIN_USE_64_BIT_INODE=1' + ], + 'xcode_settings': { + 'DYLIB_INSTALL_NAME_BASE': '@rpath' + }, + }], + [ 'OS=="aix"', { + 'ldflags': [ + '-Wl,-bimport:<(node_exp_file)' + ], + }], + [ 'OS=="zos"', { + 'cflags': [ + '-q64', + '-Wc,DLL', + '-qlonglong', + '-qenum=int', + '-qxclang=-fexec-charset=ISO8859-1' + ], + 'defines': [ + '_ALL_SOURCE=1', + 'MAP_FAILED=-1', + '_UNIX03_SOURCE=1' + ], + 'ldflags': [ + '-q64', + '<(node_exp_file)' + ], + }], + [ 'OS=="win"', { + 'conditions': [ + ['node_engine=="chakracore"', { + 'library_dirs': [ '<(node_root_dir)/$(ConfigurationName)' ], + 'libraries': [ '<@(node_engine_libs)' ], + }], + ['node_with_ltcg=="true"', { + 'msvs_settings': { + 'VCCLCompilerTool': { + 'WholeProgramOptimization': 'true' # /GL, whole program optimization, needed for LTCG + }, + 'VCLibrarianTool': { + 'AdditionalOptions': [ + '/LTCG:INCREMENTAL', # incremental link-time code generation + ] + }, + 'VCLinkerTool': { + 'OptimizeReferences': 2, # /OPT:REF + 'EnableCOMDATFolding': 2, # /OPT:ICF + 'LinkIncremental': 1, # disable incremental linking + 'AdditionalOptions': [ + '/LTCG:INCREMENTAL', # incremental link-time code generation + ] + } + } + }] + ], + 'libraries': [ + '-lkernel32.lib', + '-luser32.lib', + '-lgdi32.lib', + '-lwinspool.lib', + '-lcomdlg32.lib', + '-ladvapi32.lib', + '-lshell32.lib', + '-lole32.lib', + '-loleaut32.lib', + '-luuid.lib', + '-lodbc32.lib', + '-lDelayImp.lib', + '-l"<(node_lib_file)"' + ], + 'msvs_disabled_warnings': [ + # warning C4251: 'node::ObjectWrap::handle_' : class 'v8::Persistent' + # needs to have dll-interface to be used by + # clients of class 'node::ObjectWrap' + 4251 + ], + }, { + # OS!="win" + 'defines': [ + '_LARGEFILE_SOURCE', + '_FILE_OFFSET_BITS=64' + ], + }], + [ 'OS in "freebsd openbsd netbsd solaris android" or \ + (OS=="linux" and target_arch!="ia32")', { + 'cflags': [ '-fPIC' ], + }], + ] + } +} diff --git a/node_modules/node-gyp/bin/node-gyp.js b/node_modules/node-gyp/bin/node-gyp.js new file mode 100644 index 0000000..8652ea2 --- /dev/null +++ b/node_modules/node-gyp/bin/node-gyp.js @@ -0,0 +1,140 @@ +#!/usr/bin/env node + +'use strict' + +process.title = 'node-gyp' + +const envPaths = require('env-paths') +const gyp = require('../') +const log = require('npmlog') +const os = require('os') + +/** + * Process and execute the selected commands. + */ + +const prog = gyp() +var completed = false +prog.parseArgv(process.argv) +prog.devDir = prog.opts.devdir + +var homeDir = os.homedir() +if (prog.devDir) { + prog.devDir = prog.devDir.replace(/^~/, homeDir) +} else if (homeDir) { + prog.devDir = envPaths('node-gyp', { suffix: '' }).cache +} else { + throw new Error( + "node-gyp requires that the user's home directory is specified " + + 'in either of the environmental variables HOME or USERPROFILE. ' + + 'Overide with: --devdir /path/to/.node-gyp') +} + +if (prog.todo.length === 0) { + if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { + console.log('v%s', prog.version) + } else { + console.log('%s', prog.usage()) + } + process.exit(0) +} + +log.info('it worked if it ends with', 'ok') +log.verbose('cli', process.argv) +log.info('using', 'node-gyp@%s', prog.version) +log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch) + +/** + * Change dir if -C/--directory was passed. + */ + +var dir = prog.opts.directory +if (dir) { + var fs = require('fs') + try { + var stat = fs.statSync(dir) + if (stat.isDirectory()) { + log.info('chdir', dir) + process.chdir(dir) + } else { + log.warn('chdir', dir + ' is not a directory') + } + } catch (e) { + if (e.code === 'ENOENT') { + log.warn('chdir', dir + ' is not a directory') + } else { + log.warn('chdir', 'error during chdir() "%s"', e.message) + } + } +} + +function run () { + var command = prog.todo.shift() + if (!command) { + // done! + completed = true + log.info('ok') + return + } + + prog.commands[command.name](command.args, function (err) { + if (err) { + log.error(command.name + ' error') + log.error('stack', err.stack) + errorMessage() + log.error('not ok') + return process.exit(1) + } + if (command.name === 'list') { + var versions = arguments[1] + if (versions.length > 0) { + versions.forEach(function (version) { + console.log(version) + }) + } else { + console.log('No node development files installed. Use `node-gyp install` to install a version.') + } + } else if (arguments.length >= 2) { + console.log.apply(console, [].slice.call(arguments, 1)) + } + + // now run the next command in the queue + process.nextTick(run) + }) +} + +process.on('exit', function (code) { + if (!completed && !code) { + log.error('Completion callback never invoked!') + issueMessage() + process.exit(6) + } +}) + +process.on('uncaughtException', function (err) { + log.error('UNCAUGHT EXCEPTION') + log.error('stack', err.stack) + issueMessage() + process.exit(7) +}) + +function errorMessage () { + // copied from npm's lib/utils/error-handler.js + var os = require('os') + log.error('System', os.type() + ' ' + os.release()) + log.error('command', process.argv + .map(JSON.stringify).join(' ')) + log.error('cwd', process.cwd()) + log.error('node -v', process.version) + log.error('node-gyp -v', 'v' + prog.package.version) +} + +function issueMessage () { + errorMessage() + log.error('', ['Node-gyp failed to build your package.', + 'Try to update npm and/or node-gyp and if it does not help file an issue with the package author.' + ].join('\n')) +} + +// start running the given commands! +run() diff --git a/node_modules/node-gyp/docs/Common-issues.md b/node_modules/node-gyp/docs/Common-issues.md new file mode 100644 index 0000000..ae05fe3 --- /dev/null +++ b/node_modules/node-gyp/docs/Common-issues.md @@ -0,0 +1,14 @@ +## Python Issues OSX + +Make sure you are using the native Python version in OSX. If you use a MacPorts of HomeBrew version, you may run into problems. + +If you have issues with `execvp`, be sure to check your `$PYTHON` environment variable. If it is not set to the native version, unset it and try again. + +Notes: https://gist.github.com/erichocean/5177582 + +## npm ERR! `node-gyp rebuild`(Windows) +* just install the build tools from [here](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools) +Please note the version as is required in below command e.g **2017** +* Launch cmd, run `npm config set msvs_version 2017` +* close and open new CMD/terminal and all is well :100: + diff --git a/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md b/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md new file mode 100644 index 0000000..c1e1158 --- /dev/null +++ b/node_modules/node-gyp/docs/Error-pre-versions-of-node-cannot-be-installed.md @@ -0,0 +1,94 @@ +When using `node-gyp` you might see an error like this when attempting to compile/install a node.js native addon: + +``` +$ npm install bcrypt +npm http GET https://registry.npmjs.org/bcrypt/0.7.5 +npm http 304 https://registry.npmjs.org/bcrypt/0.7.5 +npm http GET https://registry.npmjs.org/bindings/1.0.0 +npm http 304 https://registry.npmjs.org/bindings/1.0.0 + +> bcrypt@0.7.5 install /home/ubuntu/public/song-swap/node_modules/bcrypt +> node-gyp rebuild + +gyp ERR! configure error +gyp ERR! stack Error: "pre" versions of node cannot be installed, use the --nodedir flag instead +gyp ERR! stack at install (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/install.js:69:16) +gyp ERR! stack at Object.self.commands.(anonymous function) [as install] (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/node-gyp.js:56:37) +gyp ERR! stack at getNodeDir (/usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/configure.js:219:20) +gyp ERR! stack at /usr/local/lib/node_modules/npm/node_modules/node-gyp/lib/configure.js:105:9 +gyp ERR! stack at ChildProcess.exithandler (child_process.js:630:7) +gyp ERR! stack at ChildProcess.EventEmitter.emit (events.js:99:17) +gyp ERR! stack at maybeClose (child_process.js:730:16) +gyp ERR! stack at Process.ChildProcess._handle.onexit (child_process.js:797:5) +gyp ERR! System Linux 3.5.0-21-generic +gyp ERR! command "node" "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js" "rebuild" +gyp ERR! cwd /home/ubuntu/public/song-swap/node_modules/bcrypt +gyp ERR! node -v v0.11.2-pre +gyp ERR! node-gyp -v v0.9.5 +gyp ERR! not ok +npm ERR! bcrypt@0.7.5 install: `node-gyp rebuild` +npm ERR! `sh "-c" "node-gyp rebuild"` failed with 1 +npm ERR! +npm ERR! Failed at the bcrypt@0.7.5 install script. +npm ERR! This is most likely a problem with the bcrypt package, +npm ERR! not with npm itself. +npm ERR! Tell the author that this fails on your system: +npm ERR! node-gyp rebuild +npm ERR! You can get their info via: +npm ERR! npm owner ls bcrypt +npm ERR! There is likely additional logging output above. + +npm ERR! System Linux 3.5.0-21-generic +npm ERR! command "/usr/local/bin/node" "/usr/local/bin/npm" "install" "bcrypt" +npm ERR! cwd /home/ubuntu/public/song-swap +npm ERR! node -v v0.11.2-pre +npm ERR! npm -v 1.2.18 +npm ERR! code ELIFECYCLE +npm ERR! +npm ERR! Additional logging details can be found in: +npm ERR! /home/ubuntu/public/song-swap/npm-debug.log +npm ERR! not ok code 0 +``` + +The main error here is: + +``` +Error: "pre" versions of node cannot be installed, use the --nodedir flag instead +``` + +This error is caused when you attempt to compile a native addon using a version of node.js with `-pre` at the end of the version number: + +``` bash +$ node -v +v0.10.4-pre +``` + +## How to avoid (the short answer) + +To avoid this error completely just use a stable release of node.js. i.e. `v0.10.4`, and __not__ `v0.10.4-pre`. + +## How to fix (the long answer) + +This error happens because `node-gyp` does not know what header files were used to compile your "pre" version of node, and therefore it needs you to specify the node source code directory path using the `--nodedir` flag. + +For example, if I compiled my development ("pre") version of node.js using the source code in `/Users/nrajlich/node`, then I could invoke `node-gyp` like: + +``` bash +$ node-gyp rebuild --nodedir=/Users/nrajlich/node +``` + +Or install an native addon through `npm` like: + +``` bash +$ npm install bcrypt --nodedir=/Users/nrajlich/node +``` + +### Always use `--nodedir` + +__Note:__ This is for advanced users who use `-pre` versions of node more often than tagged releases. + +If you're invoking `node-gyp` through `npm`, then you can leverage `npm`'s configuration system and not have to specify the `--nodedir` flag all the time: + +``` bash +$ npm config set nodedir /Users/nrajlich/node +``` \ No newline at end of file diff --git a/node_modules/node-gyp/docs/Home.md b/node_modules/node-gyp/docs/Home.md new file mode 100644 index 0000000..fe09986 --- /dev/null +++ b/node_modules/node-gyp/docs/Home.md @@ -0,0 +1,7 @@ +Welcome to the node-gyp wiki! + + * [["binding.gyp" files out in the wild]] + * [[Linking to OpenSSL]] + * [[Common Issues]] + * [[Updating npm's bundled node-gyp]] + * [[Error: "pre" versions of node cannot be installed]] diff --git a/node_modules/node-gyp/docs/Linking-to-OpenSSL.md b/node_modules/node-gyp/docs/Linking-to-OpenSSL.md new file mode 100644 index 0000000..ec80929 --- /dev/null +++ b/node_modules/node-gyp/docs/Linking-to-OpenSSL.md @@ -0,0 +1,86 @@ +A handful of native addons require linking to OpenSSL in one way or another. This introduces a small challenge since node will sometimes bundle OpenSSL statically (the default for node >= v0.8.x), or sometimes dynamically link to the system OpenSSL (default for node <= v0.6.x). + +Good native addons should account for both scenarios. It's recommended that you use the `binding.gyp` file provided below as a starting-point for any addon that needs to use OpenSSL: + +``` python +{ + 'variables': { + # node v0.6.x doesn't give us its build variables, + # but on Unix it was only possible to use the system OpenSSL library, + # so default the variable to "true", v0.8.x node and up will overwrite it. + 'node_shared_openssl%': 'true' + }, + 'targets': [ + { + 'target_name': 'binding', + 'sources': [ + 'src/binding.cc' + ], + 'conditions': [ + ['node_shared_openssl=="false"', { + # so when "node_shared_openssl" is "false", then OpenSSL has been + # bundled into the node executable. So we need to include the same + # header files that were used when building node. + 'include_dirs': [ + '<(node_root_dir)/deps/openssl/openssl/include' + ], + "conditions" : [ + ["target_arch=='ia32'", { + "include_dirs": [ "<(node_root_dir)/deps/openssl/config/piii" ] + }], + ["target_arch=='x64'", { + "include_dirs": [ "<(node_root_dir)/deps/openssl/config/k8" ] + }], + ["target_arch=='arm'", { + "include_dirs": [ "<(node_root_dir)/deps/openssl/config/arm" ] + }] + ] + }] + ] + } + ] +} +``` + +This ensures that when OpenSSL is statically linked into `node` then, the bundled OpenSSL headers are included, but when the system OpenSSL is in use, then only those headers will be used. + +## Windows? + +As you can see this baseline `binding.gyp` file only accounts for the Unix scenario. Currently on Windows the situation is a little less ideal. On Windows, OpenSSL is _always_ statically compiled into the `node` executable, so ideally it would be possible to use that copy of OpenSSL when building native addons. + +Unfortunately it doesn't seem like that is possible at the moment, as there would need to be tweaks made to the generated `node.lib` file to include the openssl glue functions, or a new `openssl.lib` file would need to be created during the node build. I'm not sure which is the easiest/most feasible. + +In the meantime, one possible solution is using another copy of OpenSSL, which is what [`node-bcrypt`](https://github.com/ncb000gt/node.bcrypt.js) currently does. Adding something like this to your `binding.gyp` file's `"conditions"` block would enable this: + +``` python + [ 'OS=="win"', { + 'conditions': [ + # "openssl_root" is the directory on Windows of the OpenSSL files. + # Check the "target_arch" variable to set good default values for + # both 64-bit and 32-bit builds of the module. + ['target_arch=="x64"', { + 'variables': { + 'openssl_root%': 'C:/OpenSSL-Win64' + }, + }, { + 'variables': { + 'openssl_root%': 'C:/OpenSSL-Win32' + }, + }], + ], + 'libraries': [ + '-l<(openssl_root)/lib/libeay32.lib', + ], + 'include_dirs': [ + '<(openssl_root)/include', + ], + }] +``` + +Now you can direct your users to install OpenSSL on Windows from here (be sure to tell them to install the 64-bit version if they're compiling against a 64-bit version of node): http://slproweb.com/products/Win32OpenSSL.html + +Also note that both `node-gyp` and `npm` allow you to overwrite that default `openssl_root` variable on the command line: + +``` bash +$ node-gyp rebuild --openssl-root="C:\Users\Nathan\Desktop\openssl" +``` \ No newline at end of file diff --git a/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md b/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md new file mode 100644 index 0000000..0777687 --- /dev/null +++ b/node_modules/node-gyp/docs/Updating-npm-bundled-node-gyp.md @@ -0,0 +1,45 @@ +# Updating the npm-bundled version of node-gyp + +[Many issues](https://github.com/nodejs/node-gyp/labels/ERR%21%20node-gyp%20-v%20%3C%3D%20v5.1.0) are opened by users who are +not running a [current version of node-gyp](https://github.com/nodejs/node-gyp/releases). + +`npm` bundles its own, internal, copy of `node-gyp`. This internal copy is independent of any globally installed copy of node-gyp that +may have been installed via `npm install -g node-gyp`. + +Generally, npm's library files are installed inside your global "node_modules", where npm is installed (run `npm prefix` and add `lib/node_modules`, or just `node_modules` for Windows). There are some exceptions to this. Inside this global `node_modules/` there will be an `npm/` directory and inside this you'll find a `node_modules/node-gyp/` directory. So it may look something like `/usr/local/lib/node_modules/npm/node_modules/node-gyp/`. This is the version of node-gyp that ships with npm. + +When you install a _new_ version of node-gyp outside of npm, it'll go into your global node_modules, but not under the `npm/node_modules`. So that may look like `/usr/local/lib/node_modules/node-gyp/`. It'll have the `node-gyp` executable linked into your `PATH` so running `node-gyp` will use this version. + +The catch is that npm won't use this version unless you tell it to, it'll keep on using the one you have installed. You need to instruct it to by setting the `node_gyp` config variable (which goes into your `~/.npmrc`). You do this by running the `npm config set` command as below. Then npm will use the command in the path you supply whenever it needs to build a native addon. + +**Important**: You also need to remember to unset this when you upgrade npm with a newer version of node-gyp, or you have to manually keep your globally installed node-gyp to date. See "Undo" below. + +## Linux and macOS +``` +npm install --global node-gyp@latest +npm config set node_gyp $(npm prefix -g)/lib/node_modules/node-gyp/bin/node-gyp.js +``` + +`sudo` may be required for the first command if you get a permission error. + +## Windows + +### Windows Command Prompt +``` +npm install --global node-gyp@latest +for /f "delims=" %P in ('npm prefix -g') do npm config set node_gyp "%P\node_modules\node-gyp\bin\node-gyp.js" +``` + +### Powershell +``` +npm install --global node-gyp@latest +npm prefix -g | % {npm config set node_gyp "$_\node_modules\node-gyp\bin\node-gyp.js"} +``` + +## Undo +**Beware** if you don't unset the `node_gyp` config option, npm will continue to use the globally installed version of node-gyp rather than the one it ships with, which may end up being newer. + +``` +npm config delete node_gyp +npm uninstall --global node-gyp +``` diff --git a/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md b/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md new file mode 100644 index 0000000..c4603dd --- /dev/null +++ b/node_modules/node-gyp/docs/binding.gyp-files-in-the-wild.md @@ -0,0 +1,48 @@ +This page contains links to some examples of existing `binding.gyp` files that other node modules are using. Take a look at them for inspiration. + +To add to this page, just add the link to the project's `binding.gyp` file below: + + * [ons](https://github.com/XadillaX/aliyun-ons/blob/master/binding.gyp) + * [thmclrx](https://github.com/XadillaX/thmclrx/blob/master/binding.gyp) + * [libxmljs](https://github.com/polotek/libxmljs/blob/master/binding.gyp) + * [node-buffertools](https://github.com/bnoordhuis/node-buffertools/blob/master/binding.gyp) + * [node-canvas](https://github.com/LearnBoost/node-canvas/blob/master/binding.gyp) + * [node-ffi](https://github.com/rbranson/node-ffi/blob/master/binding.gyp) + [libffi](https://github.com/rbranson/node-ffi/blob/master/deps/libffi/libffi.gyp) + * [node-time](https://github.com/TooTallNate/node-time/blob/master/binding.gyp) + * [node-sass](https://github.com/sass/node-sass/blob/master/binding.gyp) + [libsass](https://github.com/sass/node-sass/blob/master/src/libsass.gyp) + * [node-serialport](https://github.com/voodootikigod/node-serialport/blob/master/binding.gyp) + * [node-weak](https://github.com/TooTallNate/node-weak/blob/master/binding.gyp) + * [pty.js](https://github.com/chjj/pty.js/blob/master/binding.gyp) + * [ref](https://github.com/TooTallNate/ref/blob/master/binding.gyp) + * [appjs](https://github.com/milani/appjs/blob/master/binding.gyp) + * [nwm](https://github.com/mixu/nwm/blob/master/binding.gyp) + * [bcrypt](https://github.com/ncb000gt/node.bcrypt.js/blob/master/binding.gyp) + * [nk-mysql](https://github.com/mmod/nodamysql/blob/master/binding.gyp) + * [nk-xrm-installer](https://github.com/mmod/nk-xrm-installer/blob/master/binding.gyp) + [includable.gypi](https://github.com/mmod/nk-xrm-installer/blob/master/includable.gypi) + [unpack.py](https://github.com/mmod/nk-xrm-installer/blob/master/unpack.py) + [disburse.py](https://github.com/mmod/nk-xrm-installer/blob/master/disburse.py) + .py files above provide complete reference for examples of fetching source via http, extracting, and moving files. + * [node-memwatch](https://github.com/lloyd/node-memwatch/blob/master/binding.gyp) + * [node-ip2location](https://github.com/bolgovr/node-ip2location/blob/master/binding.gyp) + * [node-midi](https://github.com/justinlatimer/node-midi/blob/master/binding.gyp) + * [node-sqlite3](https://github.com/developmentseed/node-sqlite3/blob/master/binding.gyp) + [libsqlite3](https://github.com/developmentseed/node-sqlite3/blob/master/deps/sqlite3.gyp) + * [node-zipfile](https://github.com/mapbox/node-zipfile/blob/master/binding.gyp) + * [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/binding.gyp) + * [node-inotify](https://github.com/c4milo/node-inotify/blob/master/binding.gyp) + * [v8-profiler](https://github.com/c4milo/v8-profiler/blob/master/binding.gyp) + * [airtunes](https://github.com/radioline/node_airtunes/blob/master/binding.gyp) + * [node-fann](https://github.com/c4milo/node-fann/blob/master/binding.gyp) + * [node-talib](https://github.com/oransel/node-talib/blob/master/binding.gyp) + * [node-leveldown](https://github.com/rvagg/node-leveldown/blob/master/binding.gyp) + [leveldb.gyp](https://github.com/rvagg/node-leveldown/blob/master/deps/leveldb/leveldb.gyp) + [snappy.gyp](https://github.com/rvagg/node-leveldown/blob/master/deps/snappy/snappy.gyp) + * [node-expat](https://github.com/astro/node-expat/blob/master/binding.gyp) + [libexpat](https://github.com/astro/node-expat/blob/master/deps/libexpat/libexpat.gyp) + * [node-openvg-canvas](https://github.com/luismreis/node-openvg-canvas/blob/master/binding.gyp) + [node-openvg](https://github.com/luismreis/node-openvg/blob/master/binding.gyp) + * [node-cryptopp](https://github.com/BatikhSouri/node-cryptopp/blob/master/binding.gyp) + * [topcube](https://github.com/creationix/topcube/blob/master/binding.gyp) + * [node-osmium](https://github.com/osmcode/node-osmium/blob/master/binding.gyp) + * [node-osrm](https://github.com/DennisOSRM/node-osrm) + * [node-oracle](https://github.com/joeferner/node-oracle/blob/master/binding.gyp) + * [node-process-list](https://github.com/ReklatsMasters/node-process-list/blob/master/binding.gyp) + * [node-nanomsg](https://github.com/nickdesaulniers/node-nanomsg/blob/master/binding.gyp) + * [Ghostscript4JS](https://github.com/NickNaso/ghostscript4js/blob/master/binding.gyp) + * [nodecv](https://github.com/xudafeng/nodecv/blob/master/binding.gyp) + * [magick-cli](https://github.com/NickNaso/magick-cli/blob/master/binding.gyp) + * [sharp](https://github.com/lovell/sharp/blob/master/binding.gyp) + * [krb5](https://github.com/adaltas/node-krb5/blob/master/binding.gyp) \ No newline at end of file diff --git a/node_modules/node-gyp/gyp/.flake8 b/node_modules/node-gyp/gyp/.flake8 new file mode 100644 index 0000000..ea0c768 --- /dev/null +++ b/node_modules/node-gyp/gyp/.flake8 @@ -0,0 +1,4 @@ +[flake8] +max-complexity = 101 +max-line-length = 88 +extend-ignore = E203 # whitespace before ':' to agree with psf/black diff --git a/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml b/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml new file mode 100644 index 0000000..92303b6 --- /dev/null +++ b/node_modules/node-gyp/gyp/.github/workflows/Python_tests.yml @@ -0,0 +1,30 @@ +# TODO: Enable os: windows-latest +# TODO: Enable pytest --doctest-modules + +name: Python_tests +on: [push, pull_request] +jobs: + Python_tests: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + max-parallel: 8 + matrix: + os: [macos-latest, ubuntu-latest] # , windows-latest] + python-version: [3.6, 3.7, 3.8, 3.9] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r requirements_dev.txt + - name: Lint with flake8 + run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics + - name: Test with pytest + run: pytest + # - name: Run doctests with pytest + # run: pytest --doctest-modules diff --git a/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml b/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml new file mode 100644 index 0000000..bd7c85f --- /dev/null +++ b/node_modules/node-gyp/gyp/.github/workflows/node-gyp.yml @@ -0,0 +1,42 @@ +name: node-gyp integration + +on: [push, pull_request] + +jobs: + test: + strategy: + fail-fast: false + matrix: + os: [macos-latest, ubuntu-latest, windows-latest] + python: [3.6, 3.9] + + runs-on: ${{ matrix.os }} + steps: + - name: Clone gyp-next + uses: actions/checkout@v2 + with: + path: gyp-next + - name: Clone nodejs/node-gyp + uses: actions/checkout@v2 + with: + repository: nodejs/node-gyp + path: node-gyp + - uses: actions/setup-node@v2 + with: + node-version: 14.x + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install dependencies + run: | + cd node-gyp + npm install --no-progress + - name: Replace gyp in node-gyp + shell: bash + run: | + rm -rf node-gyp/gyp + cp -r gyp-next node-gyp/gyp + - name: Run tests + run: | + cd node-gyp + npm test diff --git a/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml b/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml new file mode 100644 index 0000000..fffe96e --- /dev/null +++ b/node_modules/node-gyp/gyp/.github/workflows/nodejs-windows.yml @@ -0,0 +1,27 @@ +name: Node.js Windows integration + +on: [push, pull_request] + +jobs: + build-windows: + runs-on: windows-latest + steps: + - name: Clone gyp-next + uses: actions/checkout@v2 + with: + path: gyp-next + - name: Clone nodejs/node + uses: actions/checkout@v2 + with: + repository: nodejs/node + path: node + - name: Install deps + run: choco install nasm + - name: Replace gyp in Node.js + run: | + rm -Recurse node/tools/gyp + cp -Recurse gyp-next node/tools/gyp + - name: Build Node.js + run: | + cd node + ./vcbuild.bat diff --git a/node_modules/node-gyp/gyp/.github/workflows/release-please.yml b/node_modules/node-gyp/gyp/.github/workflows/release-please.yml new file mode 100644 index 0000000..288afdb --- /dev/null +++ b/node_modules/node-gyp/gyp/.github/workflows/release-please.yml @@ -0,0 +1,16 @@ +on: + push: + branches: + - main + +name: release-please +jobs: + release-please: + runs-on: ubuntu-latest + steps: + - uses: GoogleCloudPlatform/release-please-action@v2 + with: + token: ${{ secrets.GITHUB_TOKEN }} + release-type: python + package-name: gyp-next + bump-minor-pre-major: Yes diff --git a/node_modules/node-gyp/gyp/AUTHORS b/node_modules/node-gyp/gyp/AUTHORS new file mode 100644 index 0000000..f49a357 --- /dev/null +++ b/node_modules/node-gyp/gyp/AUTHORS @@ -0,0 +1,16 @@ +# Names should be added to this file like so: +# Name or Organization + +Google Inc. <*@google.com> +Bloomberg Finance L.P. <*@bloomberg.net> +IBM Inc. <*@*.ibm.com> +Yandex LLC <*@yandex-team.ru> + +Steven Knight +Ryan Norton +David J. Sankel +Eric N. Vander Weele +Tom Freudenberg +Julien Brianceau +Refael Ackermann +Ujjwal Sharma diff --git a/node_modules/node-gyp/gyp/CHANGELOG.md b/node_modules/node-gyp/gyp/CHANGELOG.md new file mode 100644 index 0000000..b7d55ed --- /dev/null +++ b/node_modules/node-gyp/gyp/CHANGELOG.md @@ -0,0 +1,177 @@ +# Changelog + +## [0.10.0](https://www.github.com/nodejs/gyp-next/compare/v0.9.6...v0.10.0) (2021-08-26) + + +### Features + +* **msvs:** add support for Visual Studio 2022 ([#124](https://www.github.com/nodejs/gyp-next/issues/124)) ([4bd9215](https://www.github.com/nodejs/gyp-next/commit/4bd9215c44d300f06e916aec1d6327c22b78272d)) + +### [0.9.6](https://www.github.com/nodejs/gyp-next/compare/v0.9.5...v0.9.6) (2021-08-23) + + +### Bug Fixes + +* align flake8 test ([#122](https://www.github.com/nodejs/gyp-next/issues/122)) ([f1faa8d](https://www.github.com/nodejs/gyp-next/commit/f1faa8d3081e1a47e917ff910892f00dff16cf8a)) +* **msvs:** fix paths again in action command arguments ([#121](https://www.github.com/nodejs/gyp-next/issues/121)) ([7159dfb](https://www.github.com/nodejs/gyp-next/commit/7159dfbc5758c9ec717e215f2c36daf482c846a1)) + +### [0.9.5](https://www.github.com/nodejs/gyp-next/compare/v0.9.4...v0.9.5) (2021-08-18) + + +### Bug Fixes + +* add python 3.6 to node-gyp integration test ([3462d4c](https://www.github.com/nodejs/gyp-next/commit/3462d4ce3c31cce747513dc7ca9760c81d57c60e)) +* revert for windows compatibility ([d078e7d](https://www.github.com/nodejs/gyp-next/commit/d078e7d7ae080ddae243188f6415f940376a7368)) +* support msvs_quote_cmd in ninja generator ([#117](https://www.github.com/nodejs/gyp-next/issues/117)) ([46486ac](https://www.github.com/nodejs/gyp-next/commit/46486ac6e9329529d51061e006a5b39631e46729)) + +### [0.9.4](https://www.github.com/nodejs/gyp-next/compare/v0.9.3...v0.9.4) (2021-08-09) + + +### Bug Fixes + +* .S is an extension for asm file on Windows ([#115](https://www.github.com/nodejs/gyp-next/issues/115)) ([d2fad44](https://www.github.com/nodejs/gyp-next/commit/d2fad44ef3a79ca8900f1307060153ded57053fc)) + +### [0.9.3](https://www.github.com/nodejs/gyp-next/compare/v0.9.2...v0.9.3) (2021-07-07) + + +### Bug Fixes + +* build failure with ninja and Python 3 on Windows ([#113](https://www.github.com/nodejs/gyp-next/issues/113)) ([c172d10](https://www.github.com/nodejs/gyp-next/commit/c172d105deff5db4244e583942215918fa80dd3c)) + +### [0.9.2](https://www.github.com/nodejs/gyp-next/compare/v0.9.1...v0.9.2) (2021-05-21) + + +### Bug Fixes + +* add support of utf8 encoding ([#105](https://www.github.com/nodejs/gyp-next/issues/105)) ([4d0f93c](https://www.github.com/nodejs/gyp-next/commit/4d0f93c249286d1f0c0f665f5fe7346119f98cf1)) + +### [0.9.1](https://www.github.com/nodejs/gyp-next/compare/v0.9.0...v0.9.1) (2021-05-14) + + +### Bug Fixes + +* py lint ([3b6a8ee](https://www.github.com/nodejs/gyp-next/commit/3b6a8ee7a66193a8a6867eba9e1d2b70bdf04402)) + +## [0.9.0](https://www.github.com/nodejs/gyp-next/compare/v0.8.1...v0.9.0) (2021-05-13) + + +### Features + +* use LDFLAGS_host for host toolset ([#98](https://www.github.com/nodejs/gyp-next/issues/98)) ([bea5c7b](https://www.github.com/nodejs/gyp-next/commit/bea5c7bd67d6ad32acbdce79767a5481c70675a2)) + + +### Bug Fixes + +* msvs.py: remove overindentation ([#102](https://www.github.com/nodejs/gyp-next/issues/102)) ([3f83e99](https://www.github.com/nodejs/gyp-next/commit/3f83e99056d004d9579ceb786e06b624ddc36529)) +* update gyp.el to change case to cl-case ([#93](https://www.github.com/nodejs/gyp-next/issues/93)) ([13d5b66](https://www.github.com/nodejs/gyp-next/commit/13d5b66aab35985af9c2fb1174fdc6e1c1407ecc)) + +### [0.8.1](https://www.github.com/nodejs/gyp-next/compare/v0.8.0...v0.8.1) (2021-02-18) + + +### Bug Fixes + +* update shebang lines from python to python3 ([#94](https://www.github.com/nodejs/gyp-next/issues/94)) ([a1b0d41](https://www.github.com/nodejs/gyp-next/commit/a1b0d4171a8049a4ab7a614202063dec332f2df4)) + +## [0.8.0](https://www.github.com/nodejs/gyp-next/compare/v0.7.0...v0.8.0) (2021-01-15) + + +### ⚠ BREAKING CHANGES + +* remove support for Python 2 + +### Bug Fixes + +* revert posix build job ([#86](https://www.github.com/nodejs/gyp-next/issues/86)) ([39dc34f](https://www.github.com/nodejs/gyp-next/commit/39dc34f0799c074624005fb9bbccf6e028607f9d)) + + +### gyp + +* Remove support for Python 2 ([#88](https://www.github.com/nodejs/gyp-next/issues/88)) ([22e4654](https://www.github.com/nodejs/gyp-next/commit/22e465426fd892403c95534229af819a99c3f8dc)) + +## [0.7.0](https://www.github.com/nodejs/gyp-next/compare/v0.6.2...v0.7.0) (2020-12-17) + + +### ⚠ BREAKING CHANGES + +* **msvs:** On Windows, arguments passed to the "action" commands are no longer transformed to replace slashes with backslashes. + +### Features + +* **xcode:** --cross-compiling overrides arch-specific settings ([973bae0](https://www.github.com/nodejs/gyp-next/commit/973bae0b7b08be7b680ecae9565fbd04b3e0787d)) + + +### Bug Fixes + +* **msvs:** do not fix paths in action command arguments ([fc22f83](https://www.github.com/nodejs/gyp-next/commit/fc22f8335e2016da4aae4f4233074bd651d2faea)) +* cmake on python 3 ([fd61f5f](https://www.github.com/nodejs/gyp-next/commit/fd61f5faa5275ec8fc98e3c7868c0dd46f109540)) +* ValueError: invalid mode: 'rU' while trying to load binding.gyp ([d0504e6](https://www.github.com/nodejs/gyp-next/commit/d0504e6700ce48f44957a4d5891b142a60be946f)) +* xcode cmake parsing ([eefe8d1](https://www.github.com/nodejs/gyp-next/commit/eefe8d10e99863bc4ac7e2ed32facd608d400d4b)) + +### [0.6.2](https://www.github.com/nodejs/gyp-next/compare/v0.6.1...v0.6.2) (2020-10-16) + + +### Bug Fixes + +* do not rewrite absolute paths to avoid long paths ([#74](https://www.github.com/nodejs/gyp-next/issues/74)) ([c2ccc1a](https://www.github.com/nodejs/gyp-next/commit/c2ccc1a81f7f94433a94f4d01a2e820db4c4331a)) +* only include MARMASM when toolset is target ([5a2794a](https://www.github.com/nodejs/gyp-next/commit/5a2794aefb58f0c00404ff042b61740bc8b8d5cd)) + +### [0.6.1](https://github.com/nodejs/gyp-next/compare/v0.6.0...v0.6.1) (2020-10-14) + + +### Bug Fixes + +* Correctly rename object files for absolute paths in MSVS generator. + +## [0.6.0](https://github.com/nodejs/gyp-next/compare/v0.5.0...v0.6.0) (2020-10-13) + + +### Features + +* The Makefile generator will now output shared libraries directly to the product directory on all platforms (previously only macOS). + +## [0.5.0](https://github.com/nodejs/gyp-next/compare/v0.4.0...v0.5.0) (2020-09-30) + + +### Features + +* Extended compile_commands_json generator to consider more file extensions than just `c` and `cc`. `cpp` and `cxx` are now supported. +* Source files with duplicate basenames are now supported. + +### Removed + +* The `--no-duplicate-basename-check` option was removed. +* The `msvs_enable_marmasm` configuration option was removed in favor of auto-inclusion of the "marmasm" sections for Windows on ARM. + +## [0.4.0](https://github.com/nodejs/gyp-next/compare/v0.3.0...v0.4.0) (2020-07-14) + + +### Features + +* Added support for passing arbitrary architectures to Xcode builds, enables `arm64` builds. + +### Bug Fixes + +* Fixed a bug on Solaris where copying archives failed. + +## [0.3.0](https://github.com/nodejs/gyp-next/compare/v0.2.1...v0.3.0) (2020-06-06) + + +### Features + +* Added support for MSVC cross-compilation. This allows compilation on x64 for a Windows ARM target. + +### Bug Fixes + +* Fixed XCode CLT version detection on macOS Catalina. + +### [0.2.1](https://github.com/nodejs/gyp-next/compare/v0.2.0...v0.2.1) (2020-05-05) + + +### Bug Fixes + +* Relicensed to Node.js contributors. +* Fixed Windows bug introduced in v0.2.0. + +## [0.2.0](https://github.com/nodejs/gyp-next/releases/tag/v0.2.0) (2020-04-06) + +This is the first release of this project, based on https://chromium.googlesource.com/external/gyp with changes made over the years in Node.js and node-gyp. diff --git a/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md b/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..d724027 --- /dev/null +++ b/node_modules/node-gyp/gyp/CODE_OF_CONDUCT.md @@ -0,0 +1,4 @@ +# Code of Conduct + +* [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md) +* [Node.js Moderation Policy](https://github.com/nodejs/admin/blob/HEAD/Moderation-Policy.md) diff --git a/node_modules/node-gyp/gyp/CONTRIBUTING.md b/node_modules/node-gyp/gyp/CONTRIBUTING.md new file mode 100644 index 0000000..1a0bcde --- /dev/null +++ b/node_modules/node-gyp/gyp/CONTRIBUTING.md @@ -0,0 +1,32 @@ +# Contributing to gyp-next + +## Code of Conduct + +This project is bound to the [Node.js Code of Conduct](https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md). + + +## Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. diff --git a/node_modules/node-gyp/gyp/LICENSE b/node_modules/node-gyp/gyp/LICENSE new file mode 100644 index 0000000..c6944c5 --- /dev/null +++ b/node_modules/node-gyp/gyp/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2020 Node.js contributors. All rights reserved. +Copyright (c) 2009 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/node-gyp/gyp/README.md b/node_modules/node-gyp/gyp/README.md new file mode 100644 index 0000000..9ffc2b2 --- /dev/null +++ b/node_modules/node-gyp/gyp/README.md @@ -0,0 +1,7 @@ +GYP can Generate Your Projects. +=================================== + +Documents are available at [gyp.gsrc.io](https://gyp.gsrc.io), or you can check out ```md-pages``` branch to read those documents offline. + +__gyp-next__ is [released](https://github.com/nodejs/gyp-next/releases) to the [__Python Packaging Index__](https://pypi.org/project/gyp-next) (PyPI) and can be installed with the command: +* `python3 -m pip install gyp-next` diff --git a/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc b/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc new file mode 100644 index 0000000..8bca510 --- /dev/null +++ b/node_modules/node-gyp/gyp/data/win/large-pdb-shim.cc @@ -0,0 +1,12 @@ +// Copyright (c) 2013 Google Inc. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// This file is used to generate an empty .pdb -- with a 4KB pagesize -- that is +// then used during the final link for modules that have large PDBs. Otherwise, +// the linker will generate a pdb with a page size of 1KB, which imposes a limit +// of 1GB on the .pdb. By generating an initial empty .pdb with the compiler +// (rather than the linker), this limit is avoided. With this in place PDBs may +// grow to 2GB. +// +// This file is referenced by the msvs_large_pdb mechanism in MSVSUtil.py. diff --git a/node_modules/node-gyp/gyp/gyp b/node_modules/node-gyp/gyp/gyp new file mode 100644 index 0000000..1b8b9bd --- /dev/null +++ b/node_modules/node-gyp/gyp/gyp @@ -0,0 +1,8 @@ +#!/bin/sh +# Copyright 2013 The Chromium Authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +set -e +base=$(dirname "$0") +exec python "${base}/gyp_main.py" "$@" diff --git a/node_modules/node-gyp/gyp/gyp.bat b/node_modules/node-gyp/gyp/gyp.bat new file mode 100644 index 0000000..ad797c3 --- /dev/null +++ b/node_modules/node-gyp/gyp/gyp.bat @@ -0,0 +1,5 @@ +@rem Copyright (c) 2009 Google Inc. All rights reserved. +@rem Use of this source code is governed by a BSD-style license that can be +@rem found in the LICENSE file. + +@python "%~dp0gyp_main.py" %* diff --git a/node_modules/node-gyp/gyp/gyp_main.py b/node_modules/node-gyp/gyp/gyp_main.py new file mode 100644 index 0000000..f23dcdf --- /dev/null +++ b/node_modules/node-gyp/gyp/gyp_main.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2009 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import os +import sys +import subprocess + + +def IsCygwin(): + # Function copied from pylib/gyp/common.py + try: + out = subprocess.Popen( + "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + stdout, _ = out.communicate() + return "CYGWIN" in stdout.decode("utf-8") + except Exception: + return False + + +def UnixifyPath(path): + try: + if not IsCygwin(): + return path + out = subprocess.Popen( + ["cygpath", "-u", path], stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + stdout, _ = out.communicate() + return stdout.decode("utf-8") + except Exception: + return path + + +# Make sure we're using the version of pylib in this repo, not one installed +# elsewhere on the system. Also convert to Unix style path on Cygwin systems, +# else the 'gyp' library will not be found +path = UnixifyPath(sys.argv[0]) +sys.path.insert(0, os.path.join(os.path.dirname(path), "pylib")) +import gyp # noqa: E402 + +if __name__ == "__main__": + sys.exit(gyp.script_main()) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py new file mode 100644 index 0000000..d6b1897 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py @@ -0,0 +1,367 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""New implementation of Visual Studio project generation.""" + +import hashlib +import os +import random +from operator import attrgetter + +import gyp.common + + +def cmp(x, y): + return (x > y) - (x < y) + + +# Initialize random number generator +random.seed() + +# GUIDs for project types +ENTRY_TYPE_GUIDS = { + "project": "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}", + "folder": "{2150E333-8FDC-42A3-9474-1A3956D46DE8}", +} + +# ------------------------------------------------------------------------------ +# Helper functions + + +def MakeGuid(name, seed="msvs_new"): + """Returns a GUID for the specified target name. + + Args: + name: Target name. + seed: Seed for MD5 hash. + Returns: + A GUID-line string calculated from the name and seed. + + This generates something which looks like a GUID, but depends only on the + name and seed. This means the same name/seed will always generate the same + GUID, so that projects and solutions which refer to each other can explicitly + determine the GUID to refer to explicitly. It also means that the GUID will + not change when the project for a target is rebuilt. + """ + # Calculate a MD5 signature for the seed and name. + d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper() + # Convert most of the signature to GUID form (discard the rest) + guid = ( + "{" + + d[:8] + + "-" + + d[8:12] + + "-" + + d[12:16] + + "-" + + d[16:20] + + "-" + + d[20:32] + + "}" + ) + return guid + + +# ------------------------------------------------------------------------------ + + +class MSVSSolutionEntry: + def __cmp__(self, other): + # Sort by name then guid (so things are in order on vs2008). + return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) + + +class MSVSFolder(MSVSSolutionEntry): + """Folder in a Visual Studio project or solution.""" + + def __init__(self, path, name=None, entries=None, guid=None, items=None): + """Initializes the folder. + + Args: + path: Full path to the folder. + name: Name of the folder. + entries: List of folder entries to nest inside this folder. May contain + Folder or Project objects. May be None, if the folder is empty. + guid: GUID to use for folder, if not None. + items: List of solution items to include in the folder project. May be + None, if the folder does not directly contain items. + """ + if name: + self.name = name + else: + # Use last layer. + self.name = os.path.basename(path) + + self.path = path + self.guid = guid + + # Copy passed lists (or set to empty lists) + self.entries = sorted(entries or [], key=attrgetter("path")) + self.items = list(items or []) + + self.entry_type_guid = ENTRY_TYPE_GUIDS["folder"] + + def get_guid(self): + if self.guid is None: + # Use consistent guids for folders (so things don't regenerate). + self.guid = MakeGuid(self.path, seed="msvs_folder") + return self.guid + + +# ------------------------------------------------------------------------------ + + +class MSVSProject(MSVSSolutionEntry): + """Visual Studio project.""" + + def __init__( + self, + path, + name=None, + dependencies=None, + guid=None, + spec=None, + build_file=None, + config_platform_overrides=None, + fixpath_prefix=None, + ): + """Initializes the project. + + Args: + path: Absolute path to the project file. + name: Name of project. If None, the name will be the same as the base + name of the project file. + dependencies: List of other Project objects this project is dependent + upon, if not None. + guid: GUID to use for project, if not None. + spec: Dictionary specifying how to build this project. + build_file: Filename of the .gyp file that the vcproj file comes from. + config_platform_overrides: optional dict of configuration platforms to + used in place of the default for this target. + fixpath_prefix: the path used to adjust the behavior of _fixpath + """ + self.path = path + self.guid = guid + self.spec = spec + self.build_file = build_file + # Use project filename if name not specified + self.name = name or os.path.splitext(os.path.basename(path))[0] + + # Copy passed lists (or set to empty lists) + self.dependencies = list(dependencies or []) + + self.entry_type_guid = ENTRY_TYPE_GUIDS["project"] + + if config_platform_overrides: + self.config_platform_overrides = config_platform_overrides + else: + self.config_platform_overrides = {} + self.fixpath_prefix = fixpath_prefix + self.msbuild_toolset = None + + def set_dependencies(self, dependencies): + self.dependencies = list(dependencies or []) + + def get_guid(self): + if self.guid is None: + # Set GUID from path + # TODO(rspangler): This is fragile. + # 1. We can't just use the project filename sans path, since there could + # be multiple projects with the same base name (for example, + # foo/unittest.vcproj and bar/unittest.vcproj). + # 2. The path needs to be relative to $SOURCE_ROOT, so that the project + # GUID is the same whether it's included from base/base.sln or + # foo/bar/baz/baz.sln. + # 3. The GUID needs to be the same each time this builder is invoked, so + # that we don't need to rebuild the solution when the project changes. + # 4. We should be able to handle pre-built project files by reading the + # GUID from the files. + self.guid = MakeGuid(self.name) + return self.guid + + def set_msbuild_toolset(self, msbuild_toolset): + self.msbuild_toolset = msbuild_toolset + + +# ------------------------------------------------------------------------------ + + +class MSVSSolution: + """Visual Studio solution.""" + + def __init__( + self, path, version, entries=None, variants=None, websiteProperties=True + ): + """Initializes the solution. + + Args: + path: Path to solution file. + version: Format version to emit. + entries: List of entries in solution. May contain Folder or Project + objects. May be None, if the folder is empty. + variants: List of build variant strings. If none, a default list will + be used. + websiteProperties: Flag to decide if the website properties section + is generated. + """ + self.path = path + self.websiteProperties = websiteProperties + self.version = version + + # Copy passed lists (or set to empty lists) + self.entries = list(entries or []) + + if variants: + # Copy passed list + self.variants = variants[:] + else: + # Use default + self.variants = ["Debug|Win32", "Release|Win32"] + # TODO(rspangler): Need to be able to handle a mapping of solution config + # to project config. Should we be able to handle variants being a dict, + # or add a separate variant_map variable? If it's a dict, we can't + # guarantee the order of variants since dict keys aren't ordered. + + # TODO(rspangler): Automatically write to disk for now; should delay until + # node-evaluation time. + self.Write() + + def Write(self, writer=gyp.common.WriteOnDiff): + """Writes the solution file to disk. + + Raises: + IndexError: An entry appears multiple times. + """ + # Walk the entry tree and collect all the folders and projects. + all_entries = set() + entries_to_check = self.entries[:] + while entries_to_check: + e = entries_to_check.pop(0) + + # If this entry has been visited, nothing to do. + if e in all_entries: + continue + + all_entries.add(e) + + # If this is a folder, check its entries too. + if isinstance(e, MSVSFolder): + entries_to_check += e.entries + + all_entries = sorted(all_entries, key=attrgetter("path")) + + # Open file and print header + f = writer(self.path) + f.write( + "Microsoft Visual Studio Solution File, " + "Format Version %s\r\n" % self.version.SolutionVersion() + ) + f.write("# %s\r\n" % self.version.Description()) + + # Project entries + sln_root = os.path.split(self.path)[0] + for e in all_entries: + relative_path = gyp.common.RelativePath(e.path, sln_root) + # msbuild does not accept an empty folder_name. + # use '.' in case relative_path is empty. + folder_name = relative_path.replace("/", "\\") or "." + f.write( + 'Project("%s") = "%s", "%s", "%s"\r\n' + % ( + e.entry_type_guid, # Entry type GUID + e.name, # Folder name + folder_name, # Folder name (again) + e.get_guid(), # Entry GUID + ) + ) + + # TODO(rspangler): Need a way to configure this stuff + if self.websiteProperties: + f.write( + "\tProjectSection(WebsiteProperties) = preProject\r\n" + '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' + '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' + "\tEndProjectSection\r\n" + ) + + if isinstance(e, MSVSFolder): + if e.items: + f.write("\tProjectSection(SolutionItems) = preProject\r\n") + for i in e.items: + f.write(f"\t\t{i} = {i}\r\n") + f.write("\tEndProjectSection\r\n") + + if isinstance(e, MSVSProject): + if e.dependencies: + f.write("\tProjectSection(ProjectDependencies) = postProject\r\n") + for d in e.dependencies: + f.write(f"\t\t{d.get_guid()} = {d.get_guid()}\r\n") + f.write("\tEndProjectSection\r\n") + + f.write("EndProject\r\n") + + # Global section + f.write("Global\r\n") + + # Configurations (variants) + f.write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n") + for v in self.variants: + f.write(f"\t\t{v} = {v}\r\n") + f.write("\tEndGlobalSection\r\n") + + # Sort config guids for easier diffing of solution changes. + config_guids = [] + config_guids_overrides = {} + for e in all_entries: + if isinstance(e, MSVSProject): + config_guids.append(e.get_guid()) + config_guids_overrides[e.get_guid()] = e.config_platform_overrides + config_guids.sort() + + f.write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n") + for g in config_guids: + for v in self.variants: + nv = config_guids_overrides[g].get(v, v) + # Pick which project configuration to build for this solution + # configuration. + f.write( + "\t\t%s.%s.ActiveCfg = %s\r\n" + % ( + g, # Project GUID + v, # Solution build configuration + nv, # Project build config for that solution config + ) + ) + + # Enable project in this solution configuration. + f.write( + "\t\t%s.%s.Build.0 = %s\r\n" + % ( + g, # Project GUID + v, # Solution build configuration + nv, # Project build config for that solution config + ) + ) + f.write("\tEndGlobalSection\r\n") + + # TODO(rspangler): Should be able to configure this stuff too (though I've + # never seen this be any different) + f.write("\tGlobalSection(SolutionProperties) = preSolution\r\n") + f.write("\t\tHideSolutionNode = FALSE\r\n") + f.write("\tEndGlobalSection\r\n") + + # Folder mappings + # Omit this section if there are no folders + if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]): + f.write("\tGlobalSection(NestedProjects) = preSolution\r\n") + for e in all_entries: + if not isinstance(e, MSVSFolder): + continue # Does not apply to projects, only folders + for subentry in e.entries: + f.write(f"\t\t{subentry.get_guid()} = {e.get_guid()}\r\n") + f.write("\tEndGlobalSection\r\n") + + f.write("EndGlobal\r\n") + + f.close() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py new file mode 100644 index 0000000..f0cfabe --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py @@ -0,0 +1,206 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Visual Studio project reader/writer.""" + +import gyp.easy_xml as easy_xml + +# ------------------------------------------------------------------------------ + + +class Tool: + """Visual Studio tool.""" + + def __init__(self, name, attrs=None): + """Initializes the tool. + + Args: + name: Tool name. + attrs: Dict of tool attributes; may be None. + """ + self._attrs = attrs or {} + self._attrs["Name"] = name + + def _GetSpecification(self): + """Creates an element for the tool. + + Returns: + A new xml.dom.Element for the tool. + """ + return ["Tool", self._attrs] + + +class Filter: + """Visual Studio filter - that is, a virtual folder.""" + + def __init__(self, name, contents=None): + """Initializes the folder. + + Args: + name: Filter (folder) name. + contents: List of filenames and/or Filter objects contained. + """ + self.name = name + self.contents = list(contents or []) + + +# ------------------------------------------------------------------------------ + + +class Writer: + """Visual Studio XML project writer.""" + + def __init__(self, project_path, version, name, guid=None, platforms=None): + """Initializes the project. + + Args: + project_path: Path to the project file. + version: Format version to emit. + name: Name of the project. + guid: GUID to use for project, if not None. + platforms: Array of string, the supported platforms. If null, ['Win32'] + """ + self.project_path = project_path + self.version = version + self.name = name + self.guid = guid + + # Default to Win32 for platforms. + if not platforms: + platforms = ["Win32"] + + # Initialize the specifications of the various sections. + self.platform_section = ["Platforms"] + for platform in platforms: + self.platform_section.append(["Platform", {"Name": platform}]) + self.tool_files_section = ["ToolFiles"] + self.configurations_section = ["Configurations"] + self.files_section = ["Files"] + + # Keep a dict keyed on filename to speed up access. + self.files_dict = dict() + + def AddToolFile(self, path): + """Adds a tool file to the project. + + Args: + path: Relative path from project to tool file. + """ + self.tool_files_section.append(["ToolFile", {"RelativePath": path}]) + + def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools): + """Returns the specification for a configuration. + + Args: + config_type: Type of configuration node. + config_name: Configuration name. + attrs: Dict of configuration attributes; may be None. + tools: List of tools (strings or Tool objects); may be None. + Returns: + """ + # Handle defaults + if not attrs: + attrs = {} + if not tools: + tools = [] + + # Add configuration node and its attributes + node_attrs = attrs.copy() + node_attrs["Name"] = config_name + specification = [config_type, node_attrs] + + # Add tool nodes and their attributes + if tools: + for t in tools: + if isinstance(t, Tool): + specification.append(t._GetSpecification()) + else: + specification.append(Tool(t)._GetSpecification()) + return specification + + def AddConfig(self, name, attrs=None, tools=None): + """Adds a configuration to the project. + + Args: + name: Configuration name. + attrs: Dict of configuration attributes; may be None. + tools: List of tools (strings or Tool objects); may be None. + """ + spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools) + self.configurations_section.append(spec) + + def _AddFilesToNode(self, parent, files): + """Adds files and/or filters to the parent node. + + Args: + parent: Destination node + files: A list of Filter objects and/or relative paths to files. + + Will call itself recursively, if the files list contains Filter objects. + """ + for f in files: + if isinstance(f, Filter): + node = ["Filter", {"Name": f.name}] + self._AddFilesToNode(node, f.contents) + else: + node = ["File", {"RelativePath": f}] + self.files_dict[f] = node + parent.append(node) + + def AddFiles(self, files): + """Adds files to the project. + + Args: + files: A list of Filter objects and/or relative paths to files. + + This makes a copy of the file/filter tree at the time of this call. If you + later add files to a Filter object which was passed into a previous call + to AddFiles(), it will not be reflected in this project. + """ + self._AddFilesToNode(self.files_section, files) + # TODO(rspangler) This also doesn't handle adding files to an existing + # filter. That is, it doesn't merge the trees. + + def AddFileConfig(self, path, config, attrs=None, tools=None): + """Adds a configuration to a file. + + Args: + path: Relative path to the file. + config: Name of configuration to add. + attrs: Dict of configuration attributes; may be None. + tools: List of tools (strings or Tool objects); may be None. + + Raises: + ValueError: Relative path does not match any file added via AddFiles(). + """ + # Find the file node with the right relative path + parent = self.files_dict.get(path) + if not parent: + raise ValueError('AddFileConfig: file "%s" not in project.' % path) + + # Add the config to the file node + spec = self._GetSpecForConfiguration("FileConfiguration", config, attrs, tools) + parent.append(spec) + + def WriteIfChanged(self): + """Writes the project file.""" + # First create XML content definition + content = [ + "VisualStudioProject", + { + "ProjectType": "Visual C++", + "Version": self.version.ProjectVersion(), + "Name": self.name, + "ProjectGUID": self.guid, + "RootNamespace": self.name, + "Keyword": "Win32Proj", + }, + self.platform_section, + self.tool_files_section, + self.configurations_section, + ["References"], # empty section + self.files_section, + ["Globals"], # empty section + ] + easy_xml.WriteXmlIfChanged(content, self.project_path, encoding="Windows-1252") diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py new file mode 100644 index 0000000..e89a971 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py @@ -0,0 +1,1270 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +r"""Code to validate and convert settings of the Microsoft build tools. + +This file contains code to validate and convert settings of the Microsoft +build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), +and ValidateMSBuildSettings() are the entry points. + +This file was created by comparing the projects created by Visual Studio 2008 +and Visual Studio 2010 for all available settings through the user interface. +The MSBuild schemas were also considered. They are typically found in the +MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild +""" + +import re +import sys + +# Dictionaries of settings validators. The key is the tool name, the value is +# a dictionary mapping setting names to validation functions. +_msvs_validators = {} +_msbuild_validators = {} + + +# A dictionary of settings converters. The key is the tool name, the value is +# a dictionary mapping setting names to conversion functions. +_msvs_to_msbuild_converters = {} + + +# Tool name mapping from MSVS to MSBuild. +_msbuild_name_of_tool = {} + + +class _Tool: + """Represents a tool used by MSVS or MSBuild. + + Attributes: + msvs_name: The name of the tool in MSVS. + msbuild_name: The name of the tool in MSBuild. + """ + + def __init__(self, msvs_name, msbuild_name): + self.msvs_name = msvs_name + self.msbuild_name = msbuild_name + + +def _AddTool(tool): + """Adds a tool to the four dictionaries used to process settings. + + This only defines the tool. Each setting also needs to be added. + + Args: + tool: The _Tool object to be added. + """ + _msvs_validators[tool.msvs_name] = {} + _msbuild_validators[tool.msbuild_name] = {} + _msvs_to_msbuild_converters[tool.msvs_name] = {} + _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name + + +def _GetMSBuildToolSettings(msbuild_settings, tool): + """Returns an MSBuild tool dictionary. Creates it if needed.""" + return msbuild_settings.setdefault(tool.msbuild_name, {}) + + +class _Type: + """Type of settings (Base class).""" + + def ValidateMSVS(self, value): + """Verifies that the value is legal for MSVS. + + Args: + value: the value to check for this type. + + Raises: + ValueError if value is not valid for MSVS. + """ + + def ValidateMSBuild(self, value): + """Verifies that the value is legal for MSBuild. + + Args: + value: the value to check for this type. + + Raises: + ValueError if value is not valid for MSBuild. + """ + + def ConvertToMSBuild(self, value): + """Returns the MSBuild equivalent of the MSVS value given. + + Args: + value: the MSVS value to convert. + + Returns: + the MSBuild equivalent. + + Raises: + ValueError if value is not valid. + """ + return value + + +class _String(_Type): + """A setting that's just a string.""" + + def ValidateMSVS(self, value): + if not isinstance(value, str): + raise ValueError("expected string; got %r" % value) + + def ValidateMSBuild(self, value): + if not isinstance(value, str): + raise ValueError("expected string; got %r" % value) + + def ConvertToMSBuild(self, value): + # Convert the macros + return ConvertVCMacrosToMSBuild(value) + + +class _StringList(_Type): + """A settings that's a list of strings.""" + + def ValidateMSVS(self, value): + if not isinstance(value, (list, str)): + raise ValueError("expected string list; got %r" % value) + + def ValidateMSBuild(self, value): + if not isinstance(value, (list, str)): + raise ValueError("expected string list; got %r" % value) + + def ConvertToMSBuild(self, value): + # Convert the macros + if isinstance(value, list): + return [ConvertVCMacrosToMSBuild(i) for i in value] + else: + return ConvertVCMacrosToMSBuild(value) + + +class _Boolean(_Type): + """Boolean settings, can have the values 'false' or 'true'.""" + + def _Validate(self, value): + if value != "true" and value != "false": + raise ValueError("expected bool; got %r" % value) + + def ValidateMSVS(self, value): + self._Validate(value) + + def ValidateMSBuild(self, value): + self._Validate(value) + + def ConvertToMSBuild(self, value): + self._Validate(value) + return value + + +class _Integer(_Type): + """Integer settings.""" + + def __init__(self, msbuild_base=10): + _Type.__init__(self) + self._msbuild_base = msbuild_base + + def ValidateMSVS(self, value): + # Try to convert, this will raise ValueError if invalid. + self.ConvertToMSBuild(value) + + def ValidateMSBuild(self, value): + # Try to convert, this will raise ValueError if invalid. + int(value, self._msbuild_base) + + def ConvertToMSBuild(self, value): + msbuild_format = (self._msbuild_base == 10) and "%d" or "0x%04x" + return msbuild_format % int(value) + + +class _Enumeration(_Type): + """Type of settings that is an enumeration. + + In MSVS, the values are indexes like '0', '1', and '2'. + MSBuild uses text labels that are more representative, like 'Win32'. + + Constructor args: + label_list: an array of MSBuild labels that correspond to the MSVS index. + In the rare cases where MSVS has skipped an index value, None is + used in the array to indicate the unused spot. + new: an array of labels that are new to MSBuild. + """ + + def __init__(self, label_list, new=None): + _Type.__init__(self) + self._label_list = label_list + self._msbuild_values = {value for value in label_list if value is not None} + if new is not None: + self._msbuild_values.update(new) + + def ValidateMSVS(self, value): + # Try to convert. It will raise an exception if not valid. + self.ConvertToMSBuild(value) + + def ValidateMSBuild(self, value): + if value not in self._msbuild_values: + raise ValueError("unrecognized enumerated value %s" % value) + + def ConvertToMSBuild(self, value): + index = int(value) + if index < 0 or index >= len(self._label_list): + raise ValueError( + "index value (%d) not in expected range [0, %d)" + % (index, len(self._label_list)) + ) + label = self._label_list[index] + if label is None: + raise ValueError("converted value for %s not specified." % value) + return label + + +# Instantiate the various generic types. +_boolean = _Boolean() +_integer = _Integer() +# For now, we don't do any special validation on these types: +_string = _String() +_file_name = _String() +_folder_name = _String() +_file_list = _StringList() +_folder_list = _StringList() +_string_list = _StringList() +# Some boolean settings went from numerical values to boolean. The +# mapping is 0: default, 1: false, 2: true. +_newly_boolean = _Enumeration(["", "false", "true"]) + + +def _Same(tool, name, setting_type): + """Defines a setting that has the same name in MSVS and MSBuild. + + Args: + tool: a dictionary that gives the names of the tool for MSVS and MSBuild. + name: the name of the setting. + setting_type: the type of this setting. + """ + _Renamed(tool, name, name, setting_type) + + +def _Renamed(tool, msvs_name, msbuild_name, setting_type): + """Defines a setting for which the name has changed. + + Args: + tool: a dictionary that gives the names of the tool for MSVS and MSBuild. + msvs_name: the name of the MSVS setting. + msbuild_name: the name of the MSBuild setting. + setting_type: the type of this setting. + """ + + def _Translate(value, msbuild_settings): + msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) + msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) + + _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS + _msbuild_validators[tool.msbuild_name][msbuild_name] = setting_type.ValidateMSBuild + _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate + + +def _Moved(tool, settings_name, msbuild_tool_name, setting_type): + _MovedAndRenamed( + tool, settings_name, msbuild_tool_name, settings_name, setting_type + ) + + +def _MovedAndRenamed( + tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type +): + """Defines a setting that may have moved to a new section. + + Args: + tool: a dictionary that gives the names of the tool for MSVS and MSBuild. + msvs_settings_name: the MSVS name of the setting. + msbuild_tool_name: the name of the MSBuild tool to place the setting under. + msbuild_settings_name: the MSBuild name of the setting. + setting_type: the type of this setting. + """ + + def _Translate(value, msbuild_settings): + tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) + tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) + + _msvs_validators[tool.msvs_name][msvs_settings_name] = setting_type.ValidateMSVS + validator = setting_type.ValidateMSBuild + _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator + _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate + + +def _MSVSOnly(tool, name, setting_type): + """Defines a setting that is only found in MSVS. + + Args: + tool: a dictionary that gives the names of the tool for MSVS and MSBuild. + name: the name of the setting. + setting_type: the type of this setting. + """ + + def _Translate(unused_value, unused_msbuild_settings): + # Since this is for MSVS only settings, no translation will happen. + pass + + _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS + _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate + + +def _MSBuildOnly(tool, name, setting_type): + """Defines a setting that is only found in MSBuild. + + Args: + tool: a dictionary that gives the names of the tool for MSVS and MSBuild. + name: the name of the setting. + setting_type: the type of this setting. + """ + + def _Translate(value, msbuild_settings): + # Let msbuild-only properties get translated as-is from msvs_settings. + tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {}) + tool_settings[name] = value + + _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild + _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate + + +def _ConvertedToAdditionalOption(tool, msvs_name, flag): + """Defines a setting that's handled via a command line option in MSBuild. + + Args: + tool: a dictionary that gives the names of the tool for MSVS and MSBuild. + msvs_name: the name of the MSVS setting that if 'true' becomes a flag + flag: the flag to insert at the end of the AdditionalOptions + """ + + def _Translate(value, msbuild_settings): + if value == "true": + tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) + if "AdditionalOptions" in tool_settings: + new_flags = "{} {}".format(tool_settings["AdditionalOptions"], flag) + else: + new_flags = flag + tool_settings["AdditionalOptions"] = new_flags + + _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS + _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate + + +def _CustomGeneratePreprocessedFile(tool, msvs_name): + def _Translate(value, msbuild_settings): + tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) + if value == "0": + tool_settings["PreprocessToFile"] = "false" + tool_settings["PreprocessSuppressLineNumbers"] = "false" + elif value == "1": # /P + tool_settings["PreprocessToFile"] = "true" + tool_settings["PreprocessSuppressLineNumbers"] = "false" + elif value == "2": # /EP /P + tool_settings["PreprocessToFile"] = "true" + tool_settings["PreprocessSuppressLineNumbers"] = "true" + else: + raise ValueError("value must be one of [0, 1, 2]; got %s" % value) + + # Create a bogus validator that looks for '0', '1', or '2' + msvs_validator = _Enumeration(["a", "b", "c"]).ValidateMSVS + _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator + msbuild_validator = _boolean.ValidateMSBuild + msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] + msbuild_tool_validators["PreprocessToFile"] = msbuild_validator + msbuild_tool_validators["PreprocessSuppressLineNumbers"] = msbuild_validator + _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate + + +fix_vc_macro_slashes_regex_list = ("IntDir", "OutDir") +fix_vc_macro_slashes_regex = re.compile( + r"(\$\((?:%s)\))(?:[\\/]+)" % "|".join(fix_vc_macro_slashes_regex_list) +) + +# Regular expression to detect keys that were generated by exclusion lists +_EXCLUDED_SUFFIX_RE = re.compile("^(.*)_excluded$") + + +def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): + """Verify that 'setting' is valid if it is generated from an exclusion list. + + If the setting appears to be generated from an exclusion list, the root name + is checked. + + Args: + setting: A string that is the setting name to validate + settings: A dictionary where the keys are valid settings + error_msg: The message to emit in the event of error + stderr: The stream receiving the error messages. + """ + # This may be unrecognized because it's an exclusion list. If the + # setting name has the _excluded suffix, then check the root name. + unrecognized = True + m = re.match(_EXCLUDED_SUFFIX_RE, setting) + if m: + root_setting = m.group(1) + unrecognized = root_setting not in settings + + if unrecognized: + # We don't know this setting. Give a warning. + print(error_msg, file=stderr) + + +def FixVCMacroSlashes(s): + """Replace macros which have excessive following slashes. + + These macros are known to have a built-in trailing slash. Furthermore, many + scripts hiccup on processing paths with extra slashes in the middle. + + This list is probably not exhaustive. Add as needed. + """ + if "$" in s: + s = fix_vc_macro_slashes_regex.sub(r"\1", s) + return s + + +def ConvertVCMacrosToMSBuild(s): + """Convert the MSVS macros found in the string to the MSBuild equivalent. + + This list is probably not exhaustive. Add as needed. + """ + if "$" in s: + replace_map = { + "$(ConfigurationName)": "$(Configuration)", + "$(InputDir)": "%(RelativeDir)", + "$(InputExt)": "%(Extension)", + "$(InputFileName)": "%(Filename)%(Extension)", + "$(InputName)": "%(Filename)", + "$(InputPath)": "%(Identity)", + "$(ParentName)": "$(ProjectFileName)", + "$(PlatformName)": "$(Platform)", + "$(SafeInputName)": "%(Filename)", + } + for old, new in replace_map.items(): + s = s.replace(old, new) + s = FixVCMacroSlashes(s) + return s + + +def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): + """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). + + Args: + msvs_settings: A dictionary. The key is the tool name. The values are + themselves dictionaries of settings and their values. + stderr: The stream receiving the error messages. + + Returns: + A dictionary of MSBuild settings. The key is either the MSBuild tool name + or the empty string (for the global settings). The values are themselves + dictionaries of settings and their values. + """ + msbuild_settings = {} + for msvs_tool_name, msvs_tool_settings in msvs_settings.items(): + if msvs_tool_name in _msvs_to_msbuild_converters: + msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] + for msvs_setting, msvs_value in msvs_tool_settings.items(): + if msvs_setting in msvs_tool: + # Invoke the translation function. + try: + msvs_tool[msvs_setting](msvs_value, msbuild_settings) + except ValueError as e: + print( + "Warning: while converting %s/%s to MSBuild, " + "%s" % (msvs_tool_name, msvs_setting, e), + file=stderr, + ) + else: + _ValidateExclusionSetting( + msvs_setting, + msvs_tool, + ( + "Warning: unrecognized setting %s/%s " + "while converting to MSBuild." + % (msvs_tool_name, msvs_setting) + ), + stderr, + ) + else: + print( + "Warning: unrecognized tool %s while converting to " + "MSBuild." % msvs_tool_name, + file=stderr, + ) + return msbuild_settings + + +def ValidateMSVSSettings(settings, stderr=sys.stderr): + """Validates that the names of the settings are valid for MSVS. + + Args: + settings: A dictionary. The key is the tool name. The values are + themselves dictionaries of settings and their values. + stderr: The stream receiving the error messages. + """ + _ValidateSettings(_msvs_validators, settings, stderr) + + +def ValidateMSBuildSettings(settings, stderr=sys.stderr): + """Validates that the names of the settings are valid for MSBuild. + + Args: + settings: A dictionary. The key is the tool name. The values are + themselves dictionaries of settings and their values. + stderr: The stream receiving the error messages. + """ + _ValidateSettings(_msbuild_validators, settings, stderr) + + +def _ValidateSettings(validators, settings, stderr): + """Validates that the settings are valid for MSBuild or MSVS. + + We currently only validate the names of the settings, not their values. + + Args: + validators: A dictionary of tools and their validators. + settings: A dictionary. The key is the tool name. The values are + themselves dictionaries of settings and their values. + stderr: The stream receiving the error messages. + """ + for tool_name in settings: + if tool_name in validators: + tool_validators = validators[tool_name] + for setting, value in settings[tool_name].items(): + if setting in tool_validators: + try: + tool_validators[setting](value) + except ValueError as e: + print( + f"Warning: for {tool_name}/{setting}, {e}", + file=stderr, + ) + else: + _ValidateExclusionSetting( + setting, + tool_validators, + (f"Warning: unrecognized setting {tool_name}/{setting}"), + stderr, + ) + + else: + print("Warning: unrecognized tool %s" % (tool_name), file=stderr) + + +# MSVS and MBuild names of the tools. +_compile = _Tool("VCCLCompilerTool", "ClCompile") +_link = _Tool("VCLinkerTool", "Link") +_midl = _Tool("VCMIDLTool", "Midl") +_rc = _Tool("VCResourceCompilerTool", "ResourceCompile") +_lib = _Tool("VCLibrarianTool", "Lib") +_manifest = _Tool("VCManifestTool", "Manifest") +_masm = _Tool("MASM", "MASM") +_armasm = _Tool("ARMASM", "ARMASM") + + +_AddTool(_compile) +_AddTool(_link) +_AddTool(_midl) +_AddTool(_rc) +_AddTool(_lib) +_AddTool(_manifest) +_AddTool(_masm) +_AddTool(_armasm) +# Add sections only found in the MSBuild settings. +_msbuild_validators[""] = {} +_msbuild_validators["ProjectReference"] = {} +_msbuild_validators["ManifestResourceCompile"] = {} + +# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and +# ClCompile in MSBuild. +# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for +# the schema of the MSBuild ClCompile settings. + +# Options that have the same name in MSVS and MSBuild +_Same(_compile, "AdditionalIncludeDirectories", _folder_list) # /I +_Same(_compile, "AdditionalOptions", _string_list) +_Same(_compile, "AdditionalUsingDirectories", _folder_list) # /AI +_Same(_compile, "AssemblerListingLocation", _file_name) # /Fa +_Same(_compile, "BrowseInformationFile", _file_name) +_Same(_compile, "BufferSecurityCheck", _boolean) # /GS +_Same(_compile, "DisableLanguageExtensions", _boolean) # /Za +_Same(_compile, "DisableSpecificWarnings", _string_list) # /wd +_Same(_compile, "EnableFiberSafeOptimizations", _boolean) # /GT +_Same(_compile, "EnablePREfast", _boolean) # /analyze Visible='false' +_Same(_compile, "ExpandAttributedSource", _boolean) # /Fx +_Same(_compile, "FloatingPointExceptions", _boolean) # /fp:except +_Same(_compile, "ForceConformanceInForLoopScope", _boolean) # /Zc:forScope +_Same(_compile, "ForcedIncludeFiles", _file_list) # /FI +_Same(_compile, "ForcedUsingFiles", _file_list) # /FU +_Same(_compile, "GenerateXMLDocumentationFiles", _boolean) # /doc +_Same(_compile, "IgnoreStandardIncludePath", _boolean) # /X +_Same(_compile, "MinimalRebuild", _boolean) # /Gm +_Same(_compile, "OmitDefaultLibName", _boolean) # /Zl +_Same(_compile, "OmitFramePointers", _boolean) # /Oy +_Same(_compile, "PreprocessorDefinitions", _string_list) # /D +_Same(_compile, "ProgramDataBaseFileName", _file_name) # /Fd +_Same(_compile, "RuntimeTypeInfo", _boolean) # /GR +_Same(_compile, "ShowIncludes", _boolean) # /showIncludes +_Same(_compile, "SmallerTypeCheck", _boolean) # /RTCc +_Same(_compile, "StringPooling", _boolean) # /GF +_Same(_compile, "SuppressStartupBanner", _boolean) # /nologo +_Same(_compile, "TreatWChar_tAsBuiltInType", _boolean) # /Zc:wchar_t +_Same(_compile, "UndefineAllPreprocessorDefinitions", _boolean) # /u +_Same(_compile, "UndefinePreprocessorDefinitions", _string_list) # /U +_Same(_compile, "UseFullPaths", _boolean) # /FC +_Same(_compile, "WholeProgramOptimization", _boolean) # /GL +_Same(_compile, "XMLDocumentationFileName", _file_name) +_Same(_compile, "CompileAsWinRT", _boolean) # /ZW + +_Same( + _compile, + "AssemblerOutput", + _Enumeration( + [ + "NoListing", + "AssemblyCode", # /FA + "All", # /FAcs + "AssemblyAndMachineCode", # /FAc + "AssemblyAndSourceCode", + ] + ), +) # /FAs +_Same( + _compile, + "BasicRuntimeChecks", + _Enumeration( + [ + "Default", + "StackFrameRuntimeCheck", # /RTCs + "UninitializedLocalUsageCheck", # /RTCu + "EnableFastChecks", + ] + ), +) # /RTC1 +_Same( + _compile, "BrowseInformation", _Enumeration(["false", "true", "true"]) # /FR +) # /Fr +_Same( + _compile, + "CallingConvention", + _Enumeration(["Cdecl", "FastCall", "StdCall", "VectorCall"]), # /Gd # /Gr # /Gz +) # /Gv +_Same( + _compile, + "CompileAs", + _Enumeration(["Default", "CompileAsC", "CompileAsCpp"]), # /TC +) # /TP +_Same( + _compile, + "DebugInformationFormat", + _Enumeration( + [ + "", # Disabled + "OldStyle", # /Z7 + None, + "ProgramDatabase", # /Zi + "EditAndContinue", + ] + ), +) # /ZI +_Same( + _compile, + "EnableEnhancedInstructionSet", + _Enumeration( + [ + "NotSet", + "StreamingSIMDExtensions", # /arch:SSE + "StreamingSIMDExtensions2", # /arch:SSE2 + "AdvancedVectorExtensions", # /arch:AVX (vs2012+) + "NoExtensions", # /arch:IA32 (vs2012+) + # This one only exists in the new msbuild format. + "AdvancedVectorExtensions2", # /arch:AVX2 (vs2013r2+) + ] + ), +) +_Same( + _compile, + "ErrorReporting", + _Enumeration( + [ + "None", # /errorReport:none + "Prompt", # /errorReport:prompt + "Queue", + ], # /errorReport:queue + new=["Send"], + ), +) # /errorReport:send" +_Same( + _compile, + "ExceptionHandling", + _Enumeration(["false", "Sync", "Async"], new=["SyncCThrow"]), # /EHsc # /EHa +) # /EHs +_Same( + _compile, "FavorSizeOrSpeed", _Enumeration(["Neither", "Speed", "Size"]) # /Ot +) # /Os +_Same( + _compile, + "FloatingPointModel", + _Enumeration(["Precise", "Strict", "Fast"]), # /fp:precise # /fp:strict +) # /fp:fast +_Same( + _compile, + "InlineFunctionExpansion", + _Enumeration( + ["Default", "OnlyExplicitInline", "AnySuitable"], # /Ob1 # /Ob2 + new=["Disabled"], + ), +) # /Ob0 +_Same( + _compile, + "Optimization", + _Enumeration(["Disabled", "MinSpace", "MaxSpeed", "Full"]), # /Od # /O1 # /O2 +) # /Ox +_Same( + _compile, + "RuntimeLibrary", + _Enumeration( + [ + "MultiThreaded", # /MT + "MultiThreadedDebug", # /MTd + "MultiThreadedDLL", # /MD + "MultiThreadedDebugDLL", + ] + ), +) # /MDd +_Same( + _compile, + "StructMemberAlignment", + _Enumeration( + [ + "Default", + "1Byte", # /Zp1 + "2Bytes", # /Zp2 + "4Bytes", # /Zp4 + "8Bytes", # /Zp8 + "16Bytes", + ] + ), +) # /Zp16 +_Same( + _compile, + "WarningLevel", + _Enumeration( + [ + "TurnOffAllWarnings", # /W0 + "Level1", # /W1 + "Level2", # /W2 + "Level3", # /W3 + "Level4", + ], # /W4 + new=["EnableAllWarnings"], + ), +) # /Wall + +# Options found in MSVS that have been renamed in MSBuild. +_Renamed( + _compile, "EnableFunctionLevelLinking", "FunctionLevelLinking", _boolean +) # /Gy +_Renamed(_compile, "EnableIntrinsicFunctions", "IntrinsicFunctions", _boolean) # /Oi +_Renamed(_compile, "KeepComments", "PreprocessKeepComments", _boolean) # /C +_Renamed(_compile, "ObjectFile", "ObjectFileName", _file_name) # /Fo +_Renamed(_compile, "OpenMP", "OpenMPSupport", _boolean) # /openmp +_Renamed( + _compile, "PrecompiledHeaderThrough", "PrecompiledHeaderFile", _file_name +) # Used with /Yc and /Yu +_Renamed( + _compile, "PrecompiledHeaderFile", "PrecompiledHeaderOutputFile", _file_name +) # /Fp +_Renamed( + _compile, + "UsePrecompiledHeader", + "PrecompiledHeader", + _Enumeration( + ["NotUsing", "Create", "Use"] # VS recognized '' for this value too. # /Yc + ), +) # /Yu +_Renamed(_compile, "WarnAsError", "TreatWarningAsError", _boolean) # /WX + +_ConvertedToAdditionalOption(_compile, "DefaultCharIsUnsigned", "/J") + +# MSVS options not found in MSBuild. +_MSVSOnly(_compile, "Detect64BitPortabilityProblems", _boolean) +_MSVSOnly(_compile, "UseUnicodeResponseFiles", _boolean) + +# MSBuild options not found in MSVS. +_MSBuildOnly(_compile, "BuildingInIDE", _boolean) +_MSBuildOnly( + _compile, "CompileAsManaged", _Enumeration([], new=["false", "true"]) +) # /clr +_MSBuildOnly(_compile, "CreateHotpatchableImage", _boolean) # /hotpatch +_MSBuildOnly(_compile, "MultiProcessorCompilation", _boolean) # /MP +_MSBuildOnly(_compile, "PreprocessOutputPath", _string) # /Fi +_MSBuildOnly(_compile, "ProcessorNumber", _integer) # the number of processors +_MSBuildOnly(_compile, "TrackerLogDirectory", _folder_name) +_MSBuildOnly(_compile, "TreatSpecificWarningsAsErrors", _string_list) # /we +_MSBuildOnly(_compile, "UseUnicodeForAssemblerListing", _boolean) # /FAu + +# Defines a setting that needs very customized processing +_CustomGeneratePreprocessedFile(_compile, "GeneratePreprocessedFile") + + +# Directives for converting MSVS VCLinkerTool to MSBuild Link. +# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for +# the schema of the MSBuild Link settings. + +# Options that have the same name in MSVS and MSBuild +_Same(_link, "AdditionalDependencies", _file_list) +_Same(_link, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH +# /MANIFESTDEPENDENCY: +_Same(_link, "AdditionalManifestDependencies", _file_list) +_Same(_link, "AdditionalOptions", _string_list) +_Same(_link, "AddModuleNamesToAssembly", _file_list) # /ASSEMBLYMODULE +_Same(_link, "AllowIsolation", _boolean) # /ALLOWISOLATION +_Same(_link, "AssemblyLinkResource", _file_list) # /ASSEMBLYLINKRESOURCE +_Same(_link, "BaseAddress", _string) # /BASE +_Same(_link, "CLRUnmanagedCodeCheck", _boolean) # /CLRUNMANAGEDCODECHECK +_Same(_link, "DelayLoadDLLs", _file_list) # /DELAYLOAD +_Same(_link, "DelaySign", _boolean) # /DELAYSIGN +_Same(_link, "EmbedManagedResourceFile", _file_list) # /ASSEMBLYRESOURCE +_Same(_link, "EnableUAC", _boolean) # /MANIFESTUAC +_Same(_link, "EntryPointSymbol", _string) # /ENTRY +_Same(_link, "ForceSymbolReferences", _file_list) # /INCLUDE +_Same(_link, "FunctionOrder", _file_name) # /ORDER +_Same(_link, "GenerateDebugInformation", _boolean) # /DEBUG +_Same(_link, "GenerateMapFile", _boolean) # /MAP +_Same(_link, "HeapCommitSize", _string) +_Same(_link, "HeapReserveSize", _string) # /HEAP +_Same(_link, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB +_Same(_link, "IgnoreEmbeddedIDL", _boolean) # /IGNOREIDL +_Same(_link, "ImportLibrary", _file_name) # /IMPLIB +_Same(_link, "KeyContainer", _file_name) # /KEYCONTAINER +_Same(_link, "KeyFile", _file_name) # /KEYFILE +_Same(_link, "ManifestFile", _file_name) # /ManifestFile +_Same(_link, "MapExports", _boolean) # /MAPINFO:EXPORTS +_Same(_link, "MapFileName", _file_name) +_Same(_link, "MergedIDLBaseFileName", _file_name) # /IDLOUT +_Same(_link, "MergeSections", _string) # /MERGE +_Same(_link, "MidlCommandFile", _file_name) # /MIDL +_Same(_link, "ModuleDefinitionFile", _file_name) # /DEF +_Same(_link, "OutputFile", _file_name) # /OUT +_Same(_link, "PerUserRedirection", _boolean) +_Same(_link, "Profile", _boolean) # /PROFILE +_Same(_link, "ProfileGuidedDatabase", _file_name) # /PGD +_Same(_link, "ProgramDatabaseFile", _file_name) # /PDB +_Same(_link, "RegisterOutput", _boolean) +_Same(_link, "SetChecksum", _boolean) # /RELEASE +_Same(_link, "StackCommitSize", _string) +_Same(_link, "StackReserveSize", _string) # /STACK +_Same(_link, "StripPrivateSymbols", _file_name) # /PDBSTRIPPED +_Same(_link, "SupportUnloadOfDelayLoadedDLL", _boolean) # /DELAY:UNLOAD +_Same(_link, "SuppressStartupBanner", _boolean) # /NOLOGO +_Same(_link, "SwapRunFromCD", _boolean) # /SWAPRUN:CD +_Same(_link, "TurnOffAssemblyGeneration", _boolean) # /NOASSEMBLY +_Same(_link, "TypeLibraryFile", _file_name) # /TLBOUT +_Same(_link, "TypeLibraryResourceID", _integer) # /TLBID +_Same(_link, "UACUIAccess", _boolean) # /uiAccess='true' +_Same(_link, "Version", _string) # /VERSION + +_Same(_link, "EnableCOMDATFolding", _newly_boolean) # /OPT:ICF +_Same(_link, "FixedBaseAddress", _newly_boolean) # /FIXED +_Same(_link, "LargeAddressAware", _newly_boolean) # /LARGEADDRESSAWARE +_Same(_link, "OptimizeReferences", _newly_boolean) # /OPT:REF +_Same(_link, "RandomizedBaseAddress", _newly_boolean) # /DYNAMICBASE +_Same(_link, "TerminalServerAware", _newly_boolean) # /TSAWARE + +_subsystem_enumeration = _Enumeration( + [ + "NotSet", + "Console", # /SUBSYSTEM:CONSOLE + "Windows", # /SUBSYSTEM:WINDOWS + "Native", # /SUBSYSTEM:NATIVE + "EFI Application", # /SUBSYSTEM:EFI_APPLICATION + "EFI Boot Service Driver", # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER + "EFI ROM", # /SUBSYSTEM:EFI_ROM + "EFI Runtime", # /SUBSYSTEM:EFI_RUNTIME_DRIVER + "WindowsCE", + ], # /SUBSYSTEM:WINDOWSCE + new=["POSIX"], +) # /SUBSYSTEM:POSIX + +_target_machine_enumeration = _Enumeration( + [ + "NotSet", + "MachineX86", # /MACHINE:X86 + None, + "MachineARM", # /MACHINE:ARM + "MachineEBC", # /MACHINE:EBC + "MachineIA64", # /MACHINE:IA64 + None, + "MachineMIPS", # /MACHINE:MIPS + "MachineMIPS16", # /MACHINE:MIPS16 + "MachineMIPSFPU", # /MACHINE:MIPSFPU + "MachineMIPSFPU16", # /MACHINE:MIPSFPU16 + None, + None, + None, + "MachineSH4", # /MACHINE:SH4 + None, + "MachineTHUMB", # /MACHINE:THUMB + "MachineX64", + ] +) # /MACHINE:X64 + +_Same( + _link, "AssemblyDebug", _Enumeration(["", "true", "false"]) # /ASSEMBLYDEBUG +) # /ASSEMBLYDEBUG:DISABLE +_Same( + _link, + "CLRImageType", + _Enumeration( + [ + "Default", + "ForceIJWImage", # /CLRIMAGETYPE:IJW + "ForcePureILImage", # /Switch="CLRIMAGETYPE:PURE + "ForceSafeILImage", + ] + ), +) # /Switch="CLRIMAGETYPE:SAFE +_Same( + _link, + "CLRThreadAttribute", + _Enumeration( + [ + "DefaultThreadingAttribute", # /CLRTHREADATTRIBUTE:NONE + "MTAThreadingAttribute", # /CLRTHREADATTRIBUTE:MTA + "STAThreadingAttribute", + ] + ), +) # /CLRTHREADATTRIBUTE:STA +_Same( + _link, + "DataExecutionPrevention", + _Enumeration(["", "false", "true"]), # /NXCOMPAT:NO +) # /NXCOMPAT +_Same( + _link, + "Driver", + _Enumeration(["NotSet", "Driver", "UpOnly", "WDM"]), # /Driver # /DRIVER:UPONLY +) # /DRIVER:WDM +_Same( + _link, + "LinkTimeCodeGeneration", + _Enumeration( + [ + "Default", + "UseLinkTimeCodeGeneration", # /LTCG + "PGInstrument", # /LTCG:PGInstrument + "PGOptimization", # /LTCG:PGOptimize + "PGUpdate", + ] + ), +) # /LTCG:PGUpdate +_Same( + _link, + "ShowProgress", + _Enumeration( + ["NotSet", "LinkVerbose", "LinkVerboseLib"], # /VERBOSE # /VERBOSE:Lib + new=[ + "LinkVerboseICF", # /VERBOSE:ICF + "LinkVerboseREF", # /VERBOSE:REF + "LinkVerboseSAFESEH", # /VERBOSE:SAFESEH + "LinkVerboseCLR", + ], + ), +) # /VERBOSE:CLR +_Same(_link, "SubSystem", _subsystem_enumeration) +_Same(_link, "TargetMachine", _target_machine_enumeration) +_Same( + _link, + "UACExecutionLevel", + _Enumeration( + [ + "AsInvoker", # /level='asInvoker' + "HighestAvailable", # /level='highestAvailable' + "RequireAdministrator", + ] + ), +) # /level='requireAdministrator' +_Same(_link, "MinimumRequiredVersion", _string) +_Same(_link, "TreatLinkerWarningAsErrors", _boolean) # /WX + + +# Options found in MSVS that have been renamed in MSBuild. +_Renamed( + _link, + "ErrorReporting", + "LinkErrorReporting", + _Enumeration( + [ + "NoErrorReport", # /ERRORREPORT:NONE + "PromptImmediately", # /ERRORREPORT:PROMPT + "QueueForNextLogin", + ], # /ERRORREPORT:QUEUE + new=["SendErrorReport"], + ), +) # /ERRORREPORT:SEND +_Renamed( + _link, "IgnoreDefaultLibraryNames", "IgnoreSpecificDefaultLibraries", _file_list +) # /NODEFAULTLIB +_Renamed(_link, "ResourceOnlyDLL", "NoEntryPoint", _boolean) # /NOENTRY +_Renamed(_link, "SwapRunFromNet", "SwapRunFromNET", _boolean) # /SWAPRUN:NET + +_Moved(_link, "GenerateManifest", "", _boolean) +_Moved(_link, "IgnoreImportLibrary", "", _boolean) +_Moved(_link, "LinkIncremental", "", _newly_boolean) +_Moved(_link, "LinkLibraryDependencies", "ProjectReference", _boolean) +_Moved(_link, "UseLibraryDependencyInputs", "ProjectReference", _boolean) + +# MSVS options not found in MSBuild. +_MSVSOnly(_link, "OptimizeForWindows98", _newly_boolean) +_MSVSOnly(_link, "UseUnicodeResponseFiles", _boolean) + +# MSBuild options not found in MSVS. +_MSBuildOnly(_link, "BuildingInIDE", _boolean) +_MSBuildOnly(_link, "ImageHasSafeExceptionHandlers", _boolean) # /SAFESEH +_MSBuildOnly(_link, "LinkDLL", _boolean) # /DLL Visible='false' +_MSBuildOnly(_link, "LinkStatus", _boolean) # /LTCG:STATUS +_MSBuildOnly(_link, "PreventDllBinding", _boolean) # /ALLOWBIND +_MSBuildOnly(_link, "SupportNobindOfDelayLoadedDLL", _boolean) # /DELAY:NOBIND +_MSBuildOnly(_link, "TrackerLogDirectory", _folder_name) +_MSBuildOnly(_link, "MSDOSStubFileName", _file_name) # /STUB Visible='false' +_MSBuildOnly(_link, "SectionAlignment", _integer) # /ALIGN +_MSBuildOnly(_link, "SpecifySectionAttributes", _string) # /SECTION +_MSBuildOnly( + _link, + "ForceFileOutput", + _Enumeration( + [], + new=[ + "Enabled", # /FORCE + # /FORCE:MULTIPLE + "MultiplyDefinedSymbolOnly", + "UndefinedSymbolOnly", + ], + ), +) # /FORCE:UNRESOLVED +_MSBuildOnly( + _link, + "CreateHotPatchableImage", + _Enumeration( + [], + new=[ + "Enabled", # /FUNCTIONPADMIN + "X86Image", # /FUNCTIONPADMIN:5 + "X64Image", # /FUNCTIONPADMIN:6 + "ItaniumImage", + ], + ), +) # /FUNCTIONPADMIN:16 +_MSBuildOnly( + _link, + "CLRSupportLastError", + _Enumeration( + [], + new=[ + "Enabled", # /CLRSupportLastError + "Disabled", # /CLRSupportLastError:NO + # /CLRSupportLastError:SYSTEMDLL + "SystemDlls", + ], + ), +) + + +# Directives for converting VCResourceCompilerTool to ResourceCompile. +# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for +# the schema of the MSBuild ResourceCompile settings. + +_Same(_rc, "AdditionalOptions", _string_list) +_Same(_rc, "AdditionalIncludeDirectories", _folder_list) # /I +_Same(_rc, "Culture", _Integer(msbuild_base=16)) +_Same(_rc, "IgnoreStandardIncludePath", _boolean) # /X +_Same(_rc, "PreprocessorDefinitions", _string_list) # /D +_Same(_rc, "ResourceOutputFileName", _string) # /fo +_Same(_rc, "ShowProgress", _boolean) # /v +# There is no UI in VisualStudio 2008 to set the following properties. +# However they are found in CL and other tools. Include them here for +# completeness, as they are very likely to have the same usage pattern. +_Same(_rc, "SuppressStartupBanner", _boolean) # /nologo +_Same(_rc, "UndefinePreprocessorDefinitions", _string_list) # /u + +# MSBuild options not found in MSVS. +_MSBuildOnly(_rc, "NullTerminateStrings", _boolean) # /n +_MSBuildOnly(_rc, "TrackerLogDirectory", _folder_name) + + +# Directives for converting VCMIDLTool to Midl. +# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for +# the schema of the MSBuild Midl settings. + +_Same(_midl, "AdditionalIncludeDirectories", _folder_list) # /I +_Same(_midl, "AdditionalOptions", _string_list) +_Same(_midl, "CPreprocessOptions", _string) # /cpp_opt +_Same(_midl, "ErrorCheckAllocations", _boolean) # /error allocation +_Same(_midl, "ErrorCheckBounds", _boolean) # /error bounds_check +_Same(_midl, "ErrorCheckEnumRange", _boolean) # /error enum +_Same(_midl, "ErrorCheckRefPointers", _boolean) # /error ref +_Same(_midl, "ErrorCheckStubData", _boolean) # /error stub_data +_Same(_midl, "GenerateStublessProxies", _boolean) # /Oicf +_Same(_midl, "GenerateTypeLibrary", _boolean) +_Same(_midl, "HeaderFileName", _file_name) # /h +_Same(_midl, "IgnoreStandardIncludePath", _boolean) # /no_def_idir +_Same(_midl, "InterfaceIdentifierFileName", _file_name) # /iid +_Same(_midl, "MkTypLibCompatible", _boolean) # /mktyplib203 +_Same(_midl, "OutputDirectory", _string) # /out +_Same(_midl, "PreprocessorDefinitions", _string_list) # /D +_Same(_midl, "ProxyFileName", _file_name) # /proxy +_Same(_midl, "RedirectOutputAndErrors", _file_name) # /o +_Same(_midl, "SuppressStartupBanner", _boolean) # /nologo +_Same(_midl, "TypeLibraryName", _file_name) # /tlb +_Same(_midl, "UndefinePreprocessorDefinitions", _string_list) # /U +_Same(_midl, "WarnAsError", _boolean) # /WX + +_Same( + _midl, + "DefaultCharType", + _Enumeration(["Unsigned", "Signed", "Ascii"]), # /char unsigned # /char signed +) # /char ascii7 +_Same( + _midl, + "TargetEnvironment", + _Enumeration( + [ + "NotSet", + "Win32", # /env win32 + "Itanium", # /env ia64 + "X64", # /env x64 + "ARM64", # /env arm64 + ] + ), +) +_Same( + _midl, + "EnableErrorChecks", + _Enumeration(["EnableCustom", "None", "All"]), # /error none +) # /error all +_Same( + _midl, + "StructMemberAlignment", + _Enumeration(["NotSet", "1", "2", "4", "8"]), # Zp1 # Zp2 # Zp4 +) # Zp8 +_Same( + _midl, + "WarningLevel", + _Enumeration(["0", "1", "2", "3", "4"]), # /W0 # /W1 # /W2 # /W3 +) # /W4 + +_Renamed(_midl, "DLLDataFileName", "DllDataFileName", _file_name) # /dlldata +_Renamed(_midl, "ValidateParameters", "ValidateAllParameters", _boolean) # /robust + +# MSBuild options not found in MSVS. +_MSBuildOnly(_midl, "ApplicationConfigurationMode", _boolean) # /app_config +_MSBuildOnly(_midl, "ClientStubFile", _file_name) # /cstub +_MSBuildOnly( + _midl, "GenerateClientFiles", _Enumeration([], new=["Stub", "None"]) # /client stub +) # /client none +_MSBuildOnly( + _midl, "GenerateServerFiles", _Enumeration([], new=["Stub", "None"]) # /client stub +) # /client none +_MSBuildOnly(_midl, "LocaleID", _integer) # /lcid DECIMAL +_MSBuildOnly(_midl, "ServerStubFile", _file_name) # /sstub +_MSBuildOnly(_midl, "SuppressCompilerWarnings", _boolean) # /no_warn +_MSBuildOnly(_midl, "TrackerLogDirectory", _folder_name) +_MSBuildOnly( + _midl, "TypeLibFormat", _Enumeration([], new=["NewFormat", "OldFormat"]) # /newtlb +) # /oldtlb + + +# Directives for converting VCLibrarianTool to Lib. +# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for +# the schema of the MSBuild Lib settings. + +_Same(_lib, "AdditionalDependencies", _file_list) +_Same(_lib, "AdditionalLibraryDirectories", _folder_list) # /LIBPATH +_Same(_lib, "AdditionalOptions", _string_list) +_Same(_lib, "ExportNamedFunctions", _string_list) # /EXPORT +_Same(_lib, "ForceSymbolReferences", _string) # /INCLUDE +_Same(_lib, "IgnoreAllDefaultLibraries", _boolean) # /NODEFAULTLIB +_Same(_lib, "IgnoreSpecificDefaultLibraries", _file_list) # /NODEFAULTLIB +_Same(_lib, "ModuleDefinitionFile", _file_name) # /DEF +_Same(_lib, "OutputFile", _file_name) # /OUT +_Same(_lib, "SuppressStartupBanner", _boolean) # /NOLOGO +_Same(_lib, "UseUnicodeResponseFiles", _boolean) +_Same(_lib, "LinkTimeCodeGeneration", _boolean) # /LTCG +_Same(_lib, "TargetMachine", _target_machine_enumeration) + +# TODO(jeanluc) _link defines the same value that gets moved to +# ProjectReference. We may want to validate that they are consistent. +_Moved(_lib, "LinkLibraryDependencies", "ProjectReference", _boolean) + +_MSBuildOnly(_lib, "DisplayLibrary", _string) # /LIST Visible='false' +_MSBuildOnly( + _lib, + "ErrorReporting", + _Enumeration( + [], + new=[ + "PromptImmediately", # /ERRORREPORT:PROMPT + "QueueForNextLogin", # /ERRORREPORT:QUEUE + "SendErrorReport", # /ERRORREPORT:SEND + "NoErrorReport", + ], + ), +) # /ERRORREPORT:NONE +_MSBuildOnly(_lib, "MinimumRequiredVersion", _string) +_MSBuildOnly(_lib, "Name", _file_name) # /NAME +_MSBuildOnly(_lib, "RemoveObjects", _file_list) # /REMOVE +_MSBuildOnly(_lib, "SubSystem", _subsystem_enumeration) +_MSBuildOnly(_lib, "TrackerLogDirectory", _folder_name) +_MSBuildOnly(_lib, "TreatLibWarningAsErrors", _boolean) # /WX +_MSBuildOnly(_lib, "Verbose", _boolean) + + +# Directives for converting VCManifestTool to Mt. +# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for +# the schema of the MSBuild Lib settings. + +# Options that have the same name in MSVS and MSBuild +_Same(_manifest, "AdditionalManifestFiles", _file_list) # /manifest +_Same(_manifest, "AdditionalOptions", _string_list) +_Same(_manifest, "AssemblyIdentity", _string) # /identity: +_Same(_manifest, "ComponentFileName", _file_name) # /dll +_Same(_manifest, "GenerateCatalogFiles", _boolean) # /makecdfs +_Same(_manifest, "InputResourceManifests", _string) # /inputresource +_Same(_manifest, "OutputManifestFile", _file_name) # /out +_Same(_manifest, "RegistrarScriptFile", _file_name) # /rgs +_Same(_manifest, "ReplacementsFile", _file_name) # /replacements +_Same(_manifest, "SuppressStartupBanner", _boolean) # /nologo +_Same(_manifest, "TypeLibraryFile", _file_name) # /tlb: +_Same(_manifest, "UpdateFileHashes", _boolean) # /hashupdate +_Same(_manifest, "UpdateFileHashesSearchPath", _file_name) +_Same(_manifest, "VerboseOutput", _boolean) # /verbose + +# Options that have moved location. +_MovedAndRenamed( + _manifest, + "ManifestResourceFile", + "ManifestResourceCompile", + "ResourceOutputFileName", + _file_name, +) +_Moved(_manifest, "EmbedManifest", "", _boolean) + +# MSVS options not found in MSBuild. +_MSVSOnly(_manifest, "DependencyInformationFile", _file_name) +_MSVSOnly(_manifest, "UseFAT32Workaround", _boolean) +_MSVSOnly(_manifest, "UseUnicodeResponseFiles", _boolean) + +# MSBuild options not found in MSVS. +_MSBuildOnly(_manifest, "EnableDPIAwareness", _boolean) +_MSBuildOnly(_manifest, "GenerateCategoryTags", _boolean) # /category +_MSBuildOnly( + _manifest, "ManifestFromManagedAssembly", _file_name +) # /managedassemblyname +_MSBuildOnly(_manifest, "OutputResourceManifests", _string) # /outputresource +_MSBuildOnly(_manifest, "SuppressDependencyElement", _boolean) # /nodependency +_MSBuildOnly(_manifest, "TrackerLogDirectory", _folder_name) + + +# Directives for MASM. +# See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the +# MSBuild MASM settings. + +# Options that have the same name in MSVS and MSBuild. +_Same(_masm, "UseSafeExceptionHandlers", _boolean) # /safeseh diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py new file mode 100644 index 0000000..6ca0968 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py @@ -0,0 +1,1547 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unit tests for the MSVSSettings.py file.""" + +import unittest +import gyp.MSVSSettings as MSVSSettings + +from io import StringIO + + +class TestSequenceFunctions(unittest.TestCase): + def setUp(self): + self.stderr = StringIO() + + def _ExpectedWarnings(self, expected): + """Compares recorded lines to expected warnings.""" + self.stderr.seek(0) + actual = self.stderr.read().split("\n") + actual = [line for line in actual if line] + self.assertEqual(sorted(expected), sorted(actual)) + + def testValidateMSVSSettings_tool_names(self): + """Tests that only MSVS tool names are allowed.""" + MSVSSettings.ValidateMSVSSettings( + { + "VCCLCompilerTool": {}, + "VCLinkerTool": {}, + "VCMIDLTool": {}, + "foo": {}, + "VCResourceCompilerTool": {}, + "VCLibrarianTool": {}, + "VCManifestTool": {}, + "ClCompile": {}, + }, + self.stderr, + ) + self._ExpectedWarnings( + ["Warning: unrecognized tool foo", "Warning: unrecognized tool ClCompile"] + ) + + def testValidateMSVSSettings_settings(self): + """Tests that for invalid MSVS settings.""" + MSVSSettings.ValidateMSVSSettings( + { + "VCCLCompilerTool": { + "AdditionalIncludeDirectories": "folder1;folder2", + "AdditionalOptions": ["string1", "string2"], + "AdditionalUsingDirectories": "folder1;folder2", + "AssemblerListingLocation": "a_file_name", + "AssemblerOutput": "0", + "BasicRuntimeChecks": "5", + "BrowseInformation": "fdkslj", + "BrowseInformationFile": "a_file_name", + "BufferSecurityCheck": "true", + "CallingConvention": "-1", + "CompileAs": "1", + "DebugInformationFormat": "2", + "DefaultCharIsUnsigned": "true", + "Detect64BitPortabilityProblems": "true", + "DisableLanguageExtensions": "true", + "DisableSpecificWarnings": "string1;string2", + "EnableEnhancedInstructionSet": "1", + "EnableFiberSafeOptimizations": "true", + "EnableFunctionLevelLinking": "true", + "EnableIntrinsicFunctions": "true", + "EnablePREfast": "true", + "Enableprefast": "bogus", + "ErrorReporting": "1", + "ExceptionHandling": "1", + "ExpandAttributedSource": "true", + "FavorSizeOrSpeed": "1", + "FloatingPointExceptions": "true", + "FloatingPointModel": "1", + "ForceConformanceInForLoopScope": "true", + "ForcedIncludeFiles": "file1;file2", + "ForcedUsingFiles": "file1;file2", + "GeneratePreprocessedFile": "1", + "GenerateXMLDocumentationFiles": "true", + "IgnoreStandardIncludePath": "true", + "InlineFunctionExpansion": "1", + "KeepComments": "true", + "MinimalRebuild": "true", + "ObjectFile": "a_file_name", + "OmitDefaultLibName": "true", + "OmitFramePointers": "true", + "OpenMP": "true", + "Optimization": "1", + "PrecompiledHeaderFile": "a_file_name", + "PrecompiledHeaderThrough": "a_file_name", + "PreprocessorDefinitions": "string1;string2", + "ProgramDataBaseFileName": "a_file_name", + "RuntimeLibrary": "1", + "RuntimeTypeInfo": "true", + "ShowIncludes": "true", + "SmallerTypeCheck": "true", + "StringPooling": "true", + "StructMemberAlignment": "1", + "SuppressStartupBanner": "true", + "TreatWChar_tAsBuiltInType": "true", + "UndefineAllPreprocessorDefinitions": "true", + "UndefinePreprocessorDefinitions": "string1;string2", + "UseFullPaths": "true", + "UsePrecompiledHeader": "1", + "UseUnicodeResponseFiles": "true", + "WarnAsError": "true", + "WarningLevel": "1", + "WholeProgramOptimization": "true", + "XMLDocumentationFileName": "a_file_name", + "ZZXYZ": "bogus", + }, + "VCLinkerTool": { + "AdditionalDependencies": "file1;file2", + "AdditionalDependencies_excluded": "file3", + "AdditionalLibraryDirectories": "folder1;folder2", + "AdditionalManifestDependencies": "file1;file2", + "AdditionalOptions": "a string1", + "AddModuleNamesToAssembly": "file1;file2", + "AllowIsolation": "true", + "AssemblyDebug": "2", + "AssemblyLinkResource": "file1;file2", + "BaseAddress": "a string1", + "CLRImageType": "2", + "CLRThreadAttribute": "2", + "CLRUnmanagedCodeCheck": "true", + "DataExecutionPrevention": "2", + "DelayLoadDLLs": "file1;file2", + "DelaySign": "true", + "Driver": "2", + "EmbedManagedResourceFile": "file1;file2", + "EnableCOMDATFolding": "2", + "EnableUAC": "true", + "EntryPointSymbol": "a string1", + "ErrorReporting": "2", + "FixedBaseAddress": "2", + "ForceSymbolReferences": "file1;file2", + "FunctionOrder": "a_file_name", + "GenerateDebugInformation": "true", + "GenerateManifest": "true", + "GenerateMapFile": "true", + "HeapCommitSize": "a string1", + "HeapReserveSize": "a string1", + "IgnoreAllDefaultLibraries": "true", + "IgnoreDefaultLibraryNames": "file1;file2", + "IgnoreEmbeddedIDL": "true", + "IgnoreImportLibrary": "true", + "ImportLibrary": "a_file_name", + "KeyContainer": "a_file_name", + "KeyFile": "a_file_name", + "LargeAddressAware": "2", + "LinkIncremental": "2", + "LinkLibraryDependencies": "true", + "LinkTimeCodeGeneration": "2", + "ManifestFile": "a_file_name", + "MapExports": "true", + "MapFileName": "a_file_name", + "MergedIDLBaseFileName": "a_file_name", + "MergeSections": "a string1", + "MidlCommandFile": "a_file_name", + "ModuleDefinitionFile": "a_file_name", + "OptimizeForWindows98": "1", + "OptimizeReferences": "2", + "OutputFile": "a_file_name", + "PerUserRedirection": "true", + "Profile": "true", + "ProfileGuidedDatabase": "a_file_name", + "ProgramDatabaseFile": "a_file_name", + "RandomizedBaseAddress": "2", + "RegisterOutput": "true", + "ResourceOnlyDLL": "true", + "SetChecksum": "true", + "ShowProgress": "2", + "StackCommitSize": "a string1", + "StackReserveSize": "a string1", + "StripPrivateSymbols": "a_file_name", + "SubSystem": "2", + "SupportUnloadOfDelayLoadedDLL": "true", + "SuppressStartupBanner": "true", + "SwapRunFromCD": "true", + "SwapRunFromNet": "true", + "TargetMachine": "2", + "TerminalServerAware": "2", + "TurnOffAssemblyGeneration": "true", + "TypeLibraryFile": "a_file_name", + "TypeLibraryResourceID": "33", + "UACExecutionLevel": "2", + "UACUIAccess": "true", + "UseLibraryDependencyInputs": "true", + "UseUnicodeResponseFiles": "true", + "Version": "a string1", + }, + "VCMIDLTool": { + "AdditionalIncludeDirectories": "folder1;folder2", + "AdditionalOptions": "a string1", + "CPreprocessOptions": "a string1", + "DefaultCharType": "1", + "DLLDataFileName": "a_file_name", + "EnableErrorChecks": "1", + "ErrorCheckAllocations": "true", + "ErrorCheckBounds": "true", + "ErrorCheckEnumRange": "true", + "ErrorCheckRefPointers": "true", + "ErrorCheckStubData": "true", + "GenerateStublessProxies": "true", + "GenerateTypeLibrary": "true", + "HeaderFileName": "a_file_name", + "IgnoreStandardIncludePath": "true", + "InterfaceIdentifierFileName": "a_file_name", + "MkTypLibCompatible": "true", + "notgood": "bogus", + "OutputDirectory": "a string1", + "PreprocessorDefinitions": "string1;string2", + "ProxyFileName": "a_file_name", + "RedirectOutputAndErrors": "a_file_name", + "StructMemberAlignment": "1", + "SuppressStartupBanner": "true", + "TargetEnvironment": "1", + "TypeLibraryName": "a_file_name", + "UndefinePreprocessorDefinitions": "string1;string2", + "ValidateParameters": "true", + "WarnAsError": "true", + "WarningLevel": "1", + }, + "VCResourceCompilerTool": { + "AdditionalOptions": "a string1", + "AdditionalIncludeDirectories": "folder1;folder2", + "Culture": "1003", + "IgnoreStandardIncludePath": "true", + "notgood2": "bogus", + "PreprocessorDefinitions": "string1;string2", + "ResourceOutputFileName": "a string1", + "ShowProgress": "true", + "SuppressStartupBanner": "true", + "UndefinePreprocessorDefinitions": "string1;string2", + }, + "VCLibrarianTool": { + "AdditionalDependencies": "file1;file2", + "AdditionalLibraryDirectories": "folder1;folder2", + "AdditionalOptions": "a string1", + "ExportNamedFunctions": "string1;string2", + "ForceSymbolReferences": "a string1", + "IgnoreAllDefaultLibraries": "true", + "IgnoreSpecificDefaultLibraries": "file1;file2", + "LinkLibraryDependencies": "true", + "ModuleDefinitionFile": "a_file_name", + "OutputFile": "a_file_name", + "SuppressStartupBanner": "true", + "UseUnicodeResponseFiles": "true", + }, + "VCManifestTool": { + "AdditionalManifestFiles": "file1;file2", + "AdditionalOptions": "a string1", + "AssemblyIdentity": "a string1", + "ComponentFileName": "a_file_name", + "DependencyInformationFile": "a_file_name", + "GenerateCatalogFiles": "true", + "InputResourceManifests": "a string1", + "ManifestResourceFile": "a_file_name", + "OutputManifestFile": "a_file_name", + "RegistrarScriptFile": "a_file_name", + "ReplacementsFile": "a_file_name", + "SuppressStartupBanner": "true", + "TypeLibraryFile": "a_file_name", + "UpdateFileHashes": "truel", + "UpdateFileHashesSearchPath": "a_file_name", + "UseFAT32Workaround": "true", + "UseUnicodeResponseFiles": "true", + "VerboseOutput": "true", + }, + }, + self.stderr, + ) + self._ExpectedWarnings( + [ + "Warning: for VCCLCompilerTool/BasicRuntimeChecks, " + "index value (5) not in expected range [0, 4)", + "Warning: for VCCLCompilerTool/BrowseInformation, " + "invalid literal for int() with base 10: 'fdkslj'", + "Warning: for VCCLCompilerTool/CallingConvention, " + "index value (-1) not in expected range [0, 4)", + "Warning: for VCCLCompilerTool/DebugInformationFormat, " + "converted value for 2 not specified.", + "Warning: unrecognized setting VCCLCompilerTool/Enableprefast", + "Warning: unrecognized setting VCCLCompilerTool/ZZXYZ", + "Warning: for VCLinkerTool/TargetMachine, " + "converted value for 2 not specified.", + "Warning: unrecognized setting VCMIDLTool/notgood", + "Warning: unrecognized setting VCResourceCompilerTool/notgood2", + "Warning: for VCManifestTool/UpdateFileHashes, " + "expected bool; got 'truel'" + "", + ] + ) + + def testValidateMSBuildSettings_settings(self): + """Tests that for invalid MSBuild settings.""" + MSVSSettings.ValidateMSBuildSettings( + { + "ClCompile": { + "AdditionalIncludeDirectories": "folder1;folder2", + "AdditionalOptions": ["string1", "string2"], + "AdditionalUsingDirectories": "folder1;folder2", + "AssemblerListingLocation": "a_file_name", + "AssemblerOutput": "NoListing", + "BasicRuntimeChecks": "StackFrameRuntimeCheck", + "BrowseInformation": "false", + "BrowseInformationFile": "a_file_name", + "BufferSecurityCheck": "true", + "BuildingInIDE": "true", + "CallingConvention": "Cdecl", + "CompileAs": "CompileAsC", + "CompileAsManaged": "true", + "CreateHotpatchableImage": "true", + "DebugInformationFormat": "ProgramDatabase", + "DisableLanguageExtensions": "true", + "DisableSpecificWarnings": "string1;string2", + "EnableEnhancedInstructionSet": "StreamingSIMDExtensions", + "EnableFiberSafeOptimizations": "true", + "EnablePREfast": "true", + "Enableprefast": "bogus", + "ErrorReporting": "Prompt", + "ExceptionHandling": "SyncCThrow", + "ExpandAttributedSource": "true", + "FavorSizeOrSpeed": "Neither", + "FloatingPointExceptions": "true", + "FloatingPointModel": "Precise", + "ForceConformanceInForLoopScope": "true", + "ForcedIncludeFiles": "file1;file2", + "ForcedUsingFiles": "file1;file2", + "FunctionLevelLinking": "false", + "GenerateXMLDocumentationFiles": "true", + "IgnoreStandardIncludePath": "true", + "InlineFunctionExpansion": "OnlyExplicitInline", + "IntrinsicFunctions": "false", + "MinimalRebuild": "true", + "MultiProcessorCompilation": "true", + "ObjectFileName": "a_file_name", + "OmitDefaultLibName": "true", + "OmitFramePointers": "true", + "OpenMPSupport": "true", + "Optimization": "Disabled", + "PrecompiledHeader": "NotUsing", + "PrecompiledHeaderFile": "a_file_name", + "PrecompiledHeaderOutputFile": "a_file_name", + "PreprocessKeepComments": "true", + "PreprocessorDefinitions": "string1;string2", + "PreprocessOutputPath": "a string1", + "PreprocessSuppressLineNumbers": "false", + "PreprocessToFile": "false", + "ProcessorNumber": "33", + "ProgramDataBaseFileName": "a_file_name", + "RuntimeLibrary": "MultiThreaded", + "RuntimeTypeInfo": "true", + "ShowIncludes": "true", + "SmallerTypeCheck": "true", + "StringPooling": "true", + "StructMemberAlignment": "1Byte", + "SuppressStartupBanner": "true", + "TrackerLogDirectory": "a_folder", + "TreatSpecificWarningsAsErrors": "string1;string2", + "TreatWarningAsError": "true", + "TreatWChar_tAsBuiltInType": "true", + "UndefineAllPreprocessorDefinitions": "true", + "UndefinePreprocessorDefinitions": "string1;string2", + "UseFullPaths": "true", + "UseUnicodeForAssemblerListing": "true", + "WarningLevel": "TurnOffAllWarnings", + "WholeProgramOptimization": "true", + "XMLDocumentationFileName": "a_file_name", + "ZZXYZ": "bogus", + }, + "Link": { + "AdditionalDependencies": "file1;file2", + "AdditionalLibraryDirectories": "folder1;folder2", + "AdditionalManifestDependencies": "file1;file2", + "AdditionalOptions": "a string1", + "AddModuleNamesToAssembly": "file1;file2", + "AllowIsolation": "true", + "AssemblyDebug": "", + "AssemblyLinkResource": "file1;file2", + "BaseAddress": "a string1", + "BuildingInIDE": "true", + "CLRImageType": "ForceIJWImage", + "CLRSupportLastError": "Enabled", + "CLRThreadAttribute": "MTAThreadingAttribute", + "CLRUnmanagedCodeCheck": "true", + "CreateHotPatchableImage": "X86Image", + "DataExecutionPrevention": "false", + "DelayLoadDLLs": "file1;file2", + "DelaySign": "true", + "Driver": "NotSet", + "EmbedManagedResourceFile": "file1;file2", + "EnableCOMDATFolding": "false", + "EnableUAC": "true", + "EntryPointSymbol": "a string1", + "FixedBaseAddress": "false", + "ForceFileOutput": "Enabled", + "ForceSymbolReferences": "file1;file2", + "FunctionOrder": "a_file_name", + "GenerateDebugInformation": "true", + "GenerateMapFile": "true", + "HeapCommitSize": "a string1", + "HeapReserveSize": "a string1", + "IgnoreAllDefaultLibraries": "true", + "IgnoreEmbeddedIDL": "true", + "IgnoreSpecificDefaultLibraries": "a_file_list", + "ImageHasSafeExceptionHandlers": "true", + "ImportLibrary": "a_file_name", + "KeyContainer": "a_file_name", + "KeyFile": "a_file_name", + "LargeAddressAware": "false", + "LinkDLL": "true", + "LinkErrorReporting": "SendErrorReport", + "LinkStatus": "true", + "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", + "ManifestFile": "a_file_name", + "MapExports": "true", + "MapFileName": "a_file_name", + "MergedIDLBaseFileName": "a_file_name", + "MergeSections": "a string1", + "MidlCommandFile": "a_file_name", + "MinimumRequiredVersion": "a string1", + "ModuleDefinitionFile": "a_file_name", + "MSDOSStubFileName": "a_file_name", + "NoEntryPoint": "true", + "OptimizeReferences": "false", + "OutputFile": "a_file_name", + "PerUserRedirection": "true", + "PreventDllBinding": "true", + "Profile": "true", + "ProfileGuidedDatabase": "a_file_name", + "ProgramDatabaseFile": "a_file_name", + "RandomizedBaseAddress": "false", + "RegisterOutput": "true", + "SectionAlignment": "33", + "SetChecksum": "true", + "ShowProgress": "LinkVerboseREF", + "SpecifySectionAttributes": "a string1", + "StackCommitSize": "a string1", + "StackReserveSize": "a string1", + "StripPrivateSymbols": "a_file_name", + "SubSystem": "Console", + "SupportNobindOfDelayLoadedDLL": "true", + "SupportUnloadOfDelayLoadedDLL": "true", + "SuppressStartupBanner": "true", + "SwapRunFromCD": "true", + "SwapRunFromNET": "true", + "TargetMachine": "MachineX86", + "TerminalServerAware": "false", + "TrackerLogDirectory": "a_folder", + "TreatLinkerWarningAsErrors": "true", + "TurnOffAssemblyGeneration": "true", + "TypeLibraryFile": "a_file_name", + "TypeLibraryResourceID": "33", + "UACExecutionLevel": "AsInvoker", + "UACUIAccess": "true", + "Version": "a string1", + }, + "ResourceCompile": { + "AdditionalIncludeDirectories": "folder1;folder2", + "AdditionalOptions": "a string1", + "Culture": "0x236", + "IgnoreStandardIncludePath": "true", + "NullTerminateStrings": "true", + "PreprocessorDefinitions": "string1;string2", + "ResourceOutputFileName": "a string1", + "ShowProgress": "true", + "SuppressStartupBanner": "true", + "TrackerLogDirectory": "a_folder", + "UndefinePreprocessorDefinitions": "string1;string2", + }, + "Midl": { + "AdditionalIncludeDirectories": "folder1;folder2", + "AdditionalOptions": "a string1", + "ApplicationConfigurationMode": "true", + "ClientStubFile": "a_file_name", + "CPreprocessOptions": "a string1", + "DefaultCharType": "Signed", + "DllDataFileName": "a_file_name", + "EnableErrorChecks": "EnableCustom", + "ErrorCheckAllocations": "true", + "ErrorCheckBounds": "true", + "ErrorCheckEnumRange": "true", + "ErrorCheckRefPointers": "true", + "ErrorCheckStubData": "true", + "GenerateClientFiles": "Stub", + "GenerateServerFiles": "None", + "GenerateStublessProxies": "true", + "GenerateTypeLibrary": "true", + "HeaderFileName": "a_file_name", + "IgnoreStandardIncludePath": "true", + "InterfaceIdentifierFileName": "a_file_name", + "LocaleID": "33", + "MkTypLibCompatible": "true", + "OutputDirectory": "a string1", + "PreprocessorDefinitions": "string1;string2", + "ProxyFileName": "a_file_name", + "RedirectOutputAndErrors": "a_file_name", + "ServerStubFile": "a_file_name", + "StructMemberAlignment": "NotSet", + "SuppressCompilerWarnings": "true", + "SuppressStartupBanner": "true", + "TargetEnvironment": "Itanium", + "TrackerLogDirectory": "a_folder", + "TypeLibFormat": "NewFormat", + "TypeLibraryName": "a_file_name", + "UndefinePreprocessorDefinitions": "string1;string2", + "ValidateAllParameters": "true", + "WarnAsError": "true", + "WarningLevel": "1", + }, + "Lib": { + "AdditionalDependencies": "file1;file2", + "AdditionalLibraryDirectories": "folder1;folder2", + "AdditionalOptions": "a string1", + "DisplayLibrary": "a string1", + "ErrorReporting": "PromptImmediately", + "ExportNamedFunctions": "string1;string2", + "ForceSymbolReferences": "a string1", + "IgnoreAllDefaultLibraries": "true", + "IgnoreSpecificDefaultLibraries": "file1;file2", + "LinkTimeCodeGeneration": "true", + "MinimumRequiredVersion": "a string1", + "ModuleDefinitionFile": "a_file_name", + "Name": "a_file_name", + "OutputFile": "a_file_name", + "RemoveObjects": "file1;file2", + "SubSystem": "Console", + "SuppressStartupBanner": "true", + "TargetMachine": "MachineX86i", + "TrackerLogDirectory": "a_folder", + "TreatLibWarningAsErrors": "true", + "UseUnicodeResponseFiles": "true", + "Verbose": "true", + }, + "Manifest": { + "AdditionalManifestFiles": "file1;file2", + "AdditionalOptions": "a string1", + "AssemblyIdentity": "a string1", + "ComponentFileName": "a_file_name", + "EnableDPIAwareness": "fal", + "GenerateCatalogFiles": "truel", + "GenerateCategoryTags": "true", + "InputResourceManifests": "a string1", + "ManifestFromManagedAssembly": "a_file_name", + "notgood3": "bogus", + "OutputManifestFile": "a_file_name", + "OutputResourceManifests": "a string1", + "RegistrarScriptFile": "a_file_name", + "ReplacementsFile": "a_file_name", + "SuppressDependencyElement": "true", + "SuppressStartupBanner": "true", + "TrackerLogDirectory": "a_folder", + "TypeLibraryFile": "a_file_name", + "UpdateFileHashes": "true", + "UpdateFileHashesSearchPath": "a_file_name", + "VerboseOutput": "true", + }, + "ProjectReference": { + "LinkLibraryDependencies": "true", + "UseLibraryDependencyInputs": "true", + }, + "ManifestResourceCompile": {"ResourceOutputFileName": "a_file_name"}, + "": { + "EmbedManifest": "true", + "GenerateManifest": "true", + "IgnoreImportLibrary": "true", + "LinkIncremental": "false", + }, + }, + self.stderr, + ) + self._ExpectedWarnings( + [ + "Warning: unrecognized setting ClCompile/Enableprefast", + "Warning: unrecognized setting ClCompile/ZZXYZ", + "Warning: unrecognized setting Manifest/notgood3", + "Warning: for Manifest/GenerateCatalogFiles, " + "expected bool; got 'truel'", + "Warning: for Lib/TargetMachine, unrecognized enumerated value " + "MachineX86i", + "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'", + ] + ) + + def testConvertToMSBuildSettings_empty(self): + """Tests an empty conversion.""" + msvs_settings = {} + expected_msbuild_settings = {} + actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( + msvs_settings, self.stderr + ) + self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) + self._ExpectedWarnings([]) + + def testConvertToMSBuildSettings_minimal(self): + """Tests a minimal conversion.""" + msvs_settings = { + "VCCLCompilerTool": { + "AdditionalIncludeDirectories": "dir1", + "AdditionalOptions": "/foo", + "BasicRuntimeChecks": "0", + }, + "VCLinkerTool": { + "LinkTimeCodeGeneration": "1", + "ErrorReporting": "1", + "DataExecutionPrevention": "2", + }, + } + expected_msbuild_settings = { + "ClCompile": { + "AdditionalIncludeDirectories": "dir1", + "AdditionalOptions": "/foo", + "BasicRuntimeChecks": "Default", + }, + "Link": { + "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", + "LinkErrorReporting": "PromptImmediately", + "DataExecutionPrevention": "true", + }, + } + actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( + msvs_settings, self.stderr + ) + self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) + self._ExpectedWarnings([]) + + def testConvertToMSBuildSettings_warnings(self): + """Tests conversion that generates warnings.""" + msvs_settings = { + "VCCLCompilerTool": { + "AdditionalIncludeDirectories": "1", + "AdditionalOptions": "2", + # These are incorrect values: + "BasicRuntimeChecks": "12", + "BrowseInformation": "21", + "UsePrecompiledHeader": "13", + "GeneratePreprocessedFile": "14", + }, + "VCLinkerTool": { + # These are incorrect values: + "Driver": "10", + "LinkTimeCodeGeneration": "31", + "ErrorReporting": "21", + "FixedBaseAddress": "6", + }, + "VCResourceCompilerTool": { + # Custom + "Culture": "1003" + }, + } + expected_msbuild_settings = { + "ClCompile": { + "AdditionalIncludeDirectories": "1", + "AdditionalOptions": "2", + }, + "Link": {}, + "ResourceCompile": { + # Custom + "Culture": "0x03eb" + }, + } + actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( + msvs_settings, self.stderr + ) + self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) + self._ExpectedWarnings( + [ + "Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to " + "MSBuild, index value (12) not in expected range [0, 4)", + "Warning: while converting VCCLCompilerTool/BrowseInformation to " + "MSBuild, index value (21) not in expected range [0, 3)", + "Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to " + "MSBuild, index value (13) not in expected range [0, 3)", + "Warning: while converting " + "VCCLCompilerTool/GeneratePreprocessedFile to " + "MSBuild, value must be one of [0, 1, 2]; got 14", + "Warning: while converting VCLinkerTool/Driver to " + "MSBuild, index value (10) not in expected range [0, 4)", + "Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to " + "MSBuild, index value (31) not in expected range [0, 5)", + "Warning: while converting VCLinkerTool/ErrorReporting to " + "MSBuild, index value (21) not in expected range [0, 3)", + "Warning: while converting VCLinkerTool/FixedBaseAddress to " + "MSBuild, index value (6) not in expected range [0, 3)", + ] + ) + + def testConvertToMSBuildSettings_full_synthetic(self): + """Tests conversion of all the MSBuild settings.""" + msvs_settings = { + "VCCLCompilerTool": { + "AdditionalIncludeDirectories": "folder1;folder2;folder3", + "AdditionalOptions": "a_string", + "AdditionalUsingDirectories": "folder1;folder2;folder3", + "AssemblerListingLocation": "a_file_name", + "AssemblerOutput": "0", + "BasicRuntimeChecks": "1", + "BrowseInformation": "2", + "BrowseInformationFile": "a_file_name", + "BufferSecurityCheck": "true", + "CallingConvention": "0", + "CompileAs": "1", + "DebugInformationFormat": "4", + "DefaultCharIsUnsigned": "true", + "Detect64BitPortabilityProblems": "true", + "DisableLanguageExtensions": "true", + "DisableSpecificWarnings": "d1;d2;d3", + "EnableEnhancedInstructionSet": "0", + "EnableFiberSafeOptimizations": "true", + "EnableFunctionLevelLinking": "true", + "EnableIntrinsicFunctions": "true", + "EnablePREfast": "true", + "ErrorReporting": "1", + "ExceptionHandling": "2", + "ExpandAttributedSource": "true", + "FavorSizeOrSpeed": "0", + "FloatingPointExceptions": "true", + "FloatingPointModel": "1", + "ForceConformanceInForLoopScope": "true", + "ForcedIncludeFiles": "file1;file2;file3", + "ForcedUsingFiles": "file1;file2;file3", + "GeneratePreprocessedFile": "1", + "GenerateXMLDocumentationFiles": "true", + "IgnoreStandardIncludePath": "true", + "InlineFunctionExpansion": "2", + "KeepComments": "true", + "MinimalRebuild": "true", + "ObjectFile": "a_file_name", + "OmitDefaultLibName": "true", + "OmitFramePointers": "true", + "OpenMP": "true", + "Optimization": "3", + "PrecompiledHeaderFile": "a_file_name", + "PrecompiledHeaderThrough": "a_file_name", + "PreprocessorDefinitions": "d1;d2;d3", + "ProgramDataBaseFileName": "a_file_name", + "RuntimeLibrary": "0", + "RuntimeTypeInfo": "true", + "ShowIncludes": "true", + "SmallerTypeCheck": "true", + "StringPooling": "true", + "StructMemberAlignment": "1", + "SuppressStartupBanner": "true", + "TreatWChar_tAsBuiltInType": "true", + "UndefineAllPreprocessorDefinitions": "true", + "UndefinePreprocessorDefinitions": "d1;d2;d3", + "UseFullPaths": "true", + "UsePrecompiledHeader": "1", + "UseUnicodeResponseFiles": "true", + "WarnAsError": "true", + "WarningLevel": "2", + "WholeProgramOptimization": "true", + "XMLDocumentationFileName": "a_file_name", + }, + "VCLinkerTool": { + "AdditionalDependencies": "file1;file2;file3", + "AdditionalLibraryDirectories": "folder1;folder2;folder3", + "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3", + "AdditionalManifestDependencies": "file1;file2;file3", + "AdditionalOptions": "a_string", + "AddModuleNamesToAssembly": "file1;file2;file3", + "AllowIsolation": "true", + "AssemblyDebug": "0", + "AssemblyLinkResource": "file1;file2;file3", + "BaseAddress": "a_string", + "CLRImageType": "1", + "CLRThreadAttribute": "2", + "CLRUnmanagedCodeCheck": "true", + "DataExecutionPrevention": "0", + "DelayLoadDLLs": "file1;file2;file3", + "DelaySign": "true", + "Driver": "1", + "EmbedManagedResourceFile": "file1;file2;file3", + "EnableCOMDATFolding": "0", + "EnableUAC": "true", + "EntryPointSymbol": "a_string", + "ErrorReporting": "0", + "FixedBaseAddress": "1", + "ForceSymbolReferences": "file1;file2;file3", + "FunctionOrder": "a_file_name", + "GenerateDebugInformation": "true", + "GenerateManifest": "true", + "GenerateMapFile": "true", + "HeapCommitSize": "a_string", + "HeapReserveSize": "a_string", + "IgnoreAllDefaultLibraries": "true", + "IgnoreDefaultLibraryNames": "file1;file2;file3", + "IgnoreEmbeddedIDL": "true", + "IgnoreImportLibrary": "true", + "ImportLibrary": "a_file_name", + "KeyContainer": "a_file_name", + "KeyFile": "a_file_name", + "LargeAddressAware": "2", + "LinkIncremental": "1", + "LinkLibraryDependencies": "true", + "LinkTimeCodeGeneration": "2", + "ManifestFile": "a_file_name", + "MapExports": "true", + "MapFileName": "a_file_name", + "MergedIDLBaseFileName": "a_file_name", + "MergeSections": "a_string", + "MidlCommandFile": "a_file_name", + "ModuleDefinitionFile": "a_file_name", + "OptimizeForWindows98": "1", + "OptimizeReferences": "0", + "OutputFile": "a_file_name", + "PerUserRedirection": "true", + "Profile": "true", + "ProfileGuidedDatabase": "a_file_name", + "ProgramDatabaseFile": "a_file_name", + "RandomizedBaseAddress": "1", + "RegisterOutput": "true", + "ResourceOnlyDLL": "true", + "SetChecksum": "true", + "ShowProgress": "0", + "StackCommitSize": "a_string", + "StackReserveSize": "a_string", + "StripPrivateSymbols": "a_file_name", + "SubSystem": "2", + "SupportUnloadOfDelayLoadedDLL": "true", + "SuppressStartupBanner": "true", + "SwapRunFromCD": "true", + "SwapRunFromNet": "true", + "TargetMachine": "3", + "TerminalServerAware": "2", + "TurnOffAssemblyGeneration": "true", + "TypeLibraryFile": "a_file_name", + "TypeLibraryResourceID": "33", + "UACExecutionLevel": "1", + "UACUIAccess": "true", + "UseLibraryDependencyInputs": "false", + "UseUnicodeResponseFiles": "true", + "Version": "a_string", + }, + "VCResourceCompilerTool": { + "AdditionalIncludeDirectories": "folder1;folder2;folder3", + "AdditionalOptions": "a_string", + "Culture": "1003", + "IgnoreStandardIncludePath": "true", + "PreprocessorDefinitions": "d1;d2;d3", + "ResourceOutputFileName": "a_string", + "ShowProgress": "true", + "SuppressStartupBanner": "true", + "UndefinePreprocessorDefinitions": "d1;d2;d3", + }, + "VCMIDLTool": { + "AdditionalIncludeDirectories": "folder1;folder2;folder3", + "AdditionalOptions": "a_string", + "CPreprocessOptions": "a_string", + "DefaultCharType": "0", + "DLLDataFileName": "a_file_name", + "EnableErrorChecks": "2", + "ErrorCheckAllocations": "true", + "ErrorCheckBounds": "true", + "ErrorCheckEnumRange": "true", + "ErrorCheckRefPointers": "true", + "ErrorCheckStubData": "true", + "GenerateStublessProxies": "true", + "GenerateTypeLibrary": "true", + "HeaderFileName": "a_file_name", + "IgnoreStandardIncludePath": "true", + "InterfaceIdentifierFileName": "a_file_name", + "MkTypLibCompatible": "true", + "OutputDirectory": "a_string", + "PreprocessorDefinitions": "d1;d2;d3", + "ProxyFileName": "a_file_name", + "RedirectOutputAndErrors": "a_file_name", + "StructMemberAlignment": "3", + "SuppressStartupBanner": "true", + "TargetEnvironment": "1", + "TypeLibraryName": "a_file_name", + "UndefinePreprocessorDefinitions": "d1;d2;d3", + "ValidateParameters": "true", + "WarnAsError": "true", + "WarningLevel": "4", + }, + "VCLibrarianTool": { + "AdditionalDependencies": "file1;file2;file3", + "AdditionalLibraryDirectories": "folder1;folder2;folder3", + "AdditionalLibraryDirectories_excluded": "folder1;folder2;folder3", + "AdditionalOptions": "a_string", + "ExportNamedFunctions": "d1;d2;d3", + "ForceSymbolReferences": "a_string", + "IgnoreAllDefaultLibraries": "true", + "IgnoreSpecificDefaultLibraries": "file1;file2;file3", + "LinkLibraryDependencies": "true", + "ModuleDefinitionFile": "a_file_name", + "OutputFile": "a_file_name", + "SuppressStartupBanner": "true", + "UseUnicodeResponseFiles": "true", + }, + "VCManifestTool": { + "AdditionalManifestFiles": "file1;file2;file3", + "AdditionalOptions": "a_string", + "AssemblyIdentity": "a_string", + "ComponentFileName": "a_file_name", + "DependencyInformationFile": "a_file_name", + "EmbedManifest": "true", + "GenerateCatalogFiles": "true", + "InputResourceManifests": "a_string", + "ManifestResourceFile": "my_name", + "OutputManifestFile": "a_file_name", + "RegistrarScriptFile": "a_file_name", + "ReplacementsFile": "a_file_name", + "SuppressStartupBanner": "true", + "TypeLibraryFile": "a_file_name", + "UpdateFileHashes": "true", + "UpdateFileHashesSearchPath": "a_file_name", + "UseFAT32Workaround": "true", + "UseUnicodeResponseFiles": "true", + "VerboseOutput": "true", + }, + } + expected_msbuild_settings = { + "ClCompile": { + "AdditionalIncludeDirectories": "folder1;folder2;folder3", + "AdditionalOptions": "a_string /J", + "AdditionalUsingDirectories": "folder1;folder2;folder3", + "AssemblerListingLocation": "a_file_name", + "AssemblerOutput": "NoListing", + "BasicRuntimeChecks": "StackFrameRuntimeCheck", + "BrowseInformation": "true", + "BrowseInformationFile": "a_file_name", + "BufferSecurityCheck": "true", + "CallingConvention": "Cdecl", + "CompileAs": "CompileAsC", + "DebugInformationFormat": "EditAndContinue", + "DisableLanguageExtensions": "true", + "DisableSpecificWarnings": "d1;d2;d3", + "EnableEnhancedInstructionSet": "NotSet", + "EnableFiberSafeOptimizations": "true", + "EnablePREfast": "true", + "ErrorReporting": "Prompt", + "ExceptionHandling": "Async", + "ExpandAttributedSource": "true", + "FavorSizeOrSpeed": "Neither", + "FloatingPointExceptions": "true", + "FloatingPointModel": "Strict", + "ForceConformanceInForLoopScope": "true", + "ForcedIncludeFiles": "file1;file2;file3", + "ForcedUsingFiles": "file1;file2;file3", + "FunctionLevelLinking": "true", + "GenerateXMLDocumentationFiles": "true", + "IgnoreStandardIncludePath": "true", + "InlineFunctionExpansion": "AnySuitable", + "IntrinsicFunctions": "true", + "MinimalRebuild": "true", + "ObjectFileName": "a_file_name", + "OmitDefaultLibName": "true", + "OmitFramePointers": "true", + "OpenMPSupport": "true", + "Optimization": "Full", + "PrecompiledHeader": "Create", + "PrecompiledHeaderFile": "a_file_name", + "PrecompiledHeaderOutputFile": "a_file_name", + "PreprocessKeepComments": "true", + "PreprocessorDefinitions": "d1;d2;d3", + "PreprocessSuppressLineNumbers": "false", + "PreprocessToFile": "true", + "ProgramDataBaseFileName": "a_file_name", + "RuntimeLibrary": "MultiThreaded", + "RuntimeTypeInfo": "true", + "ShowIncludes": "true", + "SmallerTypeCheck": "true", + "StringPooling": "true", + "StructMemberAlignment": "1Byte", + "SuppressStartupBanner": "true", + "TreatWarningAsError": "true", + "TreatWChar_tAsBuiltInType": "true", + "UndefineAllPreprocessorDefinitions": "true", + "UndefinePreprocessorDefinitions": "d1;d2;d3", + "UseFullPaths": "true", + "WarningLevel": "Level2", + "WholeProgramOptimization": "true", + "XMLDocumentationFileName": "a_file_name", + }, + "Link": { + "AdditionalDependencies": "file1;file2;file3", + "AdditionalLibraryDirectories": "folder1;folder2;folder3", + "AdditionalManifestDependencies": "file1;file2;file3", + "AdditionalOptions": "a_string", + "AddModuleNamesToAssembly": "file1;file2;file3", + "AllowIsolation": "true", + "AssemblyDebug": "", + "AssemblyLinkResource": "file1;file2;file3", + "BaseAddress": "a_string", + "CLRImageType": "ForceIJWImage", + "CLRThreadAttribute": "STAThreadingAttribute", + "CLRUnmanagedCodeCheck": "true", + "DataExecutionPrevention": "", + "DelayLoadDLLs": "file1;file2;file3", + "DelaySign": "true", + "Driver": "Driver", + "EmbedManagedResourceFile": "file1;file2;file3", + "EnableCOMDATFolding": "", + "EnableUAC": "true", + "EntryPointSymbol": "a_string", + "FixedBaseAddress": "false", + "ForceSymbolReferences": "file1;file2;file3", + "FunctionOrder": "a_file_name", + "GenerateDebugInformation": "true", + "GenerateMapFile": "true", + "HeapCommitSize": "a_string", + "HeapReserveSize": "a_string", + "IgnoreAllDefaultLibraries": "true", + "IgnoreEmbeddedIDL": "true", + "IgnoreSpecificDefaultLibraries": "file1;file2;file3", + "ImportLibrary": "a_file_name", + "KeyContainer": "a_file_name", + "KeyFile": "a_file_name", + "LargeAddressAware": "true", + "LinkErrorReporting": "NoErrorReport", + "LinkTimeCodeGeneration": "PGInstrument", + "ManifestFile": "a_file_name", + "MapExports": "true", + "MapFileName": "a_file_name", + "MergedIDLBaseFileName": "a_file_name", + "MergeSections": "a_string", + "MidlCommandFile": "a_file_name", + "ModuleDefinitionFile": "a_file_name", + "NoEntryPoint": "true", + "OptimizeReferences": "", + "OutputFile": "a_file_name", + "PerUserRedirection": "true", + "Profile": "true", + "ProfileGuidedDatabase": "a_file_name", + "ProgramDatabaseFile": "a_file_name", + "RandomizedBaseAddress": "false", + "RegisterOutput": "true", + "SetChecksum": "true", + "ShowProgress": "NotSet", + "StackCommitSize": "a_string", + "StackReserveSize": "a_string", + "StripPrivateSymbols": "a_file_name", + "SubSystem": "Windows", + "SupportUnloadOfDelayLoadedDLL": "true", + "SuppressStartupBanner": "true", + "SwapRunFromCD": "true", + "SwapRunFromNET": "true", + "TargetMachine": "MachineARM", + "TerminalServerAware": "true", + "TurnOffAssemblyGeneration": "true", + "TypeLibraryFile": "a_file_name", + "TypeLibraryResourceID": "33", + "UACExecutionLevel": "HighestAvailable", + "UACUIAccess": "true", + "Version": "a_string", + }, + "ResourceCompile": { + "AdditionalIncludeDirectories": "folder1;folder2;folder3", + "AdditionalOptions": "a_string", + "Culture": "0x03eb", + "IgnoreStandardIncludePath": "true", + "PreprocessorDefinitions": "d1;d2;d3", + "ResourceOutputFileName": "a_string", + "ShowProgress": "true", + "SuppressStartupBanner": "true", + "UndefinePreprocessorDefinitions": "d1;d2;d3", + }, + "Midl": { + "AdditionalIncludeDirectories": "folder1;folder2;folder3", + "AdditionalOptions": "a_string", + "CPreprocessOptions": "a_string", + "DefaultCharType": "Unsigned", + "DllDataFileName": "a_file_name", + "EnableErrorChecks": "All", + "ErrorCheckAllocations": "true", + "ErrorCheckBounds": "true", + "ErrorCheckEnumRange": "true", + "ErrorCheckRefPointers": "true", + "ErrorCheckStubData": "true", + "GenerateStublessProxies": "true", + "GenerateTypeLibrary": "true", + "HeaderFileName": "a_file_name", + "IgnoreStandardIncludePath": "true", + "InterfaceIdentifierFileName": "a_file_name", + "MkTypLibCompatible": "true", + "OutputDirectory": "a_string", + "PreprocessorDefinitions": "d1;d2;d3", + "ProxyFileName": "a_file_name", + "RedirectOutputAndErrors": "a_file_name", + "StructMemberAlignment": "4", + "SuppressStartupBanner": "true", + "TargetEnvironment": "Win32", + "TypeLibraryName": "a_file_name", + "UndefinePreprocessorDefinitions": "d1;d2;d3", + "ValidateAllParameters": "true", + "WarnAsError": "true", + "WarningLevel": "4", + }, + "Lib": { + "AdditionalDependencies": "file1;file2;file3", + "AdditionalLibraryDirectories": "folder1;folder2;folder3", + "AdditionalOptions": "a_string", + "ExportNamedFunctions": "d1;d2;d3", + "ForceSymbolReferences": "a_string", + "IgnoreAllDefaultLibraries": "true", + "IgnoreSpecificDefaultLibraries": "file1;file2;file3", + "ModuleDefinitionFile": "a_file_name", + "OutputFile": "a_file_name", + "SuppressStartupBanner": "true", + "UseUnicodeResponseFiles": "true", + }, + "Manifest": { + "AdditionalManifestFiles": "file1;file2;file3", + "AdditionalOptions": "a_string", + "AssemblyIdentity": "a_string", + "ComponentFileName": "a_file_name", + "GenerateCatalogFiles": "true", + "InputResourceManifests": "a_string", + "OutputManifestFile": "a_file_name", + "RegistrarScriptFile": "a_file_name", + "ReplacementsFile": "a_file_name", + "SuppressStartupBanner": "true", + "TypeLibraryFile": "a_file_name", + "UpdateFileHashes": "true", + "UpdateFileHashesSearchPath": "a_file_name", + "VerboseOutput": "true", + }, + "ManifestResourceCompile": {"ResourceOutputFileName": "my_name"}, + "ProjectReference": { + "LinkLibraryDependencies": "true", + "UseLibraryDependencyInputs": "false", + }, + "": { + "EmbedManifest": "true", + "GenerateManifest": "true", + "IgnoreImportLibrary": "true", + "LinkIncremental": "false", + }, + } + self.maxDiff = 9999 # on failure display a long diff + actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( + msvs_settings, self.stderr + ) + self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) + self._ExpectedWarnings([]) + + def testConvertToMSBuildSettings_actual(self): + """Tests the conversion of an actual project. + + A VS2008 project with most of the options defined was created through the + VS2008 IDE. It was then converted to VS2010. The tool settings found in + the .vcproj and .vcxproj files were converted to the two dictionaries + msvs_settings and expected_msbuild_settings. + + Note that for many settings, the VS2010 converter adds macros like + %(AdditionalIncludeDirectories) to make sure than inherited values are + included. Since the Gyp projects we generate do not use inheritance, + we removed these macros. They were: + ClCompile: + AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)' + AdditionalOptions: ' %(AdditionalOptions)' + AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)' + DisableSpecificWarnings: ';%(DisableSpecificWarnings)', + ForcedIncludeFiles: ';%(ForcedIncludeFiles)', + ForcedUsingFiles: ';%(ForcedUsingFiles)', + PreprocessorDefinitions: ';%(PreprocessorDefinitions)', + UndefinePreprocessorDefinitions: + ';%(UndefinePreprocessorDefinitions)', + Link: + AdditionalDependencies: ';%(AdditionalDependencies)', + AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)', + AdditionalManifestDependencies: + ';%(AdditionalManifestDependencies)', + AdditionalOptions: ' %(AdditionalOptions)', + AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)', + AssemblyLinkResource: ';%(AssemblyLinkResource)', + DelayLoadDLLs: ';%(DelayLoadDLLs)', + EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)', + ForceSymbolReferences: ';%(ForceSymbolReferences)', + IgnoreSpecificDefaultLibraries: + ';%(IgnoreSpecificDefaultLibraries)', + ResourceCompile: + AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', + AdditionalOptions: ' %(AdditionalOptions)', + PreprocessorDefinitions: ';%(PreprocessorDefinitions)', + Manifest: + AdditionalManifestFiles: ';%(AdditionalManifestFiles)', + AdditionalOptions: ' %(AdditionalOptions)', + InputResourceManifests: ';%(InputResourceManifests)', + """ + msvs_settings = { + "VCCLCompilerTool": { + "AdditionalIncludeDirectories": "dir1", + "AdditionalOptions": "/more", + "AdditionalUsingDirectories": "test", + "AssemblerListingLocation": "$(IntDir)\\a", + "AssemblerOutput": "1", + "BasicRuntimeChecks": "3", + "BrowseInformation": "1", + "BrowseInformationFile": "$(IntDir)\\e", + "BufferSecurityCheck": "false", + "CallingConvention": "1", + "CompileAs": "1", + "DebugInformationFormat": "4", + "DefaultCharIsUnsigned": "true", + "Detect64BitPortabilityProblems": "true", + "DisableLanguageExtensions": "true", + "DisableSpecificWarnings": "abc", + "EnableEnhancedInstructionSet": "1", + "EnableFiberSafeOptimizations": "true", + "EnableFunctionLevelLinking": "true", + "EnableIntrinsicFunctions": "true", + "EnablePREfast": "true", + "ErrorReporting": "2", + "ExceptionHandling": "2", + "ExpandAttributedSource": "true", + "FavorSizeOrSpeed": "2", + "FloatingPointExceptions": "true", + "FloatingPointModel": "1", + "ForceConformanceInForLoopScope": "false", + "ForcedIncludeFiles": "def", + "ForcedUsingFiles": "ge", + "GeneratePreprocessedFile": "2", + "GenerateXMLDocumentationFiles": "true", + "IgnoreStandardIncludePath": "true", + "InlineFunctionExpansion": "1", + "KeepComments": "true", + "MinimalRebuild": "true", + "ObjectFile": "$(IntDir)\\b", + "OmitDefaultLibName": "true", + "OmitFramePointers": "true", + "OpenMP": "true", + "Optimization": "3", + "PrecompiledHeaderFile": "$(IntDir)\\$(TargetName).pche", + "PrecompiledHeaderThrough": "StdAfx.hd", + "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE", + "ProgramDataBaseFileName": "$(IntDir)\\vc90b.pdb", + "RuntimeLibrary": "3", + "RuntimeTypeInfo": "false", + "ShowIncludes": "true", + "SmallerTypeCheck": "true", + "StringPooling": "true", + "StructMemberAlignment": "3", + "SuppressStartupBanner": "false", + "TreatWChar_tAsBuiltInType": "false", + "UndefineAllPreprocessorDefinitions": "true", + "UndefinePreprocessorDefinitions": "wer", + "UseFullPaths": "true", + "UsePrecompiledHeader": "0", + "UseUnicodeResponseFiles": "false", + "WarnAsError": "true", + "WarningLevel": "3", + "WholeProgramOptimization": "true", + "XMLDocumentationFileName": "$(IntDir)\\c", + }, + "VCLinkerTool": { + "AdditionalDependencies": "zx", + "AdditionalLibraryDirectories": "asd", + "AdditionalManifestDependencies": "s2", + "AdditionalOptions": "/mor2", + "AddModuleNamesToAssembly": "d1", + "AllowIsolation": "false", + "AssemblyDebug": "1", + "AssemblyLinkResource": "d5", + "BaseAddress": "23423", + "CLRImageType": "3", + "CLRThreadAttribute": "1", + "CLRUnmanagedCodeCheck": "true", + "DataExecutionPrevention": "0", + "DelayLoadDLLs": "d4", + "DelaySign": "true", + "Driver": "2", + "EmbedManagedResourceFile": "d2", + "EnableCOMDATFolding": "1", + "EnableUAC": "false", + "EntryPointSymbol": "f5", + "ErrorReporting": "2", + "FixedBaseAddress": "1", + "ForceSymbolReferences": "d3", + "FunctionOrder": "fssdfsd", + "GenerateDebugInformation": "true", + "GenerateManifest": "false", + "GenerateMapFile": "true", + "HeapCommitSize": "13", + "HeapReserveSize": "12", + "IgnoreAllDefaultLibraries": "true", + "IgnoreDefaultLibraryNames": "flob;flok", + "IgnoreEmbeddedIDL": "true", + "IgnoreImportLibrary": "true", + "ImportLibrary": "f4", + "KeyContainer": "f7", + "KeyFile": "f6", + "LargeAddressAware": "2", + "LinkIncremental": "0", + "LinkLibraryDependencies": "false", + "LinkTimeCodeGeneration": "1", + "ManifestFile": "$(IntDir)\\$(TargetFileName).2intermediate.manifest", + "MapExports": "true", + "MapFileName": "d5", + "MergedIDLBaseFileName": "f2", + "MergeSections": "f5", + "MidlCommandFile": "f1", + "ModuleDefinitionFile": "sdsd", + "OptimizeForWindows98": "2", + "OptimizeReferences": "2", + "OutputFile": "$(OutDir)\\$(ProjectName)2.exe", + "PerUserRedirection": "true", + "Profile": "true", + "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd", + "ProgramDatabaseFile": "Flob.pdb", + "RandomizedBaseAddress": "1", + "RegisterOutput": "true", + "ResourceOnlyDLL": "true", + "SetChecksum": "false", + "ShowProgress": "1", + "StackCommitSize": "15", + "StackReserveSize": "14", + "StripPrivateSymbols": "d3", + "SubSystem": "1", + "SupportUnloadOfDelayLoadedDLL": "true", + "SuppressStartupBanner": "false", + "SwapRunFromCD": "true", + "SwapRunFromNet": "true", + "TargetMachine": "1", + "TerminalServerAware": "1", + "TurnOffAssemblyGeneration": "true", + "TypeLibraryFile": "f3", + "TypeLibraryResourceID": "12", + "UACExecutionLevel": "2", + "UACUIAccess": "true", + "UseLibraryDependencyInputs": "true", + "UseUnicodeResponseFiles": "false", + "Version": "333", + }, + "VCResourceCompilerTool": { + "AdditionalIncludeDirectories": "f3", + "AdditionalOptions": "/more3", + "Culture": "3084", + "IgnoreStandardIncludePath": "true", + "PreprocessorDefinitions": "_UNICODE;UNICODE2", + "ResourceOutputFileName": "$(IntDir)/$(InputName)3.res", + "ShowProgress": "true", + }, + "VCManifestTool": { + "AdditionalManifestFiles": "sfsdfsd", + "AdditionalOptions": "afdsdafsd", + "AssemblyIdentity": "sddfdsadfsa", + "ComponentFileName": "fsdfds", + "DependencyInformationFile": "$(IntDir)\\mt.depdfd", + "EmbedManifest": "false", + "GenerateCatalogFiles": "true", + "InputResourceManifests": "asfsfdafs", + "ManifestResourceFile": + "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf", + "OutputManifestFile": "$(TargetPath).manifestdfs", + "RegistrarScriptFile": "sdfsfd", + "ReplacementsFile": "sdffsd", + "SuppressStartupBanner": "false", + "TypeLibraryFile": "sfsd", + "UpdateFileHashes": "true", + "UpdateFileHashesSearchPath": "sfsd", + "UseFAT32Workaround": "true", + "UseUnicodeResponseFiles": "false", + "VerboseOutput": "true", + }, + } + expected_msbuild_settings = { + "ClCompile": { + "AdditionalIncludeDirectories": "dir1", + "AdditionalOptions": "/more /J", + "AdditionalUsingDirectories": "test", + "AssemblerListingLocation": "$(IntDir)a", + "AssemblerOutput": "AssemblyCode", + "BasicRuntimeChecks": "EnableFastChecks", + "BrowseInformation": "true", + "BrowseInformationFile": "$(IntDir)e", + "BufferSecurityCheck": "false", + "CallingConvention": "FastCall", + "CompileAs": "CompileAsC", + "DebugInformationFormat": "EditAndContinue", + "DisableLanguageExtensions": "true", + "DisableSpecificWarnings": "abc", + "EnableEnhancedInstructionSet": "StreamingSIMDExtensions", + "EnableFiberSafeOptimizations": "true", + "EnablePREfast": "true", + "ErrorReporting": "Queue", + "ExceptionHandling": "Async", + "ExpandAttributedSource": "true", + "FavorSizeOrSpeed": "Size", + "FloatingPointExceptions": "true", + "FloatingPointModel": "Strict", + "ForceConformanceInForLoopScope": "false", + "ForcedIncludeFiles": "def", + "ForcedUsingFiles": "ge", + "FunctionLevelLinking": "true", + "GenerateXMLDocumentationFiles": "true", + "IgnoreStandardIncludePath": "true", + "InlineFunctionExpansion": "OnlyExplicitInline", + "IntrinsicFunctions": "true", + "MinimalRebuild": "true", + "ObjectFileName": "$(IntDir)b", + "OmitDefaultLibName": "true", + "OmitFramePointers": "true", + "OpenMPSupport": "true", + "Optimization": "Full", + "PrecompiledHeader": "NotUsing", # Actual conversion gives '' + "PrecompiledHeaderFile": "StdAfx.hd", + "PrecompiledHeaderOutputFile": "$(IntDir)$(TargetName).pche", + "PreprocessKeepComments": "true", + "PreprocessorDefinitions": "WIN32;_DEBUG;_CONSOLE", + "PreprocessSuppressLineNumbers": "true", + "PreprocessToFile": "true", + "ProgramDataBaseFileName": "$(IntDir)vc90b.pdb", + "RuntimeLibrary": "MultiThreadedDebugDLL", + "RuntimeTypeInfo": "false", + "ShowIncludes": "true", + "SmallerTypeCheck": "true", + "StringPooling": "true", + "StructMemberAlignment": "4Bytes", + "SuppressStartupBanner": "false", + "TreatWarningAsError": "true", + "TreatWChar_tAsBuiltInType": "false", + "UndefineAllPreprocessorDefinitions": "true", + "UndefinePreprocessorDefinitions": "wer", + "UseFullPaths": "true", + "WarningLevel": "Level3", + "WholeProgramOptimization": "true", + "XMLDocumentationFileName": "$(IntDir)c", + }, + "Link": { + "AdditionalDependencies": "zx", + "AdditionalLibraryDirectories": "asd", + "AdditionalManifestDependencies": "s2", + "AdditionalOptions": "/mor2", + "AddModuleNamesToAssembly": "d1", + "AllowIsolation": "false", + "AssemblyDebug": "true", + "AssemblyLinkResource": "d5", + "BaseAddress": "23423", + "CLRImageType": "ForceSafeILImage", + "CLRThreadAttribute": "MTAThreadingAttribute", + "CLRUnmanagedCodeCheck": "true", + "DataExecutionPrevention": "", + "DelayLoadDLLs": "d4", + "DelaySign": "true", + "Driver": "UpOnly", + "EmbedManagedResourceFile": "d2", + "EnableCOMDATFolding": "false", + "EnableUAC": "false", + "EntryPointSymbol": "f5", + "FixedBaseAddress": "false", + "ForceSymbolReferences": "d3", + "FunctionOrder": "fssdfsd", + "GenerateDebugInformation": "true", + "GenerateMapFile": "true", + "HeapCommitSize": "13", + "HeapReserveSize": "12", + "IgnoreAllDefaultLibraries": "true", + "IgnoreEmbeddedIDL": "true", + "IgnoreSpecificDefaultLibraries": "flob;flok", + "ImportLibrary": "f4", + "KeyContainer": "f7", + "KeyFile": "f6", + "LargeAddressAware": "true", + "LinkErrorReporting": "QueueForNextLogin", + "LinkTimeCodeGeneration": "UseLinkTimeCodeGeneration", + "ManifestFile": "$(IntDir)$(TargetFileName).2intermediate.manifest", + "MapExports": "true", + "MapFileName": "d5", + "MergedIDLBaseFileName": "f2", + "MergeSections": "f5", + "MidlCommandFile": "f1", + "ModuleDefinitionFile": "sdsd", + "NoEntryPoint": "true", + "OptimizeReferences": "true", + "OutputFile": "$(OutDir)$(ProjectName)2.exe", + "PerUserRedirection": "true", + "Profile": "true", + "ProfileGuidedDatabase": "$(TargetDir)$(TargetName).pgdd", + "ProgramDatabaseFile": "Flob.pdb", + "RandomizedBaseAddress": "false", + "RegisterOutput": "true", + "SetChecksum": "false", + "ShowProgress": "LinkVerbose", + "StackCommitSize": "15", + "StackReserveSize": "14", + "StripPrivateSymbols": "d3", + "SubSystem": "Console", + "SupportUnloadOfDelayLoadedDLL": "true", + "SuppressStartupBanner": "false", + "SwapRunFromCD": "true", + "SwapRunFromNET": "true", + "TargetMachine": "MachineX86", + "TerminalServerAware": "false", + "TurnOffAssemblyGeneration": "true", + "TypeLibraryFile": "f3", + "TypeLibraryResourceID": "12", + "UACExecutionLevel": "RequireAdministrator", + "UACUIAccess": "true", + "Version": "333", + }, + "ResourceCompile": { + "AdditionalIncludeDirectories": "f3", + "AdditionalOptions": "/more3", + "Culture": "0x0c0c", + "IgnoreStandardIncludePath": "true", + "PreprocessorDefinitions": "_UNICODE;UNICODE2", + "ResourceOutputFileName": "$(IntDir)%(Filename)3.res", + "ShowProgress": "true", + }, + "Manifest": { + "AdditionalManifestFiles": "sfsdfsd", + "AdditionalOptions": "afdsdafsd", + "AssemblyIdentity": "sddfdsadfsa", + "ComponentFileName": "fsdfds", + "GenerateCatalogFiles": "true", + "InputResourceManifests": "asfsfdafs", + "OutputManifestFile": "$(TargetPath).manifestdfs", + "RegistrarScriptFile": "sdfsfd", + "ReplacementsFile": "sdffsd", + "SuppressStartupBanner": "false", + "TypeLibraryFile": "sfsd", + "UpdateFileHashes": "true", + "UpdateFileHashesSearchPath": "sfsd", + "VerboseOutput": "true", + }, + "ProjectReference": { + "LinkLibraryDependencies": "false", + "UseLibraryDependencyInputs": "true", + }, + "": { + "EmbedManifest": "false", + "GenerateManifest": "false", + "IgnoreImportLibrary": "true", + "LinkIncremental": "", + }, + "ManifestResourceCompile": { + "ResourceOutputFileName": + "$(IntDir)$(TargetFileName).embed.manifest.resfdsf" + }, + } + self.maxDiff = 9999 # on failure display a long diff + actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( + msvs_settings, self.stderr + ) + self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) + self._ExpectedWarnings([]) + + +if __name__ == "__main__": + unittest.main() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py new file mode 100644 index 0000000..2e5c811 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py @@ -0,0 +1,59 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Visual Studio project reader/writer.""" + +import gyp.easy_xml as easy_xml + + +class Writer: + """Visual Studio XML tool file writer.""" + + def __init__(self, tool_file_path, name): + """Initializes the tool file. + + Args: + tool_file_path: Path to the tool file. + name: Name of the tool file. + """ + self.tool_file_path = tool_file_path + self.name = name + self.rules_section = ["Rules"] + + def AddCustomBuildRule( + self, name, cmd, description, additional_dependencies, outputs, extensions + ): + """Adds a rule to the tool file. + + Args: + name: Name of the rule. + description: Description of the rule. + cmd: Command line of the rule. + additional_dependencies: other files which may trigger the rule. + outputs: outputs of the rule. + extensions: extensions handled by the rule. + """ + rule = [ + "CustomBuildRule", + { + "Name": name, + "ExecutionDescription": description, + "CommandLine": cmd, + "Outputs": ";".join(outputs), + "FileExtensions": ";".join(extensions), + "AdditionalDependencies": ";".join(additional_dependencies), + }, + ] + self.rules_section.append(rule) + + def WriteIfChanged(self): + """Writes the tool file.""" + content = [ + "VisualStudioToolFile", + {"Version": "8.00", "Name": self.name}, + self.rules_section, + ] + easy_xml.WriteXmlIfChanged( + content, self.tool_file_path, encoding="Windows-1252" + ) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py new file mode 100644 index 0000000..e580c00 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py @@ -0,0 +1,153 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Visual Studio user preferences file writer.""" + +import os +import re +import socket # for gethostname + +import gyp.easy_xml as easy_xml + + +# ------------------------------------------------------------------------------ + + +def _FindCommandInPath(command): + """If there are no slashes in the command given, this function + searches the PATH env to find the given command, and converts it + to an absolute path. We have to do this because MSVS is looking + for an actual file to launch a debugger on, not just a command + line. Note that this happens at GYP time, so anything needing to + be built needs to have a full path.""" + if "/" in command or "\\" in command: + # If the command already has path elements (either relative or + # absolute), then assume it is constructed properly. + return command + else: + # Search through the path list and find an existing file that + # we can access. + paths = os.environ.get("PATH", "").split(os.pathsep) + for path in paths: + item = os.path.join(path, command) + if os.path.isfile(item) and os.access(item, os.X_OK): + return item + return command + + +def _QuoteWin32CommandLineArgs(args): + new_args = [] + for arg in args: + # Replace all double-quotes with double-double-quotes to escape + # them for cmd shell, and then quote the whole thing if there + # are any. + if arg.find('"') != -1: + arg = '""'.join(arg.split('"')) + arg = '"%s"' % arg + + # Otherwise, if there are any spaces, quote the whole arg. + elif re.search(r"[ \t\n]", arg): + arg = '"%s"' % arg + new_args.append(arg) + return new_args + + +class Writer: + """Visual Studio XML user user file writer.""" + + def __init__(self, user_file_path, version, name): + """Initializes the user file. + + Args: + user_file_path: Path to the user file. + version: Version info. + name: Name of the user file. + """ + self.user_file_path = user_file_path + self.version = version + self.name = name + self.configurations = {} + + def AddConfig(self, name): + """Adds a configuration to the project. + + Args: + name: Configuration name. + """ + self.configurations[name] = ["Configuration", {"Name": name}] + + def AddDebugSettings( + self, config_name, command, environment={}, working_directory="" + ): + """Adds a DebugSettings node to the user file for a particular config. + + Args: + command: command line to run. First element in the list is the + executable. All elements of the command will be quoted if + necessary. + working_directory: other files which may trigger the rule. (optional) + """ + command = _QuoteWin32CommandLineArgs(command) + + abs_command = _FindCommandInPath(command[0]) + + if environment and isinstance(environment, dict): + env_list = [f'{key}="{val}"' for (key, val) in environment.items()] + environment = " ".join(env_list) + else: + environment = "" + + n_cmd = [ + "DebugSettings", + { + "Command": abs_command, + "WorkingDirectory": working_directory, + "CommandArguments": " ".join(command[1:]), + "RemoteMachine": socket.gethostname(), + "Environment": environment, + "EnvironmentMerge": "true", + # Currently these are all "dummy" values that we're just setting + # in the default manner that MSVS does it. We could use some of + # these to add additional capabilities, I suppose, but they might + # not have parity with other platforms then. + "Attach": "false", + "DebuggerType": "3", # 'auto' debugger + "Remote": "1", + "RemoteCommand": "", + "HttpUrl": "", + "PDBPath": "", + "SQLDebugging": "", + "DebuggerFlavor": "0", + "MPIRunCommand": "", + "MPIRunArguments": "", + "MPIRunWorkingDirectory": "", + "ApplicationCommand": "", + "ApplicationArguments": "", + "ShimCommand": "", + "MPIAcceptMode": "", + "MPIAcceptFilter": "", + }, + ] + + # Find the config, and add it if it doesn't exist. + if config_name not in self.configurations: + self.AddConfig(config_name) + + # Add the DebugSettings onto the appropriate config. + self.configurations[config_name].append(n_cmd) + + def WriteIfChanged(self): + """Writes the user file.""" + configs = ["Configurations"] + for config, spec in sorted(self.configurations.items()): + configs.append(spec) + + content = [ + "VisualStudioUserFile", + {"Version": self.version.ProjectVersion(), "Name": self.name}, + configs, + ] + easy_xml.WriteXmlIfChanged( + content, self.user_file_path, encoding="Windows-1252" + ) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py new file mode 100644 index 0000000..36bb782 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py @@ -0,0 +1,271 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions shared amongst the Windows generators.""" + +import copy +import os + + +# A dictionary mapping supported target types to extensions. +TARGET_TYPE_EXT = { + "executable": "exe", + "loadable_module": "dll", + "shared_library": "dll", + "static_library": "lib", + "windows_driver": "sys", +} + + +def _GetLargePdbShimCcPath(): + """Returns the path of the large_pdb_shim.cc file.""" + this_dir = os.path.abspath(os.path.dirname(__file__)) + src_dir = os.path.abspath(os.path.join(this_dir, "..", "..")) + win_data_dir = os.path.join(src_dir, "data", "win") + large_pdb_shim_cc = os.path.join(win_data_dir, "large-pdb-shim.cc") + return large_pdb_shim_cc + + +def _DeepCopySomeKeys(in_dict, keys): + """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. + + Arguments: + in_dict: The dictionary to copy. + keys: The keys to be copied. If a key is in this list and doesn't exist in + |in_dict| this is not an error. + Returns: + The partially deep-copied dictionary. + """ + d = {} + for key in keys: + if key not in in_dict: + continue + d[key] = copy.deepcopy(in_dict[key]) + return d + + +def _SuffixName(name, suffix): + """Add a suffix to the end of a target. + + Arguments: + name: name of the target (foo#target) + suffix: the suffix to be added + Returns: + Target name with suffix added (foo_suffix#target) + """ + parts = name.rsplit("#", 1) + parts[0] = f"{parts[0]}_{suffix}" + return "#".join(parts) + + +def _ShardName(name, number): + """Add a shard number to the end of a target. + + Arguments: + name: name of the target (foo#target) + number: shard number + Returns: + Target name with shard added (foo_1#target) + """ + return _SuffixName(name, str(number)) + + +def ShardTargets(target_list, target_dicts): + """Shard some targets apart to work around the linkers limits. + + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + Returns: + Tuple of the new sharded versions of the inputs. + """ + # Gather the targets to shard, and how many pieces. + targets_to_shard = {} + for t in target_dicts: + shards = int(target_dicts[t].get("msvs_shard", 0)) + if shards: + targets_to_shard[t] = shards + # Shard target_list. + new_target_list = [] + for t in target_list: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + new_target_list.append(_ShardName(t, i)) + else: + new_target_list.append(t) + # Shard target_dict. + new_target_dicts = {} + for t in target_dicts: + if t in targets_to_shard: + for i in range(targets_to_shard[t]): + name = _ShardName(t, i) + new_target_dicts[name] = copy.copy(target_dicts[t]) + new_target_dicts[name]["target_name"] = _ShardName( + new_target_dicts[name]["target_name"], i + ) + sources = new_target_dicts[name].get("sources", []) + new_sources = [] + for pos in range(i, len(sources), targets_to_shard[t]): + new_sources.append(sources[pos]) + new_target_dicts[name]["sources"] = new_sources + else: + new_target_dicts[t] = target_dicts[t] + # Shard dependencies. + for t in sorted(new_target_dicts): + for deptype in ("dependencies", "dependencies_original"): + dependencies = copy.copy(new_target_dicts[t].get(deptype, [])) + new_dependencies = [] + for d in dependencies: + if d in targets_to_shard: + for i in range(targets_to_shard[d]): + new_dependencies.append(_ShardName(d, i)) + else: + new_dependencies.append(d) + new_target_dicts[t][deptype] = new_dependencies + + return (new_target_list, new_target_dicts) + + +def _GetPdbPath(target_dict, config_name, vars): + """Returns the path to the PDB file that will be generated by a given + configuration. + + The lookup proceeds as follows: + - Look for an explicit path in the VCLinkerTool configuration block. + - Look for an 'msvs_large_pdb_path' variable. + - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is + specified. + - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'. + + Arguments: + target_dict: The target dictionary to be searched. + config_name: The name of the configuration of interest. + vars: A dictionary of common GYP variables with generator-specific values. + Returns: + The path of the corresponding PDB file. + """ + config = target_dict["configurations"][config_name] + msvs = config.setdefault("msvs_settings", {}) + + linker = msvs.get("VCLinkerTool", {}) + + pdb_path = linker.get("ProgramDatabaseFile") + if pdb_path: + return pdb_path + + variables = target_dict.get("variables", {}) + pdb_path = variables.get("msvs_large_pdb_path", None) + if pdb_path: + return pdb_path + + pdb_base = target_dict.get("product_name", target_dict["target_name"]) + pdb_base = "{}.{}.pdb".format(pdb_base, TARGET_TYPE_EXT[target_dict["type"]]) + pdb_path = vars["PRODUCT_DIR"] + "/" + pdb_base + + return pdb_path + + +def InsertLargePdbShims(target_list, target_dicts, vars): + """Insert a shim target that forces the linker to use 4KB pagesize PDBs. + + This is a workaround for targets with PDBs greater than 1GB in size, the + limit for the 1KB pagesize PDBs created by the linker by default. + + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + vars: A dictionary of common GYP variables with generator-specific values. + Returns: + Tuple of the shimmed version of the inputs. + """ + # Determine which targets need shimming. + targets_to_shim = [] + for t in target_dicts: + target_dict = target_dicts[t] + + # We only want to shim targets that have msvs_large_pdb enabled. + if not int(target_dict.get("msvs_large_pdb", 0)): + continue + # This is intended for executable, shared_library and loadable_module + # targets where every configuration is set up to produce a PDB output. + # If any of these conditions is not true then the shim logic will fail + # below. + targets_to_shim.append(t) + + large_pdb_shim_cc = _GetLargePdbShimCcPath() + + for t in targets_to_shim: + target_dict = target_dicts[t] + target_name = target_dict.get("target_name") + + base_dict = _DeepCopySomeKeys( + target_dict, ["configurations", "default_configuration", "toolset"] + ) + + # This is the dict for copying the source file (part of the GYP tree) + # to the intermediate directory of the project. This is necessary because + # we can't always build a relative path to the shim source file (on Windows + # GYP and the project may be on different drives), and Ninja hates absolute + # paths (it ends up generating the .obj and .obj.d alongside the source + # file, polluting GYPs tree). + copy_suffix = "large_pdb_copy" + copy_target_name = target_name + "_" + copy_suffix + full_copy_target_name = _SuffixName(t, copy_suffix) + shim_cc_basename = os.path.basename(large_pdb_shim_cc) + shim_cc_dir = vars["SHARED_INTERMEDIATE_DIR"] + "/" + copy_target_name + shim_cc_path = shim_cc_dir + "/" + shim_cc_basename + copy_dict = copy.deepcopy(base_dict) + copy_dict["target_name"] = copy_target_name + copy_dict["type"] = "none" + copy_dict["sources"] = [large_pdb_shim_cc] + copy_dict["copies"] = [ + {"destination": shim_cc_dir, "files": [large_pdb_shim_cc]} + ] + + # This is the dict for the PDB generating shim target. It depends on the + # copy target. + shim_suffix = "large_pdb_shim" + shim_target_name = target_name + "_" + shim_suffix + full_shim_target_name = _SuffixName(t, shim_suffix) + shim_dict = copy.deepcopy(base_dict) + shim_dict["target_name"] = shim_target_name + shim_dict["type"] = "static_library" + shim_dict["sources"] = [shim_cc_path] + shim_dict["dependencies"] = [full_copy_target_name] + + # Set up the shim to output its PDB to the same location as the final linker + # target. + for config_name, config in shim_dict.get("configurations").items(): + pdb_path = _GetPdbPath(target_dict, config_name, vars) + + # A few keys that we don't want to propagate. + for key in ["msvs_precompiled_header", "msvs_precompiled_source", "test"]: + config.pop(key, None) + + msvs = config.setdefault("msvs_settings", {}) + + # Update the compiler directives in the shim target. + compiler = msvs.setdefault("VCCLCompilerTool", {}) + compiler["DebugInformationFormat"] = "3" + compiler["ProgramDataBaseFileName"] = pdb_path + + # Set the explicit PDB path in the appropriate configuration of the + # original target. + config = target_dict["configurations"][config_name] + msvs = config.setdefault("msvs_settings", {}) + linker = msvs.setdefault("VCLinkerTool", {}) + linker["GenerateDebugInformation"] = "true" + linker["ProgramDatabaseFile"] = pdb_path + + # Add the new targets. They must go to the beginning of the list so that + # the dependency generation works as expected in ninja. + target_list.insert(0, full_copy_target_name) + target_list.insert(0, full_shim_target_name) + target_dicts[full_copy_target_name] = copy_dict + target_dicts[full_shim_target_name] = shim_dict + + # Update the original target to depend on the shim target. + target_dict.setdefault("dependencies", []).append(full_shim_target_name) + + return (target_list, target_dicts) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py new file mode 100644 index 0000000..8d7f21e --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py @@ -0,0 +1,574 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Handle version information related to Visual Stuio.""" + +import errno +import os +import re +import subprocess +import sys +import glob + + +def JoinPath(*args): + return os.path.normpath(os.path.join(*args)) + + +class VisualStudioVersion: + """Information regarding a version of Visual Studio.""" + + def __init__( + self, + short_name, + description, + solution_version, + project_version, + flat_sln, + uses_vcxproj, + path, + sdk_based, + default_toolset=None, + compatible_sdks=None, + ): + self.short_name = short_name + self.description = description + self.solution_version = solution_version + self.project_version = project_version + self.flat_sln = flat_sln + self.uses_vcxproj = uses_vcxproj + self.path = path + self.sdk_based = sdk_based + self.default_toolset = default_toolset + compatible_sdks = compatible_sdks or [] + compatible_sdks.sort(key=lambda v: float(v.replace("v", "")), reverse=True) + self.compatible_sdks = compatible_sdks + + def ShortName(self): + return self.short_name + + def Description(self): + """Get the full description of the version.""" + return self.description + + def SolutionVersion(self): + """Get the version number of the sln files.""" + return self.solution_version + + def ProjectVersion(self): + """Get the version number of the vcproj or vcxproj files.""" + return self.project_version + + def FlatSolution(self): + return self.flat_sln + + def UsesVcxproj(self): + """Returns true if this version uses a vcxproj file.""" + return self.uses_vcxproj + + def ProjectExtension(self): + """Returns the file extension for the project.""" + return self.uses_vcxproj and ".vcxproj" or ".vcproj" + + def Path(self): + """Returns the path to Visual Studio installation.""" + return self.path + + def ToolPath(self, tool): + """Returns the path to a given compiler tool. """ + return os.path.normpath(os.path.join(self.path, "VC/bin", tool)) + + def DefaultToolset(self): + """Returns the msbuild toolset version that will be used in the absence + of a user override.""" + return self.default_toolset + + def _SetupScriptInternal(self, target_arch): + """Returns a command (with arguments) to be used to set up the + environment.""" + assert target_arch in ("x86", "x64"), "target_arch not supported" + # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the + # depot_tools build tools and should run SetEnv.Cmd to set up the + # environment. The check for WindowsSDKDir alone is not sufficient because + # this is set by running vcvarsall.bat. + sdk_dir = os.environ.get("WindowsSDKDir", "") + setup_path = JoinPath(sdk_dir, "Bin", "SetEnv.Cmd") + if self.sdk_based and sdk_dir and os.path.exists(setup_path): + return [setup_path, "/" + target_arch] + + is_host_arch_x64 = ( + os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64" + or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64" + ) + + # For VS2017 (and newer) it's fairly easy + if self.short_name >= "2017": + script_path = JoinPath( + self.path, "VC", "Auxiliary", "Build", "vcvarsall.bat" + ) + + # Always use a native executable, cross-compiling if necessary. + host_arch = "amd64" if is_host_arch_x64 else "x86" + msvc_target_arch = "amd64" if target_arch == "x64" else "x86" + arg = host_arch + if host_arch != msvc_target_arch: + arg += "_" + msvc_target_arch + + return [script_path, arg] + + # We try to find the best version of the env setup batch. + vcvarsall = JoinPath(self.path, "VC", "vcvarsall.bat") + if target_arch == "x86": + if ( + self.short_name >= "2013" + and self.short_name[-1] != "e" + and is_host_arch_x64 + ): + # VS2013 and later, non-Express have a x64-x86 cross that we want + # to prefer. + return [vcvarsall, "amd64_x86"] + else: + # Otherwise, the standard x86 compiler. We don't use VC/vcvarsall.bat + # for x86 because vcvarsall calls vcvars32, which it can only find if + # VS??COMNTOOLS is set, which isn't guaranteed. + return [JoinPath(self.path, "Common7", "Tools", "vsvars32.bat")] + elif target_arch == "x64": + arg = "x86_amd64" + # Use the 64-on-64 compiler if we're not using an express edition and + # we're running on a 64bit OS. + if self.short_name[-1] != "e" and is_host_arch_x64: + arg = "amd64" + return [vcvarsall, arg] + + def SetupScript(self, target_arch): + script_data = self._SetupScriptInternal(target_arch) + script_path = script_data[0] + if not os.path.exists(script_path): + raise Exception( + "%s is missing - make sure VC++ tools are installed." % script_path + ) + return script_data + + +def _RegistryQueryBase(sysdir, key, value): + """Use reg.exe to read a particular key. + + While ideally we might use the win32 module, we would like gyp to be + python neutral, so for instance cygwin python lacks this module. + + Arguments: + sysdir: The system subdirectory to attempt to launch reg.exe from. + key: The registry key to read from. + value: The particular value to read. + Return: + stdout from reg.exe, or None for failure. + """ + # Skip if not on Windows or Python Win32 setup issue + if sys.platform not in ("win32", "cygwin"): + return None + # Setup params to pass to and attempt to launch reg.exe + cmd = [os.path.join(os.environ.get("WINDIR", ""), sysdir, "reg.exe"), "query", key] + if value: + cmd.extend(["/v", value]) + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid + # Note that the error text may be in [1] in some cases + text = p.communicate()[0].decode("utf-8") + # Check return code from reg.exe; officially 0==success and 1==error + if p.returncode: + return None + return text + + +def _RegistryQuery(key, value=None): + r"""Use reg.exe to read a particular key through _RegistryQueryBase. + + First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If + that fails, it falls back to System32. Sysnative is available on Vista and + up and available on Windows Server 2003 and XP through KB patch 942589. Note + that Sysnative will always fail if using 64-bit python due to it being a + virtual directory and System32 will work correctly in the first place. + + KB 942589 - http://support.microsoft.com/kb/942589/en-us. + + Arguments: + key: The registry key. + value: The particular registry value to read (optional). + Return: + stdout from reg.exe, or None for failure. + """ + text = None + try: + text = _RegistryQueryBase("Sysnative", key, value) + except OSError as e: + if e.errno == errno.ENOENT: + text = _RegistryQueryBase("System32", key, value) + else: + raise + return text + + +def _RegistryGetValueUsingWinReg(key, value): + """Use the _winreg module to obtain the value of a registry key. + + Args: + key: The registry key. + value: The particular registry value to read. + Return: + contents of the registry key's value, or None on failure. Throws + ImportError if winreg is unavailable. + """ + from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx + try: + root, subkey = key.split("\\", 1) + assert root == "HKLM" # Only need HKLM for now. + with OpenKey(HKEY_LOCAL_MACHINE, subkey) as hkey: + return QueryValueEx(hkey, value)[0] + except OSError: + return None + + +def _RegistryGetValue(key, value): + """Use _winreg or reg.exe to obtain the value of a registry key. + + Using _winreg is preferable because it solves an issue on some corporate + environments where access to reg.exe is locked down. However, we still need + to fallback to reg.exe for the case where the _winreg module is not available + (for example in cygwin python). + + Args: + key: The registry key. + value: The particular registry value to read. + Return: + contents of the registry key's value, or None on failure. + """ + try: + return _RegistryGetValueUsingWinReg(key, value) + except ImportError: + pass + + # Fallback to reg.exe if we fail to import _winreg. + text = _RegistryQuery(key, value) + if not text: + return None + # Extract value. + match = re.search(r"REG_\w+\s+([^\r]+)\r\n", text) + if not match: + return None + return match.group(1) + + +def _CreateVersion(name, path, sdk_based=False): + """Sets up MSVS project generation. + + Setup is based off the GYP_MSVS_VERSION environment variable or whatever is + autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is + passed in that doesn't match a value in versions python will throw a error. + """ + if path: + path = os.path.normpath(path) + versions = { + "2022": VisualStudioVersion( + "2022", + "Visual Studio 2022", + solution_version="12.00", + project_version="17.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v143", + compatible_sdks=["v8.1", "v10.0"], + ), + "2019": VisualStudioVersion( + "2019", + "Visual Studio 2019", + solution_version="12.00", + project_version="16.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v142", + compatible_sdks=["v8.1", "v10.0"], + ), + "2017": VisualStudioVersion( + "2017", + "Visual Studio 2017", + solution_version="12.00", + project_version="15.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v141", + compatible_sdks=["v8.1", "v10.0"], + ), + "2015": VisualStudioVersion( + "2015", + "Visual Studio 2015", + solution_version="12.00", + project_version="14.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v140", + ), + "2013": VisualStudioVersion( + "2013", + "Visual Studio 2013", + solution_version="13.00", + project_version="12.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v120", + ), + "2013e": VisualStudioVersion( + "2013e", + "Visual Studio 2013", + solution_version="13.00", + project_version="12.0", + flat_sln=True, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v120", + ), + "2012": VisualStudioVersion( + "2012", + "Visual Studio 2012", + solution_version="12.00", + project_version="4.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v110", + ), + "2012e": VisualStudioVersion( + "2012e", + "Visual Studio 2012", + solution_version="12.00", + project_version="4.0", + flat_sln=True, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + default_toolset="v110", + ), + "2010": VisualStudioVersion( + "2010", + "Visual Studio 2010", + solution_version="11.00", + project_version="4.0", + flat_sln=False, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + ), + "2010e": VisualStudioVersion( + "2010e", + "Visual C++ Express 2010", + solution_version="11.00", + project_version="4.0", + flat_sln=True, + uses_vcxproj=True, + path=path, + sdk_based=sdk_based, + ), + "2008": VisualStudioVersion( + "2008", + "Visual Studio 2008", + solution_version="10.00", + project_version="9.00", + flat_sln=False, + uses_vcxproj=False, + path=path, + sdk_based=sdk_based, + ), + "2008e": VisualStudioVersion( + "2008e", + "Visual Studio 2008", + solution_version="10.00", + project_version="9.00", + flat_sln=True, + uses_vcxproj=False, + path=path, + sdk_based=sdk_based, + ), + "2005": VisualStudioVersion( + "2005", + "Visual Studio 2005", + solution_version="9.00", + project_version="8.00", + flat_sln=False, + uses_vcxproj=False, + path=path, + sdk_based=sdk_based, + ), + "2005e": VisualStudioVersion( + "2005e", + "Visual Studio 2005", + solution_version="9.00", + project_version="8.00", + flat_sln=True, + uses_vcxproj=False, + path=path, + sdk_based=sdk_based, + ), + } + return versions[str(name)] + + +def _ConvertToCygpath(path): + """Convert to cygwin path if we are using cygwin.""" + if sys.platform == "cygwin": + p = subprocess.Popen(["cygpath", path], stdout=subprocess.PIPE) + path = p.communicate()[0].decode("utf-8").strip() + return path + + +def _DetectVisualStudioVersions(versions_to_check, force_express): + """Collect the list of installed visual studio versions. + + Returns: + A list of visual studio versions installed in descending order of + usage preference. + Base this on the registry and a quick check if devenv.exe exists. + Possibilities are: + 2005(e) - Visual Studio 2005 (8) + 2008(e) - Visual Studio 2008 (9) + 2010(e) - Visual Studio 2010 (10) + 2012(e) - Visual Studio 2012 (11) + 2013(e) - Visual Studio 2013 (12) + 2015 - Visual Studio 2015 (14) + 2017 - Visual Studio 2017 (15) + 2019 - Visual Studio 2019 (16) + 2022 - Visual Studio 2022 (17) + Where (e) is e for express editions of MSVS and blank otherwise. + """ + version_to_year = { + "8.0": "2005", + "9.0": "2008", + "10.0": "2010", + "11.0": "2012", + "12.0": "2013", + "14.0": "2015", + "15.0": "2017", + "16.0": "2019", + "17.0": "2022", + } + versions = [] + for version in versions_to_check: + # Old method of searching for which VS version is installed + # We don't use the 2010-encouraged-way because we also want to get the + # path to the binaries, which it doesn't offer. + keys = [ + r"HKLM\Software\Microsoft\VisualStudio\%s" % version, + r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s" % version, + r"HKLM\Software\Microsoft\VCExpress\%s" % version, + r"HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s" % version, + ] + for index in range(len(keys)): + path = _RegistryGetValue(keys[index], "InstallDir") + if not path: + continue + path = _ConvertToCygpath(path) + # Check for full. + full_path = os.path.join(path, "devenv.exe") + express_path = os.path.join(path, "*express.exe") + if not force_express and os.path.exists(full_path): + # Add this one. + versions.append( + _CreateVersion( + version_to_year[version], os.path.join(path, "..", "..") + ) + ) + # Check for express. + elif glob.glob(express_path): + # Add this one. + versions.append( + _CreateVersion( + version_to_year[version] + "e", os.path.join(path, "..", "..") + ) + ) + + # The old method above does not work when only SDK is installed. + keys = [ + r"HKLM\Software\Microsoft\VisualStudio\SxS\VC7", + r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7", + r"HKLM\Software\Microsoft\VisualStudio\SxS\VS7", + r"HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VS7", + ] + for index in range(len(keys)): + path = _RegistryGetValue(keys[index], version) + if not path: + continue + path = _ConvertToCygpath(path) + if version == "15.0": + if os.path.exists(path): + versions.append(_CreateVersion("2017", path)) + elif version != "14.0": # There is no Express edition for 2015. + versions.append( + _CreateVersion( + version_to_year[version] + "e", + os.path.join(path, ".."), + sdk_based=True, + ) + ) + + return versions + + +def SelectVisualStudioVersion(version="auto", allow_fallback=True): + """Select which version of Visual Studio projects to generate. + + Arguments: + version: Hook to allow caller to force a particular version (vs auto). + Returns: + An object representing a visual studio project format version. + """ + # In auto mode, check environment variable for override. + if version == "auto": + version = os.environ.get("GYP_MSVS_VERSION", "auto") + version_map = { + "auto": ("17.0", "16.0", "15.0", "14.0", "12.0", "10.0", "9.0", "8.0", "11.0"), + "2005": ("8.0",), + "2005e": ("8.0",), + "2008": ("9.0",), + "2008e": ("9.0",), + "2010": ("10.0",), + "2010e": ("10.0",), + "2012": ("11.0",), + "2012e": ("11.0",), + "2013": ("12.0",), + "2013e": ("12.0",), + "2015": ("14.0",), + "2017": ("15.0",), + "2019": ("16.0",), + "2022": ("17.0",), + } + override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH") + if override_path: + msvs_version = os.environ.get("GYP_MSVS_VERSION") + if not msvs_version: + raise ValueError( + "GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be " + "set to a particular version (e.g. 2010e)." + ) + return _CreateVersion(msvs_version, override_path, sdk_based=True) + version = str(version) + versions = _DetectVisualStudioVersions(version_map[version], "e" in version) + if not versions: + if not allow_fallback: + raise ValueError("Could not locate Visual Studio installation.") + if version == "auto": + # Default to 2005 if we couldn't find anything + return _CreateVersion("2005", None) + else: + return _CreateVersion(version, None) + return versions[0] diff --git a/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py new file mode 100644 index 0000000..6790ef9 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py @@ -0,0 +1,666 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import copy +import gyp.input +import argparse +import os.path +import re +import shlex +import sys +import traceback +from gyp.common import GypError + +# Default debug modes for GYP +debug = {} + +# List of "official" debug modes, but you can use anything you like. +DEBUG_GENERAL = "general" +DEBUG_VARIABLES = "variables" +DEBUG_INCLUDES = "includes" + + +def DebugOutput(mode, message, *args): + if "all" in gyp.debug or mode in gyp.debug: + ctx = ("unknown", 0, "unknown") + try: + f = traceback.extract_stack(limit=2) + if f: + ctx = f[0][:3] + except Exception: + pass + if args: + message %= args + print( + "%s:%s:%d:%s %s" + % (mode.upper(), os.path.basename(ctx[0]), ctx[1], ctx[2], message) + ) + + +def FindBuildFiles(): + extension = ".gyp" + files = os.listdir(os.getcwd()) + build_files = [] + for file in files: + if file.endswith(extension): + build_files.append(file) + return build_files + + +def Load( + build_files, + format, + default_variables={}, + includes=[], + depth=".", + params=None, + check=False, + circular_check=True, +): + """ + Loads one or more specified build files. + default_variables and includes will be copied before use. + Returns the generator for the specified format and the + data returned by loading the specified build files. + """ + if params is None: + params = {} + + if "-" in format: + format, params["flavor"] = format.split("-", 1) + + default_variables = copy.copy(default_variables) + + # Default variables provided by this program and its modules should be + # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, + # avoiding collisions with user and automatic variables. + default_variables["GENERATOR"] = format + default_variables["GENERATOR_FLAVOR"] = params.get("flavor", "") + + # Format can be a custom python file, or by default the name of a module + # within gyp.generator. + if format.endswith(".py"): + generator_name = os.path.splitext(format)[0] + path, generator_name = os.path.split(generator_name) + + # Make sure the path to the custom generator is in sys.path + # Don't worry about removing it once we are done. Keeping the path + # to each generator that is used in sys.path is likely harmless and + # arguably a good idea. + path = os.path.abspath(path) + if path not in sys.path: + sys.path.insert(0, path) + else: + generator_name = "gyp.generator." + format + + # These parameters are passed in order (as opposed to by key) + # because ActivePython cannot handle key parameters to __import__. + generator = __import__(generator_name, globals(), locals(), generator_name) + for (key, val) in generator.generator_default_variables.items(): + default_variables.setdefault(key, val) + + # Give the generator the opportunity to set additional variables based on + # the params it will receive in the output phase. + if getattr(generator, "CalculateVariables", None): + generator.CalculateVariables(default_variables, params) + + # Give the generator the opportunity to set generator_input_info based on + # the params it will receive in the output phase. + if getattr(generator, "CalculateGeneratorInputInfo", None): + generator.CalculateGeneratorInputInfo(params) + + # Fetch the generator specific info that gets fed to input, we use getattr + # so we can default things and the generators only have to provide what + # they need. + generator_input_info = { + "non_configuration_keys": getattr( + generator, "generator_additional_non_configuration_keys", [] + ), + "path_sections": getattr(generator, "generator_additional_path_sections", []), + "extra_sources_for_rules": getattr( + generator, "generator_extra_sources_for_rules", [] + ), + "generator_supports_multiple_toolsets": getattr( + generator, "generator_supports_multiple_toolsets", False + ), + "generator_wants_static_library_dependencies_adjusted": getattr( + generator, "generator_wants_static_library_dependencies_adjusted", True + ), + "generator_wants_sorted_dependencies": getattr( + generator, "generator_wants_sorted_dependencies", False + ), + "generator_filelist_paths": getattr( + generator, "generator_filelist_paths", None + ), + } + + # Process the input specific to this generator. + result = gyp.input.Load( + build_files, + default_variables, + includes[:], + depth, + generator_input_info, + check, + circular_check, + params["parallel"], + params["root_targets"], + ) + return [generator] + result + + +def NameValueListToDict(name_value_list): + """ + Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary + of the pairs. If a string is simply NAME, then the value in the dictionary + is set to True. If VALUE can be converted to an integer, it is. + """ + result = {} + for item in name_value_list: + tokens = item.split("=", 1) + if len(tokens) == 2: + # If we can make it an int, use that, otherwise, use the string. + try: + token_value = int(tokens[1]) + except ValueError: + token_value = tokens[1] + # Set the variable to the supplied value. + result[tokens[0]] = token_value + else: + # No value supplied, treat it as a boolean and set it. + result[tokens[0]] = True + return result + + +def ShlexEnv(env_name): + flags = os.environ.get(env_name, []) + if flags: + flags = shlex.split(flags) + return flags + + +def FormatOpt(opt, value): + if opt.startswith("--"): + return f"{opt}={value}" + return opt + value + + +def RegenerateAppendFlag(flag, values, predicate, env_name, options): + """Regenerate a list of command line flags, for an option of action='append'. + + The |env_name|, if given, is checked in the environment and used to generate + an initial list of options, then the options that were specified on the + command line (given in |values|) are appended. This matches the handling of + environment variables and command line flags where command line flags override + the environment, while not requiring the environment to be set when the flags + are used again. + """ + flags = [] + if options.use_environment and env_name: + for flag_value in ShlexEnv(env_name): + value = FormatOpt(flag, predicate(flag_value)) + if value in flags: + flags.remove(value) + flags.append(value) + if values: + for flag_value in values: + flags.append(FormatOpt(flag, predicate(flag_value))) + return flags + + +def RegenerateFlags(options): + """Given a parsed options object, and taking the environment variables into + account, returns a list of flags that should regenerate an equivalent options + object (even in the absence of the environment variables.) + + Any path options will be normalized relative to depth. + + The format flag is not included, as it is assumed the calling generator will + set that as appropriate. + """ + + def FixPath(path): + path = gyp.common.FixIfRelativePath(path, options.depth) + if not path: + return os.path.curdir + return path + + def Noop(value): + return value + + # We always want to ignore the environment when regenerating, to avoid + # duplicate or changed flags in the environment at the time of regeneration. + flags = ["--ignore-environment"] + for name, metadata in options._regeneration_metadata.items(): + opt = metadata["opt"] + value = getattr(options, name) + value_predicate = metadata["type"] == "path" and FixPath or Noop + action = metadata["action"] + env_name = metadata["env_name"] + if action == "append": + flags.extend( + RegenerateAppendFlag(opt, value, value_predicate, env_name, options) + ) + elif action in ("store", None): # None is a synonym for 'store'. + if value: + flags.append(FormatOpt(opt, value_predicate(value))) + elif options.use_environment and env_name and os.environ.get(env_name): + flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) + elif action in ("store_true", "store_false"): + if (action == "store_true" and value) or ( + action == "store_false" and not value + ): + flags.append(opt) + elif options.use_environment and env_name: + print( + "Warning: environment regeneration unimplemented " + "for %s flag %r env_name %r" % (action, opt, env_name), + file=sys.stderr, + ) + else: + print( + "Warning: regeneration unimplemented for action %r " + "flag %r" % (action, opt), + file=sys.stderr, + ) + + return flags + + +class RegeneratableOptionParser(argparse.ArgumentParser): + def __init__(self, usage): + self.__regeneratable_options = {} + argparse.ArgumentParser.__init__(self, usage=usage) + + def add_argument(self, *args, **kw): + """Add an option to the parser. + + This accepts the same arguments as ArgumentParser.add_argument, plus the + following: + regenerate: can be set to False to prevent this option from being included + in regeneration. + env_name: name of environment variable that additional values for this + option come from. + type: adds type='path', to tell the regenerator that the values of + this option need to be made relative to options.depth + """ + env_name = kw.pop("env_name", None) + if "dest" in kw and kw.pop("regenerate", True): + dest = kw["dest"] + + # The path type is needed for regenerating, for optparse we can just treat + # it as a string. + type = kw.get("type") + if type == "path": + kw["type"] = str + + self.__regeneratable_options[dest] = { + "action": kw.get("action"), + "type": type, + "env_name": env_name, + "opt": args[0], + } + + argparse.ArgumentParser.add_argument(self, *args, **kw) + + def parse_args(self, *args): + values, args = argparse.ArgumentParser.parse_known_args(self, *args) + values._regeneration_metadata = self.__regeneratable_options + return values, args + + +def gyp_main(args): + my_name = os.path.basename(sys.argv[0]) + usage = "usage: %(prog)s [options ...] [build_file ...]" + + parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s")) + parser.add_argument( + "--build", + dest="configs", + action="append", + help="configuration for build after project generation", + ) + parser.add_argument( + "--check", dest="check", action="store_true", help="check format of gyp files" + ) + parser.add_argument( + "--config-dir", + dest="config_dir", + action="store", + env_name="GYP_CONFIG_DIR", + default=None, + help="The location for configuration files like " "include.gypi.", + ) + parser.add_argument( + "-d", + "--debug", + dest="debug", + metavar="DEBUGMODE", + action="append", + default=[], + help="turn on a debugging " + 'mode for debugging GYP. Supported modes are "variables", ' + '"includes" and "general" or "all" for all of them.', + ) + parser.add_argument( + "-D", + dest="defines", + action="append", + metavar="VAR=VAL", + env_name="GYP_DEFINES", + help="sets variable VAR to value VAL", + ) + parser.add_argument( + "--depth", + dest="depth", + metavar="PATH", + type="path", + help="set DEPTH gyp variable to a relative path to PATH", + ) + parser.add_argument( + "-f", + "--format", + dest="formats", + action="append", + env_name="GYP_GENERATORS", + regenerate=False, + help="output formats to generate", + ) + parser.add_argument( + "-G", + dest="generator_flags", + action="append", + default=[], + metavar="FLAG=VAL", + env_name="GYP_GENERATOR_FLAGS", + help="sets generator flag FLAG to VAL", + ) + parser.add_argument( + "--generator-output", + dest="generator_output", + action="store", + default=None, + metavar="DIR", + type="path", + env_name="GYP_GENERATOR_OUTPUT", + help="puts generated build files under DIR", + ) + parser.add_argument( + "--ignore-environment", + dest="use_environment", + action="store_false", + default=True, + regenerate=False, + help="do not read options from environment variables", + ) + parser.add_argument( + "-I", + "--include", + dest="includes", + action="append", + metavar="INCLUDE", + type="path", + help="files to include in all loaded .gyp files", + ) + # --no-circular-check disables the check for circular relationships between + # .gyp files. These relationships should not exist, but they've only been + # observed to be harmful with the Xcode generator. Chromium's .gyp files + # currently have some circular relationships on non-Mac platforms, so this + # option allows the strict behavior to be used on Macs and the lenient + # behavior to be used elsewhere. + # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. + parser.add_argument( + "--no-circular-check", + dest="circular_check", + action="store_false", + default=True, + regenerate=False, + help="don't check for circular relationships between files", + ) + parser.add_argument( + "--no-parallel", + action="store_true", + default=False, + help="Disable multiprocessing", + ) + parser.add_argument( + "-S", + "--suffix", + dest="suffix", + default="", + help="suffix to add to generated files", + ) + parser.add_argument( + "--toplevel-dir", + dest="toplevel_dir", + action="store", + default=None, + metavar="DIR", + type="path", + help="directory to use as the root of the source tree", + ) + parser.add_argument( + "-R", + "--root-target", + dest="root_targets", + action="append", + metavar="TARGET", + help="include only TARGET and its deep dependencies", + ) + + options, build_files_arg = parser.parse_args(args) + build_files = build_files_arg + + # Set up the configuration directory (defaults to ~/.gyp) + if not options.config_dir: + home = None + home_dot_gyp = None + if options.use_environment: + home_dot_gyp = os.environ.get("GYP_CONFIG_DIR", None) + if home_dot_gyp: + home_dot_gyp = os.path.expanduser(home_dot_gyp) + + if not home_dot_gyp: + home_vars = ["HOME"] + if sys.platform in ("cygwin", "win32"): + home_vars.append("USERPROFILE") + for home_var in home_vars: + home = os.getenv(home_var) + if home: + home_dot_gyp = os.path.join(home, ".gyp") + if not os.path.exists(home_dot_gyp): + home_dot_gyp = None + else: + break + else: + home_dot_gyp = os.path.expanduser(options.config_dir) + + if home_dot_gyp and not os.path.exists(home_dot_gyp): + home_dot_gyp = None + + if not options.formats: + # If no format was given on the command line, then check the env variable. + generate_formats = [] + if options.use_environment: + generate_formats = os.environ.get("GYP_GENERATORS", []) + if generate_formats: + generate_formats = re.split(r"[\s,]", generate_formats) + if generate_formats: + options.formats = generate_formats + else: + # Nothing in the variable, default based on platform. + if sys.platform == "darwin": + options.formats = ["xcode"] + elif sys.platform in ("win32", "cygwin"): + options.formats = ["msvs"] + else: + options.formats = ["make"] + + if not options.generator_output and options.use_environment: + g_o = os.environ.get("GYP_GENERATOR_OUTPUT") + if g_o: + options.generator_output = g_o + + options.parallel = not options.no_parallel + + for mode in options.debug: + gyp.debug[mode] = 1 + + # Do an extra check to avoid work when we're not debugging. + if DEBUG_GENERAL in gyp.debug: + DebugOutput(DEBUG_GENERAL, "running with these options:") + for option, value in sorted(options.__dict__.items()): + if option[0] == "_": + continue + if isinstance(value, str): + DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) + else: + DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) + + if not build_files: + build_files = FindBuildFiles() + if not build_files: + raise GypError((usage + "\n\n%s: error: no build_file") % (my_name, my_name)) + + # TODO(mark): Chromium-specific hack! + # For Chromium, the gyp "depth" variable should always be a relative path + # to Chromium's top-level "src" directory. If no depth variable was set + # on the command line, try to find a "src" directory by looking at the + # absolute path to each build file's directory. The first "src" component + # found will be treated as though it were the path used for --depth. + if not options.depth: + for build_file in build_files: + build_file_dir = os.path.abspath(os.path.dirname(build_file)) + build_file_dir_components = build_file_dir.split(os.path.sep) + components_len = len(build_file_dir_components) + for index in range(components_len - 1, -1, -1): + if build_file_dir_components[index] == "src": + options.depth = os.path.sep.join(build_file_dir_components) + break + del build_file_dir_components[index] + + # If the inner loop found something, break without advancing to another + # build file. + if options.depth: + break + + if not options.depth: + raise GypError( + "Could not automatically locate src directory. This is" + "a temporary Chromium feature that will be removed. Use" + "--depth as a workaround." + ) + + # If toplevel-dir is not set, we assume that depth is the root of our source + # tree. + if not options.toplevel_dir: + options.toplevel_dir = options.depth + + # -D on the command line sets variable defaults - D isn't just for define, + # it's for default. Perhaps there should be a way to force (-F?) a + # variable's value so that it can't be overridden by anything else. + cmdline_default_variables = {} + defines = [] + if options.use_environment: + defines += ShlexEnv("GYP_DEFINES") + if options.defines: + defines += options.defines + cmdline_default_variables = NameValueListToDict(defines) + if DEBUG_GENERAL in gyp.debug: + DebugOutput( + DEBUG_GENERAL, "cmdline_default_variables: %s", cmdline_default_variables + ) + + # Set up includes. + includes = [] + + # If ~/.gyp/include.gypi exists, it'll be forcibly included into every + # .gyp file that's loaded, before anything else is included. + if home_dot_gyp: + default_include = os.path.join(home_dot_gyp, "include.gypi") + if os.path.exists(default_include): + print("Using overrides found in " + default_include) + includes.append(default_include) + + # Command-line --include files come after the default include. + if options.includes: + includes.extend(options.includes) + + # Generator flags should be prefixed with the target generator since they + # are global across all generator runs. + gen_flags = [] + if options.use_environment: + gen_flags += ShlexEnv("GYP_GENERATOR_FLAGS") + if options.generator_flags: + gen_flags += options.generator_flags + generator_flags = NameValueListToDict(gen_flags) + if DEBUG_GENERAL in gyp.debug.keys(): + DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) + + # Generate all requested formats (use a set in case we got one format request + # twice) + for format in set(options.formats): + params = { + "options": options, + "build_files": build_files, + "generator_flags": generator_flags, + "cwd": os.getcwd(), + "build_files_arg": build_files_arg, + "gyp_binary": sys.argv[0], + "home_dot_gyp": home_dot_gyp, + "parallel": options.parallel, + "root_targets": options.root_targets, + "target_arch": cmdline_default_variables.get("target_arch", ""), + } + + # Start with the default variables from the command line. + [generator, flat_list, targets, data] = Load( + build_files, + format, + cmdline_default_variables, + includes, + options.depth, + params, + options.check, + options.circular_check, + ) + + # TODO(mark): Pass |data| for now because the generator needs a list of + # build files that came in. In the future, maybe it should just accept + # a list, and not the whole data dict. + # NOTE: flat_list is the flattened dependency graph specifying the order + # that targets may be built. Build systems that operate serially or that + # need to have dependencies defined before dependents reference them should + # generate targets in the order specified in flat_list. + generator.GenerateOutput(flat_list, targets, data, params) + + if options.configs: + valid_configs = targets[flat_list[0]]["configurations"] + for conf in options.configs: + if conf not in valid_configs: + raise GypError("Invalid config specified via --build: %s" % conf) + generator.PerformBuild(data, options.configs, params) + + # Done + return 0 + + +def main(args): + try: + return gyp_main(args) + except GypError as e: + sys.stderr.write("gyp: %s\n" % e) + return 1 + + +# NOTE: setuptools generated console_scripts calls function with no arguments +def script_main(): + return main(sys.argv[1:]) + + +if __name__ == "__main__": + sys.exit(script_main()) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common.py b/node_modules/node-gyp/gyp/pylib/gyp/common.py new file mode 100644 index 0000000..9213fcc --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/common.py @@ -0,0 +1,654 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import errno +import filecmp +import os.path +import re +import tempfile +import sys +import subprocess + +from collections.abc import MutableSet + + +# A minimal memoizing decorator. It'll blow up if the args aren't immutable, +# among other "problems". +class memoize: + def __init__(self, func): + self.func = func + self.cache = {} + + def __call__(self, *args): + try: + return self.cache[args] + except KeyError: + result = self.func(*args) + self.cache[args] = result + return result + + +class GypError(Exception): + """Error class representing an error, which is to be presented + to the user. The main entry point will catch and display this. + """ + + pass + + +def ExceptionAppend(e, msg): + """Append a message to the given exception's message.""" + if not e.args: + e.args = (msg,) + elif len(e.args) == 1: + e.args = (str(e.args[0]) + " " + msg,) + else: + e.args = (str(e.args[0]) + " " + msg,) + e.args[1:] + + +def FindQualifiedTargets(target, qualified_list): + """ + Given a list of qualified targets, return the qualified targets for the + specified |target|. + """ + return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target] + + +def ParseQualifiedTarget(target): + # Splits a qualified target into a build file, target name and toolset. + + # NOTE: rsplit is used to disambiguate the Windows drive letter separator. + target_split = target.rsplit(":", 1) + if len(target_split) == 2: + [build_file, target] = target_split + else: + build_file = None + + target_split = target.rsplit("#", 1) + if len(target_split) == 2: + [target, toolset] = target_split + else: + toolset = None + + return [build_file, target, toolset] + + +def ResolveTarget(build_file, target, toolset): + # This function resolves a target into a canonical form: + # - a fully defined build file, either absolute or relative to the current + # directory + # - a target name + # - a toolset + # + # build_file is the file relative to which 'target' is defined. + # target is the qualified target. + # toolset is the default toolset for that target. + [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) + + if parsed_build_file: + if build_file: + # If a relative path, parsed_build_file is relative to the directory + # containing build_file. If build_file is not in the current directory, + # parsed_build_file is not a usable path as-is. Resolve it by + # interpreting it as relative to build_file. If parsed_build_file is + # absolute, it is usable as a path regardless of the current directory, + # and os.path.join will return it as-is. + build_file = os.path.normpath( + os.path.join(os.path.dirname(build_file), parsed_build_file) + ) + # Further (to handle cases like ../cwd), make it relative to cwd) + if not os.path.isabs(build_file): + build_file = RelativePath(build_file, ".") + else: + build_file = parsed_build_file + + if parsed_toolset: + toolset = parsed_toolset + + return [build_file, target, toolset] + + +def BuildFile(fully_qualified_target): + # Extracts the build file from the fully qualified target. + return ParseQualifiedTarget(fully_qualified_target)[0] + + +def GetEnvironFallback(var_list, default): + """Look up a key in the environment, with fallback to secondary keys + and finally falling back to a default value.""" + for var in var_list: + if var in os.environ: + return os.environ[var] + return default + + +def QualifiedTarget(build_file, target, toolset): + # "Qualified" means the file that a target was defined in and the target + # name, separated by a colon, suffixed by a # and the toolset name: + # /path/to/file.gyp:target_name#toolset + fully_qualified = build_file + ":" + target + if toolset: + fully_qualified = fully_qualified + "#" + toolset + return fully_qualified + + +@memoize +def RelativePath(path, relative_to, follow_path_symlink=True): + # Assuming both |path| and |relative_to| are relative to the current + # directory, returns a relative path that identifies path relative to + # relative_to. + # If |follow_symlink_path| is true (default) and |path| is a symlink, then + # this method returns a path to the real file represented by |path|. If it is + # false, this method returns a path to the symlink. If |path| is not a + # symlink, this option has no effect. + + # Convert to normalized (and therefore absolute paths). + if follow_path_symlink: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + relative_to = os.path.realpath(relative_to) + + # On Windows, we can't create a relative path to a different drive, so just + # use the absolute path. + if sys.platform == "win32": + if ( + os.path.splitdrive(path)[0].lower() + != os.path.splitdrive(relative_to)[0].lower() + ): + return path + + # Split the paths into components. + path_split = path.split(os.path.sep) + relative_to_split = relative_to.split(os.path.sep) + + # Determine how much of the prefix the two paths share. + prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) + + # Put enough ".." components to back up out of relative_to to the common + # prefix, and then append the part of path_split after the common prefix. + relative_split = [os.path.pardir] * ( + len(relative_to_split) - prefix_len + ) + path_split[prefix_len:] + + if len(relative_split) == 0: + # The paths were the same. + return "" + + # Turn it back into a string and we're done. + return os.path.join(*relative_split) + + +@memoize +def InvertRelativePath(path, toplevel_dir=None): + """Given a path like foo/bar that is relative to toplevel_dir, return + the inverse relative path back to the toplevel_dir. + + E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) + should always produce the empty string, unless the path contains symlinks. + """ + if not path: + return path + toplevel_dir = "." if toplevel_dir is None else toplevel_dir + return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path)) + + +def FixIfRelativePath(path, relative_to): + # Like RelativePath but returns |path| unchanged if it is absolute. + if os.path.isabs(path): + return path + return RelativePath(path, relative_to) + + +def UnrelativePath(path, relative_to): + # Assuming that |relative_to| is relative to the current directory, and |path| + # is a path relative to the dirname of |relative_to|, returns a path that + # identifies |path| relative to the current directory. + rel_dir = os.path.dirname(relative_to) + return os.path.normpath(os.path.join(rel_dir, path)) + + +# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at +# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 +# and the documentation for various shells. + +# _quote is a pattern that should match any argument that needs to be quoted +# with double-quotes by EncodePOSIXShellArgument. It matches the following +# characters appearing anywhere in an argument: +# \t, \n, space parameter separators +# # comments +# $ expansions (quoted to always expand within one argument) +# % called out by IEEE 1003.1 XCU.2.2 +# & job control +# ' quoting +# (, ) subshell execution +# *, ?, [ pathname expansion +# ; command delimiter +# <, >, | redirection +# = assignment +# {, } brace expansion (bash) +# ~ tilde expansion +# It also matches the empty string, because "" (or '') is the only way to +# represent an empty string literal argument to a POSIX shell. +# +# This does not match the characters in _escape, because those need to be +# backslash-escaped regardless of whether they appear in a double-quoted +# string. +_quote = re.compile("[\t\n #$%&'()*;<=>?[{|}~]|^$") + +# _escape is a pattern that should match any character that needs to be +# escaped with a backslash, whether or not the argument matched the _quote +# pattern. _escape is used with re.sub to backslash anything in _escape's +# first match group, hence the (parentheses) in the regular expression. +# +# _escape matches the following characters appearing anywhere in an argument: +# " to prevent POSIX shells from interpreting this character for quoting +# \ to prevent POSIX shells from interpreting this character for escaping +# ` to prevent POSIX shells from interpreting this character for command +# substitution +# Missing from this list is $, because the desired behavior of +# EncodePOSIXShellArgument is to permit parameter (variable) expansion. +# +# Also missing from this list is !, which bash will interpret as the history +# expansion character when history is enabled. bash does not enable history +# by default in non-interactive shells, so this is not thought to be a problem. +# ! was omitted from this list because bash interprets "\!" as a literal string +# including the backslash character (avoiding history expansion but retaining +# the backslash), which would not be correct for argument encoding. Handling +# this case properly would also be problematic because bash allows the history +# character to be changed with the histchars shell variable. Fortunately, +# as history is not enabled in non-interactive shells and +# EncodePOSIXShellArgument is only expected to encode for non-interactive +# shells, there is no room for error here by ignoring !. +_escape = re.compile(r'(["\\`])') + + +def EncodePOSIXShellArgument(argument): + """Encodes |argument| suitably for consumption by POSIX shells. + + argument may be quoted and escaped as necessary to ensure that POSIX shells + treat the returned value as a literal representing the argument passed to + this function. Parameter (variable) expansions beginning with $ are allowed + to remain intact without escaping the $, to allow the argument to contain + references to variables to be expanded by the shell. + """ + + if not isinstance(argument, str): + argument = str(argument) + + if _quote.search(argument): + quote = '"' + else: + quote = "" + + encoded = quote + re.sub(_escape, r"\\\1", argument) + quote + + return encoded + + +def EncodePOSIXShellList(list): + """Encodes |list| suitably for consumption by POSIX shells. + + Returns EncodePOSIXShellArgument for each item in list, and joins them + together using the space character as an argument separator. + """ + + encoded_arguments = [] + for argument in list: + encoded_arguments.append(EncodePOSIXShellArgument(argument)) + return " ".join(encoded_arguments) + + +def DeepDependencyTargets(target_dicts, roots): + """Returns the recursive list of target dependencies.""" + dependencies = set() + pending = set(roots) + while pending: + # Pluck out one. + r = pending.pop() + # Skip if visited already. + if r in dependencies: + continue + # Add it. + dependencies.add(r) + # Add its children. + spec = target_dicts[r] + pending.update(set(spec.get("dependencies", []))) + pending.update(set(spec.get("dependencies_original", []))) + return list(dependencies - set(roots)) + + +def BuildFileTargets(target_list, build_file): + """From a target_list, returns the subset from the specified build_file. + """ + return [p for p in target_list if BuildFile(p) == build_file] + + +def AllTargets(target_list, target_dicts, build_file): + """Returns all targets (direct and dependencies) for the specified build_file. + """ + bftargets = BuildFileTargets(target_list, build_file) + deptargets = DeepDependencyTargets(target_dicts, bftargets) + return bftargets + deptargets + + +def WriteOnDiff(filename): + """Write to a file only if the new contents differ. + + Arguments: + filename: name of the file to potentially write to. + Returns: + A file like object which will write to temporary file and only overwrite + the target if it differs (on close). + """ + + class Writer: + """Wrapper around file which only covers the target if it differs.""" + + def __init__(self): + # On Cygwin remove the "dir" argument + # `C:` prefixed paths are treated as relative, + # consequently ending up with current dir "/cygdrive/c/..." + # being prefixed to those, which was + # obviously a non-existent path, + # for example: "/cygdrive/c//C:\". + # For more details see: + # https://docs.python.org/2/library/tempfile.html#tempfile.mkstemp + base_temp_dir = "" if IsCygwin() else os.path.dirname(filename) + # Pick temporary file. + tmp_fd, self.tmp_path = tempfile.mkstemp( + suffix=".tmp", + prefix=os.path.split(filename)[1] + ".gyp.", + dir=base_temp_dir, + ) + try: + self.tmp_file = os.fdopen(tmp_fd, "wb") + except Exception: + # Don't leave turds behind. + os.unlink(self.tmp_path) + raise + + def __getattr__(self, attrname): + # Delegate everything else to self.tmp_file + return getattr(self.tmp_file, attrname) + + def close(self): + try: + # Close tmp file. + self.tmp_file.close() + # Determine if different. + same = False + try: + same = filecmp.cmp(self.tmp_path, filename, False) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + if same: + # The new file is identical to the old one, just get rid of the new + # one. + os.unlink(self.tmp_path) + else: + # The new file is different from the old one, + # or there is no old one. + # Rename the new file to the permanent name. + # + # tempfile.mkstemp uses an overly restrictive mode, resulting in a + # file that can only be read by the owner, regardless of the umask. + # There's no reason to not respect the umask here, + # which means that an extra hoop is required + # to fetch it and reset the new file's mode. + # + # No way to get the umask without setting a new one? Set a safe one + # and then set it back to the old value. + umask = os.umask(0o77) + os.umask(umask) + os.chmod(self.tmp_path, 0o666 & ~umask) + if sys.platform == "win32" and os.path.exists(filename): + # NOTE: on windows (but not cygwin) rename will not replace an + # existing file, so it must be preceded with a remove. + # Sadly there is no way to make the switch atomic. + os.remove(filename) + os.rename(self.tmp_path, filename) + except Exception: + # Don't leave turds behind. + os.unlink(self.tmp_path) + raise + + def write(self, s): + self.tmp_file.write(s.encode("utf-8")) + + return Writer() + + +def EnsureDirExists(path): + """Make sure the directory for |path| exists.""" + try: + os.makedirs(os.path.dirname(path)) + except OSError: + pass + + +def GetFlavor(params): + """Returns |params.flavor| if it's set, the system's default flavor else.""" + flavors = { + "cygwin": "win", + "win32": "win", + "darwin": "mac", + } + + if "flavor" in params: + return params["flavor"] + if sys.platform in flavors: + return flavors[sys.platform] + if sys.platform.startswith("sunos"): + return "solaris" + if sys.platform.startswith(("dragonfly", "freebsd")): + return "freebsd" + if sys.platform.startswith("openbsd"): + return "openbsd" + if sys.platform.startswith("netbsd"): + return "netbsd" + if sys.platform.startswith("aix"): + return "aix" + if sys.platform.startswith(("os390", "zos")): + return "zos" + + return "linux" + + +def CopyTool(flavor, out_path, generator_flags={}): + """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it + to |out_path|.""" + # aix and solaris just need flock emulation. mac and win use more complicated + # support scripts. + prefix = {"aix": "flock", "solaris": "flock", "mac": "mac", "win": "win"}.get( + flavor, None + ) + if not prefix: + return + + # Slurp input file. + source_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "%s_tool.py" % prefix + ) + with open(source_path) as source_file: + source = source_file.readlines() + + # Set custom header flags. + header = "# Generated by gyp. Do not edit.\n" + mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) + if flavor == "mac" and mac_toolchain_dir: + header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" % mac_toolchain_dir + + # Add header and write it out. + tool_path = os.path.join(out_path, "gyp-%s-tool" % prefix) + with open(tool_path, "w") as tool_file: + tool_file.write("".join([source[0], header] + source[1:])) + + # Make file executable. + os.chmod(tool_path, 0o755) + + +# From Alex Martelli, +# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 +# ASPN: Python Cookbook: Remove duplicates from a sequence +# First comment, dated 2001/10/13. +# (Also in the printed Python Cookbook.) + + +def uniquer(seq, idfun=lambda x: x): + seen = {} + result = [] + for item in seq: + marker = idfun(item) + if marker in seen: + continue + seen[marker] = 1 + result.append(item) + return result + + +# Based on http://code.activestate.com/recipes/576694/. +class OrderedSet(MutableSet): + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.map = {} # key --> [key, prev, next] + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def discard(self, key): + if key in self.map: + key, prev_item, next_item = self.map.pop(key) + prev_item[2] = next_item + next_item[1] = prev_item + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + # The second argument is an addition that causes a pylint warning. + def pop(self, last=True): # pylint: disable=W0221 + if not self: + raise KeyError("set is empty") + key = self.end[1][0] if last else self.end[2][0] + self.discard(key) + return key + + def __repr__(self): + if not self: + return f"{self.__class__.__name__}()" + return f"{self.__class__.__name__}({list(self)!r})" + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) + + # Extensions to the recipe. + def update(self, iterable): + for i in iterable: + if i not in self: + self.add(i) + + +class CycleError(Exception): + """An exception raised when an unexpected cycle is detected.""" + + def __init__(self, nodes): + self.nodes = nodes + + def __str__(self): + return "CycleError: cycle involving: " + str(self.nodes) + + +def TopologicallySorted(graph, get_edges): + r"""Topologically sort based on a user provided edge definition. + + Args: + graph: A list of node names. + get_edges: A function mapping from node name to a hashable collection + of node names which this node has outgoing edges to. + Returns: + A list containing all of the node in graph in topological order. + It is assumed that calling get_edges once for each node and caching is + cheaper than repeatedly calling get_edges. + Raises: + CycleError in the event of a cycle. + Example: + graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'} + def GetEdges(node): + return re.findall(r'\$\(([^))]\)', graph[node]) + print TopologicallySorted(graph.keys(), GetEdges) + ==> + ['a', 'c', b'] + """ + get_edges = memoize(get_edges) + visited = set() + visiting = set() + ordered_nodes = [] + + def Visit(node): + if node in visiting: + raise CycleError(visiting) + if node in visited: + return + visited.add(node) + visiting.add(node) + for neighbor in get_edges(node): + Visit(neighbor) + visiting.remove(node) + ordered_nodes.insert(0, node) + + for node in sorted(graph): + Visit(node) + return ordered_nodes + + +def CrossCompileRequested(): + # TODO: figure out how to not build extra host objects in the + # non-cross-compile case when this is enabled, and enable unconditionally. + return ( + os.environ.get("GYP_CROSSCOMPILE") + or os.environ.get("AR_host") + or os.environ.get("CC_host") + or os.environ.get("CXX_host") + or os.environ.get("AR_target") + or os.environ.get("CC_target") + or os.environ.get("CXX_target") + ) + + +def IsCygwin(): + try: + out = subprocess.Popen( + "uname", stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + stdout = out.communicate()[0].decode("utf-8") + return "CYGWIN" in str(stdout) + except Exception: + return False diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py new file mode 100644 index 0000000..0534408 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unit tests for the common.py file.""" + +import gyp.common +import unittest +import sys + + +class TestTopologicallySorted(unittest.TestCase): + def test_Valid(self): + """Test that sorting works on a valid graph with one possible order.""" + graph = { + "a": ["b", "c"], + "b": [], + "c": ["d"], + "d": ["b"], + } + + def GetEdge(node): + return tuple(graph[node]) + + self.assertEqual( + gyp.common.TopologicallySorted(graph.keys(), GetEdge), ["a", "c", "d", "b"] + ) + + def test_Cycle(self): + """Test that an exception is thrown on a cyclic graph.""" + graph = { + "a": ["b"], + "b": ["c"], + "c": ["d"], + "d": ["a"], + } + + def GetEdge(node): + return tuple(graph[node]) + + self.assertRaises( + gyp.common.CycleError, gyp.common.TopologicallySorted, graph.keys(), GetEdge + ) + + +class TestGetFlavor(unittest.TestCase): + """Test that gyp.common.GetFlavor works as intended""" + + original_platform = "" + + def setUp(self): + self.original_platform = sys.platform + + def tearDown(self): + sys.platform = self.original_platform + + def assertFlavor(self, expected, argument, param): + sys.platform = argument + self.assertEqual(expected, gyp.common.GetFlavor(param)) + + def test_platform_default(self): + self.assertFlavor("freebsd", "freebsd9", {}) + self.assertFlavor("freebsd", "freebsd10", {}) + self.assertFlavor("openbsd", "openbsd5", {}) + self.assertFlavor("solaris", "sunos5", {}) + self.assertFlavor("solaris", "sunos", {}) + self.assertFlavor("linux", "linux2", {}) + self.assertFlavor("linux", "linux3", {}) + self.assertFlavor("linux", "linux", {}) + + def test_param(self): + self.assertFlavor("foobar", "linux2", {"flavor": "foobar"}) + + +if __name__ == "__main__": + unittest.main() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py new file mode 100644 index 0000000..bda1a47 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py @@ -0,0 +1,165 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import sys +import re +import os +import locale +from functools import reduce + + +def XmlToString(content, encoding="utf-8", pretty=False): + """ Writes the XML content to disk, touching the file only if it has changed. + + Visual Studio files have a lot of pre-defined structures. This function makes + it easy to represent these structures as Python data structures, instead of + having to create a lot of function calls. + + Each XML element of the content is represented as a list composed of: + 1. The name of the element, a string, + 2. The attributes of the element, a dictionary (optional), and + 3+. The content of the element, if any. Strings are simple text nodes and + lists are child elements. + + Example 1: + + becomes + ['test'] + + Example 2: + + This is + it! + + + becomes + ['myelement', {'a':'value1', 'b':'value2'}, + ['childtype', 'This is'], + ['childtype', 'it!'], + ] + + Args: + content: The structured content to be converted. + encoding: The encoding to report on the first XML line. + pretty: True if we want pretty printing with indents and new lines. + + Returns: + The XML content as a string. + """ + # We create a huge list of all the elements of the file. + xml_parts = ['' % encoding] + if pretty: + xml_parts.append("\n") + _ConstructContentList(xml_parts, content, pretty) + + # Convert it to a string + return "".join(xml_parts) + + +def _ConstructContentList(xml_parts, specification, pretty, level=0): + """ Appends the XML parts corresponding to the specification. + + Args: + xml_parts: A list of XML parts to be appended to. + specification: The specification of the element. See EasyXml docs. + pretty: True if we want pretty printing with indents and new lines. + level: Indentation level. + """ + # The first item in a specification is the name of the element. + if pretty: + indentation = " " * level + new_line = "\n" + else: + indentation = "" + new_line = "" + name = specification[0] + if not isinstance(name, str): + raise Exception( + "The first item of an EasyXml specification should be " + "a string. Specification was " + str(specification) + ) + xml_parts.append(indentation + "<" + name) + + # Optionally in second position is a dictionary of the attributes. + rest = specification[1:] + if rest and isinstance(rest[0], dict): + for at, val in sorted(rest[0].items()): + xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"') + rest = rest[1:] + if rest: + xml_parts.append(">") + all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) + multi_line = not all_strings + if multi_line and new_line: + xml_parts.append(new_line) + for child_spec in rest: + # If it's a string, append a text node. + # Otherwise recurse over that child definition + if isinstance(child_spec, str): + xml_parts.append(_XmlEscape(child_spec)) + else: + _ConstructContentList(xml_parts, child_spec, pretty, level + 1) + if multi_line and indentation: + xml_parts.append(indentation) + xml_parts.append(f"{new_line}") + else: + xml_parts.append("/>%s" % new_line) + + +def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, + win32=(sys.platform == "win32")): + """ Writes the XML content to disk, touching the file only if it has changed. + + Args: + content: The structured content to be written. + path: Location of the file. + encoding: The encoding to report on the first line of the XML file. + pretty: True if we want pretty printing with indents and new lines. + """ + xml_string = XmlToString(content, encoding, pretty) + if win32 and os.linesep != "\r\n": + xml_string = xml_string.replace("\n", "\r\n") + + default_encoding = locale.getdefaultlocale()[1] + if default_encoding and default_encoding.upper() != encoding.upper(): + xml_string = xml_string.encode(encoding) + + # Get the old content + try: + with open(path) as file: + existing = file.read() + except OSError: + existing = None + + # It has changed, write it + if existing != xml_string: + with open(path, "wb") as file: + file.write(xml_string) + + +_xml_escape_map = { + '"': """, + "'": "'", + "<": "<", + ">": ">", + "&": "&", + "\n": " ", + "\r": " ", +} + + +_xml_escape_re = re.compile("(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) + + +def _XmlEscape(value, attr=False): + """ Escape a string for inclusion in XML.""" + + def replace(match): + m = match.string[match.start() : match.end()] + # don't replace single quotes in attrs + if attr and m == "'": + return m + return _xml_escape_map[m] + + return _xml_escape_re.sub(replace, value) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py new file mode 100644 index 0000000..342f693 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Unit tests for the easy_xml.py file. """ + +import gyp.easy_xml as easy_xml +import unittest + +from io import StringIO + + +class TestSequenceFunctions(unittest.TestCase): + def setUp(self): + self.stderr = StringIO() + + def test_EasyXml_simple(self): + self.assertEqual( + easy_xml.XmlToString(["test"]), + '', + ) + + self.assertEqual( + easy_xml.XmlToString(["test"], encoding="Windows-1252"), + '', + ) + + def test_EasyXml_simple_with_attributes(self): + self.assertEqual( + easy_xml.XmlToString(["test2", {"a": "value1", "b": "value2"}]), + '', + ) + + def test_EasyXml_escaping(self): + original = "'\"\r&\nfoo" + converted = "<test>'" & foo" + converted_apos = converted.replace("'", "'") + self.assertEqual( + easy_xml.XmlToString(["test3", {"a": original}, original]), + '%s' + % (converted, converted_apos), + ) + + def test_EasyXml_pretty(self): + self.assertEqual( + easy_xml.XmlToString( + ["test3", ["GrandParent", ["Parent1", ["Child"]], ["Parent2"]]], + pretty=True, + ), + '\n' + "\n" + " \n" + " \n" + " \n" + " \n" + " \n" + " \n" + "\n", + ) + + def test_EasyXml_complex(self): + # We want to create: + target = ( + '' + "" + '' + "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}" + "Win32Proj" + "automated_ui_tests" + "" + '' + "' + "Application" + "Unicode" + "" + "" + ) + + xml = easy_xml.XmlToString( + [ + "Project", + [ + "PropertyGroup", + {"Label": "Globals"}, + ["ProjectGuid", "{D2250C20-3A94-4FB9-AF73-11BC5B73884B}"], + ["Keyword", "Win32Proj"], + ["RootNamespace", "automated_ui_tests"], + ], + ["Import", {"Project": "$(VCTargetsPath)\\Microsoft.Cpp.props"}], + [ + "PropertyGroup", + { + "Condition": "'$(Configuration)|$(Platform)'=='Debug|Win32'", + "Label": "Configuration", + }, + ["ConfigurationType", "Application"], + ["CharacterSet", "Unicode"], + ], + ] + ) + self.assertEqual(xml, target) + + +if __name__ == "__main__": + unittest.main() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py new file mode 100644 index 0000000..1cb9815 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python3 +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""These functions are executed via gyp-flock-tool when using the Makefile +generator. Used on systems that don't have a built-in flock.""" + +import fcntl +import os +import struct +import subprocess +import sys + + +def main(args): + executor = FlockTool() + executor.Dispatch(args) + + +class FlockTool: + """This class emulates the 'flock' command.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace("-", "") + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + # Note that the stock python on SunOS has a bug + # where fcntl.flock(fd, LOCK_EX) always fails + # with EBADF, that's why we use this F_SETLK + # hack instead. + fd = os.open(lockfile, os.O_WRONLY | os.O_NOCTTY | os.O_CREAT, 0o666) + if sys.platform.startswith("aix"): + # Python on AIX is compiled with LARGEFILE support, which changes the + # struct size. + op = struct.pack("hhIllqq", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) + else: + op = struct.pack("hhllhhl", fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) + fcntl.fcntl(fd, fcntl.F_SETLK, op) + return subprocess.call(cmd_list) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py new file mode 100644 index 0000000..f15df00 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py @@ -0,0 +1,808 @@ +# Copyright (c) 2014 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +This script is intended for use as a GYP_GENERATOR. It takes as input (by way of +the generator flag config_path) the path of a json file that dictates the files +and targets to search for. The following keys are supported: +files: list of paths (relative) of the files to search for. +test_targets: unqualified target names to search for. Any target in this list +that depends upon a file in |files| is output regardless of the type of target +or chain of dependencies. +additional_compile_targets: Unqualified targets to search for in addition to +test_targets. Targets in the combined list that depend upon a file in |files| +are not necessarily output. For example, if the target is of type none then the +target is not output (but one of the descendants of the target will be). + +The following is output: +error: only supplied if there is an error. +compile_targets: minimal set of targets that directly or indirectly (for + targets of type none) depend on the files in |files| and is one of the + supplied targets or a target that one of the supplied targets depends on. + The expectation is this set of targets is passed into a build step. This list + always contains the output of test_targets as well. +test_targets: set of targets from the supplied |test_targets| that either + directly or indirectly depend upon a file in |files|. This list if useful + if additional processing needs to be done for certain targets after the + build, such as running tests. +status: outputs one of three values: none of the supplied files were found, + one of the include files changed so that it should be assumed everything + changed (in this case test_targets and compile_targets are not output) or at + least one file was found. +invalid_targets: list of supplied targets that were not found. + +Example: +Consider a graph like the following: + A D + / \ +B C +A depends upon both B and C, A is of type none and B and C are executables. +D is an executable, has no dependencies and nothing depends on it. +If |additional_compile_targets| = ["A"], |test_targets| = ["B", "C"] and +files = ["b.cc", "d.cc"] (B depends upon b.cc and D depends upon d.cc), then +the following is output: +|compile_targets| = ["B"] B must built as it depends upon the changed file b.cc +and the supplied target A depends upon it. A is not output as a build_target +as it is of type none with no rules and actions. +|test_targets| = ["B"] B directly depends upon the change file b.cc. + +Even though the file d.cc, which D depends upon, has changed D is not output +as it was not supplied by way of |additional_compile_targets| or |test_targets|. + +If the generator flag analyzer_output_path is specified, output is written +there. Otherwise output is written to stdout. + +In Gyp the "all" target is shorthand for the root targets in the files passed +to gyp. For example, if file "a.gyp" contains targets "a1" and +"a2", and file "b.gyp" contains targets "b1" and "b2" and "a2" has a dependency +on "b2" and gyp is supplied "a.gyp" then "all" consists of "a1" and "a2". +Notice that "b1" and "b2" are not in the "all" target as "b.gyp" was not +directly supplied to gyp. OTOH if both "a.gyp" and "b.gyp" are supplied to gyp +then the "all" target includes "b1" and "b2". +""" + + +import gyp.common +import json +import os +import posixpath + +debug = False + +found_dependency_string = "Found dependency" +no_dependency_string = "No dependencies" +# Status when it should be assumed that everything has changed. +all_changed_string = "Found dependency (all)" + +# MatchStatus is used indicate if and how a target depends upon the supplied +# sources. +# The target's sources contain one of the supplied paths. +MATCH_STATUS_MATCHES = 1 +# The target has a dependency on another target that contains one of the +# supplied paths. +MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2 +# The target's sources weren't in the supplied paths and none of the target's +# dependencies depend upon a target that matched. +MATCH_STATUS_DOESNT_MATCH = 3 +# The target doesn't contain the source, but the dependent targets have not yet +# been visited to determine a more specific status yet. +MATCH_STATUS_TBD = 4 + +generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() + +generator_wants_static_library_dependencies_adjusted = False + +generator_default_variables = {} +for dirname in [ + "INTERMEDIATE_DIR", + "SHARED_INTERMEDIATE_DIR", + "PRODUCT_DIR", + "LIB_DIR", + "SHARED_LIB_DIR", +]: + generator_default_variables[dirname] = "!!!" + +for unused in [ + "RULE_INPUT_PATH", + "RULE_INPUT_ROOT", + "RULE_INPUT_NAME", + "RULE_INPUT_DIRNAME", + "RULE_INPUT_EXT", + "EXECUTABLE_PREFIX", + "EXECUTABLE_SUFFIX", + "STATIC_LIB_PREFIX", + "STATIC_LIB_SUFFIX", + "SHARED_LIB_PREFIX", + "SHARED_LIB_SUFFIX", + "CONFIGURATION_NAME", +]: + generator_default_variables[unused] = "" + + +def _ToGypPath(path): + """Converts a path to the format used by gyp.""" + if os.sep == "\\" and os.altsep == "/": + return path.replace("\\", "/") + return path + + +def _ResolveParent(path, base_path_components): + """Resolves |path|, which starts with at least one '../'. Returns an empty + string if the path shouldn't be considered. See _AddSources() for a + description of |base_path_components|.""" + depth = 0 + while path.startswith("../"): + depth += 1 + path = path[3:] + # Relative includes may go outside the source tree. For example, an action may + # have inputs in /usr/include, which are not in the source tree. + if depth > len(base_path_components): + return "" + if depth == len(base_path_components): + return path + return ( + "/".join(base_path_components[0 : len(base_path_components) - depth]) + + "/" + + path + ) + + +def _AddSources(sources, base_path, base_path_components, result): + """Extracts valid sources from |sources| and adds them to |result|. Each + source file is relative to |base_path|, but may contain '..'. To make + resolving '..' easier |base_path_components| contains each of the + directories in |base_path|. Additionally each source may contain variables. + Such sources are ignored as it is assumed dependencies on them are expressed + and tracked in some other means.""" + # NOTE: gyp paths are always posix style. + for source in sources: + if not len(source) or source.startswith("!!!") or source.startswith("$"): + continue + # variable expansion may lead to //. + org_source = source + source = source[0] + source[1:].replace("//", "/") + if source.startswith("../"): + source = _ResolveParent(source, base_path_components) + if len(source): + result.append(source) + continue + result.append(base_path + source) + if debug: + print("AddSource", org_source, result[len(result) - 1]) + + +def _ExtractSourcesFromAction(action, base_path, base_path_components, results): + if "inputs" in action: + _AddSources(action["inputs"], base_path, base_path_components, results) + + +def _ToLocalPath(toplevel_dir, path): + """Converts |path| to a path relative to |toplevel_dir|.""" + if path == toplevel_dir: + return "" + if path.startswith(toplevel_dir + "/"): + return path[len(toplevel_dir) + len("/") :] + return path + + +def _ExtractSources(target, target_dict, toplevel_dir): + # |target| is either absolute or relative and in the format of the OS. Gyp + # source paths are always posix. Convert |target| to a posix path relative to + # |toplevel_dir_|. This is done to make it easy to build source paths. + base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target))) + base_path_components = base_path.split("/") + + # Add a trailing '/' so that _AddSources() can easily build paths. + if len(base_path): + base_path += "/" + + if debug: + print("ExtractSources", target, base_path) + + results = [] + if "sources" in target_dict: + _AddSources(target_dict["sources"], base_path, base_path_components, results) + # Include the inputs from any actions. Any changes to these affect the + # resulting output. + if "actions" in target_dict: + for action in target_dict["actions"]: + _ExtractSourcesFromAction(action, base_path, base_path_components, results) + if "rules" in target_dict: + for rule in target_dict["rules"]: + _ExtractSourcesFromAction(rule, base_path, base_path_components, results) + + return results + + +class Target: + """Holds information about a particular target: + deps: set of Targets this Target depends upon. This is not recursive, only the + direct dependent Targets. + match_status: one of the MatchStatus values. + back_deps: set of Targets that have a dependency on this Target. + visited: used during iteration to indicate whether we've visited this target. + This is used for two iterations, once in building the set of Targets and + again in _GetBuildTargets(). + name: fully qualified name of the target. + requires_build: True if the target type is such that it needs to be built. + See _DoesTargetTypeRequireBuild for details. + added_to_compile_targets: used when determining if the target was added to the + set of targets that needs to be built. + in_roots: true if this target is a descendant of one of the root nodes. + is_executable: true if the type of target is executable. + is_static_library: true if the type of target is static_library. + is_or_has_linked_ancestor: true if the target does a link (eg executable), or + if there is a target in back_deps that does a link.""" + + def __init__(self, name): + self.deps = set() + self.match_status = MATCH_STATUS_TBD + self.back_deps = set() + self.name = name + # TODO(sky): I don't like hanging this off Target. This state is specific + # to certain functions and should be isolated there. + self.visited = False + self.requires_build = False + self.added_to_compile_targets = False + self.in_roots = False + self.is_executable = False + self.is_static_library = False + self.is_or_has_linked_ancestor = False + + +class Config: + """Details what we're looking for + files: set of files to search for + targets: see file description for details.""" + + def __init__(self): + self.files = [] + self.targets = set() + self.additional_compile_target_names = set() + self.test_target_names = set() + + def Init(self, params): + """Initializes Config. This is a separate method as it raises an exception + if there is a parse error.""" + generator_flags = params.get("generator_flags", {}) + config_path = generator_flags.get("config_path", None) + if not config_path: + return + try: + f = open(config_path) + config = json.load(f) + f.close() + except OSError: + raise Exception("Unable to open file " + config_path) + except ValueError as e: + raise Exception("Unable to parse config file " + config_path + str(e)) + if not isinstance(config, dict): + raise Exception("config_path must be a JSON file containing a dictionary") + self.files = config.get("files", []) + self.additional_compile_target_names = set( + config.get("additional_compile_targets", []) + ) + self.test_target_names = set(config.get("test_targets", [])) + + +def _WasBuildFileModified(build_file, data, files, toplevel_dir): + """Returns true if the build file |build_file| is either in |files| or + one of the files included by |build_file| is in |files|. |toplevel_dir| is + the root of the source tree.""" + if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files: + if debug: + print("gyp file modified", build_file) + return True + + # First element of included_files is the file itself. + if len(data[build_file]["included_files"]) <= 1: + return False + + for include_file in data[build_file]["included_files"][1:]: + # |included_files| are relative to the directory of the |build_file|. + rel_include_file = _ToGypPath( + gyp.common.UnrelativePath(include_file, build_file) + ) + if _ToLocalPath(toplevel_dir, rel_include_file) in files: + if debug: + print( + "included gyp file modified, gyp_file=", + build_file, + "included file=", + rel_include_file, + ) + return True + return False + + +def _GetOrCreateTargetByName(targets, target_name): + """Creates or returns the Target at targets[target_name]. If there is no + Target for |target_name| one is created. Returns a tuple of whether a new + Target was created and the Target.""" + if target_name in targets: + return False, targets[target_name] + target = Target(target_name) + targets[target_name] = target + return True, target + + +def _DoesTargetTypeRequireBuild(target_dict): + """Returns true if the target type is such that it needs to be built.""" + # If a 'none' target has rules or actions we assume it requires a build. + return bool( + target_dict["type"] != "none" + or target_dict.get("actions") + or target_dict.get("rules") + ) + + +def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files): + """Returns a tuple of the following: + . A dictionary mapping from fully qualified name to Target. + . A list of the targets that have a source file in |files|. + . Targets that constitute the 'all' target. See description at top of file + for details on the 'all' target. + This sets the |match_status| of the targets that contain any of the source + files in |files| to MATCH_STATUS_MATCHES. + |toplevel_dir| is the root of the source tree.""" + # Maps from target name to Target. + name_to_target = {} + + # Targets that matched. + matching_targets = [] + + # Queue of targets to visit. + targets_to_visit = target_list[:] + + # Maps from build file to a boolean indicating whether the build file is in + # |files|. + build_file_in_files = {} + + # Root targets across all files. + roots = set() + + # Set of Targets in |build_files|. + build_file_targets = set() + + while len(targets_to_visit) > 0: + target_name = targets_to_visit.pop() + created_target, target = _GetOrCreateTargetByName(name_to_target, target_name) + if created_target: + roots.add(target) + elif target.visited: + continue + + target.visited = True + target.requires_build = _DoesTargetTypeRequireBuild(target_dicts[target_name]) + target_type = target_dicts[target_name]["type"] + target.is_executable = target_type == "executable" + target.is_static_library = target_type == "static_library" + target.is_or_has_linked_ancestor = ( + target_type == "executable" or target_type == "shared_library" + ) + + build_file = gyp.common.ParseQualifiedTarget(target_name)[0] + if build_file not in build_file_in_files: + build_file_in_files[build_file] = _WasBuildFileModified( + build_file, data, files, toplevel_dir + ) + + if build_file in build_files: + build_file_targets.add(target) + + # If a build file (or any of its included files) is modified we assume all + # targets in the file are modified. + if build_file_in_files[build_file]: + print("matching target from modified build file", target_name) + target.match_status = MATCH_STATUS_MATCHES + matching_targets.append(target) + else: + sources = _ExtractSources( + target_name, target_dicts[target_name], toplevel_dir + ) + for source in sources: + if _ToGypPath(os.path.normpath(source)) in files: + print("target", target_name, "matches", source) + target.match_status = MATCH_STATUS_MATCHES + matching_targets.append(target) + break + + # Add dependencies to visit as well as updating back pointers for deps. + for dep in target_dicts[target_name].get("dependencies", []): + targets_to_visit.append(dep) + + created_dep_target, dep_target = _GetOrCreateTargetByName( + name_to_target, dep + ) + if not created_dep_target: + roots.discard(dep_target) + + target.deps.add(dep_target) + dep_target.back_deps.add(target) + + return name_to_target, matching_targets, roots & build_file_targets + + +def _GetUnqualifiedToTargetMapping(all_targets, to_find): + """Returns a tuple of the following: + . mapping (dictionary) from unqualified name to Target for all the + Targets in |to_find|. + . any target names not found. If this is empty all targets were found.""" + result = {} + if not to_find: + return {}, [] + to_find = set(to_find) + for target_name in all_targets.keys(): + extracted = gyp.common.ParseQualifiedTarget(target_name) + if len(extracted) > 1 and extracted[1] in to_find: + to_find.remove(extracted[1]) + result[extracted[1]] = all_targets[target_name] + if not to_find: + return result, [] + return result, [x for x in to_find] + + +def _DoesTargetDependOnMatchingTargets(target): + """Returns true if |target| or any of its dependencies is one of the + targets containing the files supplied as input to analyzer. This updates + |matches| of the Targets as it recurses. + target: the Target to look for.""" + if target.match_status == MATCH_STATUS_DOESNT_MATCH: + return False + if ( + target.match_status == MATCH_STATUS_MATCHES + or target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY + ): + return True + for dep in target.deps: + if _DoesTargetDependOnMatchingTargets(dep): + target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY + print("\t", target.name, "matches by dep", dep.name) + return True + target.match_status = MATCH_STATUS_DOESNT_MATCH + return False + + +def _GetTargetsDependingOnMatchingTargets(possible_targets): + """Returns the list of Targets in |possible_targets| that depend (either + directly on indirectly) on at least one of the targets containing the files + supplied as input to analyzer. + possible_targets: targets to search from.""" + found = [] + print("Targets that matched by dependency:") + for target in possible_targets: + if _DoesTargetDependOnMatchingTargets(target): + found.append(target) + return found + + +def _AddCompileTargets(target, roots, add_if_no_ancestor, result): + """Recurses through all targets that depend on |target|, adding all targets + that need to be built (and are in |roots|) to |result|. + roots: set of root targets. + add_if_no_ancestor: If true and there are no ancestors of |target| then add + |target| to |result|. |target| must still be in |roots|. + result: targets that need to be built are added here.""" + if target.visited: + return + + target.visited = True + target.in_roots = target in roots + + for back_dep_target in target.back_deps: + _AddCompileTargets(back_dep_target, roots, False, result) + target.added_to_compile_targets |= back_dep_target.added_to_compile_targets + target.in_roots |= back_dep_target.in_roots + target.is_or_has_linked_ancestor |= back_dep_target.is_or_has_linked_ancestor + + # Always add 'executable' targets. Even though they may be built by other + # targets that depend upon them it makes detection of what is going to be + # built easier. + # And always add static_libraries that have no dependencies on them from + # linkables. This is necessary as the other dependencies on them may be + # static libraries themselves, which are not compile time dependencies. + if target.in_roots and ( + target.is_executable + or ( + not target.added_to_compile_targets + and (add_if_no_ancestor or target.requires_build) + ) + or ( + target.is_static_library + and add_if_no_ancestor + and not target.is_or_has_linked_ancestor + ) + ): + print( + "\t\tadding to compile targets", + target.name, + "executable", + target.is_executable, + "added_to_compile_targets", + target.added_to_compile_targets, + "add_if_no_ancestor", + add_if_no_ancestor, + "requires_build", + target.requires_build, + "is_static_library", + target.is_static_library, + "is_or_has_linked_ancestor", + target.is_or_has_linked_ancestor, + ) + result.add(target) + target.added_to_compile_targets = True + + +def _GetCompileTargets(matching_targets, supplied_targets): + """Returns the set of Targets that require a build. + matching_targets: targets that changed and need to be built. + supplied_targets: set of targets supplied to analyzer to search from.""" + result = set() + for target in matching_targets: + print("finding compile targets for match", target.name) + _AddCompileTargets(target, supplied_targets, True, result) + return result + + +def _WriteOutput(params, **values): + """Writes the output, either to stdout or a file is specified.""" + if "error" in values: + print("Error:", values["error"]) + if "status" in values: + print(values["status"]) + if "targets" in values: + values["targets"].sort() + print("Supplied targets that depend on changed files:") + for target in values["targets"]: + print("\t", target) + if "invalid_targets" in values: + values["invalid_targets"].sort() + print("The following targets were not found:") + for target in values["invalid_targets"]: + print("\t", target) + if "build_targets" in values: + values["build_targets"].sort() + print("Targets that require a build:") + for target in values["build_targets"]: + print("\t", target) + if "compile_targets" in values: + values["compile_targets"].sort() + print("Targets that need to be built:") + for target in values["compile_targets"]: + print("\t", target) + if "test_targets" in values: + values["test_targets"].sort() + print("Test targets:") + for target in values["test_targets"]: + print("\t", target) + + output_path = params.get("generator_flags", {}).get("analyzer_output_path", None) + if not output_path: + print(json.dumps(values)) + return + try: + f = open(output_path, "w") + f.write(json.dumps(values) + "\n") + f.close() + except OSError as e: + print("Error writing to output file", output_path, str(e)) + + +def _WasGypIncludeFileModified(params, files): + """Returns true if one of the files in |files| is in the set of included + files.""" + if params["options"].includes: + for include in params["options"].includes: + if _ToGypPath(os.path.normpath(include)) in files: + print("Include file modified, assuming all changed", include) + return True + return False + + +def _NamesNotIn(names, mapping): + """Returns a list of the values in |names| that are not in |mapping|.""" + return [name for name in names if name not in mapping] + + +def _LookupTargets(names, mapping): + """Returns a list of the mapping[name] for each value in |names| that is in + |mapping|.""" + return [mapping[name] for name in names if name in mapping] + + +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + flavor = gyp.common.GetFlavor(params) + if flavor == "mac": + default_variables.setdefault("OS", "mac") + elif flavor == "win": + default_variables.setdefault("OS", "win") + gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) + else: + operating_system = flavor + if flavor == "android": + operating_system = "linux" # Keep this legacy behavior for now. + default_variables.setdefault("OS", operating_system) + + +class TargetCalculator: + """Calculates the matching test_targets and matching compile_targets.""" + + def __init__( + self, + files, + additional_compile_target_names, + test_target_names, + data, + target_list, + target_dicts, + toplevel_dir, + build_files, + ): + self._additional_compile_target_names = set(additional_compile_target_names) + self._test_target_names = set(test_target_names) + ( + self._name_to_target, + self._changed_targets, + self._root_targets, + ) = _GenerateTargets( + data, target_list, target_dicts, toplevel_dir, frozenset(files), build_files + ) + ( + self._unqualified_mapping, + self.invalid_targets, + ) = _GetUnqualifiedToTargetMapping( + self._name_to_target, self._supplied_target_names_no_all() + ) + + def _supplied_target_names(self): + return self._additional_compile_target_names | self._test_target_names + + def _supplied_target_names_no_all(self): + """Returns the supplied test targets without 'all'.""" + result = self._supplied_target_names() + result.discard("all") + return result + + def is_build_impacted(self): + """Returns true if the supplied files impact the build at all.""" + return self._changed_targets + + def find_matching_test_target_names(self): + """Returns the set of output test targets.""" + assert self.is_build_impacted() + # Find the test targets first. 'all' is special cased to mean all the + # root targets. To deal with all the supplied |test_targets| are expanded + # to include the root targets during lookup. If any of the root targets + # match, we remove it and replace it with 'all'. + test_target_names_no_all = set(self._test_target_names) + test_target_names_no_all.discard("all") + test_targets_no_all = _LookupTargets( + test_target_names_no_all, self._unqualified_mapping + ) + test_target_names_contains_all = "all" in self._test_target_names + if test_target_names_contains_all: + test_targets = [ + x for x in (set(test_targets_no_all) | set(self._root_targets)) + ] + else: + test_targets = [x for x in test_targets_no_all] + print("supplied test_targets") + for target_name in self._test_target_names: + print("\t", target_name) + print("found test_targets") + for target in test_targets: + print("\t", target.name) + print("searching for matching test targets") + matching_test_targets = _GetTargetsDependingOnMatchingTargets(test_targets) + matching_test_targets_contains_all = test_target_names_contains_all and set( + matching_test_targets + ) & set(self._root_targets) + if matching_test_targets_contains_all: + # Remove any of the targets for all that were not explicitly supplied, + # 'all' is subsequentely added to the matching names below. + matching_test_targets = [ + x for x in (set(matching_test_targets) & set(test_targets_no_all)) + ] + print("matched test_targets") + for target in matching_test_targets: + print("\t", target.name) + matching_target_names = [ + gyp.common.ParseQualifiedTarget(target.name)[1] + for target in matching_test_targets + ] + if matching_test_targets_contains_all: + matching_target_names.append("all") + print("\tall") + return matching_target_names + + def find_matching_compile_target_names(self): + """Returns the set of output compile targets.""" + assert self.is_build_impacted() + # Compile targets are found by searching up from changed targets. + # Reset the visited status for _GetBuildTargets. + for target in self._name_to_target.values(): + target.visited = False + + supplied_targets = _LookupTargets( + self._supplied_target_names_no_all(), self._unqualified_mapping + ) + if "all" in self._supplied_target_names(): + supplied_targets = [ + x for x in (set(supplied_targets) | set(self._root_targets)) + ] + print("Supplied test_targets & compile_targets") + for target in supplied_targets: + print("\t", target.name) + print("Finding compile targets") + compile_targets = _GetCompileTargets(self._changed_targets, supplied_targets) + return [ + gyp.common.ParseQualifiedTarget(target.name)[1] + for target in compile_targets + ] + + +def GenerateOutput(target_list, target_dicts, data, params): + """Called by gyp as the final stage. Outputs results.""" + config = Config() + try: + config.Init(params) + + if not config.files: + raise Exception( + "Must specify files to analyze via config_path generator " "flag" + ) + + toplevel_dir = _ToGypPath(os.path.abspath(params["options"].toplevel_dir)) + if debug: + print("toplevel_dir", toplevel_dir) + + if _WasGypIncludeFileModified(params, config.files): + result_dict = { + "status": all_changed_string, + "test_targets": list(config.test_target_names), + "compile_targets": list( + config.additional_compile_target_names | config.test_target_names + ), + } + _WriteOutput(params, **result_dict) + return + + calculator = TargetCalculator( + config.files, + config.additional_compile_target_names, + config.test_target_names, + data, + target_list, + target_dicts, + toplevel_dir, + params["build_files"], + ) + if not calculator.is_build_impacted(): + result_dict = { + "status": no_dependency_string, + "test_targets": [], + "compile_targets": [], + } + if calculator.invalid_targets: + result_dict["invalid_targets"] = calculator.invalid_targets + _WriteOutput(params, **result_dict) + return + + test_target_names = calculator.find_matching_test_target_names() + compile_target_names = calculator.find_matching_compile_target_names() + found_at_least_one_target = compile_target_names or test_target_names + result_dict = { + "test_targets": test_target_names, + "status": found_dependency_string + if found_at_least_one_target + else no_dependency_string, + "compile_targets": list(set(compile_target_names) | set(test_target_names)), + } + if calculator.invalid_targets: + result_dict["invalid_targets"] = calculator.invalid_targets + _WriteOutput(params, **result_dict) + + except Exception as e: + _WriteOutput(params, error=str(e)) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py new file mode 100644 index 0000000..cdf1a48 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py @@ -0,0 +1,1173 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Notes: +# +# This generates makefiles suitable for inclusion into the Android build system +# via an Android.mk file. It is based on make.py, the standard makefile +# generator. +# +# The code below generates a separate .mk file for each target, but +# all are sourced by the top-level GypAndroid.mk. This means that all +# variables in .mk-files clobber one another, and furthermore that any +# variables set potentially clash with other Android build system variables. +# Try to avoid setting global variables where possible. + + +import gyp +import gyp.common +import gyp.generator.make as make # Reuse global functions from make backend. +import os +import re +import subprocess + +generator_default_variables = { + "OS": "android", + "EXECUTABLE_PREFIX": "", + "EXECUTABLE_SUFFIX": "", + "STATIC_LIB_PREFIX": "lib", + "SHARED_LIB_PREFIX": "lib", + "STATIC_LIB_SUFFIX": ".a", + "SHARED_LIB_SUFFIX": ".so", + "INTERMEDIATE_DIR": "$(gyp_intermediate_dir)", + "SHARED_INTERMEDIATE_DIR": "$(gyp_shared_intermediate_dir)", + "PRODUCT_DIR": "$(gyp_shared_intermediate_dir)", + "SHARED_LIB_DIR": "$(builddir)/lib.$(TOOLSET)", + "LIB_DIR": "$(obj).$(TOOLSET)", + "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python. + "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python. + "RULE_INPUT_PATH": "$(RULE_SOURCES)", + "RULE_INPUT_EXT": "$(suffix $<)", + "RULE_INPUT_NAME": "$(notdir $<)", + "CONFIGURATION_NAME": "$(GYP_CONFIGURATION)", +} + +# Make supports multiple toolsets +generator_supports_multiple_toolsets = True + + +# Generator-specific gyp specs. +generator_additional_non_configuration_keys = [ + # Boolean to declare that this target does not want its name mangled. + "android_unmangled_name", + # Map of android build system variables to set. + "aosp_build_settings", +] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] + + +ALL_MODULES_FOOTER = """\ +# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from +# all the included sub-makefiles. This is just here to clarify. +gyp_all_modules: +""" + +header = """\ +# This file is generated by gyp; do not edit. + +""" + +# Map gyp target types to Android module classes. +MODULE_CLASSES = { + "static_library": "STATIC_LIBRARIES", + "shared_library": "SHARED_LIBRARIES", + "executable": "EXECUTABLES", +} + + +def IsCPPExtension(ext): + return make.COMPILABLE_EXTENSIONS.get(ext) == "cxx" + + +def Sourceify(path): + """Convert a path to its source directory form. The Android backend does not + support options.generator_output, so this function is a noop.""" + return path + + +# Map from qualified target to path to output. +# For Android, the target of these maps is a tuple ('static', 'modulename'), +# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, +# since we link by module. +target_outputs = {} +# Map from qualified target to any linkable output. A subset +# of target_outputs. E.g. when mybinary depends on liba, we want to +# include liba in the linker line; when otherbinary depends on +# mybinary, we just want to build mybinary first. +target_link_deps = {} + + +class AndroidMkWriter: + """AndroidMkWriter packages up the writing of one target-specific Android.mk. + + Its only real entry point is Write(), and is mostly used for namespacing. + """ + + def __init__(self, android_top_dir): + self.android_top_dir = android_top_dir + + def Write( + self, + qualified_target, + relative_target, + base_path, + output_filename, + spec, + configs, + part_of_all, + write_alias_target, + sdk_version, + ): + """The main entry point: writes a .mk file for a single target. + + Arguments: + qualified_target: target we're generating + relative_target: qualified target name relative to the root + base_path: path relative to source root we're building in, used to resolve + target-relative paths + output_filename: output .mk file name to write + spec, configs: gyp info + part_of_all: flag indicating this target is part of 'all' + write_alias_target: flag indicating whether to create short aliases for + this target + sdk_version: what to emit for LOCAL_SDK_VERSION in output + """ + gyp.common.EnsureDirExists(output_filename) + + self.fp = open(output_filename, "w") + + self.fp.write(header) + + self.qualified_target = qualified_target + self.relative_target = relative_target + self.path = base_path + self.target = spec["target_name"] + self.type = spec["type"] + self.toolset = spec["toolset"] + + deps, link_deps = self.ComputeDeps(spec) + + # Some of the generation below can add extra output, sources, or + # link dependencies. All of the out params of the functions that + # follow use names like extra_foo. + extra_outputs = [] + extra_sources = [] + + self.android_class = MODULE_CLASSES.get(self.type, "GYP") + self.android_module = self.ComputeAndroidModule(spec) + (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) + self.output = self.output_binary = self.ComputeOutput(spec) + + # Standard header. + self.WriteLn("include $(CLEAR_VARS)\n") + + # Module class and name. + self.WriteLn("LOCAL_MODULE_CLASS := " + self.android_class) + self.WriteLn("LOCAL_MODULE := " + self.android_module) + # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. + # The library module classes fail if the stem is set. ComputeOutputParts + # makes sure that stem == modulename in these cases. + if self.android_stem != self.android_module: + self.WriteLn("LOCAL_MODULE_STEM := " + self.android_stem) + self.WriteLn("LOCAL_MODULE_SUFFIX := " + self.android_suffix) + if self.toolset == "host": + self.WriteLn("LOCAL_IS_HOST_MODULE := true") + self.WriteLn("LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)") + elif sdk_version > 0: + self.WriteLn( + "LOCAL_MODULE_TARGET_ARCH := " "$(TARGET_$(GYP_VAR_PREFIX)ARCH)" + ) + self.WriteLn("LOCAL_SDK_VERSION := %s" % sdk_version) + + # Grab output directories; needed for Actions and Rules. + if self.toolset == "host": + self.WriteLn( + "gyp_intermediate_dir := " + "$(call local-intermediates-dir,,$(GYP_HOST_VAR_PREFIX))" + ) + else: + self.WriteLn( + "gyp_intermediate_dir := " + "$(call local-intermediates-dir,,$(GYP_VAR_PREFIX))" + ) + self.WriteLn( + "gyp_shared_intermediate_dir := " + "$(call intermediates-dir-for,GYP,shared,,,$(GYP_VAR_PREFIX))" + ) + self.WriteLn() + + # List files this target depends on so that actions/rules/copies/sources + # can depend on the list. + # TODO: doesn't pull in things through transitive link deps; needed? + target_dependencies = [x[1] for x in deps if x[0] == "path"] + self.WriteLn("# Make sure our deps are built first.") + self.WriteList( + target_dependencies, "GYP_TARGET_DEPENDENCIES", local_pathify=True + ) + + # Actions must come first, since they can generate more OBJs for use below. + if "actions" in spec: + self.WriteActions(spec["actions"], extra_sources, extra_outputs) + + # Rules must be early like actions. + if "rules" in spec: + self.WriteRules(spec["rules"], extra_sources, extra_outputs) + + if "copies" in spec: + self.WriteCopies(spec["copies"], extra_outputs) + + # GYP generated outputs. + self.WriteList(extra_outputs, "GYP_GENERATED_OUTPUTS", local_pathify=True) + + # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend + # on both our dependency targets and our generated files. + self.WriteLn("# Make sure our deps and generated files are built first.") + self.WriteLn( + "LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) " + "$(GYP_GENERATED_OUTPUTS)" + ) + self.WriteLn() + + # Sources. + if spec.get("sources", []) or extra_sources: + self.WriteSources(spec, configs, extra_sources) + + self.WriteTarget( + spec, configs, deps, link_deps, part_of_all, write_alias_target + ) + + # Update global list of target outputs, used in dependency tracking. + target_outputs[qualified_target] = ("path", self.output_binary) + + # Update global list of link dependencies. + if self.type == "static_library": + target_link_deps[qualified_target] = ("static", self.android_module) + elif self.type == "shared_library": + target_link_deps[qualified_target] = ("shared", self.android_module) + + self.fp.close() + return self.android_module + + def WriteActions(self, actions, extra_sources, extra_outputs): + """Write Makefile code for any 'actions' from the gyp input. + + extra_sources: a list that will be filled in with newly generated source + files, if any + extra_outputs: a list that will be filled in with any outputs of these + actions (used to make other pieces dependent on these + actions) + """ + for action in actions: + name = make.StringToMakefileVariable( + "{}_{}".format(self.relative_target, action["action_name"]) + ) + self.WriteLn('### Rules for action "%s":' % action["action_name"]) + inputs = action["inputs"] + outputs = action["outputs"] + + # Build up a list of outputs. + # Collect the output dirs we'll need. + dirs = set() + for out in outputs: + if not out.startswith("$"): + print( + 'WARNING: Action for target "%s" writes output to local path ' + '"%s".' % (self.target, out) + ) + dir = os.path.split(out)[0] + if dir: + dirs.add(dir) + if int(action.get("process_outputs_as_sources", False)): + extra_sources += outputs + + # Prepare the actual command. + command = gyp.common.EncodePOSIXShellList(action["action"]) + if "message" in action: + quiet_cmd = "Gyp action: %s ($@)" % action["message"] + else: + quiet_cmd = "Gyp action: %s ($@)" % name + if len(dirs) > 0: + command = "mkdir -p %s" % " ".join(dirs) + "; " + command + + cd_action = "cd $(gyp_local_path)/%s; " % self.path + command = cd_action + command + + # The makefile rules are all relative to the top dir, but the gyp actions + # are defined relative to their containing dir. This replaces the gyp_* + # variables for the action rule with an absolute version so that the + # output goes in the right place. + # Only write the gyp_* rules for the "primary" output (:1); + # it's superfluous for the "extra outputs", and this avoids accidentally + # writing duplicate dummy rules for those outputs. + main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) + self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output) + self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output) + self.WriteLn( + "%s: gyp_intermediate_dir := " + "$(abspath $(gyp_intermediate_dir))" % main_output + ) + self.WriteLn( + "%s: gyp_shared_intermediate_dir := " + "$(abspath $(gyp_shared_intermediate_dir))" % main_output + ) + + # Android's envsetup.sh adds a number of directories to the path including + # the built host binary directory. This causes actions/rules invoked by + # gyp to sometimes use these instead of system versions, e.g. bison. + # The built host binaries may not be suitable, and can cause errors. + # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable + # set by envsetup. + self.WriteLn( + "%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))" + % main_output + ) + + # Don't allow spaces in input/output filenames, but make an exception for + # filenames which start with '$(' since it's okay for there to be spaces + # inside of make function/macro invocations. + for input in inputs: + if not input.startswith("$(") and " " in input: + raise gyp.common.GypError( + 'Action input filename "%s" in target %s contains a space' + % (input, self.target) + ) + for output in outputs: + if not output.startswith("$(") and " " in output: + raise gyp.common.GypError( + 'Action output filename "%s" in target %s contains a space' + % (output, self.target) + ) + + self.WriteLn( + "%s: %s $(GYP_TARGET_DEPENDENCIES)" + % (main_output, " ".join(map(self.LocalPathify, inputs))) + ) + self.WriteLn('\t@echo "%s"' % quiet_cmd) + self.WriteLn("\t$(hide)%s\n" % command) + for output in outputs[1:]: + # Make each output depend on the main output, with an empty command + # to force make to notice that the mtime has changed. + self.WriteLn(f"{self.LocalPathify(output)}: {main_output} ;") + + extra_outputs += outputs + self.WriteLn() + + self.WriteLn() + + def WriteRules(self, rules, extra_sources, extra_outputs): + """Write Makefile code for any 'rules' from the gyp input. + + extra_sources: a list that will be filled in with newly generated source + files, if any + extra_outputs: a list that will be filled in with any outputs of these + rules (used to make other pieces dependent on these rules) + """ + if len(rules) == 0: + return + + for rule in rules: + if len(rule.get("rule_sources", [])) == 0: + continue + name = make.StringToMakefileVariable( + "{}_{}".format(self.relative_target, rule["rule_name"]) + ) + self.WriteLn('\n### Generated for rule "%s":' % name) + self.WriteLn('# "%s":' % rule) + + inputs = rule.get("inputs") + for rule_source in rule.get("rule_sources", []): + (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) + (rule_source_root, rule_source_ext) = os.path.splitext( + rule_source_basename + ) + + outputs = [ + self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) + for out in rule["outputs"] + ] + + dirs = set() + for out in outputs: + if not out.startswith("$"): + print( + "WARNING: Rule for target %s writes output to local path %s" + % (self.target, out) + ) + dir = os.path.dirname(out) + if dir: + dirs.add(dir) + extra_outputs += outputs + if int(rule.get("process_outputs_as_sources", False)): + extra_sources.extend(outputs) + + components = [] + for component in rule["action"]: + component = self.ExpandInputRoot( + component, rule_source_root, rule_source_dirname + ) + if "$(RULE_SOURCES)" in component: + component = component.replace("$(RULE_SOURCES)", rule_source) + components.append(component) + + command = gyp.common.EncodePOSIXShellList(components) + cd_action = "cd $(gyp_local_path)/%s; " % self.path + command = cd_action + command + if dirs: + command = "mkdir -p %s" % " ".join(dirs) + "; " + command + + # We set up a rule to build the first output, and then set up + # a rule for each additional output to depend on the first. + outputs = map(self.LocalPathify, outputs) + main_output = outputs[0] + self.WriteLn("%s: gyp_local_path := $(LOCAL_PATH)" % main_output) + self.WriteLn("%s: gyp_var_prefix := $(GYP_VAR_PREFIX)" % main_output) + self.WriteLn( + "%s: gyp_intermediate_dir := " + "$(abspath $(gyp_intermediate_dir))" % main_output + ) + self.WriteLn( + "%s: gyp_shared_intermediate_dir := " + "$(abspath $(gyp_shared_intermediate_dir))" % main_output + ) + + # See explanation in WriteActions. + self.WriteLn( + "%s: export PATH := " + "$(subst $(ANDROID_BUILD_PATHS),,$(PATH))" % main_output + ) + + main_output_deps = self.LocalPathify(rule_source) + if inputs: + main_output_deps += " " + main_output_deps += " ".join([self.LocalPathify(f) for f in inputs]) + + self.WriteLn( + "%s: %s $(GYP_TARGET_DEPENDENCIES)" + % (main_output, main_output_deps) + ) + self.WriteLn("\t%s\n" % command) + for output in outputs[1:]: + # Make each output depend on the main output, with an empty command + # to force make to notice that the mtime has changed. + self.WriteLn(f"{output}: {main_output} ;") + self.WriteLn() + + self.WriteLn() + + def WriteCopies(self, copies, extra_outputs): + """Write Makefile code for any 'copies' from the gyp input. + + extra_outputs: a list that will be filled in with any outputs of this action + (used to make other pieces dependent on this action) + """ + self.WriteLn("### Generated for copy rule.") + + variable = make.StringToMakefileVariable(self.relative_target + "_copies") + outputs = [] + for copy in copies: + for path in copy["files"]: + # The Android build system does not allow generation of files into the + # source tree. The destination should start with a variable, which will + # typically be $(gyp_intermediate_dir) or + # $(gyp_shared_intermediate_dir). Note that we can't use an assertion + # because some of the gyp tests depend on this. + if not copy["destination"].startswith("$"): + print( + "WARNING: Copy rule for target %s writes output to " + "local path %s" % (self.target, copy["destination"]) + ) + + # LocalPathify() calls normpath, stripping trailing slashes. + path = Sourceify(self.LocalPathify(path)) + filename = os.path.split(path)[1] + output = Sourceify( + self.LocalPathify(os.path.join(copy["destination"], filename)) + ) + + self.WriteLn(f"{output}: {path} $(GYP_TARGET_DEPENDENCIES) | $(ACP)") + self.WriteLn("\t@echo Copying: $@") + self.WriteLn("\t$(hide) mkdir -p $(dir $@)") + self.WriteLn("\t$(hide) $(ACP) -rpf $< $@") + self.WriteLn() + outputs.append(output) + self.WriteLn( + "{} = {}".format(variable, " ".join(map(make.QuoteSpaces, outputs))) + ) + extra_outputs.append("$(%s)" % variable) + self.WriteLn() + + def WriteSourceFlags(self, spec, configs): + """Write out the flags and include paths used to compile source files for + the current target. + + Args: + spec, configs: input from gyp. + """ + for configname, config in sorted(configs.items()): + extracted_includes = [] + + self.WriteLn("\n# Flags passed to both C and C++ files.") + cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( + config.get("cflags", []) + config.get("cflags_c", []) + ) + extracted_includes.extend(includes_from_cflags) + self.WriteList(cflags, "MY_CFLAGS_%s" % configname) + + self.WriteList( + config.get("defines"), + "MY_DEFS_%s" % configname, + prefix="-D", + quoter=make.EscapeCppDefine, + ) + + self.WriteLn("\n# Include paths placed before CFLAGS/CPPFLAGS") + includes = list(config.get("include_dirs", [])) + includes.extend(extracted_includes) + includes = map(Sourceify, map(self.LocalPathify, includes)) + includes = self.NormalizeIncludePaths(includes) + self.WriteList(includes, "LOCAL_C_INCLUDES_%s" % configname) + + self.WriteLn("\n# Flags passed to only C++ (and not C) files.") + self.WriteList(config.get("cflags_cc"), "LOCAL_CPPFLAGS_%s" % configname) + + self.WriteLn( + "\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) " + "$(MY_DEFS_$(GYP_CONFIGURATION))" + ) + # Undefine ANDROID for host modules + # TODO: the source code should not use macro ANDROID to tell if it's host + # or target module. + if self.toolset == "host": + self.WriteLn("# Undefine ANDROID for host modules") + self.WriteLn("LOCAL_CFLAGS += -UANDROID") + self.WriteLn( + "LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) " + "$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))" + ) + self.WriteLn("LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))") + # Android uses separate flags for assembly file invocations, but gyp expects + # the same CFLAGS to be applied: + self.WriteLn("LOCAL_ASFLAGS := $(LOCAL_CFLAGS)") + + def WriteSources(self, spec, configs, extra_sources): + """Write Makefile code for any 'sources' from the gyp input. + These are source files necessary to build the current target. + We need to handle shared_intermediate directory source files as + a special case by copying them to the intermediate directory and + treating them as a generated sources. Otherwise the Android build + rules won't pick them up. + + Args: + spec, configs: input from gyp. + extra_sources: Sources generated from Actions or Rules. + """ + sources = filter(make.Compilable, spec.get("sources", [])) + generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] + extra_sources = filter(make.Compilable, extra_sources) + + # Determine and output the C++ extension used by these sources. + # We simply find the first C++ file and use that extension. + all_sources = sources + extra_sources + local_cpp_extension = ".cpp" + for source in all_sources: + (root, ext) = os.path.splitext(source) + if IsCPPExtension(ext): + local_cpp_extension = ext + break + if local_cpp_extension != ".cpp": + self.WriteLn("LOCAL_CPP_EXTENSION := %s" % local_cpp_extension) + + # We need to move any non-generated sources that are coming from the + # shared intermediate directory out of LOCAL_SRC_FILES and put them + # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files + # that don't match our local_cpp_extension, since Android will only + # generate Makefile rules for a single LOCAL_CPP_EXTENSION. + local_files = [] + for source in sources: + (root, ext) = os.path.splitext(source) + if "$(gyp_shared_intermediate_dir)" in source: + extra_sources.append(source) + elif "$(gyp_intermediate_dir)" in source: + extra_sources.append(source) + elif IsCPPExtension(ext) and ext != local_cpp_extension: + extra_sources.append(source) + else: + local_files.append(os.path.normpath(os.path.join(self.path, source))) + + # For any generated source, if it is coming from the shared intermediate + # directory then we add a Make rule to copy them to the local intermediate + # directory first. This is because the Android LOCAL_GENERATED_SOURCES + # must be in the local module intermediate directory for the compile rules + # to work properly. If the file has the wrong C++ extension, then we add + # a rule to copy that to intermediates and use the new version. + final_generated_sources = [] + # If a source file gets copied, we still need to add the original source + # directory as header search path, for GCC searches headers in the + # directory that contains the source file by default. + origin_src_dirs = [] + for source in extra_sources: + local_file = source + if "$(gyp_intermediate_dir)/" not in local_file: + basename = os.path.basename(local_file) + local_file = "$(gyp_intermediate_dir)/" + basename + (root, ext) = os.path.splitext(local_file) + if IsCPPExtension(ext) and ext != local_cpp_extension: + local_file = root + local_cpp_extension + if local_file != source: + self.WriteLn(f"{local_file}: {self.LocalPathify(source)}") + self.WriteLn("\tmkdir -p $(@D); cp $< $@") + origin_src_dirs.append(os.path.dirname(source)) + final_generated_sources.append(local_file) + + # We add back in all of the non-compilable stuff to make sure that the + # make rules have dependencies on them. + final_generated_sources.extend(generated_not_sources) + self.WriteList(final_generated_sources, "LOCAL_GENERATED_SOURCES") + + origin_src_dirs = gyp.common.uniquer(origin_src_dirs) + origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) + self.WriteList(origin_src_dirs, "GYP_COPIED_SOURCE_ORIGIN_DIRS") + + self.WriteList(local_files, "LOCAL_SRC_FILES") + + # Write out the flags used to compile the source; this must be done last + # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. + self.WriteSourceFlags(spec, configs) + + def ComputeAndroidModule(self, spec): + """Return the Android module name used for a gyp spec. + + We use the complete qualified target name to avoid collisions between + duplicate targets in different directories. We also add a suffix to + distinguish gyp-generated module names. + """ + + if int(spec.get("android_unmangled_name", 0)): + assert self.type != "shared_library" or self.target.startswith("lib") + return self.target + + if self.type == "shared_library": + # For reasons of convention, the Android build system requires that all + # shared library modules are named 'libfoo' when generating -l flags. + prefix = "lib_" + else: + prefix = "" + + if spec["toolset"] == "host": + suffix = "_$(TARGET_$(GYP_VAR_PREFIX)ARCH)_host_gyp" + else: + suffix = "_gyp" + + if self.path: + middle = make.StringToMakefileVariable(f"{self.path}_{self.target}") + else: + middle = make.StringToMakefileVariable(self.target) + + return "".join([prefix, middle, suffix]) + + def ComputeOutputParts(self, spec): + """Return the 'output basename' of a gyp spec, split into filename + ext. + + Android libraries must be named the same thing as their module name, + otherwise the linker can't find them, so product_name and so on must be + ignored if we are building a library, and the "lib" prepending is + not done for Android. + """ + assert self.type != "loadable_module" # TODO: not supported? + + target = spec["target_name"] + target_prefix = "" + target_ext = "" + if self.type == "static_library": + target = self.ComputeAndroidModule(spec) + target_ext = ".a" + elif self.type == "shared_library": + target = self.ComputeAndroidModule(spec) + target_ext = ".so" + elif self.type == "none": + target_ext = ".stamp" + elif self.type != "executable": + print( + "ERROR: What output file should be generated?", + "type", + self.type, + "target", + target, + ) + + if self.type != "static_library" and self.type != "shared_library": + target_prefix = spec.get("product_prefix", target_prefix) + target = spec.get("product_name", target) + product_ext = spec.get("product_extension") + if product_ext: + target_ext = "." + product_ext + + target_stem = target_prefix + target + return (target_stem, target_ext) + + def ComputeOutputBasename(self, spec): + """Return the 'output basename' of a gyp spec. + + E.g., the loadable module 'foobar' in directory 'baz' will produce + 'libfoobar.so' + """ + return "".join(self.ComputeOutputParts(spec)) + + def ComputeOutput(self, spec): + """Return the 'output' (full output path) of a gyp spec. + + E.g., the loadable module 'foobar' in directory 'baz' will produce + '$(obj)/baz/libfoobar.so' + """ + if self.type == "executable": + # We install host executables into shared_intermediate_dir so they can be + # run by gyp rules that refer to PRODUCT_DIR. + path = "$(gyp_shared_intermediate_dir)" + elif self.type == "shared_library": + if self.toolset == "host": + path = "$($(GYP_HOST_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)" + else: + path = "$($(GYP_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)" + else: + # Other targets just get built into their intermediate dir. + if self.toolset == "host": + path = ( + "$(call intermediates-dir-for,%s,%s,true,," + "$(GYP_HOST_VAR_PREFIX))" + % (self.android_class, self.android_module) + ) + else: + path = "$(call intermediates-dir-for,{},{},,,$(GYP_VAR_PREFIX))".format( + self.android_class, + self.android_module, + ) + + assert spec.get("product_dir") is None # TODO: not supported? + return os.path.join(path, self.ComputeOutputBasename(spec)) + + def NormalizeIncludePaths(self, include_paths): + """Normalize include_paths. + Convert absolute paths to relative to the Android top directory. + + Args: + include_paths: A list of unprocessed include paths. + Returns: + A list of normalized include paths. + """ + normalized = [] + for path in include_paths: + if path[0] == "/": + path = gyp.common.RelativePath(path, self.android_top_dir) + normalized.append(path) + return normalized + + def ExtractIncludesFromCFlags(self, cflags): + """Extract includes "-I..." out from cflags + + Args: + cflags: A list of compiler flags, which may be mixed with "-I.." + Returns: + A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. + """ + clean_cflags = [] + include_paths = [] + for flag in cflags: + if flag.startswith("-I"): + include_paths.append(flag[2:]) + else: + clean_cflags.append(flag) + + return (clean_cflags, include_paths) + + def FilterLibraries(self, libraries): + """Filter the 'libraries' key to separate things that shouldn't be ldflags. + + Library entries that look like filenames should be converted to android + module names instead of being passed to the linker as flags. + + Args: + libraries: the value of spec.get('libraries') + Returns: + A tuple (static_lib_modules, dynamic_lib_modules, ldflags) + """ + static_lib_modules = [] + dynamic_lib_modules = [] + ldflags = [] + for libs in libraries: + # Libs can have multiple words. + for lib in libs.split(): + # Filter the system libraries, which are added by default by the Android + # build system. + if ( + lib == "-lc" + or lib == "-lstdc++" + or lib == "-lm" + or lib.endswith("libgcc.a") + ): + continue + match = re.search(r"([^/]+)\.a$", lib) + if match: + static_lib_modules.append(match.group(1)) + continue + match = re.search(r"([^/]+)\.so$", lib) + if match: + dynamic_lib_modules.append(match.group(1)) + continue + if lib.startswith("-l"): + ldflags.append(lib) + return (static_lib_modules, dynamic_lib_modules, ldflags) + + def ComputeDeps(self, spec): + """Compute the dependencies of a gyp spec. + + Returns a tuple (deps, link_deps), where each is a list of + filenames that will need to be put in front of make for either + building (deps) or linking (link_deps). + """ + deps = [] + link_deps = [] + if "dependencies" in spec: + deps.extend( + [ + target_outputs[dep] + for dep in spec["dependencies"] + if target_outputs[dep] + ] + ) + for dep in spec["dependencies"]: + if dep in target_link_deps: + link_deps.append(target_link_deps[dep]) + deps.extend(link_deps) + return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) + + def WriteTargetFlags(self, spec, configs, link_deps): + """Write Makefile code to specify the link flags and library dependencies. + + spec, configs: input from gyp. + link_deps: link dependency list; see ComputeDeps() + """ + # Libraries (i.e. -lfoo) + # These must be included even for static libraries as some of them provide + # implicit include paths through the build system. + libraries = gyp.common.uniquer(spec.get("libraries", [])) + static_libs, dynamic_libs, ldflags_libs = self.FilterLibraries(libraries) + + if self.type != "static_library": + for configname, config in sorted(configs.items()): + ldflags = list(config.get("ldflags", [])) + self.WriteLn("") + self.WriteList(ldflags, "LOCAL_LDFLAGS_%s" % configname) + self.WriteList(ldflags_libs, "LOCAL_GYP_LIBS") + self.WriteLn( + "LOCAL_LDFLAGS := $(LOCAL_LDFLAGS_$(GYP_CONFIGURATION)) " + "$(LOCAL_GYP_LIBS)" + ) + + # Link dependencies (i.e. other gyp targets this target depends on) + # These need not be included for static libraries as within the gyp build + # we do not use the implicit include path mechanism. + if self.type != "static_library": + static_link_deps = [x[1] for x in link_deps if x[0] == "static"] + shared_link_deps = [x[1] for x in link_deps if x[0] == "shared"] + else: + static_link_deps = [] + shared_link_deps = [] + + # Only write the lists if they are non-empty. + if static_libs or static_link_deps: + self.WriteLn("") + self.WriteList(static_libs + static_link_deps, "LOCAL_STATIC_LIBRARIES") + self.WriteLn("# Enable grouping to fix circular references") + self.WriteLn("LOCAL_GROUP_STATIC_LIBRARIES := true") + if dynamic_libs or shared_link_deps: + self.WriteLn("") + self.WriteList(dynamic_libs + shared_link_deps, "LOCAL_SHARED_LIBRARIES") + + def WriteTarget( + self, spec, configs, deps, link_deps, part_of_all, write_alias_target + ): + """Write Makefile code to produce the final target of the gyp spec. + + spec, configs: input from gyp. + deps, link_deps: dependency lists; see ComputeDeps() + part_of_all: flag indicating this target is part of 'all' + write_alias_target: flag indicating whether to create short aliases for this + target + """ + self.WriteLn("### Rules for final target.") + + if self.type != "none": + self.WriteTargetFlags(spec, configs, link_deps) + + settings = spec.get("aosp_build_settings", {}) + if settings: + self.WriteLn("### Set directly by aosp_build_settings.") + for k, v in settings.items(): + if isinstance(v, list): + self.WriteList(v, k) + else: + self.WriteLn(f"{k} := {make.QuoteIfNecessary(v)}") + self.WriteLn("") + + # Add to the set of targets which represent the gyp 'all' target. We use the + # name 'gyp_all_modules' as the Android build system doesn't allow the use + # of the Make target 'all' and because 'all_modules' is the equivalent of + # the Make target 'all' on Android. + if part_of_all and write_alias_target: + self.WriteLn('# Add target alias to "gyp_all_modules" target.') + self.WriteLn(".PHONY: gyp_all_modules") + self.WriteLn("gyp_all_modules: %s" % self.android_module) + self.WriteLn("") + + # Add an alias from the gyp target name to the Android module name. This + # simplifies manual builds of the target, and is required by the test + # framework. + if self.target != self.android_module and write_alias_target: + self.WriteLn("# Alias gyp target name.") + self.WriteLn(".PHONY: %s" % self.target) + self.WriteLn(f"{self.target}: {self.android_module}") + self.WriteLn("") + + # Add the command to trigger build of the target type depending + # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY + # NOTE: This has to come last! + modifier = "" + if self.toolset == "host": + modifier = "HOST_" + if self.type == "static_library": + self.WriteLn("include $(BUILD_%sSTATIC_LIBRARY)" % modifier) + elif self.type == "shared_library": + self.WriteLn("LOCAL_PRELINK_MODULE := false") + self.WriteLn("include $(BUILD_%sSHARED_LIBRARY)" % modifier) + elif self.type == "executable": + self.WriteLn("LOCAL_CXX_STL := libc++_static") + # Executables are for build and test purposes only, so they're installed + # to a directory that doesn't get included in the system image. + self.WriteLn("LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)") + self.WriteLn("include $(BUILD_%sEXECUTABLE)" % modifier) + else: + self.WriteLn("LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp") + self.WriteLn("LOCAL_UNINSTALLABLE_MODULE := true") + if self.toolset == "target": + self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_VAR_PREFIX)") + else: + self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX := $(GYP_HOST_VAR_PREFIX)") + self.WriteLn() + self.WriteLn("include $(BUILD_SYSTEM)/base_rules.mk") + self.WriteLn() + self.WriteLn("$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)") + self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') + self.WriteLn("\t$(hide) mkdir -p $(dir $@)") + self.WriteLn("\t$(hide) touch $@") + self.WriteLn() + self.WriteLn("LOCAL_2ND_ARCH_VAR_PREFIX :=") + + def WriteList( + self, + value_list, + variable=None, + prefix="", + quoter=make.QuoteIfNecessary, + local_pathify=False, + ): + """Write a variable definition that is a list of values. + + E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out + foo = blaha blahb + but in a pretty-printed style. + """ + values = "" + if value_list: + value_list = [quoter(prefix + value) for value in value_list] + if local_pathify: + value_list = [self.LocalPathify(value) for value in value_list] + values = " \\\n\t" + " \\\n\t".join(value_list) + self.fp.write(f"{variable} :={values}\n\n") + + def WriteLn(self, text=""): + self.fp.write(text + "\n") + + def LocalPathify(self, path): + """Convert a subdirectory-relative path into a normalized path which starts + with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). + Absolute paths, or paths that contain variables, are just normalized.""" + if "$(" in path or os.path.isabs(path): + # path is not a file in the project tree in this case, but calling + # normpath is still important for trimming trailing slashes. + return os.path.normpath(path) + local_path = os.path.join("$(LOCAL_PATH)", self.path, path) + local_path = os.path.normpath(local_path) + # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) + # - i.e. that the resulting path is still inside the project tree. The + # path may legitimately have ended up containing just $(LOCAL_PATH), though, + # so we don't look for a slash. + assert local_path.startswith( + "$(LOCAL_PATH)" + ), f"Path {path} attempts to escape from gyp path {self.path} !)" + return local_path + + def ExpandInputRoot(self, template, expansion, dirname): + if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template: + return template + path = template % { + "INPUT_ROOT": expansion, + "INPUT_DIRNAME": dirname, + } + return os.path.normpath(path) + + +def PerformBuild(data, configurations, params): + # The android backend only supports the default configuration. + options = params["options"] + makefile = os.path.abspath(os.path.join(options.toplevel_dir, "GypAndroid.mk")) + env = dict(os.environ) + env["ONE_SHOT_MAKEFILE"] = makefile + arguments = ["make", "-C", os.environ["ANDROID_BUILD_TOP"], "gyp_all_modules"] + print("Building: %s" % arguments) + subprocess.check_call(arguments, env=env) + + +def GenerateOutput(target_list, target_dicts, data, params): + options = params["options"] + generator_flags = params.get("generator_flags", {}) + limit_to_target_all = generator_flags.get("limit_to_target_all", False) + write_alias_targets = generator_flags.get("write_alias_targets", True) + sdk_version = generator_flags.get("aosp_sdk_version", 0) + android_top_dir = os.environ.get("ANDROID_BUILD_TOP") + assert android_top_dir, "$ANDROID_BUILD_TOP not set; you need to run lunch." + + def CalculateMakefilePath(build_file, base_name): + """Determine where to write a Makefile for a given gyp file.""" + # Paths in gyp files are relative to the .gyp file, but we want + # paths relative to the source root for the master makefile. Grab + # the path of the .gyp file as the base to relativize against. + # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". + base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth) + # We write the file in the base_path directory. + output_file = os.path.join(options.depth, base_path, base_name) + assert ( + not options.generator_output + ), "The Android backend does not support options.generator_output." + base_path = gyp.common.RelativePath( + os.path.dirname(build_file), options.toplevel_dir + ) + return base_path, output_file + + # TODO: search for the first non-'Default' target. This can go + # away when we add verification that all targets have the + # necessary configurations. + default_configuration = None + for target in target_list: + spec = target_dicts[target] + if spec["default_configuration"] != "Default": + default_configuration = spec["default_configuration"] + break + if not default_configuration: + default_configuration = "Default" + + makefile_name = "GypAndroid" + options.suffix + ".mk" + makefile_path = os.path.join(options.toplevel_dir, makefile_name) + assert ( + not options.generator_output + ), "The Android backend does not support options.generator_output." + gyp.common.EnsureDirExists(makefile_path) + root_makefile = open(makefile_path, "w") + + root_makefile.write(header) + + # We set LOCAL_PATH just once, here, to the top of the project tree. This + # allows all the other paths we use to be relative to the Android.mk file, + # as the Android build system expects. + root_makefile.write("\nLOCAL_PATH := $(call my-dir)\n") + + # Find the list of targets that derive from the gyp file(s) being built. + needed_targets = set() + for build_file in params["build_files"]: + for target in gyp.common.AllTargets(target_list, target_dicts, build_file): + needed_targets.add(target) + + build_files = set() + include_list = set() + android_modules = {} + for qualified_target in target_list: + build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target) + relative_build_file = gyp.common.RelativePath(build_file, options.toplevel_dir) + build_files.add(relative_build_file) + included_files = data[build_file]["included_files"] + for included_file in included_files: + # The included_files entries are relative to the dir of the build file + # that included them, so we have to undo that and then make them relative + # to the root dir. + relative_include_file = gyp.common.RelativePath( + gyp.common.UnrelativePath(included_file, build_file), + options.toplevel_dir, + ) + abs_include_file = os.path.abspath(relative_include_file) + # If the include file is from the ~/.gyp dir, we should use absolute path + # so that relocating the src dir doesn't break the path. + if params["home_dot_gyp"] and abs_include_file.startswith( + params["home_dot_gyp"] + ): + build_files.add(abs_include_file) + else: + build_files.add(relative_include_file) + + base_path, output_file = CalculateMakefilePath( + build_file, target + "." + toolset + options.suffix + ".mk" + ) + + spec = target_dicts[qualified_target] + configs = spec["configurations"] + + part_of_all = qualified_target in needed_targets + if limit_to_target_all and not part_of_all: + continue + + relative_target = gyp.common.QualifiedTarget( + relative_build_file, target, toolset + ) + writer = AndroidMkWriter(android_top_dir) + android_module = writer.Write( + qualified_target, + relative_target, + base_path, + output_file, + spec, + configs, + part_of_all=part_of_all, + write_alias_target=write_alias_targets, + sdk_version=sdk_version, + ) + if android_module in android_modules: + print( + "ERROR: Android module names must be unique. The following " + "targets both generate Android module name %s.\n %s\n %s" + % (android_module, android_modules[android_module], qualified_target) + ) + return + android_modules[android_module] = qualified_target + + # Our root_makefile lives at the source root. Compute the relative path + # from there to the output_file for including. + mkfile_rel_path = gyp.common.RelativePath( + output_file, os.path.dirname(makefile_path) + ) + include_list.add(mkfile_rel_path) + + root_makefile.write("GYP_CONFIGURATION ?= %s\n" % default_configuration) + root_makefile.write("GYP_VAR_PREFIX ?=\n") + root_makefile.write("GYP_HOST_VAR_PREFIX ?=\n") + root_makefile.write("GYP_HOST_MULTILIB ?= first\n") + + # Write out the sorted list of includes. + root_makefile.write("\n") + for include_file in sorted(include_list): + root_makefile.write("include $(LOCAL_PATH)/" + include_file + "\n") + root_makefile.write("\n") + + if write_alias_targets: + root_makefile.write(ALL_MODULES_FOOTER) + + root_makefile.close() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py new file mode 100644 index 0000000..c95d184 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py @@ -0,0 +1,1321 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""cmake output module + +This module is under development and should be considered experimental. + +This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is +created for each configuration. + +This module's original purpose was to support editing in IDEs like KDevelop +which use CMake for project management. It is also possible to use CMake to +generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator +will convert the CMakeLists.txt to a code::blocks cbp for the editor to read, +but build using CMake. As a result QtCreator editor is unaware of compiler +defines. The generated CMakeLists.txt can also be used to build on Linux. There +is currently no support for building on platforms other than Linux. + +The generated CMakeLists.txt should properly compile all projects. However, +there is a mismatch between gyp and cmake with regard to linking. All attempts +are made to work around this, but CMake sometimes sees -Wl,--start-group as a +library and incorrectly repeats it. As a result the output of this generator +should not be relied on for building. + +When using with kdevelop, use version 4.4+. Previous versions of kdevelop will +not be able to find the header file directories described in the generated +CMakeLists.txt file. +""" + + +import multiprocessing +import os +import signal +import subprocess +import gyp.common +import gyp.xcode_emulation + +_maketrans = str.maketrans + +generator_default_variables = { + "EXECUTABLE_PREFIX": "", + "EXECUTABLE_SUFFIX": "", + "STATIC_LIB_PREFIX": "lib", + "STATIC_LIB_SUFFIX": ".a", + "SHARED_LIB_PREFIX": "lib", + "SHARED_LIB_SUFFIX": ".so", + "SHARED_LIB_DIR": "${builddir}/lib.${TOOLSET}", + "LIB_DIR": "${obj}.${TOOLSET}", + "INTERMEDIATE_DIR": "${obj}.${TOOLSET}/${TARGET}/geni", + "SHARED_INTERMEDIATE_DIR": "${obj}/gen", + "PRODUCT_DIR": "${builddir}", + "RULE_INPUT_PATH": "${RULE_INPUT_PATH}", + "RULE_INPUT_DIRNAME": "${RULE_INPUT_DIRNAME}", + "RULE_INPUT_NAME": "${RULE_INPUT_NAME}", + "RULE_INPUT_ROOT": "${RULE_INPUT_ROOT}", + "RULE_INPUT_EXT": "${RULE_INPUT_EXT}", + "CONFIGURATION_NAME": "${configuration}", +} + +FULL_PATH_VARS = ("${CMAKE_CURRENT_LIST_DIR}", "${builddir}", "${obj}") + +generator_supports_multiple_toolsets = True +generator_wants_static_library_dependencies_adjusted = True + +COMPILABLE_EXTENSIONS = { + ".c": "cc", + ".cc": "cxx", + ".cpp": "cxx", + ".cxx": "cxx", + ".s": "s", # cc + ".S": "s", # cc +} + + +def RemovePrefix(a, prefix): + """Returns 'a' without 'prefix' if it starts with 'prefix'.""" + return a[len(prefix) :] if a.startswith(prefix) else a + + +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + default_variables.setdefault("OS", gyp.common.GetFlavor(params)) + + +def Compilable(filename): + """Return true if the file is compilable (should be in OBJS).""" + return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS) + + +def Linkable(filename): + """Return true if the file is linkable (should be on the link line).""" + return filename.endswith(".o") + + +def NormjoinPathForceCMakeSource(base_path, rel_path): + """Resolves rel_path against base_path and returns the result. + + If rel_path is an absolute path it is returned unchanged. + Otherwise it is resolved against base_path and normalized. + If the result is a relative path, it is forced to be relative to the + CMakeLists.txt. + """ + if os.path.isabs(rel_path): + return rel_path + if any([rel_path.startswith(var) for var in FULL_PATH_VARS]): + return rel_path + # TODO: do we need to check base_path for absolute variables as well? + return os.path.join( + "${CMAKE_CURRENT_LIST_DIR}", os.path.normpath(os.path.join(base_path, rel_path)) + ) + + +def NormjoinPath(base_path, rel_path): + """Resolves rel_path against base_path and returns the result. + TODO: what is this really used for? + If rel_path begins with '$' it is returned unchanged. + Otherwise it is resolved against base_path if relative, then normalized. + """ + if rel_path.startswith("$") and not rel_path.startswith("${configuration}"): + return rel_path + return os.path.normpath(os.path.join(base_path, rel_path)) + + +def CMakeStringEscape(a): + """Escapes the string 'a' for use inside a CMake string. + + This means escaping + '\' otherwise it may be seen as modifying the next character + '"' otherwise it will end the string + ';' otherwise the string becomes a list + + The following do not need to be escaped + '#' when the lexer is in string state, this does not start a comment + + The following are yet unknown + '$' generator variables (like ${obj}) must not be escaped, + but text $ should be escaped + what is wanted is to know which $ come from generator variables + """ + return a.replace("\\", "\\\\").replace(";", "\\;").replace('"', '\\"') + + +def SetFileProperty(output, source_name, property_name, values, sep): + """Given a set of source file, sets the given property on them.""" + output.write("set_source_files_properties(") + output.write(source_name) + output.write(" PROPERTIES ") + output.write(property_name) + output.write(' "') + for value in values: + output.write(CMakeStringEscape(value)) + output.write(sep) + output.write('")\n') + + +def SetFilesProperty(output, variable, property_name, values, sep): + """Given a set of source files, sets the given property on them.""" + output.write("set_source_files_properties(") + WriteVariable(output, variable) + output.write(" PROPERTIES ") + output.write(property_name) + output.write(' "') + for value in values: + output.write(CMakeStringEscape(value)) + output.write(sep) + output.write('")\n') + + +def SetTargetProperty(output, target_name, property_name, values, sep=""): + """Given a target, sets the given property.""" + output.write("set_target_properties(") + output.write(target_name) + output.write(" PROPERTIES ") + output.write(property_name) + output.write(' "') + for value in values: + output.write(CMakeStringEscape(value)) + output.write(sep) + output.write('")\n') + + +def SetVariable(output, variable_name, value): + """Sets a CMake variable.""" + output.write("set(") + output.write(variable_name) + output.write(' "') + output.write(CMakeStringEscape(value)) + output.write('")\n') + + +def SetVariableList(output, variable_name, values): + """Sets a CMake variable to a list.""" + if not values: + return SetVariable(output, variable_name, "") + if len(values) == 1: + return SetVariable(output, variable_name, values[0]) + output.write("list(APPEND ") + output.write(variable_name) + output.write('\n "') + output.write('"\n "'.join([CMakeStringEscape(value) for value in values])) + output.write('")\n') + + +def UnsetVariable(output, variable_name): + """Unsets a CMake variable.""" + output.write("unset(") + output.write(variable_name) + output.write(")\n") + + +def WriteVariable(output, variable_name, prepend=None): + if prepend: + output.write(prepend) + output.write("${") + output.write(variable_name) + output.write("}") + + +class CMakeTargetType: + def __init__(self, command, modifier, property_modifier): + self.command = command + self.modifier = modifier + self.property_modifier = property_modifier + + +cmake_target_type_from_gyp_target_type = { + "executable": CMakeTargetType("add_executable", None, "RUNTIME"), + "static_library": CMakeTargetType("add_library", "STATIC", "ARCHIVE"), + "shared_library": CMakeTargetType("add_library", "SHARED", "LIBRARY"), + "loadable_module": CMakeTargetType("add_library", "MODULE", "LIBRARY"), + "none": CMakeTargetType("add_custom_target", "SOURCES", None), +} + + +def StringToCMakeTargetName(a): + """Converts the given string 'a' to a valid CMake target name. + + All invalid characters are replaced by '_'. + Invalid for cmake: ' ', '/', '(', ')', '"' + Invalid for make: ':' + Invalid for unknown reasons but cause failures: '.' + """ + return a.translate(_maketrans(' /():."', "_______")) + + +def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, output): + """Write CMake for the 'actions' in the target. + + Args: + target_name: the name of the CMake target being generated. + actions: the Gyp 'actions' dict for this target. + extra_sources: [(, )] to append with generated source files. + extra_deps: [] to append with generated targets. + path_to_gyp: relative path from CMakeLists.txt being generated to + the Gyp file in which the target being generated is defined. + """ + for action in actions: + action_name = StringToCMakeTargetName(action["action_name"]) + action_target_name = f"{target_name}__{action_name}" + + inputs = action["inputs"] + inputs_name = action_target_name + "__input" + SetVariableList( + output, + inputs_name, + [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs], + ) + + outputs = action["outputs"] + cmake_outputs = [ + NormjoinPathForceCMakeSource(path_to_gyp, out) for out in outputs + ] + outputs_name = action_target_name + "__output" + SetVariableList(output, outputs_name, cmake_outputs) + + # Build up a list of outputs. + # Collect the output dirs we'll need. + dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir} + + if int(action.get("process_outputs_as_sources", False)): + extra_sources.extend(zip(cmake_outputs, outputs)) + + # add_custom_command + output.write("add_custom_command(OUTPUT ") + WriteVariable(output, outputs_name) + output.write("\n") + + if len(dirs) > 0: + for directory in dirs: + output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ") + output.write(directory) + output.write("\n") + + output.write(" COMMAND ") + output.write(gyp.common.EncodePOSIXShellList(action["action"])) + output.write("\n") + + output.write(" DEPENDS ") + WriteVariable(output, inputs_name) + output.write("\n") + + output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") + output.write(path_to_gyp) + output.write("\n") + + output.write(" COMMENT ") + if "message" in action: + output.write(action["message"]) + else: + output.write(action_target_name) + output.write("\n") + + output.write(" VERBATIM\n") + output.write(")\n") + + # add_custom_target + output.write("add_custom_target(") + output.write(action_target_name) + output.write("\n DEPENDS ") + WriteVariable(output, outputs_name) + output.write("\n SOURCES ") + WriteVariable(output, inputs_name) + output.write("\n)\n") + + extra_deps.append(action_target_name) + + +def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source): + if rel_path.startswith(("${RULE_INPUT_PATH}", "${RULE_INPUT_DIRNAME}")): + if any([rule_source.startswith(var) for var in FULL_PATH_VARS]): + return rel_path + return NormjoinPathForceCMakeSource(base_path, rel_path) + + +def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, output): + """Write CMake for the 'rules' in the target. + + Args: + target_name: the name of the CMake target being generated. + actions: the Gyp 'actions' dict for this target. + extra_sources: [(, )] to append with generated source files. + extra_deps: [] to append with generated targets. + path_to_gyp: relative path from CMakeLists.txt being generated to + the Gyp file in which the target being generated is defined. + """ + for rule in rules: + rule_name = StringToCMakeTargetName(target_name + "__" + rule["rule_name"]) + + inputs = rule.get("inputs", []) + inputs_name = rule_name + "__input" + SetVariableList( + output, + inputs_name, + [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs], + ) + outputs = rule["outputs"] + var_outputs = [] + + for count, rule_source in enumerate(rule.get("rule_sources", [])): + action_name = rule_name + "_" + str(count) + + rule_source_dirname, rule_source_basename = os.path.split(rule_source) + rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename) + + SetVariable(output, "RULE_INPUT_PATH", rule_source) + SetVariable(output, "RULE_INPUT_DIRNAME", rule_source_dirname) + SetVariable(output, "RULE_INPUT_NAME", rule_source_basename) + SetVariable(output, "RULE_INPUT_ROOT", rule_source_root) + SetVariable(output, "RULE_INPUT_EXT", rule_source_ext) + + # Build up a list of outputs. + # Collect the output dirs we'll need. + dirs = {dir for dir in (os.path.dirname(o) for o in outputs) if dir} + + # Create variables for the output, as 'local' variable will be unset. + these_outputs = [] + for output_index, out in enumerate(outputs): + output_name = action_name + "_" + str(output_index) + SetVariable( + output, + output_name, + NormjoinRulePathForceCMakeSource(path_to_gyp, out, rule_source), + ) + if int(rule.get("process_outputs_as_sources", False)): + extra_sources.append(("${" + output_name + "}", out)) + these_outputs.append("${" + output_name + "}") + var_outputs.append("${" + output_name + "}") + + # add_custom_command + output.write("add_custom_command(OUTPUT\n") + for out in these_outputs: + output.write(" ") + output.write(out) + output.write("\n") + + for directory in dirs: + output.write(" COMMAND ${CMAKE_COMMAND} -E make_directory ") + output.write(directory) + output.write("\n") + + output.write(" COMMAND ") + output.write(gyp.common.EncodePOSIXShellList(rule["action"])) + output.write("\n") + + output.write(" DEPENDS ") + WriteVariable(output, inputs_name) + output.write(" ") + output.write(NormjoinPath(path_to_gyp, rule_source)) + output.write("\n") + + # CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives. + # The cwd is the current build directory. + output.write(" WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") + output.write(path_to_gyp) + output.write("\n") + + output.write(" COMMENT ") + if "message" in rule: + output.write(rule["message"]) + else: + output.write(action_name) + output.write("\n") + + output.write(" VERBATIM\n") + output.write(")\n") + + UnsetVariable(output, "RULE_INPUT_PATH") + UnsetVariable(output, "RULE_INPUT_DIRNAME") + UnsetVariable(output, "RULE_INPUT_NAME") + UnsetVariable(output, "RULE_INPUT_ROOT") + UnsetVariable(output, "RULE_INPUT_EXT") + + # add_custom_target + output.write("add_custom_target(") + output.write(rule_name) + output.write(" DEPENDS\n") + for out in var_outputs: + output.write(" ") + output.write(out) + output.write("\n") + output.write("SOURCES ") + WriteVariable(output, inputs_name) + output.write("\n") + for rule_source in rule.get("rule_sources", []): + output.write(" ") + output.write(NormjoinPath(path_to_gyp, rule_source)) + output.write("\n") + output.write(")\n") + + extra_deps.append(rule_name) + + +def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output): + """Write CMake for the 'copies' in the target. + + Args: + target_name: the name of the CMake target being generated. + actions: the Gyp 'actions' dict for this target. + extra_deps: [] to append with generated targets. + path_to_gyp: relative path from CMakeLists.txt being generated to + the Gyp file in which the target being generated is defined. + """ + copy_name = target_name + "__copies" + + # CMake gets upset with custom targets with OUTPUT which specify no output. + have_copies = any(copy["files"] for copy in copies) + if not have_copies: + output.write("add_custom_target(") + output.write(copy_name) + output.write(")\n") + extra_deps.append(copy_name) + return + + class Copy: + def __init__(self, ext, command): + self.cmake_inputs = [] + self.cmake_outputs = [] + self.gyp_inputs = [] + self.gyp_outputs = [] + self.ext = ext + self.inputs_name = None + self.outputs_name = None + self.command = command + + file_copy = Copy("", "copy") + dir_copy = Copy("_dirs", "copy_directory") + + for copy in copies: + files = copy["files"] + destination = copy["destination"] + for src in files: + path = os.path.normpath(src) + basename = os.path.split(path)[1] + dst = os.path.join(destination, basename) + + copy = file_copy if os.path.basename(src) else dir_copy + + copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src)) + copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst)) + copy.gyp_inputs.append(src) + copy.gyp_outputs.append(dst) + + for copy in (file_copy, dir_copy): + if copy.cmake_inputs: + copy.inputs_name = copy_name + "__input" + copy.ext + SetVariableList(output, copy.inputs_name, copy.cmake_inputs) + + copy.outputs_name = copy_name + "__output" + copy.ext + SetVariableList(output, copy.outputs_name, copy.cmake_outputs) + + # add_custom_command + output.write("add_custom_command(\n") + + output.write("OUTPUT") + for copy in (file_copy, dir_copy): + if copy.outputs_name: + WriteVariable(output, copy.outputs_name, " ") + output.write("\n") + + for copy in (file_copy, dir_copy): + for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs): + # 'cmake -E copy src dst' will create the 'dst' directory if needed. + output.write("COMMAND ${CMAKE_COMMAND} -E %s " % copy.command) + output.write(src) + output.write(" ") + output.write(dst) + output.write("\n") + + output.write("DEPENDS") + for copy in (file_copy, dir_copy): + if copy.inputs_name: + WriteVariable(output, copy.inputs_name, " ") + output.write("\n") + + output.write("WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/") + output.write(path_to_gyp) + output.write("\n") + + output.write("COMMENT Copying for ") + output.write(target_name) + output.write("\n") + + output.write("VERBATIM\n") + output.write(")\n") + + # add_custom_target + output.write("add_custom_target(") + output.write(copy_name) + output.write("\n DEPENDS") + for copy in (file_copy, dir_copy): + if copy.outputs_name: + WriteVariable(output, copy.outputs_name, " ") + output.write("\n SOURCES") + if file_copy.inputs_name: + WriteVariable(output, file_copy.inputs_name, " ") + output.write("\n)\n") + + extra_deps.append(copy_name) + + +def CreateCMakeTargetBaseName(qualified_target): + """This is the name we would like the target to have.""" + _, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget( + qualified_target + ) + cmake_target_base_name = gyp_target_name + if gyp_target_toolset and gyp_target_toolset != "target": + cmake_target_base_name += "_" + gyp_target_toolset + return StringToCMakeTargetName(cmake_target_base_name) + + +def CreateCMakeTargetFullName(qualified_target): + """An unambiguous name for the target.""" + gyp_file, gyp_target_name, gyp_target_toolset = gyp.common.ParseQualifiedTarget( + qualified_target + ) + cmake_target_full_name = gyp_file + ":" + gyp_target_name + if gyp_target_toolset and gyp_target_toolset != "target": + cmake_target_full_name += "_" + gyp_target_toolset + return StringToCMakeTargetName(cmake_target_full_name) + + +class CMakeNamer: + """Converts Gyp target names into CMake target names. + + CMake requires that target names be globally unique. One way to ensure + this is to fully qualify the names of the targets. Unfortunately, this + ends up with all targets looking like "chrome_chrome_gyp_chrome" instead + of just "chrome". If this generator were only interested in building, it + would be possible to fully qualify all target names, then create + unqualified target names which depend on all qualified targets which + should have had that name. This is more or less what the 'make' generator + does with aliases. However, one goal of this generator is to create CMake + files for use with IDEs, and fully qualified names are not as user + friendly. + + Since target name collision is rare, we do the above only when required. + + Toolset variants are always qualified from the base, as this is required for + building. However, it also makes sense for an IDE, as it is possible for + defines to be different. + """ + + def __init__(self, target_list): + self.cmake_target_base_names_conficting = set() + + cmake_target_base_names_seen = set() + for qualified_target in target_list: + cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target) + + if cmake_target_base_name not in cmake_target_base_names_seen: + cmake_target_base_names_seen.add(cmake_target_base_name) + else: + self.cmake_target_base_names_conficting.add(cmake_target_base_name) + + def CreateCMakeTargetName(self, qualified_target): + base_name = CreateCMakeTargetBaseName(qualified_target) + if base_name in self.cmake_target_base_names_conficting: + return CreateCMakeTargetFullName(qualified_target) + return base_name + + +def WriteTarget( + namer, + qualified_target, + target_dicts, + build_dir, + config_to_use, + options, + generator_flags, + all_qualified_targets, + flavor, + output, +): + # The make generator does this always. + # TODO: It would be nice to be able to tell CMake all dependencies. + circular_libs = generator_flags.get("circular", True) + + if not generator_flags.get("standalone", False): + output.write("\n#") + output.write(qualified_target) + output.write("\n") + + gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) + rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir) + rel_gyp_dir = os.path.dirname(rel_gyp_file) + + # Relative path from build dir to top dir. + build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) + # Relative path from build dir to gyp dir. + build_to_gyp = os.path.join(build_to_top, rel_gyp_dir) + + path_from_cmakelists_to_gyp = build_to_gyp + + spec = target_dicts.get(qualified_target, {}) + config = spec.get("configurations", {}).get(config_to_use, {}) + + xcode_settings = None + if flavor == "mac": + xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + + target_name = spec.get("target_name", "") + target_type = spec.get("type", "") + target_toolset = spec.get("toolset") + + cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type) + if cmake_target_type is None: + print( + "Target %s has unknown target type %s, skipping." + % (target_name, target_type) + ) + return + + SetVariable(output, "TARGET", target_name) + SetVariable(output, "TOOLSET", target_toolset) + + cmake_target_name = namer.CreateCMakeTargetName(qualified_target) + + extra_sources = [] + extra_deps = [] + + # Actions must come first, since they can generate more OBJs for use below. + if "actions" in spec: + WriteActions( + cmake_target_name, + spec["actions"], + extra_sources, + extra_deps, + path_from_cmakelists_to_gyp, + output, + ) + + # Rules must be early like actions. + if "rules" in spec: + WriteRules( + cmake_target_name, + spec["rules"], + extra_sources, + extra_deps, + path_from_cmakelists_to_gyp, + output, + ) + + # Copies + if "copies" in spec: + WriteCopies( + cmake_target_name, + spec["copies"], + extra_deps, + path_from_cmakelists_to_gyp, + output, + ) + + # Target and sources + srcs = spec.get("sources", []) + + # Gyp separates the sheep from the goats based on file extensions. + # A full separation is done here because of flag handing (see below). + s_sources = [] + c_sources = [] + cxx_sources = [] + linkable_sources = [] + other_sources = [] + for src in srcs: + _, ext = os.path.splitext(src) + src_type = COMPILABLE_EXTENSIONS.get(ext, None) + src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src) + + if src_type == "s": + s_sources.append(src_norm_path) + elif src_type == "cc": + c_sources.append(src_norm_path) + elif src_type == "cxx": + cxx_sources.append(src_norm_path) + elif Linkable(ext): + linkable_sources.append(src_norm_path) + else: + other_sources.append(src_norm_path) + + for extra_source in extra_sources: + src, real_source = extra_source + _, ext = os.path.splitext(real_source) + src_type = COMPILABLE_EXTENSIONS.get(ext, None) + + if src_type == "s": + s_sources.append(src) + elif src_type == "cc": + c_sources.append(src) + elif src_type == "cxx": + cxx_sources.append(src) + elif Linkable(ext): + linkable_sources.append(src) + else: + other_sources.append(src) + + s_sources_name = None + if s_sources: + s_sources_name = cmake_target_name + "__asm_srcs" + SetVariableList(output, s_sources_name, s_sources) + + c_sources_name = None + if c_sources: + c_sources_name = cmake_target_name + "__c_srcs" + SetVariableList(output, c_sources_name, c_sources) + + cxx_sources_name = None + if cxx_sources: + cxx_sources_name = cmake_target_name + "__cxx_srcs" + SetVariableList(output, cxx_sources_name, cxx_sources) + + linkable_sources_name = None + if linkable_sources: + linkable_sources_name = cmake_target_name + "__linkable_srcs" + SetVariableList(output, linkable_sources_name, linkable_sources) + + other_sources_name = None + if other_sources: + other_sources_name = cmake_target_name + "__other_srcs" + SetVariableList(output, other_sources_name, other_sources) + + # CMake gets upset when executable targets provide no sources. + # http://www.cmake.org/pipermail/cmake/2010-July/038461.html + dummy_sources_name = None + has_sources = ( + s_sources_name + or c_sources_name + or cxx_sources_name + or linkable_sources_name + or other_sources_name + ) + if target_type == "executable" and not has_sources: + dummy_sources_name = cmake_target_name + "__dummy_srcs" + SetVariable( + output, dummy_sources_name, "${obj}.${TOOLSET}/${TARGET}/genc/dummy.c" + ) + output.write('if(NOT EXISTS "') + WriteVariable(output, dummy_sources_name) + output.write('")\n') + output.write(' file(WRITE "') + WriteVariable(output, dummy_sources_name) + output.write('" "")\n') + output.write("endif()\n") + + # CMake is opposed to setting linker directories and considers the practice + # of setting linker directories dangerous. Instead, it favors the use of + # find_library and passing absolute paths to target_link_libraries. + # However, CMake does provide the command link_directories, which adds + # link directories to targets defined after it is called. + # As a result, link_directories must come before the target definition. + # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES. + library_dirs = config.get("library_dirs") + if library_dirs is not None: + output.write("link_directories(") + for library_dir in library_dirs: + output.write(" ") + output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir)) + output.write("\n") + output.write(")\n") + + output.write(cmake_target_type.command) + output.write("(") + output.write(cmake_target_name) + + if cmake_target_type.modifier is not None: + output.write(" ") + output.write(cmake_target_type.modifier) + + if s_sources_name: + WriteVariable(output, s_sources_name, " ") + if c_sources_name: + WriteVariable(output, c_sources_name, " ") + if cxx_sources_name: + WriteVariable(output, cxx_sources_name, " ") + if linkable_sources_name: + WriteVariable(output, linkable_sources_name, " ") + if other_sources_name: + WriteVariable(output, other_sources_name, " ") + if dummy_sources_name: + WriteVariable(output, dummy_sources_name, " ") + + output.write(")\n") + + # Let CMake know if the 'all' target should depend on this target. + exclude_from_all = ( + "TRUE" if qualified_target not in all_qualified_targets else "FALSE" + ) + SetTargetProperty(output, cmake_target_name, "EXCLUDE_FROM_ALL", exclude_from_all) + for extra_target_name in extra_deps: + SetTargetProperty( + output, extra_target_name, "EXCLUDE_FROM_ALL", exclude_from_all + ) + + # Output name and location. + if target_type != "none": + # Link as 'C' if there are no other files + if not c_sources and not cxx_sources: + SetTargetProperty(output, cmake_target_name, "LINKER_LANGUAGE", ["C"]) + + # Mark uncompiled sources as uncompiled. + if other_sources_name: + output.write("set_source_files_properties(") + WriteVariable(output, other_sources_name, "") + output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n') + + # Mark object sources as linkable. + if linkable_sources_name: + output.write("set_source_files_properties(") + WriteVariable(output, other_sources_name, "") + output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n') + + # Output directory + target_output_directory = spec.get("product_dir") + if target_output_directory is None: + if target_type in ("executable", "loadable_module"): + target_output_directory = generator_default_variables["PRODUCT_DIR"] + elif target_type == "shared_library": + target_output_directory = "${builddir}/lib.${TOOLSET}" + elif spec.get("standalone_static_library", False): + target_output_directory = generator_default_variables["PRODUCT_DIR"] + else: + base_path = gyp.common.RelativePath( + os.path.dirname(gyp_file), options.toplevel_dir + ) + target_output_directory = "${obj}.${TOOLSET}" + target_output_directory = os.path.join( + target_output_directory, base_path + ) + + cmake_target_output_directory = NormjoinPathForceCMakeSource( + path_from_cmakelists_to_gyp, target_output_directory + ) + SetTargetProperty( + output, + cmake_target_name, + cmake_target_type.property_modifier + "_OUTPUT_DIRECTORY", + cmake_target_output_directory, + ) + + # Output name + default_product_prefix = "" + default_product_name = target_name + default_product_ext = "" + if target_type == "static_library": + static_library_prefix = generator_default_variables["STATIC_LIB_PREFIX"] + default_product_name = RemovePrefix( + default_product_name, static_library_prefix + ) + default_product_prefix = static_library_prefix + default_product_ext = generator_default_variables["STATIC_LIB_SUFFIX"] + + elif target_type in ("loadable_module", "shared_library"): + shared_library_prefix = generator_default_variables["SHARED_LIB_PREFIX"] + default_product_name = RemovePrefix( + default_product_name, shared_library_prefix + ) + default_product_prefix = shared_library_prefix + default_product_ext = generator_default_variables["SHARED_LIB_SUFFIX"] + + elif target_type != "executable": + print( + "ERROR: What output file should be generated?", + "type", + target_type, + "target", + target_name, + ) + + product_prefix = spec.get("product_prefix", default_product_prefix) + product_name = spec.get("product_name", default_product_name) + product_ext = spec.get("product_extension") + if product_ext: + product_ext = "." + product_ext + else: + product_ext = default_product_ext + + SetTargetProperty(output, cmake_target_name, "PREFIX", product_prefix) + SetTargetProperty( + output, + cmake_target_name, + cmake_target_type.property_modifier + "_OUTPUT_NAME", + product_name, + ) + SetTargetProperty(output, cmake_target_name, "SUFFIX", product_ext) + + # Make the output of this target referenceable as a source. + cmake_target_output_basename = product_prefix + product_name + product_ext + cmake_target_output = os.path.join( + cmake_target_output_directory, cmake_target_output_basename + ) + SetFileProperty(output, cmake_target_output, "GENERATED", ["TRUE"], "") + + # Includes + includes = config.get("include_dirs") + if includes: + # This (target include directories) is what requires CMake 2.8.8 + includes_name = cmake_target_name + "__include_dirs" + SetVariableList( + output, + includes_name, + [ + NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include) + for include in includes + ], + ) + output.write("set_property(TARGET ") + output.write(cmake_target_name) + output.write(" APPEND PROPERTY INCLUDE_DIRECTORIES ") + WriteVariable(output, includes_name, "") + output.write(")\n") + + # Defines + defines = config.get("defines") + if defines is not None: + SetTargetProperty( + output, cmake_target_name, "COMPILE_DEFINITIONS", defines, ";" + ) + + # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493 + # CMake currently does not have target C and CXX flags. + # So, instead of doing... + + # cflags_c = config.get('cflags_c') + # if cflags_c is not None: + # SetTargetProperty(output, cmake_target_name, + # 'C_COMPILE_FLAGS', cflags_c, ' ') + + # cflags_cc = config.get('cflags_cc') + # if cflags_cc is not None: + # SetTargetProperty(output, cmake_target_name, + # 'CXX_COMPILE_FLAGS', cflags_cc, ' ') + + # Instead we must... + cflags = config.get("cflags", []) + cflags_c = config.get("cflags_c", []) + cflags_cxx = config.get("cflags_cc", []) + if xcode_settings: + cflags = xcode_settings.GetCflags(config_to_use) + cflags_c = xcode_settings.GetCflagsC(config_to_use) + cflags_cxx = xcode_settings.GetCflagsCC(config_to_use) + # cflags_objc = xcode_settings.GetCflagsObjC(config_to_use) + # cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use) + + if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources): + SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", cflags, " ") + + elif c_sources and not (s_sources or cxx_sources): + flags = [] + flags.extend(cflags) + flags.extend(cflags_c) + SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ") + + elif cxx_sources and not (s_sources or c_sources): + flags = [] + flags.extend(cflags) + flags.extend(cflags_cxx) + SetTargetProperty(output, cmake_target_name, "COMPILE_FLAGS", flags, " ") + + else: + # TODO: This is broken, one cannot generally set properties on files, + # as other targets may require different properties on the same files. + if s_sources and cflags: + SetFilesProperty(output, s_sources_name, "COMPILE_FLAGS", cflags, " ") + + if c_sources and (cflags or cflags_c): + flags = [] + flags.extend(cflags) + flags.extend(cflags_c) + SetFilesProperty(output, c_sources_name, "COMPILE_FLAGS", flags, " ") + + if cxx_sources and (cflags or cflags_cxx): + flags = [] + flags.extend(cflags) + flags.extend(cflags_cxx) + SetFilesProperty(output, cxx_sources_name, "COMPILE_FLAGS", flags, " ") + + # Linker flags + ldflags = config.get("ldflags") + if ldflags is not None: + SetTargetProperty(output, cmake_target_name, "LINK_FLAGS", ldflags, " ") + + # XCode settings + xcode_settings = config.get("xcode_settings", {}) + for xcode_setting, xcode_value in xcode_settings.items(): + SetTargetProperty( + output, + cmake_target_name, + "XCODE_ATTRIBUTE_%s" % xcode_setting, + xcode_value, + "" if isinstance(xcode_value, str) else " ", + ) + + # Note on Dependencies and Libraries: + # CMake wants to handle link order, resolving the link line up front. + # Gyp does not retain or enforce specifying enough information to do so. + # So do as other gyp generators and use --start-group and --end-group. + # Give CMake as little information as possible so that it doesn't mess it up. + + # Dependencies + rawDeps = spec.get("dependencies", []) + + static_deps = [] + shared_deps = [] + other_deps = [] + for rawDep in rawDeps: + dep_cmake_name = namer.CreateCMakeTargetName(rawDep) + dep_spec = target_dicts.get(rawDep, {}) + dep_target_type = dep_spec.get("type", None) + + if dep_target_type == "static_library": + static_deps.append(dep_cmake_name) + elif dep_target_type == "shared_library": + shared_deps.append(dep_cmake_name) + else: + other_deps.append(dep_cmake_name) + + # ensure all external dependencies are complete before internal dependencies + # extra_deps currently only depend on their own deps, so otherwise run early + if static_deps or shared_deps or other_deps: + for extra_dep in extra_deps: + output.write("add_dependencies(") + output.write(extra_dep) + output.write("\n") + for deps in (static_deps, shared_deps, other_deps): + for dep in gyp.common.uniquer(deps): + output.write(" ") + output.write(dep) + output.write("\n") + output.write(")\n") + + linkable = target_type in ("executable", "loadable_module", "shared_library") + other_deps.extend(extra_deps) + if other_deps or (not linkable and (static_deps or shared_deps)): + output.write("add_dependencies(") + output.write(cmake_target_name) + output.write("\n") + for dep in gyp.common.uniquer(other_deps): + output.write(" ") + output.write(dep) + output.write("\n") + if not linkable: + for deps in (static_deps, shared_deps): + for lib_dep in gyp.common.uniquer(deps): + output.write(" ") + output.write(lib_dep) + output.write("\n") + output.write(")\n") + + # Libraries + if linkable: + external_libs = [lib for lib in spec.get("libraries", []) if len(lib) > 0] + if external_libs or static_deps or shared_deps: + output.write("target_link_libraries(") + output.write(cmake_target_name) + output.write("\n") + if static_deps: + write_group = circular_libs and len(static_deps) > 1 and flavor != "mac" + if write_group: + output.write("-Wl,--start-group\n") + for dep in gyp.common.uniquer(static_deps): + output.write(" ") + output.write(dep) + output.write("\n") + if write_group: + output.write("-Wl,--end-group\n") + if shared_deps: + for dep in gyp.common.uniquer(shared_deps): + output.write(" ") + output.write(dep) + output.write("\n") + if external_libs: + for lib in gyp.common.uniquer(external_libs): + output.write(' "') + output.write(RemovePrefix(lib, "$(SDKROOT)")) + output.write('"\n') + + output.write(")\n") + + UnsetVariable(output, "TOOLSET") + UnsetVariable(output, "TARGET") + + +def GenerateOutputForConfig(target_list, target_dicts, data, params, config_to_use): + options = params["options"] + generator_flags = params["generator_flags"] + flavor = gyp.common.GetFlavor(params) + + # generator_dir: relative path from pwd to where make puts build files. + # Makes migrating from make to cmake easier, cmake doesn't put anything here. + # Each Gyp configuration creates a different CMakeLists.txt file + # to avoid incompatibilities between Gyp and CMake configurations. + generator_dir = os.path.relpath(options.generator_output or ".") + + # output_dir: relative path from generator_dir to the build directory. + output_dir = generator_flags.get("output_dir", "out") + + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.normpath(os.path.join(generator_dir, output_dir, config_to_use)) + + toplevel_build = os.path.join(options.toplevel_dir, build_dir) + + output_file = os.path.join(toplevel_build, "CMakeLists.txt") + gyp.common.EnsureDirExists(output_file) + + output = open(output_file, "w") + output.write("cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n") + output.write("cmake_policy(VERSION 2.8.8)\n") + + gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1]) + output.write("project(") + output.write(project_target) + output.write(")\n") + + SetVariable(output, "configuration", config_to_use) + + ar = None + cc = None + cxx = None + + make_global_settings = data[gyp_file].get("make_global_settings", []) + build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) + for key, value in make_global_settings: + if key == "AR": + ar = os.path.join(build_to_top, value) + if key == "CC": + cc = os.path.join(build_to_top, value) + if key == "CXX": + cxx = os.path.join(build_to_top, value) + + ar = gyp.common.GetEnvironFallback(["AR_target", "AR"], ar) + cc = gyp.common.GetEnvironFallback(["CC_target", "CC"], cc) + cxx = gyp.common.GetEnvironFallback(["CXX_target", "CXX"], cxx) + + if ar: + SetVariable(output, "CMAKE_AR", ar) + if cc: + SetVariable(output, "CMAKE_C_COMPILER", cc) + if cxx: + SetVariable(output, "CMAKE_CXX_COMPILER", cxx) + + # The following appears to be as-yet undocumented. + # http://public.kitware.com/Bug/view.php?id=8392 + output.write("enable_language(ASM)\n") + # ASM-ATT does not support .S files. + # output.write('enable_language(ASM-ATT)\n') + + if cc: + SetVariable(output, "CMAKE_ASM_COMPILER", cc) + + SetVariable(output, "builddir", "${CMAKE_CURRENT_BINARY_DIR}") + SetVariable(output, "obj", "${builddir}/obj") + output.write("\n") + + # TODO: Undocumented/unsupported (the CMake Java generator depends on it). + # CMake by default names the object resulting from foo.c to be foo.c.o. + # Gyp traditionally names the object resulting from foo.c foo.o. + # This should be irrelevant, but some targets extract .o files from .a + # and depend on the name of the extracted .o files. + output.write("set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n") + output.write("set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n") + output.write("\n") + + # Force ninja to use rsp files. Otherwise link and ar lines can get too long, + # resulting in 'Argument list too long' errors. + # However, rsp files don't work correctly on Mac. + if flavor != "mac": + output.write("set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n") + output.write("\n") + + namer = CMakeNamer(target_list) + + # The list of targets upon which the 'all' target should depend. + # CMake has it's own implicit 'all' target, one is not created explicitly. + all_qualified_targets = set() + for build_file in params["build_files"]: + for qualified_target in gyp.common.AllTargets( + target_list, target_dicts, os.path.normpath(build_file) + ): + all_qualified_targets.add(qualified_target) + + for qualified_target in target_list: + if flavor == "mac": + gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) + spec = target_dicts[qualified_target] + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec) + + WriteTarget( + namer, + qualified_target, + target_dicts, + build_dir, + config_to_use, + options, + generator_flags, + all_qualified_targets, + flavor, + output, + ) + + output.close() + + +def PerformBuild(data, configurations, params): + options = params["options"] + generator_flags = params["generator_flags"] + + # generator_dir: relative path from pwd to where make puts build files. + # Makes migrating from make to cmake easier, cmake doesn't put anything here. + generator_dir = os.path.relpath(options.generator_output or ".") + + # output_dir: relative path from generator_dir to the build directory. + output_dir = generator_flags.get("output_dir", "out") + + for config_name in configurations: + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.normpath( + os.path.join(generator_dir, output_dir, config_name) + ) + arguments = ["cmake", "-G", "Ninja"] + print(f"Generating [{config_name}]: {arguments}") + subprocess.check_call(arguments, cwd=build_dir) + + arguments = ["ninja", "-C", build_dir] + print(f"Building [{config_name}]: {arguments}") + subprocess.check_call(arguments) + + +def CallGenerateOutputForConfig(arglist): + # Ignore the interrupt signal so that the parent process catches it and + # kills all multiprocessing children. + signal.signal(signal.SIGINT, signal.SIG_IGN) + + target_list, target_dicts, data, params, config_name = arglist + GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) + + +def GenerateOutput(target_list, target_dicts, data, params): + user_config = params.get("generator_flags", {}).get("config", None) + if user_config: + GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) + else: + config_names = target_dicts[target_list[0]]["configurations"] + if params["parallel"]: + try: + pool = multiprocessing.Pool(len(config_names)) + arglists = [] + for config_name in config_names: + arglists.append( + (target_list, target_dicts, data, params, config_name) + ) + pool.map(CallGenerateOutputForConfig, arglists) + except KeyboardInterrupt as e: + pool.terminate() + raise e + else: + for config_name in config_names: + GenerateOutputForConfig( + target_list, target_dicts, data, params, config_name + ) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py new file mode 100644 index 0000000..f330a04 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/compile_commands_json.py @@ -0,0 +1,120 @@ +# Copyright (c) 2016 Ben Noordhuis . All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +import gyp.common +import gyp.xcode_emulation +import json +import os + +generator_additional_non_configuration_keys = [] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] +generator_filelist_paths = None +generator_supports_multiple_toolsets = True +generator_wants_sorted_dependencies = False + +# Lifted from make.py. The actual values don't matter much. +generator_default_variables = { + "CONFIGURATION_NAME": "$(BUILDTYPE)", + "EXECUTABLE_PREFIX": "", + "EXECUTABLE_SUFFIX": "", + "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni", + "PRODUCT_DIR": "$(builddir)", + "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", + "RULE_INPUT_EXT": "$(suffix $<)", + "RULE_INPUT_NAME": "$(notdir $<)", + "RULE_INPUT_PATH": "$(abspath $<)", + "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", + "SHARED_INTERMEDIATE_DIR": "$(obj)/gen", + "SHARED_LIB_PREFIX": "lib", + "STATIC_LIB_PREFIX": "lib", + "STATIC_LIB_SUFFIX": ".a", +} + + +def IsMac(params): + return "mac" == gyp.common.GetFlavor(params) + + +def CalculateVariables(default_variables, params): + default_variables.setdefault("OS", gyp.common.GetFlavor(params)) + + +def AddCommandsForTarget(cwd, target, params, per_config_commands): + output_dir = params["generator_flags"].get("output_dir", "out") + for configuration_name, configuration in target["configurations"].items(): + if IsMac(params): + xcode_settings = gyp.xcode_emulation.XcodeSettings(target) + cflags = xcode_settings.GetCflags(configuration_name) + cflags_c = xcode_settings.GetCflagsC(configuration_name) + cflags_cc = xcode_settings.GetCflagsCC(configuration_name) + else: + cflags = configuration.get("cflags", []) + cflags_c = configuration.get("cflags_c", []) + cflags_cc = configuration.get("cflags_cc", []) + + cflags_c = cflags + cflags_c + cflags_cc = cflags + cflags_cc + + defines = configuration.get("defines", []) + defines = ["-D" + s for s in defines] + + # TODO(bnoordhuis) Handle generated source files. + extensions = (".c", ".cc", ".cpp", ".cxx") + sources = [s for s in target.get("sources", []) if s.endswith(extensions)] + + def resolve(filename): + return os.path.abspath(os.path.join(cwd, filename)) + + # TODO(bnoordhuis) Handle generated header files. + include_dirs = configuration.get("include_dirs", []) + include_dirs = [s for s in include_dirs if not s.startswith("$(obj)")] + includes = ["-I" + resolve(s) for s in include_dirs] + + defines = gyp.common.EncodePOSIXShellList(defines) + includes = gyp.common.EncodePOSIXShellList(includes) + cflags_c = gyp.common.EncodePOSIXShellList(cflags_c) + cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc) + + commands = per_config_commands.setdefault(configuration_name, []) + for source in sources: + file = resolve(source) + isc = source.endswith(".c") + cc = "cc" if isc else "c++" + cflags = cflags_c if isc else cflags_cc + command = " ".join( + ( + cc, + defines, + includes, + cflags, + "-c", + gyp.common.EncodePOSIXShellArgument(file), + ) + ) + commands.append(dict(command=command, directory=output_dir, file=file)) + + +def GenerateOutput(target_list, target_dicts, data, params): + per_config_commands = {} + for qualified_target, target in target_dicts.items(): + build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( + qualified_target + ) + if IsMac(params): + settings = data[build_file] + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target) + cwd = os.path.dirname(build_file) + AddCommandsForTarget(cwd, target, params, per_config_commands) + + output_dir = params["generator_flags"].get("output_dir", "out") + for configuration_name, commands in per_config_commands.items(): + filename = os.path.join(output_dir, configuration_name, "compile_commands.json") + gyp.common.EnsureDirExists(filename) + fp = open(filename, "w") + json.dump(commands, fp=fp, indent=0, check_circular=False) + + +def PerformBuild(data, configurations, params): + pass diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py new file mode 100644 index 0000000..99d5c1f --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py @@ -0,0 +1,103 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import os +import gyp +import gyp.common +import gyp.msvs_emulation +import json + +generator_supports_multiple_toolsets = True + +generator_wants_static_library_dependencies_adjusted = False + +generator_filelist_paths = {} + +generator_default_variables = {} +for dirname in [ + "INTERMEDIATE_DIR", + "SHARED_INTERMEDIATE_DIR", + "PRODUCT_DIR", + "LIB_DIR", + "SHARED_LIB_DIR", +]: + # Some gyp steps fail if these are empty(!). + generator_default_variables[dirname] = "dir" +for unused in [ + "RULE_INPUT_PATH", + "RULE_INPUT_ROOT", + "RULE_INPUT_NAME", + "RULE_INPUT_DIRNAME", + "RULE_INPUT_EXT", + "EXECUTABLE_PREFIX", + "EXECUTABLE_SUFFIX", + "STATIC_LIB_PREFIX", + "STATIC_LIB_SUFFIX", + "SHARED_LIB_PREFIX", + "SHARED_LIB_SUFFIX", + "CONFIGURATION_NAME", +]: + generator_default_variables[unused] = "" + + +def CalculateVariables(default_variables, params): + generator_flags = params.get("generator_flags", {}) + for key, val in generator_flags.items(): + default_variables.setdefault(key, val) + default_variables.setdefault("OS", gyp.common.GetFlavor(params)) + + flavor = gyp.common.GetFlavor(params) + if flavor == "win": + gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) + + +def CalculateGeneratorInputInfo(params): + """Calculate the generator specific info that gets fed to input (called by + gyp).""" + generator_flags = params.get("generator_flags", {}) + if generator_flags.get("adjust_static_libraries", False): + global generator_wants_static_library_dependencies_adjusted + generator_wants_static_library_dependencies_adjusted = True + + toplevel = params["options"].toplevel_dir + generator_dir = os.path.relpath(params["options"].generator_output or ".") + # output_dir: relative path from generator_dir to the build directory. + output_dir = generator_flags.get("output_dir", "out") + qualified_out_dir = os.path.normpath( + os.path.join(toplevel, generator_dir, output_dir, "gypfiles") + ) + global generator_filelist_paths + generator_filelist_paths = { + "toplevel": toplevel, + "qualified_out_dir": qualified_out_dir, + } + + +def GenerateOutput(target_list, target_dicts, data, params): + # Map of target -> list of targets it depends on. + edges = {} + + # Queue of targets to visit. + targets_to_visit = target_list[:] + + while len(targets_to_visit) > 0: + target = targets_to_visit.pop() + if target in edges: + continue + edges[target] = [] + + for dep in target_dicts[target].get("dependencies", []): + edges[target].append(dep) + targets_to_visit.append(dep) + + try: + filepath = params["generator_flags"]["output_dir"] + except KeyError: + filepath = "." + filename = os.path.join(filepath, "dump.json") + f = open(filename, "w") + json.dump(edges, f) + f.close() + print("Wrote json to %s." % filename) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py new file mode 100644 index 0000000..1ff0dc8 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py @@ -0,0 +1,464 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""GYP backend that generates Eclipse CDT settings files. + +This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML +files that can be imported into an Eclipse CDT project. The XML file contains a +list of include paths and symbols (i.e. defines). + +Because a full .cproject definition is not created by this generator, it's not +possible to properly define the include dirs and symbols for each file +individually. Instead, one set of includes/symbols is generated for the entire +project. This works fairly well (and is a vast improvement in general), but may +still result in a few indexer issues here and there. + +This generator has no automated tests, so expect it to be broken. +""" + +from xml.sax.saxutils import escape +import os.path +import subprocess +import gyp +import gyp.common +import gyp.msvs_emulation +import shlex +import xml.etree.cElementTree as ET + +generator_wants_static_library_dependencies_adjusted = False + +generator_default_variables = {} + +for dirname in ["INTERMEDIATE_DIR", "PRODUCT_DIR", "LIB_DIR", "SHARED_LIB_DIR"]: + # Some gyp steps fail if these are empty(!), so we convert them to variables + generator_default_variables[dirname] = "$" + dirname + +for unused in [ + "RULE_INPUT_PATH", + "RULE_INPUT_ROOT", + "RULE_INPUT_NAME", + "RULE_INPUT_DIRNAME", + "RULE_INPUT_EXT", + "EXECUTABLE_PREFIX", + "EXECUTABLE_SUFFIX", + "STATIC_LIB_PREFIX", + "STATIC_LIB_SUFFIX", + "SHARED_LIB_PREFIX", + "SHARED_LIB_SUFFIX", + "CONFIGURATION_NAME", +]: + generator_default_variables[unused] = "" + +# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as +# part of the path when dealing with generated headers. This value will be +# replaced dynamically for each configuration. +generator_default_variables["SHARED_INTERMEDIATE_DIR"] = "$SHARED_INTERMEDIATE_DIR" + + +def CalculateVariables(default_variables, params): + generator_flags = params.get("generator_flags", {}) + for key, val in generator_flags.items(): + default_variables.setdefault(key, val) + flavor = gyp.common.GetFlavor(params) + default_variables.setdefault("OS", flavor) + if flavor == "win": + gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) + + +def CalculateGeneratorInputInfo(params): + """Calculate the generator specific info that gets fed to input (called by + gyp).""" + generator_flags = params.get("generator_flags", {}) + if generator_flags.get("adjust_static_libraries", False): + global generator_wants_static_library_dependencies_adjusted + generator_wants_static_library_dependencies_adjusted = True + + +def GetAllIncludeDirectories( + target_list, + target_dicts, + shared_intermediate_dirs, + config_name, + params, + compiler_path, +): + """Calculate the set of include directories to be used. + + Returns: + A list including all the include_dir's specified for every target followed + by any include directories that were added as cflag compiler options. + """ + + gyp_includes_set = set() + compiler_includes_list = [] + + # Find compiler's default include dirs. + if compiler_path: + command = shlex.split(compiler_path) + command.extend(["-E", "-xc++", "-v", "-"]) + proc = subprocess.Popen( + args=command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + output = proc.communicate()[1].decode("utf-8") + # Extract the list of include dirs from the output, which has this format: + # ... + # #include "..." search starts here: + # #include <...> search starts here: + # /usr/include/c++/4.6 + # /usr/local/include + # End of search list. + # ... + in_include_list = False + for line in output.splitlines(): + if line.startswith("#include"): + in_include_list = True + continue + if line.startswith("End of search list."): + break + if in_include_list: + include_dir = line.strip() + if include_dir not in compiler_includes_list: + compiler_includes_list.append(include_dir) + + flavor = gyp.common.GetFlavor(params) + if flavor == "win": + generator_flags = params.get("generator_flags", {}) + for target_name in target_list: + target = target_dicts[target_name] + if config_name in target["configurations"]: + config = target["configurations"][config_name] + + # Look for any include dirs that were explicitly added via cflags. This + # may be done in gyp files to force certain includes to come at the end. + # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and + # remove this. + if flavor == "win": + msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) + cflags = msvs_settings.GetCflags(config_name) + else: + cflags = config["cflags"] + for cflag in cflags: + if cflag.startswith("-I"): + include_dir = cflag[2:] + if include_dir not in compiler_includes_list: + compiler_includes_list.append(include_dir) + + # Find standard gyp include dirs. + if "include_dirs" in config: + include_dirs = config["include_dirs"] + for shared_intermediate_dir in shared_intermediate_dirs: + for include_dir in include_dirs: + include_dir = include_dir.replace( + "$SHARED_INTERMEDIATE_DIR", shared_intermediate_dir + ) + if not os.path.isabs(include_dir): + base_dir = os.path.dirname(target_name) + + include_dir = base_dir + "/" + include_dir + include_dir = os.path.abspath(include_dir) + + gyp_includes_set.add(include_dir) + + # Generate a list that has all the include dirs. + all_includes_list = list(gyp_includes_set) + all_includes_list.sort() + for compiler_include in compiler_includes_list: + if compiler_include not in gyp_includes_set: + all_includes_list.append(compiler_include) + + # All done. + return all_includes_list + + +def GetCompilerPath(target_list, data, options): + """Determine a command that can be used to invoke the compiler. + + Returns: + If this is a gyp project that has explicit make settings, try to determine + the compiler from that. Otherwise, see if a compiler was specified via the + CC_target environment variable. + """ + # First, see if the compiler is configured in make's settings. + build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) + make_global_settings_dict = data[build_file].get("make_global_settings", {}) + for key, value in make_global_settings_dict: + if key in ["CC", "CXX"]: + return os.path.join(options.toplevel_dir, value) + + # Check to see if the compiler was specified as an environment variable. + for key in ["CC_target", "CC", "CXX"]: + compiler = os.environ.get(key) + if compiler: + return compiler + + return "gcc" + + +def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path): + """Calculate the defines for a project. + + Returns: + A dict that includes explicit defines declared in gyp files along with all + of the default defines that the compiler uses. + """ + + # Get defines declared in the gyp files. + all_defines = {} + flavor = gyp.common.GetFlavor(params) + if flavor == "win": + generator_flags = params.get("generator_flags", {}) + for target_name in target_list: + target = target_dicts[target_name] + + if flavor == "win": + msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags) + extra_defines = msvs_settings.GetComputedDefines(config_name) + else: + extra_defines = [] + if config_name in target["configurations"]: + config = target["configurations"][config_name] + target_defines = config["defines"] + else: + target_defines = [] + for define in target_defines + extra_defines: + split_define = define.split("=", 1) + if len(split_define) == 1: + split_define.append("1") + if split_define[0].strip() in all_defines: + # Already defined + continue + all_defines[split_define[0].strip()] = split_define[1].strip() + # Get default compiler defines (if possible). + if flavor == "win": + return all_defines # Default defines already processed in the loop above. + if compiler_path: + command = shlex.split(compiler_path) + command.extend(["-E", "-dM", "-"]) + cpp_proc = subprocess.Popen( + args=command, cwd=".", stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + cpp_output = cpp_proc.communicate()[0].decode("utf-8") + cpp_lines = cpp_output.split("\n") + for cpp_line in cpp_lines: + if not cpp_line.strip(): + continue + cpp_line_parts = cpp_line.split(" ", 2) + key = cpp_line_parts[1] + if len(cpp_line_parts) >= 3: + val = cpp_line_parts[2] + else: + val = "1" + all_defines[key] = val + + return all_defines + + +def WriteIncludePaths(out, eclipse_langs, include_dirs): + """Write the includes section of a CDT settings export file.""" + + out.write( + '
\n' + ) + out.write(' \n') + for lang in eclipse_langs: + out.write(' \n' % lang) + for include_dir in include_dirs: + out.write( + ' %s\n' + % include_dir + ) + out.write(" \n") + out.write("
\n") + + +def WriteMacros(out, eclipse_langs, defines): + """Write the macros section of a CDT settings export file.""" + + out.write( + '
\n' + ) + out.write(' \n') + for lang in eclipse_langs: + out.write(' \n' % lang) + for key in sorted(defines): + out.write( + " %s%s\n" + % (escape(key), escape(defines[key])) + ) + out.write(" \n") + out.write("
\n") + + +def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): + options = params["options"] + generator_flags = params.get("generator_flags", {}) + + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.join(generator_flags.get("output_dir", "out"), config_name) + + toplevel_build = os.path.join(options.toplevel_dir, build_dir) + # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the + # SHARED_INTERMEDIATE_DIR. Include both possible locations. + shared_intermediate_dirs = [ + os.path.join(toplevel_build, "obj", "gen"), + os.path.join(toplevel_build, "gen"), + ] + + GenerateCdtSettingsFile( + target_list, + target_dicts, + data, + params, + config_name, + os.path.join(toplevel_build, "eclipse-cdt-settings.xml"), + options, + shared_intermediate_dirs, + ) + GenerateClasspathFile( + target_list, + target_dicts, + options.toplevel_dir, + toplevel_build, + os.path.join(toplevel_build, "eclipse-classpath.xml"), + ) + + +def GenerateCdtSettingsFile( + target_list, + target_dicts, + data, + params, + config_name, + out_name, + options, + shared_intermediate_dirs, +): + gyp.common.EnsureDirExists(out_name) + with open(out_name, "w") as out: + out.write('\n') + out.write("\n") + + eclipse_langs = [ + "C++ Source File", + "C Source File", + "Assembly Source File", + "GNU C++", + "GNU C", + "Assembly", + ] + compiler_path = GetCompilerPath(target_list, data, options) + include_dirs = GetAllIncludeDirectories( + target_list, + target_dicts, + shared_intermediate_dirs, + config_name, + params, + compiler_path, + ) + WriteIncludePaths(out, eclipse_langs, include_dirs) + defines = GetAllDefines( + target_list, target_dicts, data, config_name, params, compiler_path + ) + WriteMacros(out, eclipse_langs, defines) + + out.write("\n") + + +def GenerateClasspathFile( + target_list, target_dicts, toplevel_dir, toplevel_build, out_name +): + """Generates a classpath file suitable for symbol navigation and code + completion of Java code (such as in Android projects) by finding all + .java and .jar files used as action inputs.""" + gyp.common.EnsureDirExists(out_name) + result = ET.Element("classpath") + + def AddElements(kind, paths): + # First, we need to normalize the paths so they are all relative to the + # toplevel dir. + rel_paths = set() + for path in paths: + if os.path.isabs(path): + rel_paths.add(os.path.relpath(path, toplevel_dir)) + else: + rel_paths.add(path) + + for path in sorted(rel_paths): + entry_element = ET.SubElement(result, "classpathentry") + entry_element.set("kind", kind) + entry_element.set("path", path) + + AddElements("lib", GetJavaJars(target_list, target_dicts, toplevel_dir)) + AddElements("src", GetJavaSourceDirs(target_list, target_dicts, toplevel_dir)) + # Include the standard JRE container and a dummy out folder + AddElements("con", ["org.eclipse.jdt.launching.JRE_CONTAINER"]) + # Include a dummy out folder so that Eclipse doesn't use the default /bin + # folder in the root of the project. + AddElements("output", [os.path.join(toplevel_build, ".eclipse-java-build")]) + + ET.ElementTree(result).write(out_name) + + +def GetJavaJars(target_list, target_dicts, toplevel_dir): + """Generates a sequence of all .jars used as inputs.""" + for target_name in target_list: + target = target_dicts[target_name] + for action in target.get("actions", []): + for input_ in action["inputs"]: + if os.path.splitext(input_)[1] == ".jar" and not input_.startswith("$"): + if os.path.isabs(input_): + yield input_ + else: + yield os.path.join(os.path.dirname(target_name), input_) + + +def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir): + """Generates a sequence of all likely java package root directories.""" + for target_name in target_list: + target = target_dicts[target_name] + for action in target.get("actions", []): + for input_ in action["inputs"]: + if os.path.splitext(input_)[1] == ".java" and not input_.startswith( + "$" + ): + dir_ = os.path.dirname( + os.path.join(os.path.dirname(target_name), input_) + ) + # If there is a parent 'src' or 'java' folder, navigate up to it - + # these are canonical package root names in Chromium. This will + # break if 'src' or 'java' exists in the package structure. This + # could be further improved by inspecting the java file for the + # package name if this proves to be too fragile in practice. + parent_search = dir_ + while os.path.basename(parent_search) not in ["src", "java"]: + parent_search, _ = os.path.split(parent_search) + if not parent_search or parent_search == toplevel_dir: + # Didn't find a known root, just return the original path + yield dir_ + break + else: + yield parent_search + + +def GenerateOutput(target_list, target_dicts, data, params): + """Generate an XML settings file that can be imported into a CDT project.""" + + if params["options"].generator_output: + raise NotImplementedError("--generator_output not implemented for eclipse") + + user_config = params.get("generator_flags", {}).get("config", None) + if user_config: + GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) + else: + config_names = target_dicts[target_list[0]]["configurations"] + for config_name in config_names: + GenerateOutputForConfig( + target_list, target_dicts, data, params, config_name + ) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py new file mode 100644 index 0000000..4171704 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py @@ -0,0 +1,89 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""gypd output module + +This module produces gyp input as its output. Output files are given the +.gypd extension to avoid overwriting the .gyp files that they are generated +from. Internal references to .gyp files (such as those found in +"dependencies" sections) are not adjusted to point to .gypd files instead; +unlike other paths, which are relative to the .gyp or .gypd file, such paths +are relative to the directory from which gyp was run to create the .gypd file. + +This generator module is intended to be a sample and a debugging aid, hence +the "d" for "debug" in .gypd. It is useful to inspect the results of the +various merges, expansions, and conditional evaluations performed by gyp +and to see a representation of what would be fed to a generator module. + +It's not advisable to rename .gypd files produced by this module to .gyp, +because they will have all merges, expansions, and evaluations already +performed and the relevant constructs not present in the output; paths to +dependencies may be wrong; and various sections that do not belong in .gyp +files such as such as "included_files" and "*_excluded" will be present. +Output will also be stripped of comments. This is not intended to be a +general-purpose gyp pretty-printer; for that, you probably just want to +run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip +comments but won't do all of the other things done to this module's output. + +The specific formatting of the output generated by this module is subject +to change. +""" + + +import gyp.common +import pprint + + +# These variables should just be spit back out as variable references. +_generator_identity_variables = [ + "CONFIGURATION_NAME", + "EXECUTABLE_PREFIX", + "EXECUTABLE_SUFFIX", + "INTERMEDIATE_DIR", + "LIB_DIR", + "PRODUCT_DIR", + "RULE_INPUT_ROOT", + "RULE_INPUT_DIRNAME", + "RULE_INPUT_EXT", + "RULE_INPUT_NAME", + "RULE_INPUT_PATH", + "SHARED_INTERMEDIATE_DIR", + "SHARED_LIB_DIR", + "SHARED_LIB_PREFIX", + "SHARED_LIB_SUFFIX", + "STATIC_LIB_PREFIX", + "STATIC_LIB_SUFFIX", +] + +# gypd doesn't define a default value for OS like many other generator +# modules. Specify "-D OS=whatever" on the command line to provide a value. +generator_default_variables = {} + +# gypd supports multiple toolsets +generator_supports_multiple_toolsets = True + +# TODO(mark): This always uses <, which isn't right. The input module should +# notify the generator to tell it which phase it is operating in, and this +# module should use < for the early phase and then switch to > for the late +# phase. Bonus points for carrying @ back into the output too. +for v in _generator_identity_variables: + generator_default_variables[v] = "<(%s)" % v + + +def GenerateOutput(target_list, target_dicts, data, params): + output_files = {} + for qualified_target in target_list: + [input_file, target] = gyp.common.ParseQualifiedTarget(qualified_target)[0:2] + + if input_file[-4:] != ".gyp": + continue + input_file_stem = input_file[:-4] + output_file = input_file_stem + params["options"].suffix + ".gypd" + + output_files[output_file] = output_files.get(output_file, input_file) + + for output_file, input_file in output_files.items(): + output = open(output_file, "w") + pprint.pprint(data[input_file], output) + output.close() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py new file mode 100644 index 0000000..82a07dd --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py @@ -0,0 +1,58 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""gypsh output module + +gypsh is a GYP shell. It's not really a generator per se. All it does is +fire up an interactive Python session with a few local variables set to the +variables passed to the generator. Like gypd, it's intended as a debugging +aid, to facilitate the exploration of .gyp structures after being processed +by the input module. + +The expected usage is "gyp -f gypsh -D OS=desired_os". +""" + + +import code +import sys + + +# All of this stuff about generator variables was lovingly ripped from gypd.py. +# That module has a much better description of what's going on and why. +_generator_identity_variables = [ + "EXECUTABLE_PREFIX", + "EXECUTABLE_SUFFIX", + "INTERMEDIATE_DIR", + "PRODUCT_DIR", + "RULE_INPUT_ROOT", + "RULE_INPUT_DIRNAME", + "RULE_INPUT_EXT", + "RULE_INPUT_NAME", + "RULE_INPUT_PATH", + "SHARED_INTERMEDIATE_DIR", +] + +generator_default_variables = {} + +for v in _generator_identity_variables: + generator_default_variables[v] = "<(%s)" % v + + +def GenerateOutput(target_list, target_dicts, data, params): + locals = { + "target_list": target_list, + "target_dicts": target_dicts, + "data": data, + } + + # Use a banner that looks like the stock Python one and like what + # code.interact uses by default, but tack on something to indicate what + # locals are available, and identify gypsh. + banner = "Python {} on {}\nlocals.keys() = {}\ngypsh".format( + sys.version, + sys.platform, + repr(sorted(locals.keys())), + ) + + code.interact(banner, local=locals) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py new file mode 100644 index 0000000..c595f20 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py @@ -0,0 +1,2518 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# Notes: +# +# This is all roughly based on the Makefile system used by the Linux +# kernel, but is a non-recursive make -- we put the entire dependency +# graph in front of make and let it figure it out. +# +# The code below generates a separate .mk file for each target, but +# all are sourced by the top-level Makefile. This means that all +# variables in .mk-files clobber one another. Be careful to use := +# where appropriate for immediate evaluation, and similarly to watch +# that you're not relying on a variable value to last between different +# .mk files. +# +# TODOs: +# +# Global settings and utility functions are currently stuffed in the +# toplevel Makefile. It may make sense to generate some .mk files on +# the side to keep the files readable. + + +import os +import re +import subprocess +import gyp +import gyp.common +import gyp.xcode_emulation +from gyp.common import GetEnvironFallback + +import hashlib + +generator_default_variables = { + "EXECUTABLE_PREFIX": "", + "EXECUTABLE_SUFFIX": "", + "STATIC_LIB_PREFIX": "lib", + "SHARED_LIB_PREFIX": "lib", + "STATIC_LIB_SUFFIX": ".a", + "INTERMEDIATE_DIR": "$(obj).$(TOOLSET)/$(TARGET)/geni", + "SHARED_INTERMEDIATE_DIR": "$(obj)/gen", + "PRODUCT_DIR": "$(builddir)", + "RULE_INPUT_ROOT": "%(INPUT_ROOT)s", # This gets expanded by Python. + "RULE_INPUT_DIRNAME": "%(INPUT_DIRNAME)s", # This gets expanded by Python. + "RULE_INPUT_PATH": "$(abspath $<)", + "RULE_INPUT_EXT": "$(suffix $<)", + "RULE_INPUT_NAME": "$(notdir $<)", + "CONFIGURATION_NAME": "$(BUILDTYPE)", +} + +# Make supports multiple toolsets +generator_supports_multiple_toolsets = True + +# Request sorted dependencies in the order from dependents to dependencies. +generator_wants_sorted_dependencies = False + +# Placates pylint. +generator_additional_non_configuration_keys = [] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] +generator_filelist_paths = None + + +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + flavor = gyp.common.GetFlavor(params) + if flavor == "mac": + default_variables.setdefault("OS", "mac") + default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib") + default_variables.setdefault( + "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"] + ) + default_variables.setdefault( + "LIB_DIR", generator_default_variables["PRODUCT_DIR"] + ) + + # Copy additional generator configuration data from Xcode, which is shared + # by the Mac Make generator. + import gyp.generator.xcode as xcode_generator + + global generator_additional_non_configuration_keys + generator_additional_non_configuration_keys = getattr( + xcode_generator, "generator_additional_non_configuration_keys", [] + ) + global generator_additional_path_sections + generator_additional_path_sections = getattr( + xcode_generator, "generator_additional_path_sections", [] + ) + global generator_extra_sources_for_rules + generator_extra_sources_for_rules = getattr( + xcode_generator, "generator_extra_sources_for_rules", [] + ) + COMPILABLE_EXTENSIONS.update({".m": "objc", ".mm": "objcxx"}) + else: + operating_system = flavor + if flavor == "android": + operating_system = "linux" # Keep this legacy behavior for now. + default_variables.setdefault("OS", operating_system) + if flavor == "aix": + default_variables.setdefault("SHARED_LIB_SUFFIX", ".a") + else: + default_variables.setdefault("SHARED_LIB_SUFFIX", ".so") + default_variables.setdefault("SHARED_LIB_DIR", "$(builddir)/lib.$(TOOLSET)") + default_variables.setdefault("LIB_DIR", "$(obj).$(TOOLSET)") + + +def CalculateGeneratorInputInfo(params): + """Calculate the generator specific info that gets fed to input (called by + gyp).""" + generator_flags = params.get("generator_flags", {}) + android_ndk_version = generator_flags.get("android_ndk_version", None) + # Android NDK requires a strict link order. + if android_ndk_version: + global generator_wants_sorted_dependencies + generator_wants_sorted_dependencies = True + + output_dir = params["options"].generator_output or params["options"].toplevel_dir + builddir_name = generator_flags.get("output_dir", "out") + qualified_out_dir = os.path.normpath( + os.path.join(output_dir, builddir_name, "gypfiles") + ) + + global generator_filelist_paths + generator_filelist_paths = { + "toplevel": params["options"].toplevel_dir, + "qualified_out_dir": qualified_out_dir, + } + + +# The .d checking code below uses these functions: +# wildcard, sort, foreach, shell, wordlist +# wildcard can handle spaces, the rest can't. +# Since I could find no way to make foreach work with spaces in filenames +# correctly, the .d files have spaces replaced with another character. The .d +# file for +# Chromium\ Framework.framework/foo +# is for example +# out/Release/.deps/out/Release/Chromium?Framework.framework/foo +# This is the replacement character. +SPACE_REPLACEMENT = "?" + + +LINK_COMMANDS_LINUX = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) + +# Due to circular dependencies between libraries :(, we wrap the +# special "figure out circular dependencies" flags around the entire +# input list during linking. +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) -o $@ $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,--start-group $(LD_INPUTS) $(LIBS) -Wl,--end-group + +# We support two kinds of shared objects (.so): +# 1) shared_library, which is just bundling together many dependent libraries +# into a link line. +# 2) loadable_module, which is generating a module intended for dlopen(). +# +# They differ only slightly: +# In the former case, we want to package all dependent code into the .so. +# In the latter case, we want to package just the API exposed by the +# outermost module. +# This means shared_library uses --whole-archive, while loadable_module doesn't. +# (Note that --whole-archive is incompatible with the --start-group used in +# normal linking.) + +# Other shared-object link notes: +# - Set SONAME to the library filename so our binaries don't reference +# the local, absolute paths used on the link command-line. +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -o $@ -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) +""" # noqa: E501 + +LINK_COMMANDS_MAC = """\ +quiet_cmd_alink = LIBTOOL-STATIC $@ +cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) +""" # noqa: E501 + +LINK_COMMANDS_ANDROID = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) + +# Due to circular dependencies between libraries :(, we wrap the +# special "figure out circular dependencies" flags around the entire +# input list during linking. +quiet_cmd_link = LINK($(TOOLSET)) $@ +quiet_cmd_link_host = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) +cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) + +# Other shared-object link notes: +# - Set SONAME to the library filename so our binaries don't reference +# the local, absolute paths used on the link command-line. +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) +quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) +""" # noqa: E501 + + +LINK_COMMANDS_AIX = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) +""" # noqa: E501 + + +LINK_COMMANDS_OS390 = """\ +quiet_cmd_alink = AR($(TOOLSET)) $@ +cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) + +quiet_cmd_alink_thin = AR($(TOOLSET)) $@ +cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) + +quiet_cmd_link = LINK($(TOOLSET)) $@ +cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) + +quiet_cmd_solink = SOLINK($(TOOLSET)) $@ +cmd_solink = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) -Wl,DLL + +quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ +cmd_solink_module = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -Wl,DLL +""" # noqa: E501 + + +# Header of toplevel Makefile. +# This should go into the build tree, but it's easier to keep it here for now. +SHARED_HEADER = ( + """\ +# We borrow heavily from the kernel build setup, though we are simpler since +# we don't have Kconfig tweaking settings on us. + +# The implicit make rules have it looking for RCS files, among other things. +# We instead explicitly write all the rules we care about. +# It's even quicker (saves ~200ms) to pass -r on the command line. +MAKEFLAGS=-r + +# The source directory tree. +srcdir := %(srcdir)s +abs_srcdir := $(abspath $(srcdir)) + +# The name of the builddir. +builddir_name ?= %(builddir)s + +# The V=1 flag on command line makes us verbosely print command lines. +ifdef V + quiet= +else + quiet=quiet_ +endif + +# Specify BUILDTYPE=Release on the command line for a release build. +BUILDTYPE ?= %(default_configuration)s + +# Directory all our build output goes into. +# Note that this must be two directories beneath src/ for unit tests to pass, +# as they reach into the src/ directory for data with relative paths. +builddir ?= $(builddir_name)/$(BUILDTYPE) +abs_builddir := $(abspath $(builddir)) +depsdir := $(builddir)/.deps + +# Object output directory. +obj := $(builddir)/obj +abs_obj := $(abspath $(obj)) + +# We build up a list of every single one of the targets so we can slurp in the +# generated dependency rule Makefiles in one pass. +all_deps := + +%(make_global_settings)s + +CC.target ?= %(CC.target)s +CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS) +CXX.target ?= %(CXX.target)s +CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS) +LINK.target ?= %(LINK.target)s +LDFLAGS.target ?= $(LDFLAGS) +AR.target ?= $(AR) + +# C++ apps need to be linked with g++. +LINK ?= $(CXX.target) + +# TODO(evan): move all cross-compilation logic to gyp-time so we don't need +# to replicate this environment fallback in make as well. +CC.host ?= %(CC.host)s +CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host) +CXX.host ?= %(CXX.host)s +CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host) +LINK.host ?= %(LINK.host)s +LDFLAGS.host ?= $(LDFLAGS_host) +AR.host ?= %(AR.host)s + +# Define a dir function that can handle spaces. +# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions +# "leading spaces cannot appear in the text of the first argument as written. +# These characters can be put into the argument value by variable substitution." +empty := +space := $(empty) $(empty) + +# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces +replace_spaces = $(subst $(space),""" + + SPACE_REPLACEMENT + + """,$1) +unreplace_spaces = $(subst """ + + SPACE_REPLACEMENT + + """,$(space),$1) +dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) + +# Flags to make gcc output dependency info. Note that you need to be +# careful here to use the flags that ccache and distcc can understand. +# We write to a dep file on the side first and then rename at the end +# so we can't end up with a broken dep file. +depfile = $(depsdir)/$(call replace_spaces,$@).d +DEPFLAGS = %(makedep_args)s -MF $(depfile).raw + +# We have to fixup the deps output in a few ways. +# (1) the file output should mention the proper .o file. +# ccache or distcc lose the path to the target, so we convert a rule of +# the form: +# foobar.o: DEP1 DEP2 +# into +# path/to/foobar.o: DEP1 DEP2 +# (2) we want missing files not to cause us to fail to build. +# We want to rewrite +# foobar.o: DEP1 DEP2 \\ +# DEP3 +# to +# DEP1: +# DEP2: +# DEP3: +# so if the files are missing, they're just considered phony rules. +# We have to do some pretty insane escaping to get those backslashes +# and dollar signs past make, the shell, and sed at the same time. +# Doesn't work with spaces, but that's fine: .d files have spaces in +# their names replaced with other characters.""" + r""" +define fixup_dep +# The depfile may not exist if the input file didn't have any #includes. +touch $(depfile).raw +# Fixup path as in (1). +sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) +# Add extra rules as in (2). +# We remove slashes and replace spaces with new lines; +# remove blank lines; +# delete the first line and append a colon to the remaining lines. +sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ + grep -v '^$$' |\ + sed -e 1d -e 's|$$|:|' \ + >> $(depfile) +rm $(depfile).raw +endef +""" + """ +# Command definitions: +# - cmd_foo is the actual command to run; +# - quiet_cmd_foo is the brief-output summary of the command. + +quiet_cmd_cc = CC($(TOOLSET)) $@ +cmd_cc = $(CC.$(TOOLSET)) -o $@ $< $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c + +quiet_cmd_cxx = CXX($(TOOLSET)) $@ +cmd_cxx = $(CXX.$(TOOLSET)) -o $@ $< $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c +%(extra_commands)s +quiet_cmd_touch = TOUCH $@ +cmd_touch = touch $@ + +quiet_cmd_copy = COPY $@ +# send stderr to /dev/null to ignore messages when linking directories. +cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@") + +%(link_commands)s +""" # noqa: E501 + r""" +# Define an escape_quotes function to escape single quotes. +# This allows us to handle quotes properly as long as we always use +# use single quotes and escape_quotes. +escape_quotes = $(subst ','\'',$(1)) +# This comment is here just to include a ' to unconfuse syntax highlighting. +# Define an escape_vars function to escape '$' variable syntax. +# This allows us to read/write command lines with shell variables (e.g. +# $LD_LIBRARY_PATH), without triggering make substitution. +escape_vars = $(subst $$,$$$$,$(1)) +# Helper that expands to a shell command to echo a string exactly as it is in +# make. This uses printf instead of echo because printf's behaviour with respect +# to escape sequences is more portable than echo's across different shells +# (e.g., dash, bash). +exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))' +""" + """ +# Helper to compare the command we're about to run against the command +# we logged the last time we ran the command. Produces an empty +# string (false) when the commands match. +# Tricky point: Make has no string-equality test function. +# The kernel uses the following, but it seems like it would have false +# positives, where one string reordered its arguments. +# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ +# $(filter-out $(cmd_$@), $(cmd_$(1)))) +# We instead substitute each for the empty string into the other, and +# say they're equal if both substitutions produce the empty string. +# .d files contain """ + + SPACE_REPLACEMENT + + """ instead of spaces, take that into account. +command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\ + $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) + +# Helper that is non-empty when a prerequisite changes. +# Normally make does this implicitly, but we force rules to always run +# so we can check their command lines. +# $? -- new prerequisites +# $| -- order-only dependencies +prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) + +# Helper that executes all postbuilds until one fails. +define do_postbuilds + @E=0;\\ + for p in $(POSTBUILDS); do\\ + eval $$p;\\ + E=$$?;\\ + if [ $$E -ne 0 ]; then\\ + break;\\ + fi;\\ + done;\\ + if [ $$E -ne 0 ]; then\\ + rm -rf "$@";\\ + exit $$E;\\ + fi +endef + +# do_cmd: run a command via the above cmd_foo names, if necessary. +# Should always run for a given target to handle command-line changes. +# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. +# Third argument, if non-zero, makes it do POSTBUILDS processing. +# Note: We intentionally do NOT call dirx for depfile, since it contains """ + + SPACE_REPLACEMENT + + """ for +# spaces already and dirx strips the """ + + SPACE_REPLACEMENT + + """ characters. +define do_cmd +$(if $(or $(command_changed),$(prereq_changed)), + @$(call exact_echo, $($(quiet)cmd_$(1))) + @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" + $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))), + @$(cmd_$(1)) + @echo " $(quiet_cmd_$(1)): Finished", + @$(cmd_$(1)) + ) + @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) + @$(if $(2),$(fixup_dep)) + $(if $(and $(3), $(POSTBUILDS)), + $(call do_postbuilds) + ) +) +endef + +# Declare the "%(default_target)s" target first so it is the default, +# even though we don't have the deps yet. +.PHONY: %(default_target)s +%(default_target)s: + +# make looks for ways to re-generate included makefiles, but in our case, we +# don't have a direct way. Explicitly telling make that it has nothing to do +# for them makes it go faster. +%%.d: ; + +# Use FORCE_DO_CMD to force a target to run. Should be coupled with +# do_cmd. +.PHONY: FORCE_DO_CMD +FORCE_DO_CMD: + +""" # noqa: E501 +) + +SHARED_HEADER_MAC_COMMANDS = """ +quiet_cmd_objc = CXX($(TOOLSET)) $@ +cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< + +quiet_cmd_objcxx = CXX($(TOOLSET)) $@ +cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# Commands for precompiled header files. +quiet_cmd_pch_c = CXX($(TOOLSET)) $@ +cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ +cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< +quiet_cmd_pch_m = CXX($(TOOLSET)) $@ +cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< +quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ +cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< + +# gyp-mac-tool is written next to the root Makefile by gyp. +# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd +# already. +quiet_cmd_mac_tool = MACTOOL $(4) $< +cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" + +quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ +cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) + +quiet_cmd_infoplist = INFOPLIST $@ +cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" +""" # noqa: E501 + + +def WriteRootHeaderSuffixRules(writer): + extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower) + + writer.write("# Suffix rules, putting all outputs into $(obj).\n") + for ext in extensions: + writer.write("$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n" % ext) + writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) + + writer.write("\n# Try building from generated source, too.\n") + for ext in extensions: + writer.write( + "$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n" % ext + ) + writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) + writer.write("\n") + for ext in extensions: + writer.write("$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n" % ext) + writer.write("\t@$(call do_cmd,%s,1)\n" % COMPILABLE_EXTENSIONS[ext]) + writer.write("\n") + + +SHARED_HEADER_SUFFIX_RULES_COMMENT1 = """\ +# Suffix rules, putting all outputs into $(obj). +""" + + +SHARED_HEADER_SUFFIX_RULES_COMMENT2 = """\ +# Try building from generated source, too. +""" + + +SHARED_FOOTER = """\ +# "all" is a concatenation of the "all" targets from all the included +# sub-makefiles. This is just here to clarify. +all: + +# Add in dependency-tracking rules. $(all_deps) is the list of every single +# target in our tree. Only consider the ones with .d (dependency) info: +d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) +ifneq ($(d_files),) + include $(d_files) +endif +""" + +header = """\ +# This file is generated by gyp; do not edit. + +""" + +# Maps every compilable file extension to the do_cmd that compiles it. +COMPILABLE_EXTENSIONS = { + ".c": "cc", + ".cc": "cxx", + ".cpp": "cxx", + ".cxx": "cxx", + ".s": "cc", + ".S": "cc", +} + + +def Compilable(filename): + """Return true if the file is compilable (should be in OBJS).""" + for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS): + if res: + return True + return False + + +def Linkable(filename): + """Return true if the file is linkable (should be on the link line).""" + return filename.endswith(".o") + + +def Target(filename): + """Translate a compilable filename to its .o target.""" + return os.path.splitext(filename)[0] + ".o" + + +def EscapeShellArgument(s): + """Quotes an argument so that it will be interpreted literally by a POSIX + shell. Taken from + http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python + """ + return "'" + s.replace("'", "'\\''") + "'" + + +def EscapeMakeVariableExpansion(s): + """Make has its own variable expansion syntax using $. We must escape it for + string to be interpreted literally.""" + return s.replace("$", "$$") + + +def EscapeCppDefine(s): + """Escapes a CPP define so that it will reach the compiler unaltered.""" + s = EscapeShellArgument(s) + s = EscapeMakeVariableExpansion(s) + # '#' characters must be escaped even embedded in a string, else Make will + # treat it as the start of a comment. + return s.replace("#", r"\#") + + +def QuoteIfNecessary(string): + """TODO: Should this ideally be replaced with one or more of the above + functions?""" + if '"' in string: + string = '"' + string.replace('"', '\\"') + '"' + return string + + +def StringToMakefileVariable(string): + """Convert a string to a value that is acceptable as a make variable name.""" + return re.sub("[^a-zA-Z0-9_]", "_", string) + + +srcdir_prefix = "" + + +def Sourceify(path): + """Convert a path to its source directory form.""" + if "$(" in path: + return path + if os.path.isabs(path): + return path + return srcdir_prefix + path + + +def QuoteSpaces(s, quote=r"\ "): + return s.replace(" ", quote) + + +def SourceifyAndQuoteSpaces(path): + """Convert a path to its source directory form and quote spaces.""" + return QuoteSpaces(Sourceify(path)) + + +# Map from qualified target to path to output. +target_outputs = {} +# Map from qualified target to any linkable output. A subset +# of target_outputs. E.g. when mybinary depends on liba, we want to +# include liba in the linker line; when otherbinary depends on +# mybinary, we just want to build mybinary first. +target_link_deps = {} + + +class MakefileWriter: + """MakefileWriter packages up the writing of one target-specific foobar.mk. + + Its only real entry point is Write(), and is mostly used for namespacing. + """ + + def __init__(self, generator_flags, flavor): + self.generator_flags = generator_flags + self.flavor = flavor + + self.suffix_rules_srcdir = {} + self.suffix_rules_objdir1 = {} + self.suffix_rules_objdir2 = {} + + # Generate suffix rules for all compilable extensions. + for ext in COMPILABLE_EXTENSIONS.keys(): + # Suffix rules for source folder. + self.suffix_rules_srcdir.update( + { + ext: ( + """\ +$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD +\t@$(call do_cmd,%s,1) +""" + % (ext, COMPILABLE_EXTENSIONS[ext]) + ) + } + ) + + # Suffix rules for generated source files. + self.suffix_rules_objdir1.update( + { + ext: ( + """\ +$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD +\t@$(call do_cmd,%s,1) +""" + % (ext, COMPILABLE_EXTENSIONS[ext]) + ) + } + ) + self.suffix_rules_objdir2.update( + { + ext: ( + """\ +$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD +\t@$(call do_cmd,%s,1) +""" + % (ext, COMPILABLE_EXTENSIONS[ext]) + ) + } + ) + + def Write( + self, qualified_target, base_path, output_filename, spec, configs, part_of_all + ): + """The main entry point: writes a .mk file for a single target. + + Arguments: + qualified_target: target we're generating + base_path: path relative to source root we're building in, used to resolve + target-relative paths + output_filename: output .mk file name to write + spec, configs: gyp info + part_of_all: flag indicating this target is part of 'all' + """ + gyp.common.EnsureDirExists(output_filename) + + self.fp = open(output_filename, "w") + + self.fp.write(header) + + self.qualified_target = qualified_target + self.path = base_path + self.target = spec["target_name"] + self.type = spec["type"] + self.toolset = spec["toolset"] + + self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) + if self.flavor == "mac": + self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + else: + self.xcode_settings = None + + deps, link_deps = self.ComputeDeps(spec) + + # Some of the generation below can add extra output, sources, or + # link dependencies. All of the out params of the functions that + # follow use names like extra_foo. + extra_outputs = [] + extra_sources = [] + extra_link_deps = [] + extra_mac_bundle_resources = [] + mac_bundle_deps = [] + + if self.is_mac_bundle: + self.output = self.ComputeMacBundleOutput(spec) + self.output_binary = self.ComputeMacBundleBinaryOutput(spec) + else: + self.output = self.output_binary = self.ComputeOutput(spec) + + self.is_standalone_static_library = bool( + spec.get("standalone_static_library", 0) + ) + self._INSTALLABLE_TARGETS = ("executable", "loadable_module", "shared_library") + if self.is_standalone_static_library or self.type in self._INSTALLABLE_TARGETS: + self.alias = os.path.basename(self.output) + install_path = self._InstallableTargetInstallPath() + else: + self.alias = self.output + install_path = self.output + + self.WriteLn("TOOLSET := " + self.toolset) + self.WriteLn("TARGET := " + self.target) + + # Actions must come first, since they can generate more OBJs for use below. + if "actions" in spec: + self.WriteActions( + spec["actions"], + extra_sources, + extra_outputs, + extra_mac_bundle_resources, + part_of_all, + ) + + # Rules must be early like actions. + if "rules" in spec: + self.WriteRules( + spec["rules"], + extra_sources, + extra_outputs, + extra_mac_bundle_resources, + part_of_all, + ) + + if "copies" in spec: + self.WriteCopies(spec["copies"], extra_outputs, part_of_all) + + # Bundle resources. + if self.is_mac_bundle: + all_mac_bundle_resources = ( + spec.get("mac_bundle_resources", []) + extra_mac_bundle_resources + ) + self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) + self.WriteMacInfoPlist(mac_bundle_deps) + + # Sources. + all_sources = spec.get("sources", []) + extra_sources + if all_sources: + self.WriteSources( + configs, + deps, + all_sources, + extra_outputs, + extra_link_deps, + part_of_all, + gyp.xcode_emulation.MacPrefixHeader( + self.xcode_settings, + lambda p: Sourceify(self.Absolutify(p)), + self.Pchify, + ), + ) + sources = [x for x in all_sources if Compilable(x)] + if sources: + self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) + extensions = {os.path.splitext(s)[1] for s in sources} + for ext in extensions: + if ext in self.suffix_rules_srcdir: + self.WriteLn(self.suffix_rules_srcdir[ext]) + self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) + for ext in extensions: + if ext in self.suffix_rules_objdir1: + self.WriteLn(self.suffix_rules_objdir1[ext]) + for ext in extensions: + if ext in self.suffix_rules_objdir2: + self.WriteLn(self.suffix_rules_objdir2[ext]) + self.WriteLn("# End of this set of suffix rules") + + # Add dependency from bundle to bundle binary. + if self.is_mac_bundle: + mac_bundle_deps.append(self.output_binary) + + self.WriteTarget( + spec, + configs, + deps, + extra_link_deps + link_deps, + mac_bundle_deps, + extra_outputs, + part_of_all, + ) + + # Update global list of target outputs, used in dependency tracking. + target_outputs[qualified_target] = install_path + + # Update global list of link dependencies. + if self.type in ("static_library", "shared_library"): + target_link_deps[qualified_target] = self.output_binary + + # Currently any versions have the same effect, but in future the behavior + # could be different. + if self.generator_flags.get("android_ndk_version", None): + self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps) + + self.fp.close() + + def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): + """Write a "sub-project" Makefile. + + This is a small, wrapper Makefile that calls the top-level Makefile to build + the targets from a single gyp file (i.e. a sub-project). + + Arguments: + output_filename: sub-project Makefile name to write + makefile_path: path to the top-level Makefile + targets: list of "all" targets for this sub-project + build_dir: build output directory, relative to the sub-project + """ + gyp.common.EnsureDirExists(output_filename) + self.fp = open(output_filename, "w") + self.fp.write(header) + # For consistency with other builders, put sub-project build output in the + # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). + self.WriteLn( + "export builddir_name ?= %s" + % os.path.join(os.path.dirname(output_filename), build_dir) + ) + self.WriteLn(".PHONY: all") + self.WriteLn("all:") + if makefile_path: + makefile_path = " -C " + makefile_path + self.WriteLn("\t$(MAKE){} {}".format(makefile_path, " ".join(targets))) + self.fp.close() + + def WriteActions( + self, + actions, + extra_sources, + extra_outputs, + extra_mac_bundle_resources, + part_of_all, + ): + """Write Makefile code for any 'actions' from the gyp input. + + extra_sources: a list that will be filled in with newly generated source + files, if any + extra_outputs: a list that will be filled in with any outputs of these + actions (used to make other pieces dependent on these + actions) + part_of_all: flag indicating this target is part of 'all' + """ + env = self.GetSortedXcodeEnv() + for action in actions: + name = StringToMakefileVariable( + "{}_{}".format(self.qualified_target, action["action_name"]) + ) + self.WriteLn('### Rules for action "%s":' % action["action_name"]) + inputs = action["inputs"] + outputs = action["outputs"] + + # Build up a list of outputs. + # Collect the output dirs we'll need. + dirs = set() + for out in outputs: + dir = os.path.split(out)[0] + if dir: + dirs.add(dir) + if int(action.get("process_outputs_as_sources", False)): + extra_sources += outputs + if int(action.get("process_outputs_as_mac_bundle_resources", False)): + extra_mac_bundle_resources += outputs + + # Write the actual command. + action_commands = action["action"] + if self.flavor == "mac": + action_commands = [ + gyp.xcode_emulation.ExpandEnvVars(command, env) + for command in action_commands + ] + command = gyp.common.EncodePOSIXShellList(action_commands) + if "message" in action: + self.WriteLn( + "quiet_cmd_{} = ACTION {} $@".format(name, action["message"]) + ) + else: + self.WriteLn(f"quiet_cmd_{name} = ACTION {name} $@") + if len(dirs) > 0: + command = "mkdir -p %s" % " ".join(dirs) + "; " + command + + cd_action = "cd %s; " % Sourceify(self.path or ".") + + # command and cd_action get written to a toplevel variable called + # cmd_foo. Toplevel variables can't handle things that change per + # makefile like $(TARGET), so hardcode the target. + command = command.replace("$(TARGET)", self.target) + cd_action = cd_action.replace("$(TARGET)", self.target) + + # Set LD_LIBRARY_PATH in case the action runs an executable from this + # build which links to shared libs from this build. + # actions run on the host, so they should in theory only use host + # libraries, but until everything is made cross-compile safe, also use + # target libraries. + # TODO(piman): when everything is cross-compile safe, remove lib.target + self.WriteLn( + "cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:" + "$(builddir)/lib.target:$$LD_LIBRARY_PATH; " + "export LD_LIBRARY_PATH; " + "%s%s" % (name, cd_action, command) + ) + self.WriteLn() + outputs = [self.Absolutify(o) for o in outputs] + # The makefile rules are all relative to the top dir, but the gyp actions + # are defined relative to their containing dir. This replaces the obj + # variable for the action rule with an absolute version so that the output + # goes in the right place. + # Only write the 'obj' and 'builddir' rules for the "primary" output (:1); + # it's superfluous for the "extra outputs", and this avoids accidentally + # writing duplicate dummy rules for those outputs. + # Same for environment. + self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0])) + self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0])) + self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv()) + + for input in inputs: + assert " " not in input, ( + "Spaces in action input filenames not supported (%s)" % input + ) + for output in outputs: + assert " " not in output, ( + "Spaces in action output filenames not supported (%s)" % output + ) + + # See the comment in WriteCopies about expanding env vars. + outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] + inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] + + self.WriteDoCmd( + outputs, + [Sourceify(self.Absolutify(i)) for i in inputs], + part_of_all=part_of_all, + command=name, + ) + + # Stuff the outputs in a variable so we can refer to them later. + outputs_variable = "action_%s_outputs" % name + self.WriteLn("{} := {}".format(outputs_variable, " ".join(outputs))) + extra_outputs.append("$(%s)" % outputs_variable) + self.WriteLn() + + self.WriteLn() + + def WriteRules( + self, + rules, + extra_sources, + extra_outputs, + extra_mac_bundle_resources, + part_of_all, + ): + """Write Makefile code for any 'rules' from the gyp input. + + extra_sources: a list that will be filled in with newly generated source + files, if any + extra_outputs: a list that will be filled in with any outputs of these + rules (used to make other pieces dependent on these rules) + part_of_all: flag indicating this target is part of 'all' + """ + env = self.GetSortedXcodeEnv() + for rule in rules: + name = StringToMakefileVariable( + "{}_{}".format(self.qualified_target, rule["rule_name"]) + ) + count = 0 + self.WriteLn("### Generated for rule %s:" % name) + + all_outputs = [] + + for rule_source in rule.get("rule_sources", []): + dirs = set() + (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) + (rule_source_root, rule_source_ext) = os.path.splitext( + rule_source_basename + ) + + outputs = [ + self.ExpandInputRoot(out, rule_source_root, rule_source_dirname) + for out in rule["outputs"] + ] + + for out in outputs: + dir = os.path.dirname(out) + if dir: + dirs.add(dir) + if int(rule.get("process_outputs_as_sources", False)): + extra_sources += outputs + if int(rule.get("process_outputs_as_mac_bundle_resources", False)): + extra_mac_bundle_resources += outputs + inputs = [ + Sourceify(self.Absolutify(i)) + for i in [rule_source] + rule.get("inputs", []) + ] + actions = ["$(call do_cmd,%s_%d)" % (name, count)] + + if name == "resources_grit": + # HACK: This is ugly. Grit intentionally doesn't touch the + # timestamp of its output file when the file doesn't change, + # which is fine in hash-based dependency systems like scons + # and forge, but not kosher in the make world. After some + # discussion, hacking around it here seems like the least + # amount of pain. + actions += ["@touch --no-create $@"] + + # See the comment in WriteCopies about expanding env vars. + outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] + inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] + + outputs = [self.Absolutify(o) for o in outputs] + all_outputs += outputs + # Only write the 'obj' and 'builddir' rules for the "primary" output + # (:1); it's superfluous for the "extra outputs", and this avoids + # accidentally writing duplicate dummy rules for those outputs. + self.WriteLn("%s: obj := $(abs_obj)" % outputs[0]) + self.WriteLn("%s: builddir := $(abs_builddir)" % outputs[0]) + self.WriteMakeRule( + outputs, inputs, actions, command="%s_%d" % (name, count) + ) + # Spaces in rule filenames are not supported, but rule variables have + # spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)'). + # The spaces within the variables are valid, so remove the variables + # before checking. + variables_with_spaces = re.compile(r"\$\([^ ]* \$<\)") + for output in outputs: + output = re.sub(variables_with_spaces, "", output) + assert " " not in output, ( + "Spaces in rule filenames not yet supported (%s)" % output + ) + self.WriteLn("all_deps += %s" % " ".join(outputs)) + + action = [ + self.ExpandInputRoot(ac, rule_source_root, rule_source_dirname) + for ac in rule["action"] + ] + mkdirs = "" + if len(dirs) > 0: + mkdirs = "mkdir -p %s; " % " ".join(dirs) + cd_action = "cd %s; " % Sourceify(self.path or ".") + + # action, cd_action, and mkdirs get written to a toplevel variable + # called cmd_foo. Toplevel variables can't handle things that change + # per makefile like $(TARGET), so hardcode the target. + if self.flavor == "mac": + action = [ + gyp.xcode_emulation.ExpandEnvVars(command, env) + for command in action + ] + action = gyp.common.EncodePOSIXShellList(action) + action = action.replace("$(TARGET)", self.target) + cd_action = cd_action.replace("$(TARGET)", self.target) + mkdirs = mkdirs.replace("$(TARGET)", self.target) + + # Set LD_LIBRARY_PATH in case the rule runs an executable from this + # build which links to shared libs from this build. + # rules run on the host, so they should in theory only use host + # libraries, but until everything is made cross-compile safe, also use + # target libraries. + # TODO(piman): when everything is cross-compile safe, remove lib.target + self.WriteLn( + "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH=" + "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; " + "export LD_LIBRARY_PATH; " + "%(cd_action)s%(mkdirs)s%(action)s" + % { + "action": action, + "cd_action": cd_action, + "count": count, + "mkdirs": mkdirs, + "name": name, + } + ) + self.WriteLn( + "quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@" + % {"count": count, "name": name} + ) + self.WriteLn() + count += 1 + + outputs_variable = "rule_%s_outputs" % name + self.WriteList(all_outputs, outputs_variable) + extra_outputs.append("$(%s)" % outputs_variable) + + self.WriteLn("### Finished generating for rule: %s" % name) + self.WriteLn() + self.WriteLn("### Finished generating for all rules") + self.WriteLn("") + + def WriteCopies(self, copies, extra_outputs, part_of_all): + """Write Makefile code for any 'copies' from the gyp input. + + extra_outputs: a list that will be filled in with any outputs of this action + (used to make other pieces dependent on this action) + part_of_all: flag indicating this target is part of 'all' + """ + self.WriteLn("### Generated for copy rule.") + + variable = StringToMakefileVariable(self.qualified_target + "_copies") + outputs = [] + for copy in copies: + for path in copy["files"]: + # Absolutify() may call normpath, and will strip trailing slashes. + path = Sourceify(self.Absolutify(path)) + filename = os.path.split(path)[1] + output = Sourceify( + self.Absolutify(os.path.join(copy["destination"], filename)) + ) + + # If the output path has variables in it, which happens in practice for + # 'copies', writing the environment as target-local doesn't work, + # because the variables are already needed for the target name. + # Copying the environment variables into global make variables doesn't + # work either, because then the .d files will potentially contain spaces + # after variable expansion, and .d file handling cannot handle spaces. + # As a workaround, manually expand variables at gyp time. Since 'copies' + # can't run scripts, there's no need to write the env then. + # WriteDoCmd() will escape spaces for .d files. + env = self.GetSortedXcodeEnv() + output = gyp.xcode_emulation.ExpandEnvVars(output, env) + path = gyp.xcode_emulation.ExpandEnvVars(path, env) + self.WriteDoCmd([output], [path], "copy", part_of_all) + outputs.append(output) + self.WriteLn( + "{} = {}".format(variable, " ".join(QuoteSpaces(o) for o in outputs)) + ) + extra_outputs.append("$(%s)" % variable) + self.WriteLn() + + def WriteMacBundleResources(self, resources, bundle_deps): + """Writes Makefile code for 'mac_bundle_resources'.""" + self.WriteLn("### Generated for mac_bundle_resources") + + for output, res in gyp.xcode_emulation.GetMacBundleResources( + generator_default_variables["PRODUCT_DIR"], + self.xcode_settings, + [Sourceify(self.Absolutify(r)) for r in resources], + ): + _, ext = os.path.splitext(output) + if ext != ".xcassets": + # Make does not supports '.xcassets' emulation. + self.WriteDoCmd( + [output], [res], "mac_tool,,,copy-bundle-resource", part_of_all=True + ) + bundle_deps.append(output) + + def WriteMacInfoPlist(self, bundle_deps): + """Write Makefile code for bundle Info.plist files.""" + info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( + generator_default_variables["PRODUCT_DIR"], + self.xcode_settings, + lambda p: Sourceify(self.Absolutify(p)), + ) + if not info_plist: + return + if defines: + # Create an intermediate file to store preprocessed results. + intermediate_plist = "$(obj).$(TOOLSET)/$(TARGET)/" + os.path.basename( + info_plist + ) + self.WriteList( + defines, + intermediate_plist + ": INFOPLIST_DEFINES", + "-D", + quoter=EscapeCppDefine, + ) + self.WriteMakeRule( + [intermediate_plist], + [info_plist], + [ + "$(call do_cmd,infoplist)", + # "Convert" the plist so that any weird whitespace changes from the + # preprocessor do not affect the XML parser in mac_tool. + "@plutil -convert xml1 $@ $@", + ], + ) + info_plist = intermediate_plist + # plists can contain envvars and substitute them into the file. + self.WriteSortedXcodeEnv( + out, self.GetSortedXcodeEnv(additional_settings=extra_env) + ) + self.WriteDoCmd( + [out], [info_plist], "mac_tool,,,copy-info-plist", part_of_all=True + ) + bundle_deps.append(out) + + def WriteSources( + self, + configs, + deps, + sources, + extra_outputs, + extra_link_deps, + part_of_all, + precompiled_header, + ): + """Write Makefile code for any 'sources' from the gyp input. + These are source files necessary to build the current target. + + configs, deps, sources: input from gyp. + extra_outputs: a list of extra outputs this action should be dependent on; + used to serialize action/rules before compilation + extra_link_deps: a list that will be filled in with any outputs of + compilation (to be used in link lines) + part_of_all: flag indicating this target is part of 'all' + """ + + # Write configuration-specific variables for CFLAGS, etc. + for configname in sorted(configs.keys()): + config = configs[configname] + self.WriteList( + config.get("defines"), + "DEFS_%s" % configname, + prefix="-D", + quoter=EscapeCppDefine, + ) + + if self.flavor == "mac": + cflags = self.xcode_settings.GetCflags( + configname, arch=config.get("xcode_configuration_platform") + ) + cflags_c = self.xcode_settings.GetCflagsC(configname) + cflags_cc = self.xcode_settings.GetCflagsCC(configname) + cflags_objc = self.xcode_settings.GetCflagsObjC(configname) + cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname) + else: + cflags = config.get("cflags") + cflags_c = config.get("cflags_c") + cflags_cc = config.get("cflags_cc") + + self.WriteLn("# Flags passed to all source files.") + self.WriteList(cflags, "CFLAGS_%s" % configname) + self.WriteLn("# Flags passed to only C files.") + self.WriteList(cflags_c, "CFLAGS_C_%s" % configname) + self.WriteLn("# Flags passed to only C++ files.") + self.WriteList(cflags_cc, "CFLAGS_CC_%s" % configname) + if self.flavor == "mac": + self.WriteLn("# Flags passed to only ObjC files.") + self.WriteList(cflags_objc, "CFLAGS_OBJC_%s" % configname) + self.WriteLn("# Flags passed to only ObjC++ files.") + self.WriteList(cflags_objcc, "CFLAGS_OBJCC_%s" % configname) + includes = config.get("include_dirs") + if includes: + includes = [Sourceify(self.Absolutify(i)) for i in includes] + self.WriteList(includes, "INCS_%s" % configname, prefix="-I") + + compilable = list(filter(Compilable, sources)) + objs = [self.Objectify(self.Absolutify(Target(c))) for c in compilable] + self.WriteList(objs, "OBJS") + + for obj in objs: + assert " " not in obj, "Spaces in object filenames not supported (%s)" % obj + self.WriteLn( + "# Add to the list of files we specially track " "dependencies for." + ) + self.WriteLn("all_deps += $(OBJS)") + self.WriteLn() + + # Make sure our dependencies are built first. + if deps: + self.WriteMakeRule( + ["$(OBJS)"], + deps, + comment="Make sure our dependencies are built " "before any of us.", + order_only=True, + ) + + # Make sure the actions and rules run first. + # If they generate any extra headers etc., the per-.o file dep tracking + # will catch the proper rebuilds, so order only is still ok here. + if extra_outputs: + self.WriteMakeRule( + ["$(OBJS)"], + extra_outputs, + comment="Make sure our actions/rules run " "before any of us.", + order_only=True, + ) + + pchdeps = precompiled_header.GetObjDependencies(compilable, objs) + if pchdeps: + self.WriteLn("# Dependencies from obj files to their precompiled headers") + for source, obj, gch in pchdeps: + self.WriteLn(f"{obj}: {gch}") + self.WriteLn("# End precompiled header dependencies") + + if objs: + extra_link_deps.append("$(OBJS)") + self.WriteLn( + """\ +# CFLAGS et al overrides must be target-local. +# See "Target-specific Variable Values" in the GNU Make manual.""" + ) + self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)") + self.WriteLn( + "$(OBJS): GYP_CFLAGS := " + "$(DEFS_$(BUILDTYPE)) " + "$(INCS_$(BUILDTYPE)) " + "%s " % precompiled_header.GetInclude("c") + "$(CFLAGS_$(BUILDTYPE)) " + "$(CFLAGS_C_$(BUILDTYPE))" + ) + self.WriteLn( + "$(OBJS): GYP_CXXFLAGS := " + "$(DEFS_$(BUILDTYPE)) " + "$(INCS_$(BUILDTYPE)) " + "%s " % precompiled_header.GetInclude("cc") + "$(CFLAGS_$(BUILDTYPE)) " + "$(CFLAGS_CC_$(BUILDTYPE))" + ) + if self.flavor == "mac": + self.WriteLn( + "$(OBJS): GYP_OBJCFLAGS := " + "$(DEFS_$(BUILDTYPE)) " + "$(INCS_$(BUILDTYPE)) " + "%s " % precompiled_header.GetInclude("m") + + "$(CFLAGS_$(BUILDTYPE)) " + "$(CFLAGS_C_$(BUILDTYPE)) " + "$(CFLAGS_OBJC_$(BUILDTYPE))" + ) + self.WriteLn( + "$(OBJS): GYP_OBJCXXFLAGS := " + "$(DEFS_$(BUILDTYPE)) " + "$(INCS_$(BUILDTYPE)) " + "%s " % precompiled_header.GetInclude("mm") + + "$(CFLAGS_$(BUILDTYPE)) " + "$(CFLAGS_CC_$(BUILDTYPE)) " + "$(CFLAGS_OBJCC_$(BUILDTYPE))" + ) + + self.WritePchTargets(precompiled_header.GetPchBuildCommands()) + + # If there are any object files in our input file list, link them into our + # output. + extra_link_deps += [source for source in sources if Linkable(source)] + + self.WriteLn() + + def WritePchTargets(self, pch_commands): + """Writes make rules to compile prefix headers.""" + if not pch_commands: + return + + for gch, lang_flag, lang, input in pch_commands: + extra_flags = { + "c": "$(CFLAGS_C_$(BUILDTYPE))", + "cc": "$(CFLAGS_CC_$(BUILDTYPE))", + "m": "$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))", + "mm": "$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))", + }[lang] + var_name = { + "c": "GYP_PCH_CFLAGS", + "cc": "GYP_PCH_CXXFLAGS", + "m": "GYP_PCH_OBJCFLAGS", + "mm": "GYP_PCH_OBJCXXFLAGS", + }[lang] + self.WriteLn( + f"{gch}: {var_name} := {lang_flag} " + "$(DEFS_$(BUILDTYPE)) " + "$(INCS_$(BUILDTYPE)) " + "$(CFLAGS_$(BUILDTYPE)) " + extra_flags + ) + + self.WriteLn(f"{gch}: {input} FORCE_DO_CMD") + self.WriteLn("\t@$(call do_cmd,pch_%s,1)" % lang) + self.WriteLn("") + assert " " not in gch, "Spaces in gch filenames not supported (%s)" % gch + self.WriteLn("all_deps += %s" % gch) + self.WriteLn("") + + def ComputeOutputBasename(self, spec): + """Return the 'output basename' of a gyp spec. + + E.g., the loadable module 'foobar' in directory 'baz' will produce + 'libfoobar.so' + """ + assert not self.is_mac_bundle + + if self.flavor == "mac" and self.type in ( + "static_library", + "executable", + "shared_library", + "loadable_module", + ): + return self.xcode_settings.GetExecutablePath() + + target = spec["target_name"] + target_prefix = "" + target_ext = "" + if self.type == "static_library": + if target[:3] == "lib": + target = target[3:] + target_prefix = "lib" + target_ext = ".a" + elif self.type in ("loadable_module", "shared_library"): + if target[:3] == "lib": + target = target[3:] + target_prefix = "lib" + if self.flavor == "aix": + target_ext = ".a" + else: + target_ext = ".so" + elif self.type == "none": + target = "%s.stamp" % target + elif self.type != "executable": + print( + "ERROR: What output file should be generated?", + "type", + self.type, + "target", + target, + ) + + target_prefix = spec.get("product_prefix", target_prefix) + target = spec.get("product_name", target) + product_ext = spec.get("product_extension") + if product_ext: + target_ext = "." + product_ext + + return target_prefix + target + target_ext + + def _InstallImmediately(self): + return ( + self.toolset == "target" + and self.flavor == "mac" + and self.type + in ("static_library", "executable", "shared_library", "loadable_module") + ) + + def ComputeOutput(self, spec): + """Return the 'output' (full output path) of a gyp spec. + + E.g., the loadable module 'foobar' in directory 'baz' will produce + '$(obj)/baz/libfoobar.so' + """ + assert not self.is_mac_bundle + + path = os.path.join("$(obj)." + self.toolset, self.path) + if self.type == "executable" or self._InstallImmediately(): + path = "$(builddir)" + path = spec.get("product_dir", path) + return os.path.join(path, self.ComputeOutputBasename(spec)) + + def ComputeMacBundleOutput(self, spec): + """Return the 'output' (full output path) to a bundle output directory.""" + assert self.is_mac_bundle + path = generator_default_variables["PRODUCT_DIR"] + return os.path.join(path, self.xcode_settings.GetWrapperName()) + + def ComputeMacBundleBinaryOutput(self, spec): + """Return the 'output' (full output path) to the binary in a bundle.""" + path = generator_default_variables["PRODUCT_DIR"] + return os.path.join(path, self.xcode_settings.GetExecutablePath()) + + def ComputeDeps(self, spec): + """Compute the dependencies of a gyp spec. + + Returns a tuple (deps, link_deps), where each is a list of + filenames that will need to be put in front of make for either + building (deps) or linking (link_deps). + """ + deps = [] + link_deps = [] + if "dependencies" in spec: + deps.extend( + [ + target_outputs[dep] + for dep in spec["dependencies"] + if target_outputs[dep] + ] + ) + for dep in spec["dependencies"]: + if dep in target_link_deps: + link_deps.append(target_link_deps[dep]) + deps.extend(link_deps) + # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? + # This hack makes it work: + # link_deps.extend(spec.get('libraries', [])) + return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) + + def WriteDependencyOnExtraOutputs(self, target, extra_outputs): + self.WriteMakeRule( + [self.output_binary], + extra_outputs, + comment="Build our special outputs first.", + order_only=True, + ) + + def WriteTarget( + self, spec, configs, deps, link_deps, bundle_deps, extra_outputs, part_of_all + ): + """Write Makefile code to produce the final target of the gyp spec. + + spec, configs: input from gyp. + deps, link_deps: dependency lists; see ComputeDeps() + extra_outputs: any extra outputs that our target should depend on + part_of_all: flag indicating this target is part of 'all' + """ + + self.WriteLn("### Rules for final target.") + + if extra_outputs: + self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) + self.WriteMakeRule( + extra_outputs, + deps, + comment=("Preserve order dependency of " "special output on deps."), + order_only=True, + ) + + target_postbuilds = {} + if self.type != "none": + for configname in sorted(configs.keys()): + config = configs[configname] + if self.flavor == "mac": + ldflags = self.xcode_settings.GetLdflags( + configname, + generator_default_variables["PRODUCT_DIR"], + lambda p: Sourceify(self.Absolutify(p)), + arch=config.get("xcode_configuration_platform"), + ) + + # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. + gyp_to_build = gyp.common.InvertRelativePath(self.path) + target_postbuild = self.xcode_settings.AddImplicitPostbuilds( + configname, + QuoteSpaces( + os.path.normpath(os.path.join(gyp_to_build, self.output)) + ), + QuoteSpaces( + os.path.normpath( + os.path.join(gyp_to_build, self.output_binary) + ) + ), + ) + if target_postbuild: + target_postbuilds[configname] = target_postbuild + else: + ldflags = config.get("ldflags", []) + # Compute an rpath for this output if needed. + if any(dep.endswith(".so") or ".so." in dep for dep in deps): + # We want to get the literal string "$ORIGIN" + # into the link command, so we need lots of escaping. + ldflags.append(r"-Wl,-rpath=\$$ORIGIN/") + ldflags.append(r"-Wl,-rpath-link=\$(builddir)/") + library_dirs = config.get("library_dirs", []) + ldflags += [("-L%s" % library_dir) for library_dir in library_dirs] + self.WriteList(ldflags, "LDFLAGS_%s" % configname) + if self.flavor == "mac": + self.WriteList( + self.xcode_settings.GetLibtoolflags(configname), + "LIBTOOLFLAGS_%s" % configname, + ) + libraries = spec.get("libraries") + if libraries: + # Remove duplicate entries + libraries = gyp.common.uniquer(libraries) + if self.flavor == "mac": + libraries = self.xcode_settings.AdjustLibraries(libraries) + self.WriteList(libraries, "LIBS") + self.WriteLn( + "%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))" + % QuoteSpaces(self.output_binary) + ) + self.WriteLn("%s: LIBS := $(LIBS)" % QuoteSpaces(self.output_binary)) + + if self.flavor == "mac": + self.WriteLn( + "%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))" + % QuoteSpaces(self.output_binary) + ) + + # Postbuild actions. Like actions, but implicitly depend on the target's + # output. + postbuilds = [] + if self.flavor == "mac": + if target_postbuilds: + postbuilds.append("$(TARGET_POSTBUILDS_$(BUILDTYPE))") + postbuilds.extend(gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) + + if postbuilds: + # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), + # so we must output its definition first, since we declare variables + # using ":=". + self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) + + for configname in target_postbuilds: + self.WriteLn( + "%s: TARGET_POSTBUILDS_%s := %s" + % ( + QuoteSpaces(self.output), + configname, + gyp.common.EncodePOSIXShellList(target_postbuilds[configname]), + ) + ) + + # Postbuilds expect to be run in the gyp file's directory, so insert an + # implicit postbuild to cd to there. + postbuilds.insert(0, gyp.common.EncodePOSIXShellList(["cd", self.path])) + for i, postbuild in enumerate(postbuilds): + if not postbuild.startswith("$"): + postbuilds[i] = EscapeShellArgument(postbuild) + self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(self.output)) + self.WriteLn( + "%s: POSTBUILDS := %s" + % (QuoteSpaces(self.output), " ".join(postbuilds)) + ) + + # A bundle directory depends on its dependencies such as bundle resources + # and bundle binary. When all dependencies have been built, the bundle + # needs to be packaged. + if self.is_mac_bundle: + # If the framework doesn't contain a binary, then nothing depends + # on the actions -- make the framework depend on them directly too. + self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) + + # Bundle dependencies. Note that the code below adds actions to this + # target, so if you move these two lines, move the lines below as well. + self.WriteList([QuoteSpaces(dep) for dep in bundle_deps], "BUNDLE_DEPS") + self.WriteLn("%s: $(BUNDLE_DEPS)" % QuoteSpaces(self.output)) + + # After the framework is built, package it. Needs to happen before + # postbuilds, since postbuilds depend on this. + if self.type in ("shared_library", "loadable_module"): + self.WriteLn( + "\t@$(call do_cmd,mac_package_framework,,,%s)" + % self.xcode_settings.GetFrameworkVersion() + ) + + # Bundle postbuilds can depend on the whole bundle, so run them after + # the bundle is packaged, not already after the bundle binary is done. + if postbuilds: + self.WriteLn("\t@$(call do_postbuilds)") + postbuilds = [] # Don't write postbuilds for target's output. + + # Needed by test/mac/gyptest-rebuild.py. + self.WriteLn("\t@true # No-op, used by tests") + + # Since this target depends on binary and resources which are in + # nested subfolders, the framework directory will be older than + # its dependencies usually. To prevent this rule from executing + # on every build (expensive, especially with postbuilds), expliclity + # update the time on the framework directory. + self.WriteLn("\t@touch -c %s" % QuoteSpaces(self.output)) + + if postbuilds: + assert not self.is_mac_bundle, ( + "Postbuilds for bundles should be done " + "on the bundle, not the binary (target '%s')" % self.target + ) + assert "product_dir" not in spec, ( + "Postbuilds do not work with " "custom product_dir" + ) + + if self.type == "executable": + self.WriteLn( + "%s: LD_INPUTS := %s" + % ( + QuoteSpaces(self.output_binary), + " ".join(QuoteSpaces(dep) for dep in link_deps), + ) + ) + if self.toolset == "host" and self.flavor == "android": + self.WriteDoCmd( + [self.output_binary], + link_deps, + "link_host", + part_of_all, + postbuilds=postbuilds, + ) + else: + self.WriteDoCmd( + [self.output_binary], + link_deps, + "link", + part_of_all, + postbuilds=postbuilds, + ) + + elif self.type == "static_library": + for link_dep in link_deps: + assert " " not in link_dep, ( + "Spaces in alink input filenames not supported (%s)" % link_dep + ) + if ( + self.flavor not in ("mac", "openbsd", "netbsd", "win") + and not self.is_standalone_static_library + ): + self.WriteDoCmd( + [self.output_binary], + link_deps, + "alink_thin", + part_of_all, + postbuilds=postbuilds, + ) + else: + self.WriteDoCmd( + [self.output_binary], + link_deps, + "alink", + part_of_all, + postbuilds=postbuilds, + ) + elif self.type == "shared_library": + self.WriteLn( + "%s: LD_INPUTS := %s" + % ( + QuoteSpaces(self.output_binary), + " ".join(QuoteSpaces(dep) for dep in link_deps), + ) + ) + self.WriteDoCmd( + [self.output_binary], + link_deps, + "solink", + part_of_all, + postbuilds=postbuilds, + ) + elif self.type == "loadable_module": + for link_dep in link_deps: + assert " " not in link_dep, ( + "Spaces in module input filenames not supported (%s)" % link_dep + ) + if self.toolset == "host" and self.flavor == "android": + self.WriteDoCmd( + [self.output_binary], + link_deps, + "solink_module_host", + part_of_all, + postbuilds=postbuilds, + ) + else: + self.WriteDoCmd( + [self.output_binary], + link_deps, + "solink_module", + part_of_all, + postbuilds=postbuilds, + ) + elif self.type == "none": + # Write a stamp line. + self.WriteDoCmd( + [self.output_binary], deps, "touch", part_of_all, postbuilds=postbuilds + ) + else: + print("WARNING: no output for", self.type, self.target) + + # Add an alias for each target (if there are any outputs). + # Installable target aliases are created below. + if (self.output and self.output != self.target) and ( + self.type not in self._INSTALLABLE_TARGETS + ): + self.WriteMakeRule( + [self.target], [self.output], comment="Add target alias", phony=True + ) + if part_of_all: + self.WriteMakeRule( + ["all"], + [self.target], + comment='Add target alias to "all" target.', + phony=True, + ) + + # Add special-case rules for our installable targets. + # 1) They need to install to the build dir or "product" dir. + # 2) They get shortcuts for building (e.g. "make chrome"). + # 3) They are part of "make all". + if self.type in self._INSTALLABLE_TARGETS or self.is_standalone_static_library: + if self.type == "shared_library": + file_desc = "shared library" + elif self.type == "static_library": + file_desc = "static library" + else: + file_desc = "executable" + install_path = self._InstallableTargetInstallPath() + installable_deps = [self.output] + if ( + self.flavor == "mac" + and "product_dir" not in spec + and self.toolset == "target" + ): + # On mac, products are created in install_path immediately. + assert install_path == self.output, "{} != {}".format( + install_path, + self.output, + ) + + # Point the target alias to the final binary output. + self.WriteMakeRule( + [self.target], [install_path], comment="Add target alias", phony=True + ) + if install_path != self.output: + assert not self.is_mac_bundle # See comment a few lines above. + self.WriteDoCmd( + [install_path], + [self.output], + "copy", + comment="Copy this to the %s output path." % file_desc, + part_of_all=part_of_all, + ) + installable_deps.append(install_path) + if self.output != self.alias and self.alias != self.target: + self.WriteMakeRule( + [self.alias], + installable_deps, + comment="Short alias for building this %s." % file_desc, + phony=True, + ) + if part_of_all: + self.WriteMakeRule( + ["all"], + [install_path], + comment='Add %s to "all" target.' % file_desc, + phony=True, + ) + + def WriteList(self, value_list, variable=None, prefix="", quoter=QuoteIfNecessary): + """Write a variable definition that is a list of values. + + E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out + foo = blaha blahb + but in a pretty-printed style. + """ + values = "" + if value_list: + value_list = [quoter(prefix + value) for value in value_list] + values = " \\\n\t" + " \\\n\t".join(value_list) + self.fp.write(f"{variable} :={values}\n\n") + + def WriteDoCmd( + self, outputs, inputs, command, part_of_all, comment=None, postbuilds=False + ): + """Write a Makefile rule that uses do_cmd. + + This makes the outputs dependent on the command line that was run, + as well as support the V= make command line flag. + """ + suffix = "" + if postbuilds: + assert "," not in command + suffix = ",,1" # Tell do_cmd to honor $POSTBUILDS + self.WriteMakeRule( + outputs, + inputs, + actions=[f"$(call do_cmd,{command}{suffix})"], + comment=comment, + command=command, + force=True, + ) + # Add our outputs to the list of targets we read depfiles from. + # all_deps is only used for deps file reading, and for deps files we replace + # spaces with ? because escaping doesn't work with make's $(sort) and + # other functions. + outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] + self.WriteLn("all_deps += %s" % " ".join(outputs)) + + def WriteMakeRule( + self, + outputs, + inputs, + actions=None, + comment=None, + order_only=False, + force=False, + phony=False, + command=None, + ): + """Write a Makefile rule, with some extra tricks. + + outputs: a list of outputs for the rule (note: this is not directly + supported by make; see comments below) + inputs: a list of inputs for the rule + actions: a list of shell commands to run for the rule + comment: a comment to put in the Makefile above the rule (also useful + for making this Python script's code self-documenting) + order_only: if true, makes the dependency order-only + force: if true, include FORCE_DO_CMD as an order-only dep + phony: if true, the rule does not actually generate the named output, the + output is just a name to run the rule + command: (optional) command name to generate unambiguous labels + """ + outputs = [QuoteSpaces(o) for o in outputs] + inputs = [QuoteSpaces(i) for i in inputs] + + if comment: + self.WriteLn("# " + comment) + if phony: + self.WriteLn(".PHONY: " + " ".join(outputs)) + if actions: + self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) + force_append = " FORCE_DO_CMD" if force else "" + + if order_only: + # Order only rule: Just write a simple rule. + # TODO(evanm): just make order_only a list of deps instead of this hack. + self.WriteLn( + "{}: | {}{}".format(" ".join(outputs), " ".join(inputs), force_append) + ) + elif len(outputs) == 1: + # Regular rule, one output: Just write a simple rule. + self.WriteLn("{}: {}{}".format(outputs[0], " ".join(inputs), force_append)) + else: + # Regular rule, more than one output: Multiple outputs are tricky in + # make. We will write three rules: + # - All outputs depend on an intermediate file. + # - Make .INTERMEDIATE depend on the intermediate. + # - The intermediate file depends on the inputs and executes the + # actual command. + # - The intermediate recipe will 'touch' the intermediate file. + # - The multi-output rule will have an do-nothing recipe. + + # Hash the target name to avoid generating overlong filenames. + cmddigest = hashlib.sha1( + (command or self.target).encode("utf-8") + ).hexdigest() + intermediate = "%s.intermediate" % cmddigest + self.WriteLn("{}: {}".format(" ".join(outputs), intermediate)) + self.WriteLn("\t%s" % "@:") + self.WriteLn("{}: {}".format(".INTERMEDIATE", intermediate)) + self.WriteLn( + "{}: {}{}".format(intermediate, " ".join(inputs), force_append) + ) + actions.insert(0, "$(call do_cmd,touch)") + + if actions: + for action in actions: + self.WriteLn("\t%s" % action) + self.WriteLn() + + def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): + """Write a set of LOCAL_XXX definitions for Android NDK. + + These variable definitions will be used by Android NDK but do nothing for + non-Android applications. + + Arguments: + module_name: Android NDK module name, which must be unique among all + module names. + all_sources: A list of source files (will be filtered by Compilable). + link_deps: A list of link dependencies, which must be sorted in + the order from dependencies to dependents. + """ + if self.type not in ("executable", "shared_library", "static_library"): + return + + self.WriteLn("# Variable definitions for Android applications") + self.WriteLn("include $(CLEAR_VARS)") + self.WriteLn("LOCAL_MODULE := " + module_name) + self.WriteLn( + "LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) " + "$(DEFS_$(BUILDTYPE)) " + # LOCAL_CFLAGS is applied to both of C and C++. There is + # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C + # sources. + "$(CFLAGS_C_$(BUILDTYPE)) " + # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while + # LOCAL_C_INCLUDES does not expect it. So put it in + # LOCAL_CFLAGS. + "$(INCS_$(BUILDTYPE))" + ) + # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred. + self.WriteLn("LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))") + self.WriteLn("LOCAL_C_INCLUDES :=") + self.WriteLn("LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)") + + # Detect the C++ extension. + cpp_ext = {".cc": 0, ".cpp": 0, ".cxx": 0} + default_cpp_ext = ".cpp" + for filename in all_sources: + ext = os.path.splitext(filename)[1] + if ext in cpp_ext: + cpp_ext[ext] += 1 + if cpp_ext[ext] > cpp_ext[default_cpp_ext]: + default_cpp_ext = ext + self.WriteLn("LOCAL_CPP_EXTENSION := " + default_cpp_ext) + + self.WriteList( + list(map(self.Absolutify, filter(Compilable, all_sources))), + "LOCAL_SRC_FILES", + ) + + # Filter out those which do not match prefix and suffix and produce + # the resulting list without prefix and suffix. + def DepsToModules(deps, prefix, suffix): + modules = [] + for filepath in deps: + filename = os.path.basename(filepath) + if filename.startswith(prefix) and filename.endswith(suffix): + modules.append(filename[len(prefix) : -len(suffix)]) + return modules + + # Retrieve the default value of 'SHARED_LIB_SUFFIX' + params = {"flavor": "linux"} + default_variables = {} + CalculateVariables(default_variables, params) + + self.WriteList( + DepsToModules( + link_deps, + generator_default_variables["SHARED_LIB_PREFIX"], + default_variables["SHARED_LIB_SUFFIX"], + ), + "LOCAL_SHARED_LIBRARIES", + ) + self.WriteList( + DepsToModules( + link_deps, + generator_default_variables["STATIC_LIB_PREFIX"], + generator_default_variables["STATIC_LIB_SUFFIX"], + ), + "LOCAL_STATIC_LIBRARIES", + ) + + if self.type == "executable": + self.WriteLn("include $(BUILD_EXECUTABLE)") + elif self.type == "shared_library": + self.WriteLn("include $(BUILD_SHARED_LIBRARY)") + elif self.type == "static_library": + self.WriteLn("include $(BUILD_STATIC_LIBRARY)") + self.WriteLn() + + def WriteLn(self, text=""): + self.fp.write(text + "\n") + + def GetSortedXcodeEnv(self, additional_settings=None): + return gyp.xcode_emulation.GetSortedXcodeEnv( + self.xcode_settings, + "$(abs_builddir)", + os.path.join("$(abs_srcdir)", self.path), + "$(BUILDTYPE)", + additional_settings, + ) + + def GetSortedXcodePostbuildEnv(self): + # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. + # TODO(thakis): It would be nice to have some general mechanism instead. + strip_save_file = self.xcode_settings.GetPerTargetSetting( + "CHROMIUM_STRIP_SAVE_FILE", "" + ) + # Even if strip_save_file is empty, explicitly write it. Else a postbuild + # might pick up an export from an earlier target. + return self.GetSortedXcodeEnv( + additional_settings={"CHROMIUM_STRIP_SAVE_FILE": strip_save_file} + ) + + def WriteSortedXcodeEnv(self, target, env): + for k, v in env: + # For + # foo := a\ b + # the escaped space does the right thing. For + # export foo := a\ b + # it does not -- the backslash is written to the env as literal character. + # So don't escape spaces in |env[k]|. + self.WriteLn(f"{QuoteSpaces(target)}: export {k} := {v}") + + def Objectify(self, path): + """Convert a path to its output directory form.""" + if "$(" in path: + path = path.replace("$(obj)/", "$(obj).%s/$(TARGET)/" % self.toolset) + if "$(obj)" not in path: + path = f"$(obj).{self.toolset}/$(TARGET)/{path}" + return path + + def Pchify(self, path, lang): + """Convert a prefix header path to its output directory form.""" + path = self.Absolutify(path) + if "$(" in path: + path = path.replace( + "$(obj)/", f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}" + ) + return path + return f"$(obj).{self.toolset}/$(TARGET)/pch-{lang}/{path}" + + def Absolutify(self, path): + """Convert a subdirectory-relative path into a base-relative path. + Skips over paths that contain variables.""" + if "$(" in path: + # Don't call normpath in this case, as it might collapse the + # path too aggressively if it features '..'. However it's still + # important to strip trailing slashes. + return path.rstrip("/") + return os.path.normpath(os.path.join(self.path, path)) + + def ExpandInputRoot(self, template, expansion, dirname): + if "%(INPUT_ROOT)s" not in template and "%(INPUT_DIRNAME)s" not in template: + return template + path = template % { + "INPUT_ROOT": expansion, + "INPUT_DIRNAME": dirname, + } + return path + + def _InstallableTargetInstallPath(self): + """Returns the location of the final output for an installable target.""" + # Functionality removed for all platforms to match Xcode and hoist + # shared libraries into PRODUCT_DIR for users: + # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files + # rely on this. Emulate this behavior for mac. + # if self.type == "shared_library" and ( + # self.flavor != "mac" or self.toolset != "target" + # ): + # # Install all shared libs into a common directory (per toolset) for + # # convenient access with LD_LIBRARY_PATH. + # return "$(builddir)/lib.%s/%s" % (self.toolset, self.alias) + return "$(builddir)/" + self.alias + + +def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files): + """Write the target to regenerate the Makefile.""" + options = params["options"] + build_files_args = [ + gyp.common.RelativePath(filename, options.toplevel_dir) + for filename in params["build_files_arg"] + ] + + gyp_binary = gyp.common.FixIfRelativePath( + params["gyp_binary"], options.toplevel_dir + ) + if not gyp_binary.startswith(os.sep): + gyp_binary = os.path.join(".", gyp_binary) + + root_makefile.write( + "quiet_cmd_regen_makefile = ACTION Regenerating $@\n" + "cmd_regen_makefile = cd $(srcdir); %(cmd)s\n" + "%(makefile_name)s: %(deps)s\n" + "\t$(call do_cmd,regen_makefile)\n\n" + % { + "makefile_name": makefile_name, + "deps": " ".join(SourceifyAndQuoteSpaces(bf) for bf in build_files), + "cmd": gyp.common.EncodePOSIXShellList( + [gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args + ), + } + ) + + +def PerformBuild(data, configurations, params): + options = params["options"] + for config in configurations: + arguments = ["make"] + if options.toplevel_dir and options.toplevel_dir != ".": + arguments += "-C", options.toplevel_dir + arguments.append("BUILDTYPE=" + config) + print(f"Building [{config}]: {arguments}") + subprocess.check_call(arguments) + + +def GenerateOutput(target_list, target_dicts, data, params): + options = params["options"] + flavor = gyp.common.GetFlavor(params) + generator_flags = params.get("generator_flags", {}) + builddir_name = generator_flags.get("output_dir", "out") + android_ndk_version = generator_flags.get("android_ndk_version", None) + default_target = generator_flags.get("default_target", "all") + + def CalculateMakefilePath(build_file, base_name): + """Determine where to write a Makefile for a given gyp file.""" + # Paths in gyp files are relative to the .gyp file, but we want + # paths relative to the source root for the master makefile. Grab + # the path of the .gyp file as the base to relativize against. + # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". + base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth) + # We write the file in the base_path directory. + output_file = os.path.join(options.depth, base_path, base_name) + if options.generator_output: + output_file = os.path.join( + options.depth, options.generator_output, base_path, base_name + ) + base_path = gyp.common.RelativePath( + os.path.dirname(build_file), options.toplevel_dir + ) + return base_path, output_file + + # TODO: search for the first non-'Default' target. This can go + # away when we add verification that all targets have the + # necessary configurations. + default_configuration = None + toolsets = {target_dicts[target]["toolset"] for target in target_list} + for target in target_list: + spec = target_dicts[target] + if spec["default_configuration"] != "Default": + default_configuration = spec["default_configuration"] + break + if not default_configuration: + default_configuration = "Default" + + srcdir = "." + makefile_name = "Makefile" + options.suffix + makefile_path = os.path.join(options.toplevel_dir, makefile_name) + if options.generator_output: + global srcdir_prefix + makefile_path = os.path.join( + options.toplevel_dir, options.generator_output, makefile_name + ) + srcdir = gyp.common.RelativePath(srcdir, options.generator_output) + srcdir_prefix = "$(srcdir)/" + + flock_command = "flock" + copy_archive_arguments = "-af" + makedep_arguments = "-MMD" + header_params = { + "default_target": default_target, + "builddir": builddir_name, + "default_configuration": default_configuration, + "flock": flock_command, + "flock_index": 1, + "link_commands": LINK_COMMANDS_LINUX, + "extra_commands": "", + "srcdir": srcdir, + "copy_archive_args": copy_archive_arguments, + "makedep_args": makedep_arguments, + "CC.target": GetEnvironFallback(("CC_target", "CC"), "$(CC)"), + "AR.target": GetEnvironFallback(("AR_target", "AR"), "$(AR)"), + "CXX.target": GetEnvironFallback(("CXX_target", "CXX"), "$(CXX)"), + "LINK.target": GetEnvironFallback(("LINK_target", "LINK"), "$(LINK)"), + "CC.host": GetEnvironFallback(("CC_host", "CC"), "gcc"), + "AR.host": GetEnvironFallback(("AR_host", "AR"), "ar"), + "CXX.host": GetEnvironFallback(("CXX_host", "CXX"), "g++"), + "LINK.host": GetEnvironFallback(("LINK_host", "LINK"), "$(CXX.host)"), + } + if flavor == "mac": + flock_command = "./gyp-mac-tool flock" + header_params.update( + { + "flock": flock_command, + "flock_index": 2, + "link_commands": LINK_COMMANDS_MAC, + "extra_commands": SHARED_HEADER_MAC_COMMANDS, + } + ) + elif flavor == "android": + header_params.update({"link_commands": LINK_COMMANDS_ANDROID}) + elif flavor == "zos": + copy_archive_arguments = "-fPR" + makedep_arguments = "-qmakedep=gcc" + header_params.update( + { + "copy_archive_args": copy_archive_arguments, + "makedep_args": makedep_arguments, + "link_commands": LINK_COMMANDS_OS390, + "CC.target": GetEnvironFallback(("CC_target", "CC"), "njsc"), + "CXX.target": GetEnvironFallback(("CXX_target", "CXX"), "njsc++"), + "CC.host": GetEnvironFallback(("CC_host", "CC"), "njsc"), + "CXX.host": GetEnvironFallback(("CXX_host", "CXX"), "njsc++"), + } + ) + elif flavor == "solaris": + copy_archive_arguments = "-pPRf@" + header_params.update( + { + "copy_archive_args": copy_archive_arguments, + "flock": "./gyp-flock-tool flock", + "flock_index": 2, + } + ) + elif flavor == "freebsd": + # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific. + header_params.update({"flock": "lockf"}) + elif flavor == "openbsd": + copy_archive_arguments = "-pPRf" + header_params.update({"copy_archive_args": copy_archive_arguments}) + elif flavor == "aix": + copy_archive_arguments = "-pPRf" + header_params.update( + { + "copy_archive_args": copy_archive_arguments, + "link_commands": LINK_COMMANDS_AIX, + "flock": "./gyp-flock-tool flock", + "flock_index": 2, + } + ) + + build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) + make_global_settings_array = data[build_file].get("make_global_settings", []) + wrappers = {} + for key, value in make_global_settings_array: + if key.endswith("_wrapper"): + wrappers[key[: -len("_wrapper")]] = "$(abspath %s)" % value + make_global_settings = "" + for key, value in make_global_settings_array: + if re.match(".*_wrapper", key): + continue + if value[0] != "$": + value = "$(abspath %s)" % value + wrapper = wrappers.get(key) + if wrapper: + value = f"{wrapper} {value}" + del wrappers[key] + if key in ("CC", "CC.host", "CXX", "CXX.host"): + make_global_settings += ( + "ifneq (,$(filter $(origin %s), undefined default))\n" % key + ) + # Let gyp-time envvars win over global settings. + env_key = key.replace(".", "_") # CC.host -> CC_host + if env_key in os.environ: + value = os.environ[env_key] + make_global_settings += f" {key} = {value}\n" + make_global_settings += "endif\n" + else: + make_global_settings += f"{key} ?= {value}\n" + # TODO(ukai): define cmd when only wrapper is specified in + # make_global_settings. + + header_params["make_global_settings"] = make_global_settings + + gyp.common.EnsureDirExists(makefile_path) + root_makefile = open(makefile_path, "w") + root_makefile.write(SHARED_HEADER % header_params) + # Currently any versions have the same effect, but in future the behavior + # could be different. + if android_ndk_version: + root_makefile.write( + "# Define LOCAL_PATH for build of Android applications.\n" + "LOCAL_PATH := $(call my-dir)\n" + "\n" + ) + for toolset in toolsets: + root_makefile.write("TOOLSET := %s\n" % toolset) + WriteRootHeaderSuffixRules(root_makefile) + + # Put build-time support tools next to the root Makefile. + dest_path = os.path.dirname(makefile_path) + gyp.common.CopyTool(flavor, dest_path) + + # Find the list of targets that derive from the gyp file(s) being built. + needed_targets = set() + for build_file in params["build_files"]: + for target in gyp.common.AllTargets(target_list, target_dicts, build_file): + needed_targets.add(target) + + build_files = set() + include_list = set() + for qualified_target in target_list: + build_file, target, toolset = gyp.common.ParseQualifiedTarget(qualified_target) + + this_make_global_settings = data[build_file].get("make_global_settings", []) + assert make_global_settings_array == this_make_global_settings, ( + "make_global_settings needs to be the same for all targets " + f"{this_make_global_settings} vs. {make_global_settings}" + ) + + build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) + included_files = data[build_file]["included_files"] + for included_file in included_files: + # The included_files entries are relative to the dir of the build file + # that included them, so we have to undo that and then make them relative + # to the root dir. + relative_include_file = gyp.common.RelativePath( + gyp.common.UnrelativePath(included_file, build_file), + options.toplevel_dir, + ) + abs_include_file = os.path.abspath(relative_include_file) + # If the include file is from the ~/.gyp dir, we should use absolute path + # so that relocating the src dir doesn't break the path. + if params["home_dot_gyp"] and abs_include_file.startswith( + params["home_dot_gyp"] + ): + build_files.add(abs_include_file) + else: + build_files.add(relative_include_file) + + base_path, output_file = CalculateMakefilePath( + build_file, target + "." + toolset + options.suffix + ".mk" + ) + + spec = target_dicts[qualified_target] + configs = spec["configurations"] + + if flavor == "mac": + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) + + writer = MakefileWriter(generator_flags, flavor) + writer.Write( + qualified_target, + base_path, + output_file, + spec, + configs, + part_of_all=qualified_target in needed_targets, + ) + + # Our root_makefile lives at the source root. Compute the relative path + # from there to the output_file for including. + mkfile_rel_path = gyp.common.RelativePath( + output_file, os.path.dirname(makefile_path) + ) + include_list.add(mkfile_rel_path) + + # Write out per-gyp (sub-project) Makefiles. + depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) + for build_file in build_files: + # The paths in build_files were relativized above, so undo that before + # testing against the non-relativized items in target_list and before + # calculating the Makefile path. + build_file = os.path.join(depth_rel_path, build_file) + gyp_targets = [ + target_dicts[qualified_target]["target_name"] + for qualified_target in target_list + if qualified_target.startswith(build_file) + and qualified_target in needed_targets + ] + # Only generate Makefiles for gyp files with targets. + if not gyp_targets: + continue + base_path, output_file = CalculateMakefilePath( + build_file, os.path.splitext(os.path.basename(build_file))[0] + ".Makefile" + ) + makefile_rel_path = gyp.common.RelativePath( + os.path.dirname(makefile_path), os.path.dirname(output_file) + ) + writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, builddir_name) + + # Write out the sorted list of includes. + root_makefile.write("\n") + for include_file in sorted(include_list): + # We wrap each .mk include in an if statement so users can tell make to + # not load a file by setting NO_LOAD. The below make code says, only + # load the .mk file if the .mk filename doesn't start with a token in + # NO_LOAD. + root_makefile.write( + "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n" + " $(findstring $(join ^,$(prefix)),\\\n" + " $(join ^," + include_file + ")))),)\n" + ) + root_makefile.write(" include " + include_file + "\n") + root_makefile.write("endif\n") + root_makefile.write("\n") + + if not generator_flags.get("standalone") and generator_flags.get( + "auto_regeneration", True + ): + WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) + + root_makefile.write(SHARED_FOOTER) + + root_makefile.close() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py new file mode 100644 index 0000000..8308fa8 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py @@ -0,0 +1,3978 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import ntpath +import os +import posixpath +import re +import subprocess +import sys + +from collections import OrderedDict + +import gyp.common +import gyp.easy_xml as easy_xml +import gyp.generator.ninja as ninja_generator +import gyp.MSVSNew as MSVSNew +import gyp.MSVSProject as MSVSProject +import gyp.MSVSSettings as MSVSSettings +import gyp.MSVSToolFile as MSVSToolFile +import gyp.MSVSUserFile as MSVSUserFile +import gyp.MSVSUtil as MSVSUtil +import gyp.MSVSVersion as MSVSVersion +from gyp.common import GypError +from gyp.common import OrderedSet + + +# Regular expression for validating Visual Studio GUIDs. If the GUID +# contains lowercase hex letters, MSVS will be fine. However, +# IncrediBuild BuildConsole will parse the solution file, but then +# silently skip building the target causing hard to track down errors. +# Note that this only happens with the BuildConsole, and does not occur +# if IncrediBuild is executed from inside Visual Studio. This regex +# validates that the string looks like a GUID with all uppercase hex +# letters. +VALID_MSVS_GUID_CHARS = re.compile(r"^[A-F0-9\-]+$") + +generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() + +generator_default_variables = { + "DRIVER_PREFIX": "", + "DRIVER_SUFFIX": ".sys", + "EXECUTABLE_PREFIX": "", + "EXECUTABLE_SUFFIX": ".exe", + "STATIC_LIB_PREFIX": "", + "SHARED_LIB_PREFIX": "", + "STATIC_LIB_SUFFIX": ".lib", + "SHARED_LIB_SUFFIX": ".dll", + "INTERMEDIATE_DIR": "$(IntDir)", + "SHARED_INTERMEDIATE_DIR": "$(OutDir)/obj/global_intermediate", + "OS": "win", + "PRODUCT_DIR": "$(OutDir)", + "LIB_DIR": "$(OutDir)lib", + "RULE_INPUT_ROOT": "$(InputName)", + "RULE_INPUT_DIRNAME": "$(InputDir)", + "RULE_INPUT_EXT": "$(InputExt)", + "RULE_INPUT_NAME": "$(InputFileName)", + "RULE_INPUT_PATH": "$(InputPath)", + "CONFIGURATION_NAME": "$(ConfigurationName)", +} + + +# The msvs specific sections that hold paths +generator_additional_path_sections = [ + "msvs_cygwin_dirs", + "msvs_props", +] + + +generator_additional_non_configuration_keys = [ + "msvs_cygwin_dirs", + "msvs_cygwin_shell", + "msvs_large_pdb", + "msvs_shard", + "msvs_external_builder", + "msvs_external_builder_out_dir", + "msvs_external_builder_build_cmd", + "msvs_external_builder_clean_cmd", + "msvs_external_builder_clcompile_cmd", + "msvs_enable_winrt", + "msvs_requires_importlibrary", + "msvs_enable_winphone", + "msvs_application_type_revision", + "msvs_target_platform_version", + "msvs_target_platform_minversion", +] + +generator_filelist_paths = None + +# List of precompiled header related keys. +precomp_keys = [ + "msvs_precompiled_header", + "msvs_precompiled_source", +] + + +cached_username = None + + +cached_domain = None + + +# TODO(gspencer): Switch the os.environ calls to be +# win32api.GetDomainName() and win32api.GetUserName() once the +# python version in depot_tools has been updated to work on Vista +# 64-bit. +def _GetDomainAndUserName(): + if sys.platform not in ("win32", "cygwin"): + return ("DOMAIN", "USERNAME") + global cached_username + global cached_domain + if not cached_domain or not cached_username: + domain = os.environ.get("USERDOMAIN") + username = os.environ.get("USERNAME") + if not domain or not username: + call = subprocess.Popen( + ["net", "config", "Workstation"], stdout=subprocess.PIPE + ) + config = call.communicate()[0].decode("utf-8") + username_re = re.compile(r"^User name\s+(\S+)", re.MULTILINE) + username_match = username_re.search(config) + if username_match: + username = username_match.group(1) + domain_re = re.compile(r"^Logon domain\s+(\S+)", re.MULTILINE) + domain_match = domain_re.search(config) + if domain_match: + domain = domain_match.group(1) + cached_domain = domain + cached_username = username + return (cached_domain, cached_username) + + +fixpath_prefix = None + + +def _NormalizedSource(source): + """Normalize the path. + + But not if that gets rid of a variable, as this may expand to something + larger than one directory. + + Arguments: + source: The path to be normalize.d + + Returns: + The normalized path. + """ + normalized = os.path.normpath(source) + if source.count("$") == normalized.count("$"): + source = normalized + return source + + +def _FixPath(path, separator="\\"): + """Convert paths to a form that will make sense in a vcproj file. + + Arguments: + path: The path to convert, may contain / etc. + Returns: + The path with all slashes made into backslashes. + """ + if ( + fixpath_prefix + and path + and not os.path.isabs(path) + and not path[0] == "$" + and not _IsWindowsAbsPath(path) + ): + path = os.path.join(fixpath_prefix, path) + if separator == "\\": + path = path.replace("/", "\\") + path = _NormalizedSource(path) + if separator == "/": + path = path.replace("\\", "/") + if path and path[-1] == separator: + path = path[:-1] + return path + + +def _IsWindowsAbsPath(path): + """ + On Cygwin systems Python needs a little help determining if a path + is an absolute Windows path or not, so that + it does not treat those as relative, which results in bad paths like: + '..\\C:\\\\some_source_code_file.cc' + """ + return path.startswith("c:") or path.startswith("C:") + + +def _FixPaths(paths, separator="\\"): + """Fix each of the paths of the list.""" + return [_FixPath(i, separator) for i in paths] + + +def _ConvertSourcesToFilterHierarchy( + sources, prefix=None, excluded=None, list_excluded=True, msvs_version=None +): + """Converts a list split source file paths into a vcproj folder hierarchy. + + Arguments: + sources: A list of source file paths split. + prefix: A list of source file path layers meant to apply to each of sources. + excluded: A set of excluded files. + msvs_version: A MSVSVersion object. + + Returns: + A hierarchy of filenames and MSVSProject.Filter objects that matches the + layout of the source tree. + For example: + _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']], + prefix=['joe']) + --> + [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), + MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] + """ + if not prefix: + prefix = [] + result = [] + excluded_result = [] + folders = OrderedDict() + # Gather files into the final result, excluded, or folders. + for s in sources: + if len(s) == 1: + filename = _NormalizedSource("\\".join(prefix + s)) + if filename in excluded: + excluded_result.append(filename) + else: + result.append(filename) + elif msvs_version and not msvs_version.UsesVcxproj(): + # For MSVS 2008 and earlier, we need to process all files before walking + # the sub folders. + if not folders.get(s[0]): + folders[s[0]] = [] + folders[s[0]].append(s[1:]) + else: + contents = _ConvertSourcesToFilterHierarchy( + [s[1:]], + prefix + [s[0]], + excluded=excluded, + list_excluded=list_excluded, + msvs_version=msvs_version, + ) + contents = MSVSProject.Filter(s[0], contents=contents) + result.append(contents) + # Add a folder for excluded files. + if excluded_result and list_excluded: + excluded_folder = MSVSProject.Filter( + "_excluded_files", contents=excluded_result + ) + result.append(excluded_folder) + + if msvs_version and msvs_version.UsesVcxproj(): + return result + + # Populate all the folders. + for f in folders: + contents = _ConvertSourcesToFilterHierarchy( + folders[f], + prefix=prefix + [f], + excluded=excluded, + list_excluded=list_excluded, + msvs_version=msvs_version, + ) + contents = MSVSProject.Filter(f, contents=contents) + result.append(contents) + return result + + +def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): + if not value: + return + _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset) + + +def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): + # TODO(bradnelson): ugly hack, fix this more generally!!! + if "Directories" in setting or "Dependencies" in setting: + if type(value) == str: + value = value.replace("/", "\\") + else: + value = [i.replace("/", "\\") for i in value] + if not tools.get(tool_name): + tools[tool_name] = dict() + tool = tools[tool_name] + if "CompileAsWinRT" == setting: + return + if tool.get(setting): + if only_if_unset: + return + if type(tool[setting]) == list and type(value) == list: + tool[setting] += value + else: + raise TypeError( + 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' + "not allowed, previous value: %s" + % (value, setting, tool_name, str(tool[setting])) + ) + else: + tool[setting] = value + + +def _ConfigTargetVersion(config_data): + return config_data.get("msvs_target_version", "Windows7") + + +def _ConfigPlatform(config_data): + return config_data.get("msvs_configuration_platform", "Win32") + + +def _ConfigBaseName(config_name, platform_name): + if config_name.endswith("_" + platform_name): + return config_name[0 : -len(platform_name) - 1] + else: + return config_name + + +def _ConfigFullName(config_name, config_data): + platform_name = _ConfigPlatform(config_data) + return f"{_ConfigBaseName(config_name, platform_name)}|{platform_name}" + + +def _ConfigWindowsTargetPlatformVersion(config_data, version): + target_ver = config_data.get("msvs_windows_target_platform_version") + if target_ver and re.match(r"^\d+", target_ver): + return target_ver + config_ver = config_data.get("msvs_windows_sdk_version") + vers = [config_ver] if config_ver else version.compatible_sdks + for ver in vers: + for key in [ + r"HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s", + r"HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s", + ]: + sdk_dir = MSVSVersion._RegistryGetValue(key % ver, "InstallationFolder") + if not sdk_dir: + continue + version = MSVSVersion._RegistryGetValue(key % ver, "ProductVersion") or "" + # Find a matching entry in sdk_dir\include. + expected_sdk_dir = r"%s\include" % sdk_dir + names = sorted( + ( + x + for x in ( + os.listdir(expected_sdk_dir) + if os.path.isdir(expected_sdk_dir) + else [] + ) + if x.startswith(version) + ), + reverse=True, + ) + if names: + return names[0] + else: + print( + "Warning: No include files found for detected " + "Windows SDK version %s" % (version), + file=sys.stdout, + ) + + +def _BuildCommandLineForRuleRaw( + spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env +): + + if [x for x in cmd if "$(InputDir)" in x]: + input_dir_preamble = ( + "set INPUTDIR=$(InputDir)\n" + "if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n" + "set INPUTDIR=%INPUTDIR:~0,-1%\n" + ) + else: + input_dir_preamble = "" + + if cygwin_shell: + # Find path to cygwin. + cygwin_dir = _FixPath(spec.get("msvs_cygwin_dirs", ["."])[0]) + # Prepare command. + direct_cmd = cmd + direct_cmd = [ + i.replace("$(IntDir)", '`cygpath -m "${INTDIR}"`') for i in direct_cmd + ] + direct_cmd = [ + i.replace("$(OutDir)", '`cygpath -m "${OUTDIR}"`') for i in direct_cmd + ] + direct_cmd = [ + i.replace("$(InputDir)", '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd + ] + if has_input_path: + direct_cmd = [ + i.replace("$(InputPath)", '`cygpath -m "${INPUTPATH}"`') + for i in direct_cmd + ] + direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] + # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) + direct_cmd = " ".join(direct_cmd) + # TODO(quote): regularize quoting path names throughout the module + cmd = "" + if do_setup_env: + cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' + cmd += "set CYGWIN=nontsec&& " + if direct_cmd.find("NUMBER_OF_PROCESSORS") >= 0: + cmd += "set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& " + if direct_cmd.find("INTDIR") >= 0: + cmd += "set INTDIR=$(IntDir)&& " + if direct_cmd.find("OUTDIR") >= 0: + cmd += "set OUTDIR=$(OutDir)&& " + if has_input_path and direct_cmd.find("INPUTPATH") >= 0: + cmd += "set INPUTPATH=$(InputPath) && " + cmd += 'bash -c "%(cmd)s"' + cmd = cmd % {"cygwin_dir": cygwin_dir, "cmd": direct_cmd} + return input_dir_preamble + cmd + else: + # Convert cat --> type to mimic unix. + if cmd[0] == "cat": + command = ["type"] + else: + command = [cmd[0].replace("/", "\\")] + # Add call before command to ensure that commands can be tied together one + # after the other without aborting in Incredibuild, since IB makes a bat + # file out of the raw command string, and some commands (like python) are + # actually batch files themselves. + command.insert(0, "call") + # Fix the paths + # TODO(quote): This is a really ugly heuristic, and will miss path fixing + # for arguments like "--arg=path" or "/opt:path". + # If the argument starts with a slash or dash, it's probably a command line + # switch + # Return the path with forward slashes because the command using it might + # not support backslashes. + arguments = [i if (i[:1] in "/-") else _FixPath(i, "/") for i in cmd[1:]] + arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments] + arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments] + if quote_cmd: + # Support a mode for using cmd directly. + # Convert any paths to native form (first element is used directly). + # TODO(quote): regularize quoting path names throughout the module + arguments = ['"%s"' % i for i in arguments] + # Collapse into a single command. + return input_dir_preamble + " ".join(command + arguments) + + +def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env): + # Currently this weird argument munging is used to duplicate the way a + # python script would need to be run as part of the chrome tree. + # Eventually we should add some sort of rule_default option to set this + # per project. For now the behavior chrome needs is the default. + mcs = rule.get("msvs_cygwin_shell") + if mcs is None: + mcs = int(spec.get("msvs_cygwin_shell", 1)) + elif isinstance(mcs, str): + mcs = int(mcs) + quote_cmd = int(rule.get("msvs_quote_cmd", 1)) + return _BuildCommandLineForRuleRaw( + spec, rule["action"], mcs, has_input_path, quote_cmd, do_setup_env=do_setup_env + ) + + +def _AddActionStep(actions_dict, inputs, outputs, description, command): + """Merge action into an existing list of actions. + + Care must be taken so that actions which have overlapping inputs either don't + get assigned to the same input, or get collapsed into one. + + Arguments: + actions_dict: dictionary keyed on input name, which maps to a list of + dicts describing the actions attached to that input file. + inputs: list of inputs + outputs: list of outputs + description: description of the action + command: command line to execute + """ + # Require there to be at least one input (call sites will ensure this). + assert inputs + + action = { + "inputs": inputs, + "outputs": outputs, + "description": description, + "command": command, + } + + # Pick where to stick this action. + # While less than optimal in terms of build time, attach them to the first + # input for now. + chosen_input = inputs[0] + + # Add it there. + if chosen_input not in actions_dict: + actions_dict[chosen_input] = [] + actions_dict[chosen_input].append(action) + + +def _AddCustomBuildToolForMSVS( + p, spec, primary_input, inputs, outputs, description, cmd +): + """Add a custom build tool to execute something. + + Arguments: + p: the target project + spec: the target project dict + primary_input: input file to attach the build tool to + inputs: list of inputs + outputs: list of outputs + description: description of the action + cmd: command line to execute + """ + inputs = _FixPaths(inputs) + outputs = _FixPaths(outputs) + tool = MSVSProject.Tool( + "VCCustomBuildTool", + { + "Description": description, + "AdditionalDependencies": ";".join(inputs), + "Outputs": ";".join(outputs), + "CommandLine": cmd, + }, + ) + # Add to the properties of primary input for each config. + for config_name, c_data in spec["configurations"].items(): + p.AddFileConfig( + _FixPath(primary_input), _ConfigFullName(config_name, c_data), tools=[tool] + ) + + +def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): + """Add actions accumulated into an actions_dict, merging as needed. + + Arguments: + p: the target project + spec: the target project dict + actions_dict: dictionary keyed on input name, which maps to a list of + dicts describing the actions attached to that input file. + """ + for primary_input in actions_dict: + inputs = OrderedSet() + outputs = OrderedSet() + descriptions = [] + commands = [] + for action in actions_dict[primary_input]: + inputs.update(OrderedSet(action["inputs"])) + outputs.update(OrderedSet(action["outputs"])) + descriptions.append(action["description"]) + commands.append(action["command"]) + # Add the custom build step for one input file. + description = ", and also ".join(descriptions) + command = "\r\n".join(commands) + _AddCustomBuildToolForMSVS( + p, + spec, + primary_input=primary_input, + inputs=inputs, + outputs=outputs, + description=description, + cmd=command, + ) + + +def _RuleExpandPath(path, input_file): + """Given the input file to which a rule applied, string substitute a path. + + Arguments: + path: a path to string expand + input_file: the file to which the rule applied. + Returns: + The string substituted path. + """ + path = path.replace( + "$(InputName)", os.path.splitext(os.path.split(input_file)[1])[0] + ) + path = path.replace("$(InputDir)", os.path.dirname(input_file)) + path = path.replace( + "$(InputExt)", os.path.splitext(os.path.split(input_file)[1])[1] + ) + path = path.replace("$(InputFileName)", os.path.split(input_file)[1]) + path = path.replace("$(InputPath)", input_file) + return path + + +def _FindRuleTriggerFiles(rule, sources): + """Find the list of files which a particular rule applies to. + + Arguments: + rule: the rule in question + sources: the set of all known source files for this project + Returns: + The list of sources that trigger a particular rule. + """ + return rule.get("rule_sources", []) + + +def _RuleInputsAndOutputs(rule, trigger_file): + """Find the inputs and outputs generated by a rule. + + Arguments: + rule: the rule in question. + trigger_file: the main trigger for this rule. + Returns: + The pair of (inputs, outputs) involved in this rule. + """ + raw_inputs = _FixPaths(rule.get("inputs", [])) + raw_outputs = _FixPaths(rule.get("outputs", [])) + inputs = OrderedSet() + outputs = OrderedSet() + inputs.add(trigger_file) + for i in raw_inputs: + inputs.add(_RuleExpandPath(i, trigger_file)) + for o in raw_outputs: + outputs.add(_RuleExpandPath(o, trigger_file)) + return (inputs, outputs) + + +def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): + """Generate a native rules file. + + Arguments: + p: the target project + rules: the set of rules to include + output_dir: the directory in which the project/gyp resides + spec: the project dict + options: global generator options + """ + rules_filename = "{}{}.rules".format(spec["target_name"], options.suffix) + rules_file = MSVSToolFile.Writer( + os.path.join(output_dir, rules_filename), spec["target_name"] + ) + # Add each rule. + for r in rules: + rule_name = r["rule_name"] + rule_ext = r["extension"] + inputs = _FixPaths(r.get("inputs", [])) + outputs = _FixPaths(r.get("outputs", [])) + # Skip a rule with no action and no inputs. + if "action" not in r and not r.get("rule_sources", []): + continue + cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, do_setup_env=True) + rules_file.AddCustomBuildRule( + name=rule_name, + description=r.get("message", rule_name), + extensions=[rule_ext], + additional_dependencies=inputs, + outputs=outputs, + cmd=cmd, + ) + # Write out rules file. + rules_file.WriteIfChanged() + + # Add rules file to project. + p.AddToolFile(rules_filename) + + +def _Cygwinify(path): + path = path.replace("$(OutDir)", "$(OutDirCygwin)") + path = path.replace("$(IntDir)", "$(IntDirCygwin)") + return path + + +def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add): + """Generate an external makefile to do a set of rules. + + Arguments: + rules: the list of rules to include + output_dir: path containing project and gyp files + spec: project specification data + sources: set of sources known + options: global generator options + actions_to_add: The list of actions we will add to. + """ + filename = "{}_rules{}.mk".format(spec["target_name"], options.suffix) + mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) + # Find cygwin style versions of some paths. + mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') + mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') + # Gather stuff needed to emit all: target. + all_inputs = OrderedSet() + all_outputs = OrderedSet() + all_output_dirs = OrderedSet() + first_outputs = [] + for rule in rules: + trigger_files = _FindRuleTriggerFiles(rule, sources) + for tf in trigger_files: + inputs, outputs = _RuleInputsAndOutputs(rule, tf) + all_inputs.update(OrderedSet(inputs)) + all_outputs.update(OrderedSet(outputs)) + # Only use one target from each rule as the dependency for + # 'all' so we don't try to build each rule multiple times. + first_outputs.append(list(outputs)[0]) + # Get the unique output directories for this rule. + output_dirs = [os.path.split(i)[0] for i in outputs] + for od in output_dirs: + all_output_dirs.add(od) + first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] + # Write out all: target, including mkdir for each output directory. + mk_file.write("all: %s\n" % " ".join(first_outputs_cyg)) + for od in all_output_dirs: + if od: + mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od) + mk_file.write("\n") + # Define how each output is generated. + for rule in rules: + trigger_files = _FindRuleTriggerFiles(rule, sources) + for tf in trigger_files: + # Get all the inputs and outputs for this rule for this trigger file. + inputs, outputs = _RuleInputsAndOutputs(rule, tf) + inputs = [_Cygwinify(i) for i in inputs] + outputs = [_Cygwinify(i) for i in outputs] + # Prepare the command line for this rule. + cmd = [_RuleExpandPath(c, tf) for c in rule["action"]] + cmd = ['"%s"' % i for i in cmd] + cmd = " ".join(cmd) + # Add it to the makefile. + mk_file.write("{}: {}\n".format(" ".join(outputs), " ".join(inputs))) + mk_file.write("\t%s\n\n" % cmd) + # Close up the file. + mk_file.close() + + # Add makefile to list of sources. + sources.add(filename) + # Add a build action to call makefile. + cmd = [ + "make", + "OutDir=$(OutDir)", + "IntDir=$(IntDir)", + "-j", + "${NUMBER_OF_PROCESSORS_PLUS_1}", + "-f", + filename, + ] + cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True) + # Insert makefile as 0'th input, so it gets the action attached there, + # as this is easier to understand from in the IDE. + all_inputs = list(all_inputs) + all_inputs.insert(0, filename) + _AddActionStep( + actions_to_add, + inputs=_FixPaths(all_inputs), + outputs=_FixPaths(all_outputs), + description="Running external rules for %s" % spec["target_name"], + command=cmd, + ) + + +def _EscapeEnvironmentVariableExpansion(s): + """Escapes % characters. + + Escapes any % characters so that Windows-style environment variable + expansions will leave them alone. + See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile + to understand why we have to do this. + + Args: + s: The string to be escaped. + + Returns: + The escaped string. + """ # noqa: E731,E123,E501 + s = s.replace("%", "%%") + return s + + +quote_replacer_regex = re.compile(r'(\\*)"') + + +def _EscapeCommandLineArgumentForMSVS(s): + """Escapes a Windows command-line argument. + + So that the Win32 CommandLineToArgv function will turn the escaped result back + into the original string. + See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx + ("Parsing C++ Command-Line Arguments") to understand why we have to do + this. + + Args: + s: the string to be escaped. + Returns: + the escaped string. + """ + + def _Replace(match): + # For a literal quote, CommandLineToArgv requires an odd number of + # backslashes preceding it, and it produces half as many literal backslashes + # (rounded down). So we need to produce 2n+1 backslashes. + return 2 * match.group(1) + '\\"' + + # Escape all quotes so that they are interpreted literally. + s = quote_replacer_regex.sub(_Replace, s) + # Now add unescaped quotes so that any whitespace is interpreted literally. + s = '"' + s + '"' + return s + + +delimiters_replacer_regex = re.compile(r"(\\*)([,;]+)") + + +def _EscapeVCProjCommandLineArgListItem(s): + """Escapes command line arguments for MSVS. + + The VCProj format stores string lists in a single string using commas and + semi-colons as separators, which must be quoted if they are to be + interpreted literally. However, command-line arguments may already have + quotes, and the VCProj parser is ignorant of the backslash escaping + convention used by CommandLineToArgv, so the command-line quotes and the + VCProj quotes may not be the same quotes. So to store a general + command-line argument in a VCProj list, we need to parse the existing + quoting according to VCProj's convention and quote any delimiters that are + not already quoted by that convention. The quotes that we add will also be + seen by CommandLineToArgv, so if backslashes precede them then we also have + to escape those backslashes according to the CommandLineToArgv + convention. + + Args: + s: the string to be escaped. + Returns: + the escaped string. + """ + + def _Replace(match): + # For a non-literal quote, CommandLineToArgv requires an even number of + # backslashes preceding it, and it produces half as many literal + # backslashes. So we need to produce 2n backslashes. + return 2 * match.group(1) + '"' + match.group(2) + '"' + + segments = s.split('"') + # The unquoted segments are at the even-numbered indices. + for i in range(0, len(segments), 2): + segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) + # Concatenate back into a single string + s = '"'.join(segments) + if len(segments) % 2 == 0: + # String ends while still quoted according to VCProj's convention. This + # means the delimiter and the next list item that follow this one in the + # .vcproj file will be misinterpreted as part of this item. There is nothing + # we can do about this. Adding an extra quote would correct the problem in + # the VCProj but cause the same problem on the final command-line. Moving + # the item to the end of the list does works, but that's only possible if + # there's only one such item. Let's just warn the user. + print( + "Warning: MSVS may misinterpret the odd number of " + "quotes in " + s, + file=sys.stderr, + ) + return s + + +def _EscapeCppDefineForMSVS(s): + """Escapes a CPP define so that it will reach the compiler unaltered.""" + s = _EscapeEnvironmentVariableExpansion(s) + s = _EscapeCommandLineArgumentForMSVS(s) + s = _EscapeVCProjCommandLineArgListItem(s) + # cl.exe replaces literal # characters with = in preprocessor definitions for + # some reason. Octal-encode to work around that. + s = s.replace("#", "\\%03o" % ord("#")) + return s + + +quote_replacer_regex2 = re.compile(r'(\\+)"') + + +def _EscapeCommandLineArgumentForMSBuild(s): + """Escapes a Windows command-line argument for use by MSBuild.""" + + def _Replace(match): + return (len(match.group(1)) / 2 * 4) * "\\" + '\\"' + + # Escape all quotes so that they are interpreted literally. + s = quote_replacer_regex2.sub(_Replace, s) + return s + + +def _EscapeMSBuildSpecialCharacters(s): + escape_dictionary = { + "%": "%25", + "$": "%24", + "@": "%40", + "'": "%27", + ";": "%3B", + "?": "%3F", + "*": "%2A", + } + result = "".join([escape_dictionary.get(c, c) for c in s]) + return result + + +def _EscapeCppDefineForMSBuild(s): + """Escapes a CPP define so that it will reach the compiler unaltered.""" + s = _EscapeEnvironmentVariableExpansion(s) + s = _EscapeCommandLineArgumentForMSBuild(s) + s = _EscapeMSBuildSpecialCharacters(s) + # cl.exe replaces literal # characters with = in preprocessor definitions for + # some reason. Octal-encode to work around that. + s = s.replace("#", "\\%03o" % ord("#")) + return s + + +def _GenerateRulesForMSVS( + p, output_dir, options, spec, sources, excluded_sources, actions_to_add +): + """Generate all the rules for a particular project. + + Arguments: + p: the project + output_dir: directory to emit rules to + options: global options passed to the generator + spec: the specification for this project + sources: the set of all known source files in this project + excluded_sources: the set of sources excluded from normal processing + actions_to_add: deferred list of actions to add in + """ + rules = spec.get("rules", []) + rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))] + rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))] + + # Handle rules that use a native rules file. + if rules_native: + _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) + + # Handle external rules (non-native rules). + if rules_external: + _GenerateExternalRules( + rules_external, output_dir, spec, sources, options, actions_to_add + ) + _AdjustSourcesForRules(rules, sources, excluded_sources, False) + + +def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild): + # Add outputs generated by each rule (if applicable). + for rule in rules: + # Add in the outputs from this rule. + trigger_files = _FindRuleTriggerFiles(rule, sources) + for trigger_file in trigger_files: + # Remove trigger_file from excluded_sources to let the rule be triggered + # (e.g. rule trigger ax_enums.idl is added to excluded_sources + # because it's also in an action's inputs in the same project) + excluded_sources.discard(_FixPath(trigger_file)) + # Done if not processing outputs as sources. + if int(rule.get("process_outputs_as_sources", False)): + inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) + inputs = OrderedSet(_FixPaths(inputs)) + outputs = OrderedSet(_FixPaths(outputs)) + inputs.remove(_FixPath(trigger_file)) + sources.update(inputs) + if not is_msbuild: + excluded_sources.update(inputs) + sources.update(outputs) + + +def _FilterActionsFromExcluded(excluded_sources, actions_to_add): + """Take inputs with actions attached out of the list of exclusions. + + Arguments: + excluded_sources: list of source files not to be built. + actions_to_add: dict of actions keyed on source file they're attached to. + Returns: + excluded_sources with files that have actions attached removed. + """ + must_keep = OrderedSet(_FixPaths(actions_to_add.keys())) + return [s for s in excluded_sources if s not in must_keep] + + +def _GetDefaultConfiguration(spec): + return spec["configurations"][spec["default_configuration"]] + + +def _GetGuidOfProject(proj_path, spec): + """Get the guid for the project. + + Arguments: + proj_path: Path of the vcproj or vcxproj file to generate. + spec: The target dictionary containing the properties of the target. + Returns: + the guid. + Raises: + ValueError: if the specified GUID is invalid. + """ + # Pluck out the default configuration. + default_config = _GetDefaultConfiguration(spec) + # Decide the guid of the project. + guid = default_config.get("msvs_guid") + if guid: + if VALID_MSVS_GUID_CHARS.match(guid) is None: + raise ValueError( + 'Invalid MSVS guid: "%s". Must match regex: "%s".' + % (guid, VALID_MSVS_GUID_CHARS.pattern) + ) + guid = "{%s}" % guid + guid = guid or MSVSNew.MakeGuid(proj_path) + return guid + + +def _GetMsbuildToolsetOfProject(proj_path, spec, version): + """Get the platform toolset for the project. + + Arguments: + proj_path: Path of the vcproj or vcxproj file to generate. + spec: The target dictionary containing the properties of the target. + version: The MSVSVersion object. + Returns: + the platform toolset string or None. + """ + # Pluck out the default configuration. + default_config = _GetDefaultConfiguration(spec) + toolset = default_config.get("msbuild_toolset") + if not toolset and version.DefaultToolset(): + toolset = version.DefaultToolset() + if spec["type"] == "windows_driver": + toolset = "WindowsKernelModeDriver10.0" + return toolset + + +def _GenerateProject(project, options, version, generator_flags, spec): + """Generates a vcproj file. + + Arguments: + project: the MSVSProject object. + options: global generator options. + version: the MSVSVersion object. + generator_flags: dict of generator-specific flags. + Returns: + A list of source files that cannot be found on disk. + """ + default_config = _GetDefaultConfiguration(project.spec) + + # Skip emitting anything if told to with msvs_existing_vcproj option. + if default_config.get("msvs_existing_vcproj"): + return [] + + if version.UsesVcxproj(): + return _GenerateMSBuildProject(project, options, version, generator_flags, spec) + else: + return _GenerateMSVSProject(project, options, version, generator_flags) + + +def _GenerateMSVSProject(project, options, version, generator_flags): + """Generates a .vcproj file. It may create .rules and .user files too. + + Arguments: + project: The project object we will generate the file for. + options: Global options passed to the generator. + version: The VisualStudioVersion object. + generator_flags: dict of generator-specific flags. + """ + spec = project.spec + gyp.common.EnsureDirExists(project.path) + + platforms = _GetUniquePlatforms(spec) + p = MSVSProject.Writer( + project.path, version, spec["target_name"], project.guid, platforms + ) + + # Get directory project file is in. + project_dir = os.path.split(project.path)[0] + gyp_path = _NormalizedSource(project.build_file) + relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) + + config_type = _GetMSVSConfigurationType(spec, project.build_file) + for config_name, config in spec["configurations"].items(): + _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) + + # Prepare list of sources and excluded sources. + gyp_file = os.path.split(project.build_file)[1] + sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) + + # Add rules. + actions_to_add = {} + _GenerateRulesForMSVS( + p, project_dir, options, spec, sources, excluded_sources, actions_to_add + ) + list_excluded = generator_flags.get("msvs_list_excluded_files", True) + sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy( + spec, options, project_dir, sources, excluded_sources, list_excluded, version + ) + + # Add in files. + missing_sources = _VerifySourcesExist(sources, project_dir) + p.AddFiles(sources) + + _AddToolFilesToMSVS(p, spec) + _HandlePreCompiledHeaders(p, sources, spec) + _AddActions(actions_to_add, spec, relative_path_of_gyp_file) + _AddCopies(actions_to_add, spec) + _WriteMSVSUserFile(project.path, version, spec) + + # NOTE: this stanza must appear after all actions have been decided. + # Don't excluded sources with actions attached, or they won't run. + excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add) + _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded) + _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) + + # Write it out. + p.WriteIfChanged() + + return missing_sources + + +def _GetUniquePlatforms(spec): + """Returns the list of unique platforms for this spec, e.g ['win32', ...]. + + Arguments: + spec: The target dictionary containing the properties of the target. + Returns: + The MSVSUserFile object created. + """ + # Gather list of unique platforms. + platforms = OrderedSet() + for configuration in spec["configurations"]: + platforms.add(_ConfigPlatform(spec["configurations"][configuration])) + platforms = list(platforms) + return platforms + + +def _CreateMSVSUserFile(proj_path, version, spec): + """Generates a .user file for the user running this Gyp program. + + Arguments: + proj_path: The path of the project file being created. The .user file + shares the same path (with an appropriate suffix). + version: The VisualStudioVersion object. + spec: The target dictionary containing the properties of the target. + Returns: + The MSVSUserFile object created. + """ + (domain, username) = _GetDomainAndUserName() + vcuser_filename = ".".join([proj_path, domain, username, "user"]) + user_file = MSVSUserFile.Writer(vcuser_filename, version, spec["target_name"]) + return user_file + + +def _GetMSVSConfigurationType(spec, build_file): + """Returns the configuration type for this project. + + It's a number defined by Microsoft. May raise an exception. + + Args: + spec: The target dictionary containing the properties of the target. + build_file: The path of the gyp file. + Returns: + An integer, the configuration type. + """ + try: + config_type = { + "executable": "1", # .exe + "shared_library": "2", # .dll + "loadable_module": "2", # .dll + "static_library": "4", # .lib + "windows_driver": "5", # .sys + "none": "10", # Utility type + }[spec["type"]] + except KeyError: + if spec.get("type"): + raise GypError( + "Target type %s is not a valid target type for " + "target %s in %s." % (spec["type"], spec["target_name"], build_file) + ) + else: + raise GypError( + "Missing type field for target %s in %s." + % (spec["target_name"], build_file) + ) + return config_type + + +def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): + """Adds a configuration to the MSVS project. + + Many settings in a vcproj file are specific to a configuration. This + function the main part of the vcproj file that's configuration specific. + + Arguments: + p: The target project being generated. + spec: The target dictionary containing the properties of the target. + config_type: The configuration type, a number as defined by Microsoft. + config_name: The name of the configuration. + config: The dictionary that defines the special processing to be done + for this configuration. + """ + # Get the information for this configuration + include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config) + libraries = _GetLibraries(spec) + library_dirs = _GetLibraryDirs(config) + out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) + defines = _GetDefines(config) + defines = [_EscapeCppDefineForMSVS(d) for d in defines] + disabled_warnings = _GetDisabledWarnings(config) + prebuild = config.get("msvs_prebuild") + postbuild = config.get("msvs_postbuild") + def_file = _GetModuleDefinition(spec) + precompiled_header = config.get("msvs_precompiled_header") + + # Prepare the list of tools as a dictionary. + tools = dict() + # Add in user specified msvs_settings. + msvs_settings = config.get("msvs_settings", {}) + MSVSSettings.ValidateMSVSSettings(msvs_settings) + + # Prevent default library inheritance from the environment. + _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", ["$(NOINHERIT)"]) + + for tool in msvs_settings: + settings = config["msvs_settings"][tool] + for setting in settings: + _ToolAppend(tools, tool, setting, settings[setting]) + # Add the information to the appropriate tool + _ToolAppend(tools, "VCCLCompilerTool", "AdditionalIncludeDirectories", include_dirs) + _ToolAppend(tools, "VCMIDLTool", "AdditionalIncludeDirectories", midl_include_dirs) + _ToolAppend( + tools, + "VCResourceCompilerTool", + "AdditionalIncludeDirectories", + resource_include_dirs, + ) + # Add in libraries. + _ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", libraries) + _ToolAppend(tools, "VCLinkerTool", "AdditionalLibraryDirectories", library_dirs) + if out_file: + _ToolAppend(tools, vc_tool, "OutputFile", out_file, only_if_unset=True) + # Add defines. + _ToolAppend(tools, "VCCLCompilerTool", "PreprocessorDefinitions", defines) + _ToolAppend(tools, "VCResourceCompilerTool", "PreprocessorDefinitions", defines) + # Change program database directory to prevent collisions. + _ToolAppend( + tools, + "VCCLCompilerTool", + "ProgramDataBaseFileName", + "$(IntDir)$(ProjectName)\\vc80.pdb", + only_if_unset=True, + ) + # Add disabled warnings. + _ToolAppend(tools, "VCCLCompilerTool", "DisableSpecificWarnings", disabled_warnings) + # Add Pre-build. + _ToolAppend(tools, "VCPreBuildEventTool", "CommandLine", prebuild) + # Add Post-build. + _ToolAppend(tools, "VCPostBuildEventTool", "CommandLine", postbuild) + # Turn on precompiled headers if appropriate. + if precompiled_header: + precompiled_header = os.path.split(precompiled_header)[1] + _ToolAppend(tools, "VCCLCompilerTool", "UsePrecompiledHeader", "2") + _ToolAppend( + tools, "VCCLCompilerTool", "PrecompiledHeaderThrough", precompiled_header + ) + _ToolAppend(tools, "VCCLCompilerTool", "ForcedIncludeFiles", precompiled_header) + # Loadable modules don't generate import libraries; + # tell dependent projects to not expect one. + if spec["type"] == "loadable_module": + _ToolAppend(tools, "VCLinkerTool", "IgnoreImportLibrary", "true") + # Set the module definition file if any. + if def_file: + _ToolAppend(tools, "VCLinkerTool", "ModuleDefinitionFile", def_file) + + _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) + + +def _GetIncludeDirs(config): + """Returns the list of directories to be used for #include directives. + + Arguments: + config: The dictionary that defines the special processing to be done + for this configuration. + Returns: + The list of directory paths. + """ + # TODO(bradnelson): include_dirs should really be flexible enough not to + # require this sort of thing. + include_dirs = config.get("include_dirs", []) + config.get( + "msvs_system_include_dirs", [] + ) + midl_include_dirs = config.get("midl_include_dirs", []) + config.get( + "msvs_system_include_dirs", [] + ) + resource_include_dirs = config.get("resource_include_dirs", include_dirs) + include_dirs = _FixPaths(include_dirs) + midl_include_dirs = _FixPaths(midl_include_dirs) + resource_include_dirs = _FixPaths(resource_include_dirs) + return include_dirs, midl_include_dirs, resource_include_dirs + + +def _GetLibraryDirs(config): + """Returns the list of directories to be used for library search paths. + + Arguments: + config: The dictionary that defines the special processing to be done + for this configuration. + Returns: + The list of directory paths. + """ + + library_dirs = config.get("library_dirs", []) + library_dirs = _FixPaths(library_dirs) + return library_dirs + + +def _GetLibraries(spec): + """Returns the list of libraries for this configuration. + + Arguments: + spec: The target dictionary containing the properties of the target. + Returns: + The list of directory paths. + """ + libraries = spec.get("libraries", []) + # Strip out -l, as it is not used on windows (but is needed so we can pass + # in libraries that are assumed to be in the default library path). + # Also remove duplicate entries, leaving only the last duplicate, while + # preserving order. + found = OrderedSet() + unique_libraries_list = [] + for entry in reversed(libraries): + library = re.sub(r"^\-l", "", entry) + if not os.path.splitext(library)[1]: + library += ".lib" + if library not in found: + found.add(library) + unique_libraries_list.append(library) + unique_libraries_list.reverse() + return unique_libraries_list + + +def _GetOutputFilePathAndTool(spec, msbuild): + """Returns the path and tool to use for this target. + + Figures out the path of the file this spec will create and the name of + the VC tool that will create it. + + Arguments: + spec: The target dictionary containing the properties of the target. + Returns: + A triple of (file path, name of the vc tool, name of the msbuild tool) + """ + # Select a name for the output file. + out_file = "" + vc_tool = "" + msbuild_tool = "" + output_file_map = { + "executable": ("VCLinkerTool", "Link", "$(OutDir)", ".exe"), + "shared_library": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"), + "loadable_module": ("VCLinkerTool", "Link", "$(OutDir)", ".dll"), + "windows_driver": ("VCLinkerTool", "Link", "$(OutDir)", ".sys"), + "static_library": ("VCLibrarianTool", "Lib", "$(OutDir)lib\\", ".lib"), + } + output_file_props = output_file_map.get(spec["type"]) + if output_file_props and int(spec.get("msvs_auto_output_file", 1)): + vc_tool, msbuild_tool, out_dir, suffix = output_file_props + if spec.get("standalone_static_library", 0): + out_dir = "$(OutDir)" + out_dir = spec.get("product_dir", out_dir) + product_extension = spec.get("product_extension") + if product_extension: + suffix = "." + product_extension + elif msbuild: + suffix = "$(TargetExt)" + prefix = spec.get("product_prefix", "") + product_name = spec.get("product_name", "$(ProjectName)") + out_file = ntpath.join(out_dir, prefix + product_name + suffix) + return out_file, vc_tool, msbuild_tool + + +def _GetOutputTargetExt(spec): + """Returns the extension for this target, including the dot + + If product_extension is specified, set target_extension to this to avoid + MSB8012, returns None otherwise. Ignores any target_extension settings in + the input files. + + Arguments: + spec: The target dictionary containing the properties of the target. + Returns: + A string with the extension, or None + """ + target_extension = spec.get("product_extension") + if target_extension: + return "." + target_extension + return None + + +def _GetDefines(config): + """Returns the list of preprocessor definitions for this configuration. + + Arguments: + config: The dictionary that defines the special processing to be done + for this configuration. + Returns: + The list of preprocessor definitions. + """ + defines = [] + for d in config.get("defines", []): + if type(d) == list: + fd = "=".join([str(dpart) for dpart in d]) + else: + fd = str(d) + defines.append(fd) + return defines + + +def _GetDisabledWarnings(config): + return [str(i) for i in config.get("msvs_disabled_warnings", [])] + + +def _GetModuleDefinition(spec): + def_file = "" + if spec["type"] in [ + "shared_library", + "loadable_module", + "executable", + "windows_driver", + ]: + def_files = [s for s in spec.get("sources", []) if s.endswith(".def")] + if len(def_files) == 1: + def_file = _FixPath(def_files[0]) + elif def_files: + raise ValueError( + "Multiple module definition files in one target, target %s lists " + "multiple .def files: %s" % (spec["target_name"], " ".join(def_files)) + ) + return def_file + + +def _ConvertToolsToExpectedForm(tools): + """Convert tools to a form expected by Visual Studio. + + Arguments: + tools: A dictionary of settings; the tool name is the key. + Returns: + A list of Tool objects. + """ + tool_list = [] + for tool, settings in tools.items(): + # Collapse settings with lists. + settings_fixed = {} + for setting, value in settings.items(): + if type(value) == list: + if ( + tool == "VCLinkerTool" and setting == "AdditionalDependencies" + ) or setting == "AdditionalOptions": + settings_fixed[setting] = " ".join(value) + else: + settings_fixed[setting] = ";".join(value) + else: + settings_fixed[setting] = value + # Add in this tool. + tool_list.append(MSVSProject.Tool(tool, settings_fixed)) + return tool_list + + +def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): + """Add to the project file the configuration specified by config. + + Arguments: + p: The target project being generated. + spec: the target project dict. + tools: A dictionary of settings; the tool name is the key. + config: The dictionary that defines the special processing to be done + for this configuration. + config_type: The configuration type, a number as defined by Microsoft. + config_name: The name of the configuration. + """ + attributes = _GetMSVSAttributes(spec, config, config_type) + # Add in this configuration. + tool_list = _ConvertToolsToExpectedForm(tools) + p.AddConfig(_ConfigFullName(config_name, config), attrs=attributes, tools=tool_list) + + +def _GetMSVSAttributes(spec, config, config_type): + # Prepare configuration attributes. + prepared_attrs = {} + source_attrs = config.get("msvs_configuration_attributes", {}) + for a in source_attrs: + prepared_attrs[a] = source_attrs[a] + # Add props files. + vsprops_dirs = config.get("msvs_props", []) + vsprops_dirs = _FixPaths(vsprops_dirs) + if vsprops_dirs: + prepared_attrs["InheritedPropertySheets"] = ";".join(vsprops_dirs) + # Set configuration type. + prepared_attrs["ConfigurationType"] = config_type + output_dir = prepared_attrs.get( + "OutputDirectory", "$(SolutionDir)$(ConfigurationName)" + ) + prepared_attrs["OutputDirectory"] = _FixPath(output_dir) + "\\" + if "IntermediateDirectory" not in prepared_attrs: + intermediate = "$(ConfigurationName)\\obj\\$(ProjectName)" + prepared_attrs["IntermediateDirectory"] = _FixPath(intermediate) + "\\" + else: + intermediate = _FixPath(prepared_attrs["IntermediateDirectory"]) + "\\" + intermediate = MSVSSettings.FixVCMacroSlashes(intermediate) + prepared_attrs["IntermediateDirectory"] = intermediate + return prepared_attrs + + +def _AddNormalizedSources(sources_set, sources_array): + sources_set.update(_NormalizedSource(s) for s in sources_array) + + +def _PrepareListOfSources(spec, generator_flags, gyp_file): + """Prepare list of sources and excluded sources. + + Besides the sources specified directly in the spec, adds the gyp file so + that a change to it will cause a re-compile. Also adds appropriate sources + for actions and copies. Assumes later stage will un-exclude files which + have custom build steps attached. + + Arguments: + spec: The target dictionary containing the properties of the target. + gyp_file: The name of the gyp file. + Returns: + A pair of (list of sources, list of excluded sources). + The sources will be relative to the gyp file. + """ + sources = OrderedSet() + _AddNormalizedSources(sources, spec.get("sources", [])) + excluded_sources = OrderedSet() + # Add in the gyp file. + if not generator_flags.get("standalone"): + sources.add(gyp_file) + + # Add in 'action' inputs and outputs. + for a in spec.get("actions", []): + inputs = a["inputs"] + inputs = [_NormalizedSource(i) for i in inputs] + # Add all inputs to sources and excluded sources. + inputs = OrderedSet(inputs) + sources.update(inputs) + if not spec.get("msvs_external_builder"): + excluded_sources.update(inputs) + if int(a.get("process_outputs_as_sources", False)): + _AddNormalizedSources(sources, a.get("outputs", [])) + # Add in 'copies' inputs and outputs. + for cpy in spec.get("copies", []): + _AddNormalizedSources(sources, cpy.get("files", [])) + return (sources, excluded_sources) + + +def _AdjustSourcesAndConvertToFilterHierarchy( + spec, options, gyp_dir, sources, excluded_sources, list_excluded, version +): + """Adjusts the list of sources and excluded sources. + + Also converts the sets to lists. + + Arguments: + spec: The target dictionary containing the properties of the target. + options: Global generator options. + gyp_dir: The path to the gyp file being processed. + sources: A set of sources to be included for this project. + excluded_sources: A set of sources to be excluded for this project. + version: A MSVSVersion object. + Returns: + A trio of (list of sources, list of excluded sources, + path of excluded IDL file) + """ + # Exclude excluded sources coming into the generator. + excluded_sources.update(OrderedSet(spec.get("sources_excluded", []))) + # Add excluded sources into sources for good measure. + sources.update(excluded_sources) + # Convert to proper windows form. + # NOTE: sources goes from being a set to a list here. + # NOTE: excluded_sources goes from being a set to a list here. + sources = _FixPaths(sources) + # Convert to proper windows form. + excluded_sources = _FixPaths(excluded_sources) + + excluded_idl = _IdlFilesHandledNonNatively(spec, sources) + + precompiled_related = _GetPrecompileRelatedFiles(spec) + # Find the excluded ones, minus the precompiled header related ones. + fully_excluded = [i for i in excluded_sources if i not in precompiled_related] + + # Convert to folders and the right slashes. + sources = [i.split("\\") for i in sources] + sources = _ConvertSourcesToFilterHierarchy( + sources, + excluded=fully_excluded, + list_excluded=list_excluded, + msvs_version=version, + ) + + # Prune filters with a single child to flatten ugly directory structures + # such as ../../src/modules/module1 etc. + if version.UsesVcxproj(): + while ( + all([isinstance(s, MSVSProject.Filter) for s in sources]) + and len({s.name for s in sources}) == 1 + ): + assert all([len(s.contents) == 1 for s in sources]) + sources = [s.contents[0] for s in sources] + else: + while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter): + sources = sources[0].contents + + return sources, excluded_sources, excluded_idl + + +def _IdlFilesHandledNonNatively(spec, sources): + # If any non-native rules use 'idl' as an extension exclude idl files. + # Gather a list here to use later. + using_idl = False + for rule in spec.get("rules", []): + if rule["extension"] == "idl" and int(rule.get("msvs_external_rule", 0)): + using_idl = True + break + if using_idl: + excluded_idl = [i for i in sources if i.endswith(".idl")] + else: + excluded_idl = [] + return excluded_idl + + +def _GetPrecompileRelatedFiles(spec): + # Gather a list of precompiled header related sources. + precompiled_related = [] + for _, config in spec["configurations"].items(): + for k in precomp_keys: + f = config.get(k) + if f: + precompiled_related.append(_FixPath(f)) + return precompiled_related + + +def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, list_excluded): + exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) + for file_name, excluded_configs in exclusions.items(): + if not list_excluded and len(excluded_configs) == len(spec["configurations"]): + # If we're not listing excluded files, then they won't appear in the + # project, so don't try to configure them to be excluded. + pass + else: + for config_name, config in excluded_configs: + p.AddFileConfig( + file_name, + _ConfigFullName(config_name, config), + {"ExcludedFromBuild": "true"}, + ) + + +def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): + exclusions = {} + # Exclude excluded sources from being built. + for f in excluded_sources: + excluded_configs = [] + for config_name, config in spec["configurations"].items(): + precomped = [_FixPath(config.get(i, "")) for i in precomp_keys] + # Don't do this for ones that are precompiled header related. + if f not in precomped: + excluded_configs.append((config_name, config)) + exclusions[f] = excluded_configs + # If any non-native rules use 'idl' as an extension exclude idl files. + # Exclude them now. + for f in excluded_idl: + excluded_configs = [] + for config_name, config in spec["configurations"].items(): + excluded_configs.append((config_name, config)) + exclusions[f] = excluded_configs + return exclusions + + +def _AddToolFilesToMSVS(p, spec): + # Add in tool files (rules). + tool_files = OrderedSet() + for _, config in spec["configurations"].items(): + for f in config.get("msvs_tool_files", []): + tool_files.add(f) + for f in tool_files: + p.AddToolFile(f) + + +def _HandlePreCompiledHeaders(p, sources, spec): + # Pre-compiled header source stubs need a different compiler flag + # (generate precompiled header) and any source file not of the same + # kind (i.e. C vs. C++) as the precompiled header source stub needs + # to have use of precompiled headers disabled. + extensions_excluded_from_precompile = [] + for config_name, config in spec["configurations"].items(): + source = config.get("msvs_precompiled_source") + if source: + source = _FixPath(source) + # UsePrecompiledHeader=1 for if using precompiled headers. + tool = MSVSProject.Tool("VCCLCompilerTool", {"UsePrecompiledHeader": "1"}) + p.AddFileConfig( + source, _ConfigFullName(config_name, config), {}, tools=[tool] + ) + basename, extension = os.path.splitext(source) + if extension == ".c": + extensions_excluded_from_precompile = [".cc", ".cpp", ".cxx"] + else: + extensions_excluded_from_precompile = [".c"] + + def DisableForSourceTree(source_tree): + for source in source_tree: + if isinstance(source, MSVSProject.Filter): + DisableForSourceTree(source.contents) + else: + basename, extension = os.path.splitext(source) + if extension in extensions_excluded_from_precompile: + for config_name, config in spec["configurations"].items(): + tool = MSVSProject.Tool( + "VCCLCompilerTool", + { + "UsePrecompiledHeader": "0", + "ForcedIncludeFiles": "$(NOINHERIT)", + }, + ) + p.AddFileConfig( + _FixPath(source), + _ConfigFullName(config_name, config), + {}, + tools=[tool], + ) + + # Do nothing if there was no precompiled source. + if extensions_excluded_from_precompile: + DisableForSourceTree(sources) + + +def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): + # Add actions. + actions = spec.get("actions", []) + # Don't setup_env every time. When all the actions are run together in one + # batch file in VS, the PATH will grow too long. + # Membership in this set means that the cygwin environment has been set up, + # and does not need to be set up again. + have_setup_env = set() + for a in actions: + # Attach actions to the gyp file if nothing else is there. + inputs = a.get("inputs") or [relative_path_of_gyp_file] + attached_to = inputs[0] + need_setup_env = attached_to not in have_setup_env + cmd = _BuildCommandLineForRule( + spec, a, has_input_path=False, do_setup_env=need_setup_env + ) + have_setup_env.add(attached_to) + # Add the action. + _AddActionStep( + actions_to_add, + inputs=inputs, + outputs=a.get("outputs", []), + description=a.get("message", a["action_name"]), + command=cmd, + ) + + +def _WriteMSVSUserFile(project_path, version, spec): + # Add run_as and test targets. + if "run_as" in spec: + run_as = spec["run_as"] + action = run_as.get("action", []) + environment = run_as.get("environment", []) + working_directory = run_as.get("working_directory", ".") + elif int(spec.get("test", 0)): + action = ["$(TargetPath)", "--gtest_print_time"] + environment = [] + working_directory = "." + else: + return # Nothing to add + # Write out the user file. + user_file = _CreateMSVSUserFile(project_path, version, spec) + for config_name, c_data in spec["configurations"].items(): + user_file.AddDebugSettings( + _ConfigFullName(config_name, c_data), action, environment, working_directory + ) + user_file.WriteIfChanged() + + +def _AddCopies(actions_to_add, spec): + copies = _GetCopies(spec) + for inputs, outputs, cmd, description in copies: + _AddActionStep( + actions_to_add, + inputs=inputs, + outputs=outputs, + description=description, + command=cmd, + ) + + +def _GetCopies(spec): + copies = [] + # Add copies. + for cpy in spec.get("copies", []): + for src in cpy.get("files", []): + dst = os.path.join(cpy["destination"], os.path.basename(src)) + # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and + # outputs, so do the same for our generated command line. + if src.endswith("/"): + src_bare = src[:-1] + base_dir = posixpath.split(src_bare)[0] + outer_dir = posixpath.split(src_bare)[1] + fixed_dst = _FixPath(dst) + full_dst = f'"{fixed_dst}\\{outer_dir}\\"' + cmd = 'mkdir {} 2>nul & cd "{}" && xcopy /e /f /y "{}" {}'.format( + full_dst, + _FixPath(base_dir), + outer_dir, + full_dst, + ) + copies.append( + ( + [src], + ["dummy_copies", dst], + cmd, + f"Copying {src} to {fixed_dst}", + ) + ) + else: + fix_dst = _FixPath(cpy["destination"]) + cmd = 'mkdir "{}" 2>nul & set ERRORLEVEL=0 & copy /Y "{}" "{}"'.format( + fix_dst, + _FixPath(src), + _FixPath(dst), + ) + copies.append(([src], [dst], cmd, f"Copying {src} to {fix_dst}")) + return copies + + +def _GetPathDict(root, path): + # |path| will eventually be empty (in the recursive calls) if it was initially + # relative; otherwise it will eventually end up as '\', 'D:\', etc. + if not path or path.endswith(os.sep): + return root + parent, folder = os.path.split(path) + parent_dict = _GetPathDict(root, parent) + if folder not in parent_dict: + parent_dict[folder] = dict() + return parent_dict[folder] + + +def _DictsToFolders(base_path, bucket, flat): + # Convert to folders recursively. + children = [] + for folder, contents in bucket.items(): + if type(contents) == dict: + folder_children = _DictsToFolders( + os.path.join(base_path, folder), contents, flat + ) + if flat: + children += folder_children + else: + folder_children = MSVSNew.MSVSFolder( + os.path.join(base_path, folder), + name="(" + folder + ")", + entries=folder_children, + ) + children.append(folder_children) + else: + children.append(contents) + return children + + +def _CollapseSingles(parent, node): + # Recursively explorer the tree of dicts looking for projects which are + # the sole item in a folder which has the same name as the project. Bring + # such projects up one level. + if type(node) == dict and len(node) == 1 and next(iter(node)) == parent + ".vcproj": + return node[next(iter(node))] + if type(node) != dict: + return node + for child in node: + node[child] = _CollapseSingles(child, node[child]) + return node + + +def _GatherSolutionFolders(sln_projects, project_objects, flat): + root = {} + # Convert into a tree of dicts on path. + for p in sln_projects: + gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] + if p.endswith("#host"): + target += "_host" + gyp_dir = os.path.dirname(gyp_file) + path_dict = _GetPathDict(root, gyp_dir) + path_dict[target + ".vcproj"] = project_objects[p] + # Walk down from the top until we hit a folder that has more than one entry. + # In practice, this strips the top-level "src/" dir from the hierarchy in + # the solution. + while len(root) == 1 and type(root[next(iter(root))]) == dict: + root = root[next(iter(root))] + # Collapse singles. + root = _CollapseSingles("", root) + # Merge buckets until everything is a root entry. + return _DictsToFolders("", root, flat) + + +def _GetPathOfProject(qualified_target, spec, options, msvs_version): + default_config = _GetDefaultConfiguration(spec) + proj_filename = default_config.get("msvs_existing_vcproj") + if not proj_filename: + proj_filename = spec["target_name"] + if spec["toolset"] == "host": + proj_filename += "_host" + proj_filename = proj_filename + options.suffix + msvs_version.ProjectExtension() + + build_file = gyp.common.BuildFile(qualified_target) + proj_path = os.path.join(os.path.dirname(build_file), proj_filename) + fix_prefix = None + if options.generator_output: + project_dir_path = os.path.dirname(os.path.abspath(proj_path)) + proj_path = os.path.join(options.generator_output, proj_path) + fix_prefix = gyp.common.RelativePath( + project_dir_path, os.path.dirname(proj_path) + ) + return proj_path, fix_prefix + + +def _GetPlatformOverridesOfProject(spec): + # Prepare a dict indicating which project configurations are used for which + # solution configurations for this target. + config_platform_overrides = {} + for config_name, c in spec["configurations"].items(): + config_fullname = _ConfigFullName(config_name, c) + platform = c.get("msvs_target_platform", _ConfigPlatform(c)) + fixed_config_fullname = "{}|{}".format( + _ConfigBaseName(config_name, _ConfigPlatform(c)), + platform, + ) + if spec["toolset"] == "host" and generator_supports_multiple_toolsets: + fixed_config_fullname = f"{config_name}|x64" + config_platform_overrides[config_fullname] = fixed_config_fullname + return config_platform_overrides + + +def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): + """Create a MSVSProject object for the targets found in target list. + + Arguments: + target_list: the list of targets to generate project objects for. + target_dicts: the dictionary of specifications. + options: global generator options. + msvs_version: the MSVSVersion object. + Returns: + A set of created projects, keyed by target. + """ + global fixpath_prefix + # Generate each project. + projects = {} + for qualified_target in target_list: + spec = target_dicts[qualified_target] + proj_path, fixpath_prefix = _GetPathOfProject( + qualified_target, spec, options, msvs_version + ) + guid = _GetGuidOfProject(proj_path, spec) + overrides = _GetPlatformOverridesOfProject(spec) + build_file = gyp.common.BuildFile(qualified_target) + # Create object for this project. + target_name = spec["target_name"] + if spec["toolset"] == "host": + target_name += "_host" + obj = MSVSNew.MSVSProject( + proj_path, + name=target_name, + guid=guid, + spec=spec, + build_file=build_file, + config_platform_overrides=overrides, + fixpath_prefix=fixpath_prefix, + ) + # Set project toolset if any (MS build only) + if msvs_version.UsesVcxproj(): + obj.set_msbuild_toolset( + _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version) + ) + projects[qualified_target] = obj + # Set all the dependencies, but not if we are using an external builder like + # ninja + for project in projects.values(): + if not project.spec.get("msvs_external_builder"): + deps = project.spec.get("dependencies", []) + deps = [projects[d] for d in deps] + project.set_dependencies(deps) + return projects + + +def _InitNinjaFlavor(params, target_list, target_dicts): + """Initialize targets for the ninja flavor. + + This sets up the necessary variables in the targets to generate msvs projects + that use ninja as an external builder. The variables in the spec are only set + if they have not been set. This allows individual specs to override the + default values initialized here. + Arguments: + params: Params provided to the generator. + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + """ + for qualified_target in target_list: + spec = target_dicts[qualified_target] + if spec.get("msvs_external_builder"): + # The spec explicitly defined an external builder, so don't change it. + continue + + path_to_ninja = spec.get("msvs_path_to_ninja", "ninja.exe") + + spec["msvs_external_builder"] = "ninja" + if not spec.get("msvs_external_builder_out_dir"): + gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target) + gyp_dir = os.path.dirname(gyp_file) + configuration = "$(Configuration)" + if params.get("target_arch") == "x64": + configuration += "_x64" + if params.get("target_arch") == "arm64": + configuration += "_arm64" + spec["msvs_external_builder_out_dir"] = os.path.join( + gyp.common.RelativePath(params["options"].toplevel_dir, gyp_dir), + ninja_generator.ComputeOutputDir(params), + configuration, + ) + if not spec.get("msvs_external_builder_build_cmd"): + spec["msvs_external_builder_build_cmd"] = [ + path_to_ninja, + "-C", + "$(OutDir)", + "$(ProjectName)", + ] + if not spec.get("msvs_external_builder_clean_cmd"): + spec["msvs_external_builder_clean_cmd"] = [ + path_to_ninja, + "-C", + "$(OutDir)", + "-tclean", + "$(ProjectName)", + ] + + +def CalculateVariables(default_variables, params): + """Generated variables that require params to be known.""" + + generator_flags = params.get("generator_flags", {}) + + # Select project file format version (if unset, default to auto detecting). + msvs_version = MSVSVersion.SelectVisualStudioVersion( + generator_flags.get("msvs_version", "auto") + ) + # Stash msvs_version for later (so we don't have to probe the system twice). + params["msvs_version"] = msvs_version + + # Set a variable so conditions can be based on msvs_version. + default_variables["MSVS_VERSION"] = msvs_version.ShortName() + + # To determine processor word size on Windows, in addition to checking + # PROCESSOR_ARCHITECTURE (which reflects the word size of the current + # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which + # contains the actual word size of the system when running thru WOW64). + if ( + os.environ.get("PROCESSOR_ARCHITECTURE", "").find("64") >= 0 + or os.environ.get("PROCESSOR_ARCHITEW6432", "").find("64") >= 0 + ): + default_variables["MSVS_OS_BITS"] = 64 + else: + default_variables["MSVS_OS_BITS"] = 32 + + if gyp.common.GetFlavor(params) == "ninja": + default_variables["SHARED_INTERMEDIATE_DIR"] = "$(OutDir)gen" + + +def PerformBuild(data, configurations, params): + options = params["options"] + msvs_version = params["msvs_version"] + devenv = os.path.join(msvs_version.path, "Common7", "IDE", "devenv.com") + + for build_file, build_file_dict in data.items(): + (build_file_root, build_file_ext) = os.path.splitext(build_file) + if build_file_ext != ".gyp": + continue + sln_path = build_file_root + options.suffix + ".sln" + if options.generator_output: + sln_path = os.path.join(options.generator_output, sln_path) + + for config in configurations: + arguments = [devenv, sln_path, "/Build", config] + print(f"Building [{config}]: {arguments}") + subprocess.check_call(arguments) + + +def CalculateGeneratorInputInfo(params): + if params.get("flavor") == "ninja": + toplevel = params["options"].toplevel_dir + qualified_out_dir = os.path.normpath( + os.path.join( + toplevel, + ninja_generator.ComputeOutputDir(params), + "gypfiles-msvs-ninja", + ) + ) + + global generator_filelist_paths + generator_filelist_paths = { + "toplevel": toplevel, + "qualified_out_dir": qualified_out_dir, + } + + +def GenerateOutput(target_list, target_dicts, data, params): + """Generate .sln and .vcproj files. + + This is the entry point for this generator. + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + data: Dictionary containing per .gyp data. + """ + global fixpath_prefix + + options = params["options"] + + # Get the project file format version back out of where we stashed it in + # GeneratorCalculatedVariables. + msvs_version = params["msvs_version"] + + generator_flags = params.get("generator_flags", {}) + + # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. + (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) + + # Optionally use the large PDB workaround for targets marked with + # 'msvs_large_pdb': 1. + (target_list, target_dicts) = MSVSUtil.InsertLargePdbShims( + target_list, target_dicts, generator_default_variables + ) + + # Optionally configure each spec to use ninja as the external builder. + if params.get("flavor") == "ninja": + _InitNinjaFlavor(params, target_list, target_dicts) + + # Prepare the set of configurations. + configs = set() + for qualified_target in target_list: + spec = target_dicts[qualified_target] + for config_name, config in spec["configurations"].items(): + config_name = _ConfigFullName(config_name, config) + configs.add(config_name) + if config_name == "Release|arm64": + configs.add("Release|x64") + configs = list(configs) + + # Figure out all the projects that will be generated and their guids + project_objects = _CreateProjectObjects( + target_list, target_dicts, options, msvs_version + ) + + # Generate each project. + missing_sources = [] + for project in project_objects.values(): + fixpath_prefix = project.fixpath_prefix + missing_sources.extend( + _GenerateProject(project, options, msvs_version, generator_flags, spec) + ) + fixpath_prefix = None + + for build_file in data: + # Validate build_file extension + target_only_configs = configs + if generator_supports_multiple_toolsets: + target_only_configs = [i for i in configs if i.endswith("arm64")] + if not build_file.endswith(".gyp"): + continue + sln_path = os.path.splitext(build_file)[0] + options.suffix + ".sln" + if options.generator_output: + sln_path = os.path.join(options.generator_output, sln_path) + # Get projects in the solution, and their dependents. + sln_projects = gyp.common.BuildFileTargets(target_list, build_file) + sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) + # Create folder hierarchy. + root_entries = _GatherSolutionFolders( + sln_projects, project_objects, flat=msvs_version.FlatSolution() + ) + # Create solution. + sln = MSVSNew.MSVSSolution( + sln_path, + entries=root_entries, + variants=target_only_configs, + websiteProperties=False, + version=msvs_version, + ) + sln.Write() + + if missing_sources: + error_message = "Missing input files:\n" + "\n".join(set(missing_sources)) + if generator_flags.get("msvs_error_on_missing_sources", False): + raise GypError(error_message) + else: + print("Warning: " + error_message, file=sys.stdout) + + +def _GenerateMSBuildFiltersFile( + filters_path, + source_files, + rule_dependencies, + extension_to_rule_name, + platforms, + toolset, +): + """Generate the filters file. + + This file is used by Visual Studio to organize the presentation of source + files into folders. + + Arguments: + filters_path: The path of the file to be created. + source_files: The hierarchical structure of all the sources. + extension_to_rule_name: A dictionary mapping file extensions to rules. + """ + filter_group = [] + source_group = [] + _AppendFiltersForMSBuild( + "", + source_files, + rule_dependencies, + extension_to_rule_name, + platforms, + toolset, + filter_group, + source_group, + ) + if filter_group: + content = [ + "Project", + { + "ToolsVersion": "4.0", + "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003", + }, + ["ItemGroup"] + filter_group, + ["ItemGroup"] + source_group, + ] + easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) + elif os.path.exists(filters_path): + # We don't need this filter anymore. Delete the old filter file. + os.unlink(filters_path) + + +def _AppendFiltersForMSBuild( + parent_filter_name, + sources, + rule_dependencies, + extension_to_rule_name, + platforms, + toolset, + filter_group, + source_group, +): + """Creates the list of filters and sources to be added in the filter file. + + Args: + parent_filter_name: The name of the filter under which the sources are + found. + sources: The hierarchy of filters and sources to process. + extension_to_rule_name: A dictionary mapping file extensions to rules. + filter_group: The list to which filter entries will be appended. + source_group: The list to which source entries will be appended. + """ + for source in sources: + if isinstance(source, MSVSProject.Filter): + # We have a sub-filter. Create the name of that sub-filter. + if not parent_filter_name: + filter_name = source.name + else: + filter_name = f"{parent_filter_name}\\{source.name}" + # Add the filter to the group. + filter_group.append( + [ + "Filter", + {"Include": filter_name}, + ["UniqueIdentifier", MSVSNew.MakeGuid(source.name)], + ] + ) + # Recurse and add its dependents. + _AppendFiltersForMSBuild( + filter_name, + source.contents, + rule_dependencies, + extension_to_rule_name, + platforms, + toolset, + filter_group, + source_group, + ) + else: + # It's a source. Create a source entry. + _, element = _MapFileToMsBuildSourceType( + source, rule_dependencies, extension_to_rule_name, platforms, toolset + ) + source_entry = [element, {"Include": source}] + # Specify the filter it is part of, if any. + if parent_filter_name: + source_entry.append(["Filter", parent_filter_name]) + source_group.append(source_entry) + + +def _MapFileToMsBuildSourceType( + source, rule_dependencies, extension_to_rule_name, platforms, toolset +): + """Returns the group and element type of the source file. + + Arguments: + source: The source file name. + extension_to_rule_name: A dictionary mapping file extensions to rules. + + Returns: + A pair of (group this file should be part of, the label of element) + """ + _, ext = os.path.splitext(source) + ext = ext.lower() + if ext in extension_to_rule_name: + group = "rule" + element = extension_to_rule_name[ext] + elif ext in [".cc", ".cpp", ".c", ".cxx", ".mm"]: + group = "compile" + element = "ClCompile" + elif ext in [".h", ".hxx"]: + group = "include" + element = "ClInclude" + elif ext == ".rc": + group = "resource" + element = "ResourceCompile" + elif ext in [".s", ".asm"]: + group = "masm" + element = "MASM" + if "arm64" in platforms and toolset == "target": + element = "MARMASM" + elif ext == ".idl": + group = "midl" + element = "Midl" + elif source in rule_dependencies: + group = "rule_dependency" + element = "CustomBuild" + else: + group = "none" + element = "None" + return (group, element) + + +def _GenerateRulesForMSBuild( + output_dir, + options, + spec, + sources, + excluded_sources, + props_files_of_rules, + targets_files_of_rules, + actions_to_add, + rule_dependencies, + extension_to_rule_name, +): + # MSBuild rules are implemented using three files: an XML file, a .targets + # file and a .props file. + # For more details see: + # https://devblogs.microsoft.com/cppblog/quick-help-on-vs2010-custom-build-rule/ + rules = spec.get("rules", []) + rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))] + rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))] + + msbuild_rules = [] + for rule in rules_native: + # Skip a rule with no action and no inputs. + if "action" not in rule and not rule.get("rule_sources", []): + continue + msbuild_rule = MSBuildRule(rule, spec) + msbuild_rules.append(msbuild_rule) + rule_dependencies.update(msbuild_rule.additional_dependencies.split(";")) + extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name + if msbuild_rules: + base = spec["target_name"] + options.suffix + props_name = base + ".props" + targets_name = base + ".targets" + xml_name = base + ".xml" + + props_files_of_rules.add(props_name) + targets_files_of_rules.add(targets_name) + + props_path = os.path.join(output_dir, props_name) + targets_path = os.path.join(output_dir, targets_name) + xml_path = os.path.join(output_dir, xml_name) + + _GenerateMSBuildRulePropsFile(props_path, msbuild_rules) + _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules) + _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules) + + if rules_external: + _GenerateExternalRules( + rules_external, output_dir, spec, sources, options, actions_to_add + ) + _AdjustSourcesForRules(rules, sources, excluded_sources, True) + + +class MSBuildRule: + """Used to store information used to generate an MSBuild rule. + + Attributes: + rule_name: The rule name, sanitized to use in XML. + target_name: The name of the target. + after_targets: The name of the AfterTargets element. + before_targets: The name of the BeforeTargets element. + depends_on: The name of the DependsOn element. + compute_output: The name of the ComputeOutput element. + dirs_to_make: The name of the DirsToMake element. + inputs: The name of the _inputs element. + tlog: The name of the _tlog element. + extension: The extension this rule applies to. + description: The message displayed when this rule is invoked. + additional_dependencies: A string listing additional dependencies. + outputs: The outputs of this rule. + command: The command used to run the rule. + """ + + def __init__(self, rule, spec): + self.display_name = rule["rule_name"] + # Assure that the rule name is only characters and numbers + self.rule_name = re.sub(r"\W", "_", self.display_name) + # Create the various element names, following the example set by the + # Visual Studio 2008 to 2010 conversion. I don't know if VS2010 + # is sensitive to the exact names. + self.target_name = "_" + self.rule_name + self.after_targets = self.rule_name + "AfterTargets" + self.before_targets = self.rule_name + "BeforeTargets" + self.depends_on = self.rule_name + "DependsOn" + self.compute_output = "Compute%sOutput" % self.rule_name + self.dirs_to_make = self.rule_name + "DirsToMake" + self.inputs = self.rule_name + "_inputs" + self.tlog = self.rule_name + "_tlog" + self.extension = rule["extension"] + if not self.extension.startswith("."): + self.extension = "." + self.extension + + self.description = MSVSSettings.ConvertVCMacrosToMSBuild( + rule.get("message", self.rule_name) + ) + old_additional_dependencies = _FixPaths(rule.get("inputs", [])) + self.additional_dependencies = ";".join( + [ + MSVSSettings.ConvertVCMacrosToMSBuild(i) + for i in old_additional_dependencies + ] + ) + old_outputs = _FixPaths(rule.get("outputs", [])) + self.outputs = ";".join( + [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in old_outputs] + ) + old_command = _BuildCommandLineForRule( + spec, rule, has_input_path=True, do_setup_env=True + ) + self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) + + +def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules): + """Generate the .props file.""" + content = [ + "Project", + {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"}, + ] + for rule in msbuild_rules: + content.extend( + [ + [ + "PropertyGroup", + { + "Condition": "'$(%s)' == '' and '$(%s)' == '' and " + "'$(ConfigurationType)' != 'Makefile'" + % (rule.before_targets, rule.after_targets) + }, + [rule.before_targets, "Midl"], + [rule.after_targets, "CustomBuild"], + ], + [ + "PropertyGroup", + [ + rule.depends_on, + {"Condition": "'$(ConfigurationType)' != 'Makefile'"}, + "_SelectedFiles;$(%s)" % rule.depends_on, + ], + ], + [ + "ItemDefinitionGroup", + [ + rule.rule_name, + ["CommandLineTemplate", rule.command], + ["Outputs", rule.outputs], + ["ExecutionDescription", rule.description], + ["AdditionalDependencies", rule.additional_dependencies], + ], + ], + ] + ) + easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True) + + +def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules): + """Generate the .targets file.""" + content = [ + "Project", + {"xmlns": "http://schemas.microsoft.com/developer/msbuild/2003"}, + ] + item_group = [ + "ItemGroup", + [ + "PropertyPageSchema", + {"Include": "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"}, + ], + ] + for rule in msbuild_rules: + item_group.append( + [ + "AvailableItemName", + {"Include": rule.rule_name}, + ["Targets", rule.target_name], + ] + ) + content.append(item_group) + + for rule in msbuild_rules: + content.append( + [ + "UsingTask", + { + "TaskName": rule.rule_name, + "TaskFactory": "XamlTaskFactory", + "AssemblyName": "Microsoft.Build.Tasks.v4.0", + }, + ["Task", "$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml"], + ] + ) + for rule in msbuild_rules: + rule_name = rule.rule_name + target_outputs = "%%(%s.Outputs)" % rule_name + target_inputs = ( + "%%(%s.Identity);%%(%s.AdditionalDependencies);" "$(MSBuildProjectFile)" + ) % (rule_name, rule_name) + rule_inputs = "%%(%s.Identity)" % rule_name + extension_condition = ( + "'%(Extension)'=='.obj' or " + "'%(Extension)'=='.res' or " + "'%(Extension)'=='.rsc' or " + "'%(Extension)'=='.lib'" + ) + remove_section = [ + "ItemGroup", + {"Condition": "'@(SelectedFiles)' != ''"}, + [ + rule_name, + { + "Remove": "@(%s)" % rule_name, + "Condition": "'%(Identity)' != '@(SelectedFiles)'", + }, + ], + ] + inputs_section = [ + "ItemGroup", + [rule.inputs, {"Include": "%%(%s.AdditionalDependencies)" % rule_name}], + ] + logging_section = [ + "ItemGroup", + [ + rule.tlog, + { + "Include": "%%(%s.Outputs)" % rule_name, + "Condition": ( + "'%%(%s.Outputs)' != '' and " + "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name) + ), + }, + ["Source", "@(%s, '|')" % rule_name], + ["Inputs", "@(%s -> '%%(Fullpath)', ';')" % rule.inputs], + ], + ] + message_section = [ + "Message", + {"Importance": "High", "Text": "%%(%s.ExecutionDescription)" % rule_name}, + ] + write_tlog_section = [ + "WriteLinesToFile", + { + "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " + "'true'" % (rule.tlog, rule.tlog), + "File": "$(IntDir)$(ProjectName).write.1.tlog", + "Lines": "^%%(%s.Source);@(%s->'%%(Fullpath)')" + % (rule.tlog, rule.tlog), + }, + ] + read_tlog_section = [ + "WriteLinesToFile", + { + "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " + "'true'" % (rule.tlog, rule.tlog), + "File": "$(IntDir)$(ProjectName).read.1.tlog", + "Lines": f"^%({rule.tlog}.Source);%({rule.tlog}.Inputs)", + }, + ] + command_and_input_section = [ + rule_name, + { + "Condition": "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " + "'true'" % (rule_name, rule_name), + "EchoOff": "true", + "StandardOutputImportance": "High", + "StandardErrorImportance": "High", + "CommandLineTemplate": "%%(%s.CommandLineTemplate)" % rule_name, + "AdditionalOptions": "%%(%s.AdditionalOptions)" % rule_name, + "Inputs": rule_inputs, + }, + ] + content.extend( + [ + [ + "Target", + { + "Name": rule.target_name, + "BeforeTargets": "$(%s)" % rule.before_targets, + "AfterTargets": "$(%s)" % rule.after_targets, + "Condition": "'@(%s)' != ''" % rule_name, + "DependsOnTargets": "$(%s);%s" + % (rule.depends_on, rule.compute_output), + "Outputs": target_outputs, + "Inputs": target_inputs, + }, + remove_section, + inputs_section, + logging_section, + message_section, + write_tlog_section, + read_tlog_section, + command_and_input_section, + ], + [ + "PropertyGroup", + [ + "ComputeLinkInputsTargets", + "$(ComputeLinkInputsTargets);", + "%s;" % rule.compute_output, + ], + [ + "ComputeLibInputsTargets", + "$(ComputeLibInputsTargets);", + "%s;" % rule.compute_output, + ], + ], + [ + "Target", + { + "Name": rule.compute_output, + "Condition": "'@(%s)' != ''" % rule_name, + }, + [ + "ItemGroup", + [ + rule.dirs_to_make, + { + "Condition": "'@(%s)' != '' and " + "'%%(%s.ExcludedFromBuild)' != 'true'" + % (rule_name, rule_name), + "Include": "%%(%s.Outputs)" % rule_name, + }, + ], + [ + "Link", + { + "Include": "%%(%s.Identity)" % rule.dirs_to_make, + "Condition": extension_condition, + }, + ], + [ + "Lib", + { + "Include": "%%(%s.Identity)" % rule.dirs_to_make, + "Condition": extension_condition, + }, + ], + [ + "ImpLib", + { + "Include": "%%(%s.Identity)" % rule.dirs_to_make, + "Condition": extension_condition, + }, + ], + ], + [ + "MakeDir", + { + "Directories": ( + "@(%s->'%%(RootDir)%%(Directory)')" % rule.dirs_to_make + ) + }, + ], + ], + ] + ) + easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True) + + +def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): + # Generate the .xml file + content = [ + "ProjectSchemaDefinitions", + { + "xmlns": ( + "clr-namespace:Microsoft.Build.Framework.XamlTypes;" + "assembly=Microsoft.Build.Framework" + ), + "xmlns:x": "http://schemas.microsoft.com/winfx/2006/xaml", + "xmlns:sys": "clr-namespace:System;assembly=mscorlib", + "xmlns:transformCallback": "Microsoft.Cpp.Dev10.ConvertPropertyCallback", + }, + ] + for rule in msbuild_rules: + content.extend( + [ + [ + "Rule", + { + "Name": rule.rule_name, + "PageTemplate": "tool", + "DisplayName": rule.display_name, + "Order": "200", + }, + [ + "Rule.DataSource", + [ + "DataSource", + {"Persistence": "ProjectFile", "ItemType": rule.rule_name}, + ], + ], + [ + "Rule.Categories", + [ + "Category", + {"Name": "General"}, + ["Category.DisplayName", ["sys:String", "General"]], + ], + [ + "Category", + {"Name": "Command Line", "Subtype": "CommandLine"}, + ["Category.DisplayName", ["sys:String", "Command Line"]], + ], + ], + [ + "StringListProperty", + { + "Name": "Inputs", + "Category": "Command Line", + "IsRequired": "true", + "Switch": " ", + }, + [ + "StringListProperty.DataSource", + [ + "DataSource", + { + "Persistence": "ProjectFile", + "ItemType": rule.rule_name, + "SourceType": "Item", + }, + ], + ], + ], + [ + "StringProperty", + { + "Name": "CommandLineTemplate", + "DisplayName": "Command Line", + "Visible": "False", + "IncludeInCommandLine": "False", + }, + ], + [ + "DynamicEnumProperty", + { + "Name": rule.before_targets, + "Category": "General", + "EnumProvider": "Targets", + "IncludeInCommandLine": "False", + }, + [ + "DynamicEnumProperty.DisplayName", + ["sys:String", "Execute Before"], + ], + [ + "DynamicEnumProperty.Description", + [ + "sys:String", + "Specifies the targets for the build customization" + " to run before.", + ], + ], + [ + "DynamicEnumProperty.ProviderSettings", + [ + "NameValuePair", + { + "Name": "Exclude", + "Value": "^%s|^Compute" % rule.before_targets, + }, + ], + ], + [ + "DynamicEnumProperty.DataSource", + [ + "DataSource", + { + "Persistence": "ProjectFile", + "HasConfigurationCondition": "true", + }, + ], + ], + ], + [ + "DynamicEnumProperty", + { + "Name": rule.after_targets, + "Category": "General", + "EnumProvider": "Targets", + "IncludeInCommandLine": "False", + }, + [ + "DynamicEnumProperty.DisplayName", + ["sys:String", "Execute After"], + ], + [ + "DynamicEnumProperty.Description", + [ + "sys:String", + ( + "Specifies the targets for the build customization" + " to run after." + ), + ], + ], + [ + "DynamicEnumProperty.ProviderSettings", + [ + "NameValuePair", + { + "Name": "Exclude", + "Value": "^%s|^Compute" % rule.after_targets, + }, + ], + ], + [ + "DynamicEnumProperty.DataSource", + [ + "DataSource", + { + "Persistence": "ProjectFile", + "ItemType": "", + "HasConfigurationCondition": "true", + }, + ], + ], + ], + [ + "StringListProperty", + { + "Name": "Outputs", + "DisplayName": "Outputs", + "Visible": "False", + "IncludeInCommandLine": "False", + }, + ], + [ + "StringProperty", + { + "Name": "ExecutionDescription", + "DisplayName": "Execution Description", + "Visible": "False", + "IncludeInCommandLine": "False", + }, + ], + [ + "StringListProperty", + { + "Name": "AdditionalDependencies", + "DisplayName": "Additional Dependencies", + "IncludeInCommandLine": "False", + "Visible": "false", + }, + ], + [ + "StringProperty", + { + "Subtype": "AdditionalOptions", + "Name": "AdditionalOptions", + "Category": "Command Line", + }, + [ + "StringProperty.DisplayName", + ["sys:String", "Additional Options"], + ], + [ + "StringProperty.Description", + ["sys:String", "Additional Options"], + ], + ], + ], + [ + "ItemType", + {"Name": rule.rule_name, "DisplayName": rule.display_name}, + ], + [ + "FileExtension", + {"Name": "*" + rule.extension, "ContentType": rule.rule_name}, + ], + [ + "ContentType", + { + "Name": rule.rule_name, + "DisplayName": "", + "ItemType": rule.rule_name, + }, + ], + ] + ) + easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True) + + +def _GetConfigurationAndPlatform(name, settings, spec): + configuration = name.rsplit("_", 1)[0] + platform = settings.get("msvs_configuration_platform", "Win32") + if spec["toolset"] == "host" and platform == "arm64": + platform = "x64" # Host-only tools are always built for x64 + return (configuration, platform) + + +def _GetConfigurationCondition(name, settings, spec): + return r"'$(Configuration)|$(Platform)'=='%s|%s'" % _GetConfigurationAndPlatform( + name, settings, spec + ) + + +def _GetMSBuildProjectConfigurations(configurations, spec): + group = ["ItemGroup", {"Label": "ProjectConfigurations"}] + for (name, settings) in sorted(configurations.items()): + configuration, platform = _GetConfigurationAndPlatform(name, settings, spec) + designation = f"{configuration}|{platform}" + group.append( + [ + "ProjectConfiguration", + {"Include": designation}, + ["Configuration", configuration], + ["Platform", platform], + ] + ) + return [group] + + +def _GetMSBuildGlobalProperties(spec, version, guid, gyp_file_name): + namespace = os.path.splitext(gyp_file_name)[0] + properties = [ + [ + "PropertyGroup", + {"Label": "Globals"}, + ["ProjectGuid", guid], + ["Keyword", "Win32Proj"], + ["RootNamespace", namespace], + ["IgnoreWarnCompileDuplicatedFilename", "true"], + ] + ] + + if ( + os.environ.get("PROCESSOR_ARCHITECTURE") == "AMD64" + or os.environ.get("PROCESSOR_ARCHITEW6432") == "AMD64" + ): + properties[0].append(["PreferredToolArchitecture", "x64"]) + + if spec.get("msvs_target_platform_version"): + target_platform_version = spec.get("msvs_target_platform_version") + properties[0].append(["WindowsTargetPlatformVersion", target_platform_version]) + if spec.get("msvs_target_platform_minversion"): + target_platform_minversion = spec.get("msvs_target_platform_minversion") + properties[0].append( + ["WindowsTargetPlatformMinVersion", target_platform_minversion] + ) + else: + properties[0].append( + ["WindowsTargetPlatformMinVersion", target_platform_version] + ) + + if spec.get("msvs_enable_winrt"): + properties[0].append(["DefaultLanguage", "en-US"]) + properties[0].append(["AppContainerApplication", "true"]) + if spec.get("msvs_application_type_revision"): + app_type_revision = spec.get("msvs_application_type_revision") + properties[0].append(["ApplicationTypeRevision", app_type_revision]) + else: + properties[0].append(["ApplicationTypeRevision", "8.1"]) + if spec.get("msvs_enable_winphone"): + properties[0].append(["ApplicationType", "Windows Phone"]) + else: + properties[0].append(["ApplicationType", "Windows Store"]) + + platform_name = None + msvs_windows_sdk_version = None + for configuration in spec["configurations"].values(): + platform_name = platform_name or _ConfigPlatform(configuration) + msvs_windows_sdk_version = ( + msvs_windows_sdk_version + or _ConfigWindowsTargetPlatformVersion(configuration, version) + ) + if platform_name and msvs_windows_sdk_version: + break + if msvs_windows_sdk_version: + properties[0].append( + ["WindowsTargetPlatformVersion", str(msvs_windows_sdk_version)] + ) + elif version.compatible_sdks: + raise GypError( + "%s requires any SDK of %s version, but none were found" + % (version.description, version.compatible_sdks) + ) + + if platform_name == "ARM": + properties[0].append(["WindowsSDKDesktopARMSupport", "true"]) + + return properties + + +def _GetMSBuildConfigurationDetails(spec, build_file): + properties = {} + for name, settings in spec["configurations"].items(): + msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) + condition = _GetConfigurationCondition(name, settings, spec) + character_set = msbuild_attributes.get("CharacterSet") + config_type = msbuild_attributes.get("ConfigurationType") + _AddConditionalProperty(properties, condition, "ConfigurationType", config_type) + if config_type == "Driver": + _AddConditionalProperty(properties, condition, "DriverType", "WDM") + _AddConditionalProperty( + properties, condition, "TargetVersion", _ConfigTargetVersion(settings) + ) + if character_set: + if "msvs_enable_winrt" not in spec: + _AddConditionalProperty( + properties, condition, "CharacterSet", character_set + ) + return _GetMSBuildPropertyGroup(spec, "Configuration", properties) + + +def _GetMSBuildLocalProperties(msbuild_toolset): + # Currently the only local property we support is PlatformToolset + properties = {} + if msbuild_toolset: + properties = [ + [ + "PropertyGroup", + {"Label": "Locals"}, + ["PlatformToolset", msbuild_toolset], + ] + ] + return properties + + +def _GetMSBuildPropertySheets(configurations, spec): + user_props = r"$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" + additional_props = {} + props_specified = False + for name, settings in sorted(configurations.items()): + configuration = _GetConfigurationCondition(name, settings, spec) + if "msbuild_props" in settings: + additional_props[configuration] = _FixPaths(settings["msbuild_props"]) + props_specified = True + else: + additional_props[configuration] = "" + + if not props_specified: + return [ + [ + "ImportGroup", + {"Label": "PropertySheets"}, + [ + "Import", + { + "Project": user_props, + "Condition": "exists('%s')" % user_props, + "Label": "LocalAppDataPlatform", + }, + ], + ] + ] + else: + sheets = [] + for condition, props in additional_props.items(): + import_group = [ + "ImportGroup", + {"Label": "PropertySheets", "Condition": condition}, + [ + "Import", + { + "Project": user_props, + "Condition": "exists('%s')" % user_props, + "Label": "LocalAppDataPlatform", + }, + ], + ] + for props_file in props: + import_group.append(["Import", {"Project": props_file}]) + sheets.append(import_group) + return sheets + + +def _ConvertMSVSBuildAttributes(spec, config, build_file): + config_type = _GetMSVSConfigurationType(spec, build_file) + msvs_attributes = _GetMSVSAttributes(spec, config, config_type) + msbuild_attributes = {} + for a in msvs_attributes: + if a in ["IntermediateDirectory", "OutputDirectory"]: + directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a]) + if not directory.endswith("\\"): + directory += "\\" + msbuild_attributes[a] = directory + elif a == "CharacterSet": + msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a]) + elif a == "ConfigurationType": + msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) + else: + print("Warning: Do not know how to convert MSVS attribute " + a) + return msbuild_attributes + + +def _ConvertMSVSCharacterSet(char_set): + if char_set.isdigit(): + char_set = {"0": "MultiByte", "1": "Unicode", "2": "MultiByte"}[char_set] + return char_set + + +def _ConvertMSVSConfigurationType(config_type): + if config_type.isdigit(): + config_type = { + "1": "Application", + "2": "DynamicLibrary", + "4": "StaticLibrary", + "5": "Driver", + "10": "Utility", + }[config_type] + return config_type + + +def _GetMSBuildAttributes(spec, config, build_file): + if "msbuild_configuration_attributes" not in config: + msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file) + + else: + config_type = _GetMSVSConfigurationType(spec, build_file) + config_type = _ConvertMSVSConfigurationType(config_type) + msbuild_attributes = config.get("msbuild_configuration_attributes", {}) + msbuild_attributes.setdefault("ConfigurationType", config_type) + output_dir = msbuild_attributes.get( + "OutputDirectory", "$(SolutionDir)$(Configuration)" + ) + msbuild_attributes["OutputDirectory"] = _FixPath(output_dir) + "\\" + if "IntermediateDirectory" not in msbuild_attributes: + intermediate = _FixPath("$(Configuration)") + "\\" + msbuild_attributes["IntermediateDirectory"] = intermediate + if "CharacterSet" in msbuild_attributes: + msbuild_attributes["CharacterSet"] = _ConvertMSVSCharacterSet( + msbuild_attributes["CharacterSet"] + ) + if "TargetName" not in msbuild_attributes: + prefix = spec.get("product_prefix", "") + product_name = spec.get("product_name", "$(ProjectName)") + target_name = prefix + product_name + msbuild_attributes["TargetName"] = target_name + if "TargetExt" not in msbuild_attributes and "product_extension" in spec: + ext = spec.get("product_extension") + msbuild_attributes["TargetExt"] = "." + ext + + if spec.get("msvs_external_builder"): + external_out_dir = spec.get("msvs_external_builder_out_dir", ".") + msbuild_attributes["OutputDirectory"] = _FixPath(external_out_dir) + "\\" + + # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile' + # (depending on the tool used) to avoid MSB8012 warning. + msbuild_tool_map = { + "executable": "Link", + "shared_library": "Link", + "loadable_module": "Link", + "windows_driver": "Link", + "static_library": "Lib", + } + msbuild_tool = msbuild_tool_map.get(spec["type"]) + if msbuild_tool: + msbuild_settings = config["finalized_msbuild_settings"] + out_file = msbuild_settings[msbuild_tool].get("OutputFile") + if out_file: + msbuild_attributes["TargetPath"] = _FixPath(out_file) + target_ext = msbuild_settings[msbuild_tool].get("TargetExt") + if target_ext: + msbuild_attributes["TargetExt"] = target_ext + + return msbuild_attributes + + +def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): + # TODO(jeanluc) We could optimize out the following and do it only if + # there are actions. + # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'. + new_paths = [] + cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0] + if cygwin_dirs: + cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs) + new_paths.append(cyg_path) + # TODO(jeanluc) Change the convention to have both a cygwin_dir and a + # python_dir. + python_path = cyg_path.replace("cygwin\\bin", "python_26") + new_paths.append(python_path) + if new_paths: + new_paths = "$(ExecutablePath);" + ";".join(new_paths) + + properties = {} + for (name, configuration) in sorted(configurations.items()): + condition = _GetConfigurationCondition(name, configuration, spec) + attributes = _GetMSBuildAttributes(spec, configuration, build_file) + msbuild_settings = configuration["finalized_msbuild_settings"] + _AddConditionalProperty( + properties, condition, "IntDir", attributes["IntermediateDirectory"] + ) + _AddConditionalProperty( + properties, condition, "OutDir", attributes["OutputDirectory"] + ) + _AddConditionalProperty( + properties, condition, "TargetName", attributes["TargetName"] + ) + if "TargetExt" in attributes: + _AddConditionalProperty( + properties, condition, "TargetExt", attributes["TargetExt"] + ) + + if attributes.get("TargetPath"): + _AddConditionalProperty( + properties, condition, "TargetPath", attributes["TargetPath"] + ) + if attributes.get("TargetExt"): + _AddConditionalProperty( + properties, condition, "TargetExt", attributes["TargetExt"] + ) + + if new_paths: + _AddConditionalProperty(properties, condition, "ExecutablePath", new_paths) + tool_settings = msbuild_settings.get("", {}) + for name, value in sorted(tool_settings.items()): + formatted_value = _GetValueFormattedForMSBuild("", name, value) + _AddConditionalProperty(properties, condition, name, formatted_value) + return _GetMSBuildPropertyGroup(spec, None, properties) + + +def _AddConditionalProperty(properties, condition, name, value): + """Adds a property / conditional value pair to a dictionary. + + Arguments: + properties: The dictionary to be modified. The key is the name of the + property. The value is itself a dictionary; its key is the value and + the value a list of condition for which this value is true. + condition: The condition under which the named property has the value. + name: The name of the property. + value: The value of the property. + """ + if name not in properties: + properties[name] = {} + values = properties[name] + if value not in values: + values[value] = [] + conditions = values[value] + conditions.append(condition) + + +# Regex for msvs variable references ( i.e. $(FOO) ). +MSVS_VARIABLE_REFERENCE = re.compile(r"\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)") + + +def _GetMSBuildPropertyGroup(spec, label, properties): + """Returns a PropertyGroup definition for the specified properties. + + Arguments: + spec: The target project dict. + label: An optional label for the PropertyGroup. + properties: The dictionary to be converted. The key is the name of the + property. The value is itself a dictionary; its key is the value and + the value a list of condition for which this value is true. + """ + group = ["PropertyGroup"] + if label: + group.append({"Label": label}) + num_configurations = len(spec["configurations"]) + + def GetEdges(node): + # Use a definition of edges such that user_of_variable -> used_varible. + # This happens to be easier in this case, since a variable's + # definition contains all variables it references in a single string. + edges = set() + for value in sorted(properties[node].keys()): + # Add to edges all $(...) references to variables. + # + # Variable references that refer to names not in properties are excluded + # These can exist for instance to refer built in definitions like + # $(SolutionDir). + # + # Self references are ignored. Self reference is used in a few places to + # append to the default value. I.e. PATH=$(PATH);other_path + edges.update( + { + v + for v in MSVS_VARIABLE_REFERENCE.findall(value) + if v in properties and v != node + } + ) + return edges + + properties_ordered = gyp.common.TopologicallySorted(properties.keys(), GetEdges) + # Walk properties in the reverse of a topological sort on + # user_of_variable -> used_variable as this ensures variables are + # defined before they are used. + # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) + for name in reversed(properties_ordered): + values = properties[name] + for value, conditions in sorted(values.items()): + if len(conditions) == num_configurations: + # If the value is the same all configurations, + # just add one unconditional entry. + group.append([name, value]) + else: + for condition in conditions: + group.append([name, {"Condition": condition}, value]) + return [group] + + +def _GetMSBuildToolSettingsSections(spec, configurations): + groups = [] + for (name, configuration) in sorted(configurations.items()): + msbuild_settings = configuration["finalized_msbuild_settings"] + group = [ + "ItemDefinitionGroup", + {"Condition": _GetConfigurationCondition(name, configuration, spec)}, + ] + for tool_name, tool_settings in sorted(msbuild_settings.items()): + # Skip the tool named '' which is a holder of global settings handled + # by _GetMSBuildConfigurationGlobalProperties. + if tool_name: + if tool_settings: + tool = [tool_name] + for name, value in sorted(tool_settings.items()): + formatted_value = _GetValueFormattedForMSBuild( + tool_name, name, value + ) + tool.append([name, formatted_value]) + group.append(tool) + groups.append(group) + return groups + + +def _FinalizeMSBuildSettings(spec, configuration): + if "msbuild_settings" in configuration: + converted = False + msbuild_settings = configuration["msbuild_settings"] + MSVSSettings.ValidateMSBuildSettings(msbuild_settings) + else: + converted = True + msvs_settings = configuration.get("msvs_settings", {}) + msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) + include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs( + configuration + ) + libraries = _GetLibraries(spec) + library_dirs = _GetLibraryDirs(configuration) + out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) + target_ext = _GetOutputTargetExt(spec) + defines = _GetDefines(configuration) + if converted: + # Visual Studio 2010 has TR1 + defines = [d for d in defines if d != "_HAS_TR1=0"] + # Warn of ignored settings + ignored_settings = ["msvs_tool_files"] + for ignored_setting in ignored_settings: + value = configuration.get(ignored_setting) + if value: + print( + "Warning: The automatic conversion to MSBuild does not handle " + "%s. Ignoring setting of %s" % (ignored_setting, str(value)) + ) + + defines = [_EscapeCppDefineForMSBuild(d) for d in defines] + disabled_warnings = _GetDisabledWarnings(configuration) + prebuild = configuration.get("msvs_prebuild") + postbuild = configuration.get("msvs_postbuild") + def_file = _GetModuleDefinition(spec) + precompiled_header = configuration.get("msvs_precompiled_header") + + # Add the information to the appropriate tool + # TODO(jeanluc) We could optimize and generate these settings only if + # the corresponding files are found, e.g. don't generate ResourceCompile + # if you don't have any resources. + _ToolAppend( + msbuild_settings, "ClCompile", "AdditionalIncludeDirectories", include_dirs + ) + _ToolAppend( + msbuild_settings, "Midl", "AdditionalIncludeDirectories", midl_include_dirs + ) + _ToolAppend( + msbuild_settings, + "ResourceCompile", + "AdditionalIncludeDirectories", + resource_include_dirs, + ) + # Add in libraries, note that even for empty libraries, we want this + # set, to prevent inheriting default libraries from the environment. + _ToolSetOrAppend(msbuild_settings, "Link", "AdditionalDependencies", libraries) + _ToolAppend(msbuild_settings, "Link", "AdditionalLibraryDirectories", library_dirs) + if out_file: + _ToolAppend( + msbuild_settings, msbuild_tool, "OutputFile", out_file, only_if_unset=True + ) + if target_ext: + _ToolAppend( + msbuild_settings, msbuild_tool, "TargetExt", target_ext, only_if_unset=True + ) + # Add defines. + _ToolAppend(msbuild_settings, "ClCompile", "PreprocessorDefinitions", defines) + _ToolAppend(msbuild_settings, "ResourceCompile", "PreprocessorDefinitions", defines) + # Add disabled warnings. + _ToolAppend( + msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings + ) + # Turn on precompiled headers if appropriate. + if precompiled_header: + precompiled_header = os.path.split(precompiled_header)[1] + _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use") + _ToolAppend( + msbuild_settings, "ClCompile", "PrecompiledHeaderFile", precompiled_header + ) + _ToolAppend( + msbuild_settings, "ClCompile", "ForcedIncludeFiles", [precompiled_header] + ) + else: + _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "NotUsing") + # Turn off WinRT compilation + _ToolAppend(msbuild_settings, "ClCompile", "CompileAsWinRT", "false") + # Turn on import libraries if appropriate + if spec.get("msvs_requires_importlibrary"): + _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "false") + # Loadable modules don't generate import libraries; + # tell dependent projects to not expect one. + if spec["type"] == "loadable_module": + _ToolAppend(msbuild_settings, "", "IgnoreImportLibrary", "true") + # Set the module definition file if any. + if def_file: + _ToolAppend(msbuild_settings, "Link", "ModuleDefinitionFile", def_file) + configuration["finalized_msbuild_settings"] = msbuild_settings + if prebuild: + _ToolAppend(msbuild_settings, "PreBuildEvent", "Command", prebuild) + if postbuild: + _ToolAppend(msbuild_settings, "PostBuildEvent", "Command", postbuild) + + +def _GetValueFormattedForMSBuild(tool_name, name, value): + if type(value) == list: + # For some settings, VS2010 does not automatically extends the settings + # TODO(jeanluc) Is this what we want? + if name in [ + "AdditionalIncludeDirectories", + "AdditionalLibraryDirectories", + "AdditionalOptions", + "DelayLoadDLLs", + "DisableSpecificWarnings", + "PreprocessorDefinitions", + ]: + value.append("%%(%s)" % name) + # For most tools, entries in a list should be separated with ';' but some + # settings use a space. Check for those first. + exceptions = { + "ClCompile": ["AdditionalOptions"], + "Link": ["AdditionalOptions"], + "Lib": ["AdditionalOptions"], + } + if tool_name in exceptions and name in exceptions[tool_name]: + char = " " + else: + char = ";" + formatted_value = char.join( + [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value] + ) + else: + formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value) + return formatted_value + + +def _VerifySourcesExist(sources, root_dir): + """Verifies that all source files exist on disk. + + Checks that all regular source files, i.e. not created at run time, + exist on disk. Missing files cause needless recompilation but no otherwise + visible errors. + + Arguments: + sources: A recursive list of Filter/file names. + root_dir: The root directory for the relative path names. + Returns: + A list of source files that cannot be found on disk. + """ + missing_sources = [] + for source in sources: + if isinstance(source, MSVSProject.Filter): + missing_sources.extend(_VerifySourcesExist(source.contents, root_dir)) + else: + if "$" not in source: + full_path = os.path.join(root_dir, source) + if not os.path.exists(full_path): + missing_sources.append(full_path) + return missing_sources + + +def _GetMSBuildSources( + spec, + sources, + exclusions, + rule_dependencies, + extension_to_rule_name, + actions_spec, + sources_handled_by_action, + list_excluded, +): + groups = [ + "none", + "masm", + "midl", + "include", + "compile", + "resource", + "rule", + "rule_dependency", + ] + grouped_sources = {} + for g in groups: + grouped_sources[g] = [] + + _AddSources2( + spec, + sources, + exclusions, + grouped_sources, + rule_dependencies, + extension_to_rule_name, + sources_handled_by_action, + list_excluded, + ) + sources = [] + for g in groups: + if grouped_sources[g]: + sources.append(["ItemGroup"] + grouped_sources[g]) + if actions_spec: + sources.append(["ItemGroup"] + actions_spec) + return sources + + +def _AddSources2( + spec, + sources, + exclusions, + grouped_sources, + rule_dependencies, + extension_to_rule_name, + sources_handled_by_action, + list_excluded, +): + extensions_excluded_from_precompile = [] + for source in sources: + if isinstance(source, MSVSProject.Filter): + _AddSources2( + spec, + source.contents, + exclusions, + grouped_sources, + rule_dependencies, + extension_to_rule_name, + sources_handled_by_action, + list_excluded, + ) + else: + if source not in sources_handled_by_action: + detail = [] + excluded_configurations = exclusions.get(source, []) + if len(excluded_configurations) == len(spec["configurations"]): + detail.append(["ExcludedFromBuild", "true"]) + else: + for config_name, configuration in sorted(excluded_configurations): + condition = _GetConfigurationCondition( + config_name, configuration + ) + detail.append( + ["ExcludedFromBuild", {"Condition": condition}, "true"] + ) + # Add precompile if needed + for config_name, configuration in spec["configurations"].items(): + precompiled_source = configuration.get( + "msvs_precompiled_source", "" + ) + if precompiled_source != "": + precompiled_source = _FixPath(precompiled_source) + if not extensions_excluded_from_precompile: + # If the precompiled header is generated by a C source, + # we must not try to use it for C++ sources, + # and vice versa. + basename, extension = os.path.splitext(precompiled_source) + if extension == ".c": + extensions_excluded_from_precompile = [ + ".cc", + ".cpp", + ".cxx", + ] + else: + extensions_excluded_from_precompile = [".c"] + + if precompiled_source == source: + condition = _GetConfigurationCondition( + config_name, configuration, spec + ) + detail.append( + ["PrecompiledHeader", {"Condition": condition}, "Create"] + ) + else: + # Turn off precompiled header usage for source files of a + # different type than the file that generated the + # precompiled header. + for extension in extensions_excluded_from_precompile: + if source.endswith(extension): + detail.append(["PrecompiledHeader", ""]) + detail.append(["ForcedIncludeFiles", ""]) + + group, element = _MapFileToMsBuildSourceType( + source, + rule_dependencies, + extension_to_rule_name, + _GetUniquePlatforms(spec), + spec["toolset"], + ) + if group == "compile" and not os.path.isabs(source): + # Add an value to support duplicate source + # file basenames, except for absolute paths to avoid paths + # with more than 260 characters. + file_name = os.path.splitext(source)[0] + ".obj" + if file_name.startswith("..\\"): + file_name = re.sub(r"^(\.\.\\)+", "", file_name) + elif file_name.startswith("$("): + file_name = re.sub(r"^\$\([^)]+\)\\", "", file_name) + detail.append(["ObjectFileName", "$(IntDir)\\" + file_name]) + grouped_sources[group].append([element, {"Include": source}] + detail) + + +def _GetMSBuildProjectReferences(project): + references = [] + if project.dependencies: + group = ["ItemGroup"] + added_dependency_set = set() + for dependency in project.dependencies: + dependency_spec = dependency.spec + should_skip_dep = False + if project.spec["toolset"] == "target": + if dependency_spec["toolset"] == "host": + if dependency_spec["type"] == "static_library": + should_skip_dep = True + if dependency.name.startswith("run_"): + should_skip_dep = False + if should_skip_dep: + continue + + canonical_name = dependency.name.replace("_host", "") + added_dependency_set.add(canonical_name) + guid = dependency.guid + project_dir = os.path.split(project.path)[0] + relative_path = gyp.common.RelativePath(dependency.path, project_dir) + project_ref = [ + "ProjectReference", + {"Include": relative_path}, + ["Project", guid], + ["ReferenceOutputAssembly", "false"], + ] + for config in dependency.spec.get("configurations", {}).values(): + if config.get("msvs_use_library_dependency_inputs", 0): + project_ref.append(["UseLibraryDependencyInputs", "true"]) + break + # If it's disabled in any config, turn it off in the reference. + if config.get("msvs_2010_disable_uldi_when_referenced", 0): + project_ref.append(["UseLibraryDependencyInputs", "false"]) + break + group.append(project_ref) + references.append(group) + return references + + +def _GenerateMSBuildProject(project, options, version, generator_flags, spec): + spec = project.spec + configurations = spec["configurations"] + toolset = spec["toolset"] + project_dir, project_file_name = os.path.split(project.path) + gyp.common.EnsureDirExists(project.path) + # Prepare list of sources and excluded sources. + + gyp_file = os.path.split(project.build_file)[1] + sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, gyp_file) + # Add rules. + actions_to_add = {} + props_files_of_rules = set() + targets_files_of_rules = set() + rule_dependencies = set() + extension_to_rule_name = {} + list_excluded = generator_flags.get("msvs_list_excluded_files", True) + platforms = _GetUniquePlatforms(spec) + + # Don't generate rules if we are using an external builder like ninja. + if not spec.get("msvs_external_builder"): + _GenerateRulesForMSBuild( + project_dir, + options, + spec, + sources, + excluded_sources, + props_files_of_rules, + targets_files_of_rules, + actions_to_add, + rule_dependencies, + extension_to_rule_name, + ) + else: + rules = spec.get("rules", []) + _AdjustSourcesForRules(rules, sources, excluded_sources, True) + + sources, excluded_sources, excluded_idl = _AdjustSourcesAndConvertToFilterHierarchy( + spec, options, project_dir, sources, excluded_sources, list_excluded, version + ) + + # Don't add actions if we are using an external builder like ninja. + if not spec.get("msvs_external_builder"): + _AddActions(actions_to_add, spec, project.build_file) + _AddCopies(actions_to_add, spec) + + # NOTE: this stanza must appear after all actions have been decided. + # Don't excluded sources with actions attached, or they won't run. + excluded_sources = _FilterActionsFromExcluded(excluded_sources, actions_to_add) + + exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) + actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( + spec, actions_to_add + ) + + _GenerateMSBuildFiltersFile( + project.path + ".filters", + sources, + rule_dependencies, + extension_to_rule_name, + platforms, + toolset, + ) + missing_sources = _VerifySourcesExist(sources, project_dir) + + for configuration in configurations.values(): + _FinalizeMSBuildSettings(spec, configuration) + + # Add attributes to root element + + import_default_section = [ + ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.Default.props"}] + ] + import_cpp_props_section = [ + ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.props"}] + ] + import_cpp_targets_section = [ + ["Import", {"Project": r"$(VCTargetsPath)\Microsoft.Cpp.targets"}] + ] + import_masm_props_section = [ + ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.props"}] + ] + import_masm_targets_section = [ + ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\masm.targets"}] + ] + import_marmasm_props_section = [ + ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.props"}] + ] + import_marmasm_targets_section = [ + ["Import", {"Project": r"$(VCTargetsPath)\BuildCustomizations\marmasm.targets"}] + ] + macro_section = [["PropertyGroup", {"Label": "UserMacros"}]] + + content = [ + "Project", + { + "xmlns": "http://schemas.microsoft.com/developer/msbuild/2003", + "ToolsVersion": version.ProjectVersion(), + "DefaultTargets": "Build", + }, + ] + + content += _GetMSBuildProjectConfigurations(configurations, spec) + content += _GetMSBuildGlobalProperties( + spec, version, project.guid, project_file_name + ) + content += import_default_section + content += _GetMSBuildConfigurationDetails(spec, project.build_file) + if spec.get("msvs_enable_winphone"): + content += _GetMSBuildLocalProperties("v120_wp81") + else: + content += _GetMSBuildLocalProperties(project.msbuild_toolset) + content += import_cpp_props_section + content += import_masm_props_section + if "arm64" in platforms and toolset == "target": + content += import_marmasm_props_section + content += _GetMSBuildExtensions(props_files_of_rules) + content += _GetMSBuildPropertySheets(configurations, spec) + content += macro_section + content += _GetMSBuildConfigurationGlobalProperties( + spec, configurations, project.build_file + ) + content += _GetMSBuildToolSettingsSections(spec, configurations) + content += _GetMSBuildSources( + spec, + sources, + exclusions, + rule_dependencies, + extension_to_rule_name, + actions_spec, + sources_handled_by_action, + list_excluded, + ) + content += _GetMSBuildProjectReferences(project) + content += import_cpp_targets_section + content += import_masm_targets_section + if "arm64" in platforms and toolset == "target": + content += import_marmasm_targets_section + content += _GetMSBuildExtensionTargets(targets_files_of_rules) + + if spec.get("msvs_external_builder"): + content += _GetMSBuildExternalBuilderTargets(spec) + + # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS: + # has_run_as = _WriteMSVSUserFile(project.path, version, spec) + + easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True) + + return missing_sources + + +def _GetMSBuildExternalBuilderTargets(spec): + """Return a list of MSBuild targets for external builders. + + The "Build" and "Clean" targets are always generated. If the spec contains + 'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also + be generated, to support building selected C/C++ files. + + Arguments: + spec: The gyp target spec. + Returns: + List of MSBuild 'Target' specs. + """ + build_cmd = _BuildCommandLineForRuleRaw( + spec, spec["msvs_external_builder_build_cmd"], False, False, False, False + ) + build_target = ["Target", {"Name": "Build"}] + build_target.append(["Exec", {"Command": build_cmd}]) + + clean_cmd = _BuildCommandLineForRuleRaw( + spec, spec["msvs_external_builder_clean_cmd"], False, False, False, False + ) + clean_target = ["Target", {"Name": "Clean"}] + clean_target.append(["Exec", {"Command": clean_cmd}]) + + targets = [build_target, clean_target] + + if spec.get("msvs_external_builder_clcompile_cmd"): + clcompile_cmd = _BuildCommandLineForRuleRaw( + spec, + spec["msvs_external_builder_clcompile_cmd"], + False, + False, + False, + False, + ) + clcompile_target = ["Target", {"Name": "ClCompile"}] + clcompile_target.append(["Exec", {"Command": clcompile_cmd}]) + targets.append(clcompile_target) + + return targets + + +def _GetMSBuildExtensions(props_files_of_rules): + extensions = ["ImportGroup", {"Label": "ExtensionSettings"}] + for props_file in props_files_of_rules: + extensions.append(["Import", {"Project": props_file}]) + return [extensions] + + +def _GetMSBuildExtensionTargets(targets_files_of_rules): + targets_node = ["ImportGroup", {"Label": "ExtensionTargets"}] + for targets_file in sorted(targets_files_of_rules): + targets_node.append(["Import", {"Project": targets_file}]) + return [targets_node] + + +def _GenerateActionsForMSBuild(spec, actions_to_add): + """Add actions accumulated into an actions_to_add, merging as needed. + + Arguments: + spec: the target project dict + actions_to_add: dictionary keyed on input name, which maps to a list of + dicts describing the actions attached to that input file. + + Returns: + A pair of (action specification, the sources handled by this action). + """ + sources_handled_by_action = OrderedSet() + actions_spec = [] + for primary_input, actions in actions_to_add.items(): + if generator_supports_multiple_toolsets: + primary_input = primary_input.replace(".exe", "_host.exe") + inputs = OrderedSet() + outputs = OrderedSet() + descriptions = [] + commands = [] + for action in actions: + + def fixup_host_exe(i): + if "$(OutDir)" in i: + i = i.replace(".exe", "_host.exe") + return i + + if generator_supports_multiple_toolsets: + action["inputs"] = [fixup_host_exe(i) for i in action["inputs"]] + inputs.update(OrderedSet(action["inputs"])) + outputs.update(OrderedSet(action["outputs"])) + descriptions.append(action["description"]) + cmd = action["command"] + if generator_supports_multiple_toolsets: + cmd = cmd.replace(".exe", "_host.exe") + # For most actions, add 'call' so that actions that invoke batch files + # return and continue executing. msbuild_use_call provides a way to + # disable this but I have not seen any adverse effect from doing that + # for everything. + if action.get("msbuild_use_call", True): + cmd = "call " + cmd + commands.append(cmd) + # Add the custom build action for one input file. + description = ", and also ".join(descriptions) + + # We can't join the commands simply with && because the command line will + # get too long. See also _AddActions: cygwin's setup_env mustn't be called + # for every invocation or the command that sets the PATH will grow too + # long. + command = "\r\n".join( + [c + "\r\nif %errorlevel% neq 0 exit /b %errorlevel%" for c in commands] + ) + _AddMSBuildAction( + spec, + primary_input, + inputs, + outputs, + command, + description, + sources_handled_by_action, + actions_spec, + ) + return actions_spec, sources_handled_by_action + + +def _AddMSBuildAction( + spec, + primary_input, + inputs, + outputs, + cmd, + description, + sources_handled_by_action, + actions_spec, +): + command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) + primary_input = _FixPath(primary_input) + inputs_array = _FixPaths(inputs) + outputs_array = _FixPaths(outputs) + additional_inputs = ";".join([i for i in inputs_array if i != primary_input]) + outputs = ";".join(outputs_array) + sources_handled_by_action.add(primary_input) + action_spec = ["CustomBuild", {"Include": primary_input}] + action_spec.extend( + # TODO(jeanluc) 'Document' for all or just if as_sources? + [ + ["FileType", "Document"], + ["Command", command], + ["Message", description], + ["Outputs", outputs], + ] + ) + if additional_inputs: + action_spec.append(["AdditionalInputs", additional_inputs]) + actions_spec.append(action_spec) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py new file mode 100644 index 0000000..e80b57f --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Unit tests for the msvs.py file. """ + +import gyp.generator.msvs as msvs +import unittest + +from io import StringIO + + +class TestSequenceFunctions(unittest.TestCase): + def setUp(self): + self.stderr = StringIO() + + def test_GetLibraries(self): + self.assertEqual(msvs._GetLibraries({}), []) + self.assertEqual(msvs._GetLibraries({"libraries": []}), []) + self.assertEqual( + msvs._GetLibraries({"other": "foo", "libraries": ["a.lib"]}), ["a.lib"] + ) + self.assertEqual(msvs._GetLibraries({"libraries": ["-la"]}), ["a.lib"]) + self.assertEqual( + msvs._GetLibraries( + { + "libraries": [ + "a.lib", + "b.lib", + "c.lib", + "-lb.lib", + "-lb.lib", + "d.lib", + "a.lib", + ] + } + ), + ["c.lib", "b.lib", "d.lib", "a.lib"], + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py new file mode 100644 index 0000000..d173bf2 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py @@ -0,0 +1,2936 @@ +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import collections +import copy +import hashlib +import json +import multiprocessing +import os.path +import re +import signal +import subprocess +import sys +import gyp +import gyp.common +import gyp.msvs_emulation +import gyp.MSVSUtil as MSVSUtil +import gyp.xcode_emulation + +from io import StringIO + +from gyp.common import GetEnvironFallback +import gyp.ninja_syntax as ninja_syntax + +generator_default_variables = { + "EXECUTABLE_PREFIX": "", + "EXECUTABLE_SUFFIX": "", + "STATIC_LIB_PREFIX": "lib", + "STATIC_LIB_SUFFIX": ".a", + "SHARED_LIB_PREFIX": "lib", + # Gyp expects the following variables to be expandable by the build + # system to the appropriate locations. Ninja prefers paths to be + # known at gyp time. To resolve this, introduce special + # variables starting with $! and $| (which begin with a $ so gyp knows it + # should be treated specially, but is otherwise an invalid + # ninja/shell variable) that are passed to gyp here but expanded + # before writing out into the target .ninja files; see + # ExpandSpecial. + # $! is used for variables that represent a path and that can only appear at + # the start of a string, while $| is used for variables that can appear + # anywhere in a string. + "INTERMEDIATE_DIR": "$!INTERMEDIATE_DIR", + "SHARED_INTERMEDIATE_DIR": "$!PRODUCT_DIR/gen", + "PRODUCT_DIR": "$!PRODUCT_DIR", + "CONFIGURATION_NAME": "$|CONFIGURATION_NAME", + # Special variables that may be used by gyp 'rule' targets. + # We generate definitions for these variables on the fly when processing a + # rule. + "RULE_INPUT_ROOT": "${root}", + "RULE_INPUT_DIRNAME": "${dirname}", + "RULE_INPUT_PATH": "${source}", + "RULE_INPUT_EXT": "${ext}", + "RULE_INPUT_NAME": "${name}", +} + +# Placates pylint. +generator_additional_non_configuration_keys = [] +generator_additional_path_sections = [] +generator_extra_sources_for_rules = [] +generator_filelist_paths = None + +generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested() + + +def StripPrefix(arg, prefix): + if arg.startswith(prefix): + return arg[len(prefix) :] + return arg + + +def QuoteShellArgument(arg, flavor): + """Quote a string such that it will be interpreted as a single argument + by the shell.""" + # Rather than attempting to enumerate the bad shell characters, just + # allow common OK ones and quote anything else. + if re.match(r"^[a-zA-Z0-9_=.\\/-]+$", arg): + return arg # No quoting necessary. + if flavor == "win": + return gyp.msvs_emulation.QuoteForRspFile(arg) + return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" + + +def Define(d, flavor): + """Takes a preprocessor define and returns a -D parameter that's ninja- and + shell-escaped.""" + if flavor == "win": + # cl.exe replaces literal # characters with = in preprocessor definitions for + # some reason. Octal-encode to work around that. + d = d.replace("#", "\\%03o" % ord("#")) + return QuoteShellArgument(ninja_syntax.escape("-D" + d), flavor) + + +def AddArch(output, arch): + """Adds an arch string to an output path.""" + output, extension = os.path.splitext(output) + return f"{output}.{arch}{extension}" + + +class Target: + """Target represents the paths used within a single gyp target. + + Conceptually, building a single target A is a series of steps: + + 1) actions/rules/copies generates source/resources/etc. + 2) compiles generates .o files + 3) link generates a binary (library/executable) + 4) bundle merges the above in a mac bundle + + (Any of these steps can be optional.) + + From a build ordering perspective, a dependent target B could just + depend on the last output of this series of steps. + + But some dependent commands sometimes need to reach inside the box. + For example, when linking B it needs to get the path to the static + library generated by A. + + This object stores those paths. To keep things simple, member + variables only store concrete paths to single files, while methods + compute derived values like "the last output of the target". + """ + + def __init__(self, type): + # Gyp type ("static_library", etc.) of this target. + self.type = type + # File representing whether any input dependencies necessary for + # dependent actions have completed. + self.preaction_stamp = None + # File representing whether any input dependencies necessary for + # dependent compiles have completed. + self.precompile_stamp = None + # File representing the completion of actions/rules/copies, if any. + self.actions_stamp = None + # Path to the output of the link step, if any. + self.binary = None + # Path to the file representing the completion of building the bundle, + # if any. + self.bundle = None + # On Windows, incremental linking requires linking against all the .objs + # that compose a .lib (rather than the .lib itself). That list is stored + # here. In this case, we also need to save the compile_deps for the target, + # so that the target that directly depends on the .objs can also depend + # on those. + self.component_objs = None + self.compile_deps = None + # Windows only. The import .lib is the output of a build step, but + # because dependents only link against the lib (not both the lib and the + # dll) we keep track of the import library here. + self.import_lib = None + # Track if this target contains any C++ files, to decide if gcc or g++ + # should be used for linking. + self.uses_cpp = False + + def Linkable(self): + """Return true if this is a target that can be linked against.""" + return self.type in ("static_library", "shared_library") + + def UsesToc(self, flavor): + """Return true if the target should produce a restat rule based on a TOC + file.""" + # For bundles, the .TOC should be produced for the binary, not for + # FinalOutput(). But the naive approach would put the TOC file into the + # bundle, so don't do this for bundles for now. + if flavor == "win" or self.bundle: + return False + return self.type in ("shared_library", "loadable_module") + + def PreActionInput(self, flavor): + """Return the path, if any, that should be used as a dependency of + any dependent action step.""" + if self.UsesToc(flavor): + return self.FinalOutput() + ".TOC" + return self.FinalOutput() or self.preaction_stamp + + def PreCompileInput(self): + """Return the path, if any, that should be used as a dependency of + any dependent compile step.""" + return self.actions_stamp or self.precompile_stamp + + def FinalOutput(self): + """Return the last output of the target, which depends on all prior + steps.""" + return self.bundle or self.binary or self.actions_stamp + + +# A small discourse on paths as used within the Ninja build: +# All files we produce (both at gyp and at build time) appear in the +# build directory (e.g. out/Debug). +# +# Paths within a given .gyp file are always relative to the directory +# containing the .gyp file. Call these "gyp paths". This includes +# sources as well as the starting directory a given gyp rule/action +# expects to be run from. We call the path from the source root to +# the gyp file the "base directory" within the per-.gyp-file +# NinjaWriter code. +# +# All paths as written into the .ninja files are relative to the build +# directory. Call these paths "ninja paths". +# +# We translate between these two notions of paths with two helper +# functions: +# +# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) +# into the equivalent ninja path. +# +# - GypPathToUniqueOutput translates a gyp path into a ninja path to write +# an output file; the result can be namespaced such that it is unique +# to the input file name as well as the output target name. + + +class NinjaWriter: + def __init__( + self, + hash_for_rules, + target_outputs, + base_dir, + build_dir, + output_file, + toplevel_build, + output_file_name, + flavor, + toplevel_dir=None, + ): + """ + base_dir: path from source root to directory containing this gyp file, + by gyp semantics, all input paths are relative to this + build_dir: path from source root to build output + toplevel_dir: path to the toplevel directory + """ + + self.hash_for_rules = hash_for_rules + self.target_outputs = target_outputs + self.base_dir = base_dir + self.build_dir = build_dir + self.ninja = ninja_syntax.Writer(output_file) + self.toplevel_build = toplevel_build + self.output_file_name = output_file_name + + self.flavor = flavor + self.abs_build_dir = None + if toplevel_dir is not None: + self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, build_dir)) + self.obj_ext = ".obj" if flavor == "win" else ".o" + if flavor == "win": + # See docstring of msvs_emulation.GenerateEnvironmentFiles(). + self.win_env = {} + for arch in ("x86", "x64"): + self.win_env[arch] = "environment." + arch + + # Relative path from build output dir to base dir. + build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) + self.build_to_base = os.path.join(build_to_top, base_dir) + # Relative path from base dir to build dir. + base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) + self.base_to_build = os.path.join(base_to_top, build_dir) + + def ExpandSpecial(self, path, product_dir=None): + """Expand specials like $!PRODUCT_DIR in |path|. + + If |product_dir| is None, assumes the cwd is already the product + dir. Otherwise, |product_dir| is the relative path to the product + dir. + """ + + PRODUCT_DIR = "$!PRODUCT_DIR" + if PRODUCT_DIR in path: + if product_dir: + path = path.replace(PRODUCT_DIR, product_dir) + else: + path = path.replace(PRODUCT_DIR + "/", "") + path = path.replace(PRODUCT_DIR + "\\", "") + path = path.replace(PRODUCT_DIR, ".") + + INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR" + if INTERMEDIATE_DIR in path: + int_dir = self.GypPathToUniqueOutput("gen") + # GypPathToUniqueOutput generates a path relative to the product dir, + # so insert product_dir in front if it is provided. + path = path.replace( + INTERMEDIATE_DIR, os.path.join(product_dir or "", int_dir) + ) + + CONFIGURATION_NAME = "$|CONFIGURATION_NAME" + path = path.replace(CONFIGURATION_NAME, self.config_name) + + return path + + def ExpandRuleVariables(self, path, root, dirname, source, ext, name): + if self.flavor == "win": + path = self.msvs_settings.ConvertVSMacros(path, config=self.config_name) + path = path.replace(generator_default_variables["RULE_INPUT_ROOT"], root) + path = path.replace(generator_default_variables["RULE_INPUT_DIRNAME"], dirname) + path = path.replace(generator_default_variables["RULE_INPUT_PATH"], source) + path = path.replace(generator_default_variables["RULE_INPUT_EXT"], ext) + path = path.replace(generator_default_variables["RULE_INPUT_NAME"], name) + return path + + def GypPathToNinja(self, path, env=None): + """Translate a gyp path to a ninja path, optionally expanding environment + variable references in |path| with |env|. + + See the above discourse on path conversions.""" + if env: + if self.flavor == "mac": + path = gyp.xcode_emulation.ExpandEnvVars(path, env) + elif self.flavor == "win": + path = gyp.msvs_emulation.ExpandMacros(path, env) + if path.startswith("$!"): + expanded = self.ExpandSpecial(path) + if self.flavor == "win": + expanded = os.path.normpath(expanded) + return expanded + if "$|" in path: + path = self.ExpandSpecial(path) + assert "$" not in path, path + return os.path.normpath(os.path.join(self.build_to_base, path)) + + def GypPathToUniqueOutput(self, path, qualified=True): + """Translate a gyp path to a ninja path for writing output. + + If qualified is True, qualify the resulting filename with the name + of the target. This is necessary when e.g. compiling the same + path twice for two separate output targets. + + See the above discourse on path conversions.""" + + path = self.ExpandSpecial(path) + assert not path.startswith("$"), path + + # Translate the path following this scheme: + # Input: foo/bar.gyp, target targ, references baz/out.o + # Output: obj/foo/baz/targ.out.o (if qualified) + # obj/foo/baz/out.o (otherwise) + # (and obj.host instead of obj for cross-compiles) + # + # Why this scheme and not some other one? + # 1) for a given input, you can compute all derived outputs by matching + # its path, even if the input is brought via a gyp file with '..'. + # 2) simple files like libraries and stamps have a simple filename. + + obj = "obj" + if self.toolset != "target": + obj += "." + self.toolset + + path_dir, path_basename = os.path.split(path) + assert not os.path.isabs(path_dir), ( + "'%s' can not be absolute path (see crbug.com/462153)." % path_dir + ) + + if qualified: + path_basename = self.name + "." + path_basename + return os.path.normpath( + os.path.join(obj, self.base_dir, path_dir, path_basename) + ) + + def WriteCollapsedDependencies(self, name, targets, order_only=None): + """Given a list of targets, return a path for a single file + representing the result of building all the targets or None. + + Uses a stamp file if necessary.""" + + assert targets == [item for item in targets if item], targets + if len(targets) == 0: + assert not order_only + return None + if len(targets) > 1 or order_only: + stamp = self.GypPathToUniqueOutput(name + ".stamp") + targets = self.ninja.build(stamp, "stamp", targets, order_only=order_only) + self.ninja.newline() + return targets[0] + + def _SubninjaNameForArch(self, arch): + output_file_base = os.path.splitext(self.output_file_name)[0] + return f"{output_file_base}.{arch}.ninja" + + def WriteSpec(self, spec, config_name, generator_flags): + """The main entry point for NinjaWriter: write the build rules for a spec. + + Returns a Target object, which represents the output paths for this spec. + Returns None if there are no outputs (e.g. a settings-only 'none' type + target).""" + + self.config_name = config_name + self.name = spec["target_name"] + self.toolset = spec["toolset"] + config = spec["configurations"][config_name] + self.target = Target(spec["type"]) + self.is_standalone_static_library = bool( + spec.get("standalone_static_library", 0) + ) + + self.target_rpath = generator_flags.get("target_rpath", r"\$$ORIGIN/lib/") + + self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) + self.xcode_settings = self.msvs_settings = None + if self.flavor == "mac": + self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) + mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) + if mac_toolchain_dir: + self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir + + if self.flavor == "win": + self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, generator_flags) + arch = self.msvs_settings.GetArch(config_name) + self.ninja.variable("arch", self.win_env[arch]) + self.ninja.variable("cc", "$cl_" + arch) + self.ninja.variable("cxx", "$cl_" + arch) + self.ninja.variable("cc_host", "$cl_" + arch) + self.ninja.variable("cxx_host", "$cl_" + arch) + self.ninja.variable("asm", "$ml_" + arch) + + if self.flavor == "mac": + self.archs = self.xcode_settings.GetActiveArchs(config_name) + if len(self.archs) > 1: + self.arch_subninjas = { + arch: ninja_syntax.Writer( + OpenOutput( + os.path.join( + self.toplevel_build, self._SubninjaNameForArch(arch) + ), + "w", + ) + ) + for arch in self.archs + } + + # Compute predepends for all rules. + # actions_depends is the dependencies this target depends on before running + # any of its action/rule/copy steps. + # compile_depends is the dependencies this target depends on before running + # any of its compile steps. + actions_depends = [] + compile_depends = [] + # TODO(evan): it is rather confusing which things are lists and which + # are strings. Fix these. + if "dependencies" in spec: + for dep in spec["dependencies"]: + if dep in self.target_outputs: + target = self.target_outputs[dep] + actions_depends.append(target.PreActionInput(self.flavor)) + compile_depends.append(target.PreCompileInput()) + if target.uses_cpp: + self.target.uses_cpp = True + actions_depends = [item for item in actions_depends if item] + compile_depends = [item for item in compile_depends if item] + actions_depends = self.WriteCollapsedDependencies( + "actions_depends", actions_depends + ) + compile_depends = self.WriteCollapsedDependencies( + "compile_depends", compile_depends + ) + self.target.preaction_stamp = actions_depends + self.target.precompile_stamp = compile_depends + + # Write out actions, rules, and copies. These must happen before we + # compile any sources, so compute a list of predependencies for sources + # while we do it. + extra_sources = [] + mac_bundle_depends = [] + self.target.actions_stamp = self.WriteActionsRulesCopies( + spec, extra_sources, actions_depends, mac_bundle_depends + ) + + # If we have actions/rules/copies, we depend directly on those, but + # otherwise we depend on dependent target's actions/rules/copies etc. + # We never need to explicitly depend on previous target's link steps, + # because no compile ever depends on them. + compile_depends_stamp = self.target.actions_stamp or compile_depends + + # Write out the compilation steps, if any. + link_deps = [] + try: + sources = extra_sources + spec.get("sources", []) + except TypeError: + print("extra_sources: ", str(extra_sources)) + print('spec.get("sources"): ', str(spec.get("sources"))) + raise + if sources: + if self.flavor == "mac" and len(self.archs) > 1: + # Write subninja file containing compile and link commands scoped to + # a single arch if a fat binary is being built. + for arch in self.archs: + self.ninja.subninja(self._SubninjaNameForArch(arch)) + + pch = None + if self.flavor == "win": + gyp.msvs_emulation.VerifyMissingSources( + sources, self.abs_build_dir, generator_flags, self.GypPathToNinja + ) + pch = gyp.msvs_emulation.PrecompiledHeader( + self.msvs_settings, + config_name, + self.GypPathToNinja, + self.GypPathToUniqueOutput, + self.obj_ext, + ) + else: + pch = gyp.xcode_emulation.MacPrefixHeader( + self.xcode_settings, + self.GypPathToNinja, + lambda path, lang: self.GypPathToUniqueOutput(path + "-" + lang), + ) + link_deps = self.WriteSources( + self.ninja, + config_name, + config, + sources, + compile_depends_stamp, + pch, + spec, + ) + # Some actions/rules output 'sources' that are already object files. + obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] + if obj_outputs: + if self.flavor != "mac" or len(self.archs) == 1: + link_deps += [self.GypPathToNinja(o) for o in obj_outputs] + else: + print( + "Warning: Actions/rules writing object files don't work with " + "multiarch targets, dropping. (target %s)" % spec["target_name"] + ) + elif self.flavor == "mac" and len(self.archs) > 1: + link_deps = collections.defaultdict(list) + + compile_deps = self.target.actions_stamp or actions_depends + if self.flavor == "win" and self.target.type == "static_library": + self.target.component_objs = link_deps + self.target.compile_deps = compile_deps + + # Write out a link step, if needed. + output = None + is_empty_bundle = not link_deps and not mac_bundle_depends + if link_deps or self.target.actions_stamp or actions_depends: + output = self.WriteTarget( + spec, config_name, config, link_deps, compile_deps + ) + if self.is_mac_bundle: + mac_bundle_depends.append(output) + + # Bundle all of the above together, if needed. + if self.is_mac_bundle: + output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle) + + if not output: + return None + + assert self.target.FinalOutput(), output + return self.target + + def _WinIdlRule(self, source, prebuild, outputs): + """Handle the implicit VS .idl rule for one source file. Fills |outputs| + with files that are generated.""" + outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( + source, self.config_name + ) + outdir = self.GypPathToNinja(outdir) + + def fix_path(path, rel=None): + path = os.path.join(outdir, path) + dirname, basename = os.path.split(source) + root, ext = os.path.splitext(basename) + path = self.ExpandRuleVariables(path, root, dirname, source, ext, basename) + if rel: + path = os.path.relpath(path, rel) + return path + + vars = [(name, fix_path(value, outdir)) for name, value in vars] + output = [fix_path(p) for p in output] + vars.append(("outdir", outdir)) + vars.append(("idlflags", flags)) + input = self.GypPathToNinja(source) + self.ninja.build(output, "idl", input, variables=vars, order_only=prebuild) + outputs.extend(output) + + def WriteWinIdlFiles(self, spec, prebuild): + """Writes rules to match MSVS's implicit idl handling.""" + assert self.flavor == "win" + if self.msvs_settings.HasExplicitIdlRulesOrActions(spec): + return [] + outputs = [] + for source in filter(lambda x: x.endswith(".idl"), spec["sources"]): + self._WinIdlRule(source, prebuild, outputs) + return outputs + + def WriteActionsRulesCopies( + self, spec, extra_sources, prebuild, mac_bundle_depends + ): + """Write out the Actions, Rules, and Copies steps. Return a path + representing the outputs of these steps.""" + outputs = [] + if self.is_mac_bundle: + mac_bundle_resources = spec.get("mac_bundle_resources", [])[:] + else: + mac_bundle_resources = [] + extra_mac_bundle_resources = [] + + if "actions" in spec: + outputs += self.WriteActions( + spec["actions"], extra_sources, prebuild, extra_mac_bundle_resources + ) + if "rules" in spec: + outputs += self.WriteRules( + spec["rules"], + extra_sources, + prebuild, + mac_bundle_resources, + extra_mac_bundle_resources, + ) + if "copies" in spec: + outputs += self.WriteCopies(spec["copies"], prebuild, mac_bundle_depends) + + if "sources" in spec and self.flavor == "win": + outputs += self.WriteWinIdlFiles(spec, prebuild) + + if self.xcode_settings and self.xcode_settings.IsIosFramework(): + self.WriteiOSFrameworkHeaders(spec, outputs, prebuild) + + stamp = self.WriteCollapsedDependencies("actions_rules_copies", outputs) + + if self.is_mac_bundle: + xcassets = self.WriteMacBundleResources( + extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends + ) + partial_info_plist = self.WriteMacXCassets(xcassets, mac_bundle_depends) + self.WriteMacInfoPlist(partial_info_plist, mac_bundle_depends) + + return stamp + + def GenerateDescription(self, verb, message, fallback): + """Generate and return a description of a build step. + + |verb| is the short summary, e.g. ACTION or RULE. + |message| is a hand-written description, or None if not available. + |fallback| is the gyp-level name of the step, usable as a fallback. + """ + if self.toolset != "target": + verb += "(%s)" % self.toolset + if message: + return f"{verb} {self.ExpandSpecial(message)}" + else: + return f"{verb} {self.name}: {fallback}" + + def WriteActions( + self, actions, extra_sources, prebuild, extra_mac_bundle_resources + ): + # Actions cd into the base directory. + env = self.GetToolchainEnv() + all_outputs = [] + for action in actions: + # First write out a rule for the action. + name = "{}_{}".format(action["action_name"], self.hash_for_rules) + description = self.GenerateDescription( + "ACTION", action.get("message", None), name + ) + win_shell_flags = ( + self.msvs_settings.GetRuleShellFlags(action) + if self.flavor == "win" + else None + ) + args = action["action"] + depfile = action.get("depfile", None) + if depfile: + depfile = self.ExpandSpecial(depfile, self.base_to_build) + pool = "console" if int(action.get("ninja_use_console", 0)) else None + rule_name, _ = self.WriteNewNinjaRule( + name, args, description, win_shell_flags, env, pool, depfile=depfile + ) + + inputs = [self.GypPathToNinja(i, env) for i in action["inputs"]] + if int(action.get("process_outputs_as_sources", False)): + extra_sources += action["outputs"] + if int(action.get("process_outputs_as_mac_bundle_resources", False)): + extra_mac_bundle_resources += action["outputs"] + outputs = [self.GypPathToNinja(o, env) for o in action["outputs"]] + + # Then write out an edge using the rule. + self.ninja.build(outputs, rule_name, inputs, order_only=prebuild) + all_outputs += outputs + + self.ninja.newline() + + return all_outputs + + def WriteRules( + self, + rules, + extra_sources, + prebuild, + mac_bundle_resources, + extra_mac_bundle_resources, + ): + env = self.GetToolchainEnv() + all_outputs = [] + for rule in rules: + # Skip a rule with no action and no inputs. + if "action" not in rule and not rule.get("rule_sources", []): + continue + + # First write out a rule for the rule action. + name = "{}_{}".format(rule["rule_name"], self.hash_for_rules) + + args = rule["action"] + description = self.GenerateDescription( + "RULE", + rule.get("message", None), + ("%s " + generator_default_variables["RULE_INPUT_PATH"]) % name, + ) + win_shell_flags = ( + self.msvs_settings.GetRuleShellFlags(rule) + if self.flavor == "win" + else None + ) + pool = "console" if int(rule.get("ninja_use_console", 0)) else None + rule_name, args = self.WriteNewNinjaRule( + name, args, description, win_shell_flags, env, pool + ) + + # TODO: if the command references the outputs directly, we should + # simplify it to just use $out. + + # Rules can potentially make use of some special variables which + # must vary per source file. + # Compute the list of variables we'll need to provide. + special_locals = ("source", "root", "dirname", "ext", "name") + needed_variables = {"source"} + for argument in args: + for var in special_locals: + if "${%s}" % var in argument: + needed_variables.add(var) + needed_variables = sorted(needed_variables) + + def cygwin_munge(path): + # pylint: disable=cell-var-from-loop + if win_shell_flags and win_shell_flags.cygwin: + return path.replace("\\", "/") + return path + + inputs = [self.GypPathToNinja(i, env) for i in rule.get("inputs", [])] + + # If there are n source files matching the rule, and m additional rule + # inputs, then adding 'inputs' to each build edge written below will + # write m * n inputs. Collapsing reduces this to m + n. + sources = rule.get("rule_sources", []) + num_inputs = len(inputs) + if prebuild: + num_inputs += 1 + if num_inputs > 2 and len(sources) > 2: + inputs = [ + self.WriteCollapsedDependencies( + rule["rule_name"], inputs, order_only=prebuild + ) + ] + prebuild = [] + + # For each source file, write an edge that generates all the outputs. + for source in sources: + source = os.path.normpath(source) + dirname, basename = os.path.split(source) + root, ext = os.path.splitext(basename) + + # Gather the list of inputs and outputs, expanding $vars if possible. + outputs = [ + self.ExpandRuleVariables(o, root, dirname, source, ext, basename) + for o in rule["outputs"] + ] + + if int(rule.get("process_outputs_as_sources", False)): + extra_sources += outputs + + was_mac_bundle_resource = source in mac_bundle_resources + if was_mac_bundle_resource or int( + rule.get("process_outputs_as_mac_bundle_resources", False) + ): + extra_mac_bundle_resources += outputs + # Note: This is n_resources * n_outputs_in_rule. + # Put to-be-removed items in a set and + # remove them all in a single pass + # if this becomes a performance issue. + if was_mac_bundle_resource: + mac_bundle_resources.remove(source) + + extra_bindings = [] + for var in needed_variables: + if var == "root": + extra_bindings.append(("root", cygwin_munge(root))) + elif var == "dirname": + # '$dirname' is a parameter to the rule action, which means + # it shouldn't be converted to a Ninja path. But we don't + # want $!PRODUCT_DIR in there either. + dirname_expanded = self.ExpandSpecial( + dirname, self.base_to_build + ) + extra_bindings.append( + ("dirname", cygwin_munge(dirname_expanded)) + ) + elif var == "source": + # '$source' is a parameter to the rule action, which means + # it shouldn't be converted to a Ninja path. But we don't + # want $!PRODUCT_DIR in there either. + source_expanded = self.ExpandSpecial(source, self.base_to_build) + extra_bindings.append(("source", cygwin_munge(source_expanded))) + elif var == "ext": + extra_bindings.append(("ext", ext)) + elif var == "name": + extra_bindings.append(("name", cygwin_munge(basename))) + else: + assert var is None, repr(var) + + outputs = [self.GypPathToNinja(o, env) for o in outputs] + if self.flavor == "win": + # WriteNewNinjaRule uses unique_name to create a rsp file on win. + extra_bindings.append( + ("unique_name", hashlib.md5(outputs[0]).hexdigest()) + ) + + self.ninja.build( + outputs, + rule_name, + self.GypPathToNinja(source), + implicit=inputs, + order_only=prebuild, + variables=extra_bindings, + ) + + all_outputs.extend(outputs) + + return all_outputs + + def WriteCopies(self, copies, prebuild, mac_bundle_depends): + outputs = [] + if self.xcode_settings: + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self.GetToolchainEnv(additional_settings=extra_env) + else: + env = self.GetToolchainEnv() + for to_copy in copies: + for path in to_copy["files"]: + # Normalize the path so trailing slashes don't confuse us. + path = os.path.normpath(path) + basename = os.path.split(path)[1] + src = self.GypPathToNinja(path, env) + dst = self.GypPathToNinja( + os.path.join(to_copy["destination"], basename), env + ) + outputs += self.ninja.build(dst, "copy", src, order_only=prebuild) + if self.is_mac_bundle: + # gyp has mac_bundle_resources to copy things into a bundle's + # Resources folder, but there's no built-in way to copy files + # to other places in the bundle. + # Hence, some targets use copies for this. + # Check if this file is copied into the current bundle, + # and if so add it to the bundle depends so + # that dependent targets get rebuilt if the copy input changes. + if dst.startswith( + self.xcode_settings.GetBundleContentsFolderPath() + ): + mac_bundle_depends.append(dst) + + return outputs + + def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild): + """Prebuild steps to generate hmap files and copy headers to destination.""" + framework = self.ComputeMacBundleOutput() + all_sources = spec["sources"] + copy_headers = spec["mac_framework_headers"] + output = self.GypPathToUniqueOutput("headers.hmap") + self.xcode_settings.header_map_path = output + all_headers = map( + self.GypPathToNinja, filter(lambda x: x.endswith(".h"), all_sources) + ) + variables = [ + ("framework", framework), + ("copy_headers", map(self.GypPathToNinja, copy_headers)), + ] + outputs.extend( + self.ninja.build( + output, + "compile_ios_framework_headers", + all_headers, + variables=variables, + order_only=prebuild, + ) + ) + + def WriteMacBundleResources(self, resources, bundle_depends): + """Writes ninja edges for 'mac_bundle_resources'.""" + xcassets = [] + + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self.GetSortedXcodeEnv(additional_settings=extra_env) + env = self.ComputeExportEnvString(env) + isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) + + for output, res in gyp.xcode_emulation.GetMacBundleResources( + generator_default_variables["PRODUCT_DIR"], + self.xcode_settings, + map(self.GypPathToNinja, resources), + ): + output = self.ExpandSpecial(output) + if os.path.splitext(output)[-1] != ".xcassets": + self.ninja.build( + output, + "mac_tool", + res, + variables=[ + ("mactool_cmd", "copy-bundle-resource"), + ("env", env), + ("binary", isBinary), + ], + ) + bundle_depends.append(output) + else: + xcassets.append(res) + return xcassets + + def WriteMacXCassets(self, xcassets, bundle_depends): + """Writes ninja edges for 'mac_bundle_resources' .xcassets files. + + This add an invocation of 'actool' via the 'mac_tool.py' helper script. + It assumes that the assets catalogs define at least one imageset and + thus an Assets.car file will be generated in the application resources + directory. If this is not the case, then the build will probably be done + at each invocation of ninja.""" + if not xcassets: + return + + extra_arguments = {} + settings_to_arg = { + "XCASSETS_APP_ICON": "app-icon", + "XCASSETS_LAUNCH_IMAGE": "launch-image", + } + settings = self.xcode_settings.xcode_settings[self.config_name] + for settings_key, arg_name in settings_to_arg.items(): + value = settings.get(settings_key) + if value: + extra_arguments[arg_name] = value + + partial_info_plist = None + if extra_arguments: + partial_info_plist = self.GypPathToUniqueOutput( + "assetcatalog_generated_info.plist" + ) + extra_arguments["output-partial-info-plist"] = partial_info_plist + + outputs = [] + outputs.append( + os.path.join(self.xcode_settings.GetBundleResourceFolder(), "Assets.car") + ) + if partial_info_plist: + outputs.append(partial_info_plist) + + keys = QuoteShellArgument(json.dumps(extra_arguments), self.flavor) + extra_env = self.xcode_settings.GetPerTargetSettings() + env = self.GetSortedXcodeEnv(additional_settings=extra_env) + env = self.ComputeExportEnvString(env) + + bundle_depends.extend( + self.ninja.build( + outputs, + "compile_xcassets", + xcassets, + variables=[("env", env), ("keys", keys)], + ) + ) + return partial_info_plist + + def WriteMacInfoPlist(self, partial_info_plist, bundle_depends): + """Write build rules for bundle Info.plist files.""" + info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( + generator_default_variables["PRODUCT_DIR"], + self.xcode_settings, + self.GypPathToNinja, + ) + if not info_plist: + return + out = self.ExpandSpecial(out) + if defines: + # Create an intermediate file to store preprocessed results. + intermediate_plist = self.GypPathToUniqueOutput( + os.path.basename(info_plist) + ) + defines = " ".join([Define(d, self.flavor) for d in defines]) + info_plist = self.ninja.build( + intermediate_plist, + "preprocess_infoplist", + info_plist, + variables=[("defines", defines)], + ) + + env = self.GetSortedXcodeEnv(additional_settings=extra_env) + env = self.ComputeExportEnvString(env) + + if partial_info_plist: + intermediate_plist = self.GypPathToUniqueOutput("merged_info.plist") + info_plist = self.ninja.build( + intermediate_plist, "merge_infoplist", [partial_info_plist, info_plist] + ) + + keys = self.xcode_settings.GetExtraPlistItems(self.config_name) + keys = QuoteShellArgument(json.dumps(keys), self.flavor) + isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name) + self.ninja.build( + out, + "copy_infoplist", + info_plist, + variables=[("env", env), ("keys", keys), ("binary", isBinary)], + ) + bundle_depends.append(out) + + def WriteSources( + self, + ninja_file, + config_name, + config, + sources, + predepends, + precompiled_header, + spec, + ): + """Write build rules to compile all of |sources|.""" + if self.toolset == "host": + self.ninja.variable("ar", "$ar_host") + self.ninja.variable("cc", "$cc_host") + self.ninja.variable("cxx", "$cxx_host") + self.ninja.variable("ld", "$ld_host") + self.ninja.variable("ldxx", "$ldxx_host") + self.ninja.variable("nm", "$nm_host") + self.ninja.variable("readelf", "$readelf_host") + + if self.flavor != "mac" or len(self.archs) == 1: + return self.WriteSourcesForArch( + self.ninja, + config_name, + config, + sources, + predepends, + precompiled_header, + spec, + ) + else: + return { + arch: self.WriteSourcesForArch( + self.arch_subninjas[arch], + config_name, + config, + sources, + predepends, + precompiled_header, + spec, + arch=arch, + ) + for arch in self.archs + } + + def WriteSourcesForArch( + self, + ninja_file, + config_name, + config, + sources, + predepends, + precompiled_header, + spec, + arch=None, + ): + """Write build rules to compile all of |sources|.""" + + extra_defines = [] + if self.flavor == "mac": + cflags = self.xcode_settings.GetCflags(config_name, arch=arch) + cflags_c = self.xcode_settings.GetCflagsC(config_name) + cflags_cc = self.xcode_settings.GetCflagsCC(config_name) + cflags_objc = ["$cflags_c"] + self.xcode_settings.GetCflagsObjC(config_name) + cflags_objcc = ["$cflags_cc"] + self.xcode_settings.GetCflagsObjCC( + config_name + ) + elif self.flavor == "win": + asmflags = self.msvs_settings.GetAsmflags(config_name) + cflags = self.msvs_settings.GetCflags(config_name) + cflags_c = self.msvs_settings.GetCflagsC(config_name) + cflags_cc = self.msvs_settings.GetCflagsCC(config_name) + extra_defines = self.msvs_settings.GetComputedDefines(config_name) + # See comment at cc_command for why there's two .pdb files. + pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName( + config_name, self.ExpandSpecial + ) + if not pdbpath_c: + obj = "obj" + if self.toolset != "target": + obj += "." + self.toolset + pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name)) + pdbpath_c = pdbpath + ".c.pdb" + pdbpath_cc = pdbpath + ".cc.pdb" + self.WriteVariableList(ninja_file, "pdbname_c", [pdbpath_c]) + self.WriteVariableList(ninja_file, "pdbname_cc", [pdbpath_cc]) + self.WriteVariableList(ninja_file, "pchprefix", [self.name]) + else: + cflags = config.get("cflags", []) + cflags_c = config.get("cflags_c", []) + cflags_cc = config.get("cflags_cc", []) + + # Respect environment variables related to build, but target-specific + # flags can still override them. + if self.toolset == "target": + cflags_c = ( + os.environ.get("CPPFLAGS", "").split() + + os.environ.get("CFLAGS", "").split() + + cflags_c + ) + cflags_cc = ( + os.environ.get("CPPFLAGS", "").split() + + os.environ.get("CXXFLAGS", "").split() + + cflags_cc + ) + elif self.toolset == "host": + cflags_c = ( + os.environ.get("CPPFLAGS_host", "").split() + + os.environ.get("CFLAGS_host", "").split() + + cflags_c + ) + cflags_cc = ( + os.environ.get("CPPFLAGS_host", "").split() + + os.environ.get("CXXFLAGS_host", "").split() + + cflags_cc + ) + + defines = config.get("defines", []) + extra_defines + self.WriteVariableList( + ninja_file, "defines", [Define(d, self.flavor) for d in defines] + ) + if self.flavor == "win": + self.WriteVariableList( + ninja_file, "asmflags", map(self.ExpandSpecial, asmflags) + ) + self.WriteVariableList( + ninja_file, + "rcflags", + [ + QuoteShellArgument(self.ExpandSpecial(f), self.flavor) + for f in self.msvs_settings.GetRcflags( + config_name, self.GypPathToNinja + ) + ], + ) + + include_dirs = config.get("include_dirs", []) + + env = self.GetToolchainEnv() + if self.flavor == "win": + include_dirs = self.msvs_settings.AdjustIncludeDirs( + include_dirs, config_name + ) + self.WriteVariableList( + ninja_file, + "includes", + [ + QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) + for i in include_dirs + ], + ) + + if self.flavor == "win": + midl_include_dirs = config.get("midl_include_dirs", []) + midl_include_dirs = self.msvs_settings.AdjustMidlIncludeDirs( + midl_include_dirs, config_name + ) + self.WriteVariableList( + ninja_file, + "midl_includes", + [ + QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) + for i in midl_include_dirs + ], + ) + + pch_commands = precompiled_header.GetPchBuildCommands(arch) + if self.flavor == "mac": + # Most targets use no precompiled headers, so only write these if needed. + for ext, var in [ + ("c", "cflags_pch_c"), + ("cc", "cflags_pch_cc"), + ("m", "cflags_pch_objc"), + ("mm", "cflags_pch_objcc"), + ]: + include = precompiled_header.GetInclude(ext, arch) + if include: + ninja_file.variable(var, include) + + arflags = config.get("arflags", []) + + self.WriteVariableList(ninja_file, "cflags", map(self.ExpandSpecial, cflags)) + self.WriteVariableList( + ninja_file, "cflags_c", map(self.ExpandSpecial, cflags_c) + ) + self.WriteVariableList( + ninja_file, "cflags_cc", map(self.ExpandSpecial, cflags_cc) + ) + if self.flavor == "mac": + self.WriteVariableList( + ninja_file, "cflags_objc", map(self.ExpandSpecial, cflags_objc) + ) + self.WriteVariableList( + ninja_file, "cflags_objcc", map(self.ExpandSpecial, cflags_objcc) + ) + self.WriteVariableList(ninja_file, "arflags", map(self.ExpandSpecial, arflags)) + ninja_file.newline() + outputs = [] + has_rc_source = False + for source in sources: + filename, ext = os.path.splitext(source) + ext = ext[1:] + obj_ext = self.obj_ext + if ext in ("cc", "cpp", "cxx"): + command = "cxx" + self.target.uses_cpp = True + elif ext == "c" or (ext == "S" and self.flavor != "win"): + command = "cc" + elif ext == "s" and self.flavor != "win": # Doesn't generate .o.d files. + command = "cc_s" + elif ( + self.flavor == "win" + and ext in ("asm", "S") + and not self.msvs_settings.HasExplicitAsmRules(spec) + ): + command = "asm" + # Add the _asm suffix as msvs is capable of handling .cc and + # .asm files of the same name without collision. + obj_ext = "_asm.obj" + elif self.flavor == "mac" and ext == "m": + command = "objc" + elif self.flavor == "mac" and ext == "mm": + command = "objcxx" + self.target.uses_cpp = True + elif self.flavor == "win" and ext == "rc": + command = "rc" + obj_ext = ".res" + has_rc_source = True + else: + # Ignore unhandled extensions. + continue + input = self.GypPathToNinja(source) + output = self.GypPathToUniqueOutput(filename + obj_ext) + if arch is not None: + output = AddArch(output, arch) + implicit = precompiled_header.GetObjDependencies([input], [output], arch) + variables = [] + if self.flavor == "win": + variables, output, implicit = precompiled_header.GetFlagsModifications( + input, + output, + implicit, + command, + cflags_c, + cflags_cc, + self.ExpandSpecial, + ) + ninja_file.build( + output, + command, + input, + implicit=[gch for _, _, gch in implicit], + order_only=predepends, + variables=variables, + ) + outputs.append(output) + + if has_rc_source: + resource_include_dirs = config.get("resource_include_dirs", include_dirs) + self.WriteVariableList( + ninja_file, + "resource_includes", + [ + QuoteShellArgument("-I" + self.GypPathToNinja(i, env), self.flavor) + for i in resource_include_dirs + ], + ) + + self.WritePchTargets(ninja_file, pch_commands) + + ninja_file.newline() + return outputs + + def WritePchTargets(self, ninja_file, pch_commands): + """Writes ninja rules to compile prefix headers.""" + if not pch_commands: + return + + for gch, lang_flag, lang, input in pch_commands: + var_name = { + "c": "cflags_pch_c", + "cc": "cflags_pch_cc", + "m": "cflags_pch_objc", + "mm": "cflags_pch_objcc", + }[lang] + + map = { + "c": "cc", + "cc": "cxx", + "m": "objc", + "mm": "objcxx", + } + cmd = map.get(lang) + ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)]) + + def WriteLink(self, spec, config_name, config, link_deps, compile_deps): + """Write out a link step. Fills out target.binary. """ + if self.flavor != "mac" or len(self.archs) == 1: + return self.WriteLinkForArch( + self.ninja, spec, config_name, config, link_deps, compile_deps + ) + else: + output = self.ComputeOutput(spec) + inputs = [ + self.WriteLinkForArch( + self.arch_subninjas[arch], + spec, + config_name, + config, + link_deps[arch], + compile_deps, + arch=arch, + ) + for arch in self.archs + ] + extra_bindings = [] + build_output = output + if not self.is_mac_bundle: + self.AppendPostbuildVariable(extra_bindings, spec, output, output) + + # TODO(yyanagisawa): more work needed to fix: + # https://code.google.com/p/gyp/issues/detail?id=411 + if ( + spec["type"] in ("shared_library", "loadable_module") + and not self.is_mac_bundle + ): + extra_bindings.append(("lib", output)) + self.ninja.build( + [output, output + ".TOC"], + "solipo", + inputs, + variables=extra_bindings, + ) + else: + self.ninja.build(build_output, "lipo", inputs, variables=extra_bindings) + return output + + def WriteLinkForArch( + self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None + ): + """Write out a link step. Fills out target.binary. """ + command = { + "executable": "link", + "loadable_module": "solink_module", + "shared_library": "solink", + }[spec["type"]] + command_suffix = "" + + implicit_deps = set() + solibs = set() + order_deps = set() + + if compile_deps: + # Normally, the compiles of the target already depend on compile_deps, + # but a shared_library target might have no sources and only link together + # a few static_library deps, so the link step also needs to depend + # on compile_deps to make sure actions in the shared_library target + # get run before the link. + order_deps.add(compile_deps) + + if "dependencies" in spec: + # Two kinds of dependencies: + # - Linkable dependencies (like a .a or a .so): add them to the link line. + # - Non-linkable dependencies (like a rule that generates a file + # and writes a stamp file): add them to implicit_deps + extra_link_deps = set() + for dep in spec["dependencies"]: + target = self.target_outputs.get(dep) + if not target: + continue + linkable = target.Linkable() + if linkable: + new_deps = [] + if ( + self.flavor == "win" + and target.component_objs + and self.msvs_settings.IsUseLibraryDependencyInputs(config_name) + ): + new_deps = target.component_objs + if target.compile_deps: + order_deps.add(target.compile_deps) + elif self.flavor == "win" and target.import_lib: + new_deps = [target.import_lib] + elif target.UsesToc(self.flavor): + solibs.add(target.binary) + implicit_deps.add(target.binary + ".TOC") + else: + new_deps = [target.binary] + for new_dep in new_deps: + if new_dep not in extra_link_deps: + extra_link_deps.add(new_dep) + link_deps.append(new_dep) + + final_output = target.FinalOutput() + if not linkable or final_output != target.binary: + implicit_deps.add(final_output) + + extra_bindings = [] + if self.target.uses_cpp and self.flavor != "win": + extra_bindings.append(("ld", "$ldxx")) + + output = self.ComputeOutput(spec, arch) + if arch is None and not self.is_mac_bundle: + self.AppendPostbuildVariable(extra_bindings, spec, output, output) + + is_executable = spec["type"] == "executable" + # The ldflags config key is not used on mac or win. On those platforms + # linker flags are set via xcode_settings and msvs_settings, respectively. + if self.toolset == "target": + env_ldflags = os.environ.get("LDFLAGS", "").split() + elif self.toolset == "host": + env_ldflags = os.environ.get("LDFLAGS_host", "").split() + + if self.flavor == "mac": + ldflags = self.xcode_settings.GetLdflags( + config_name, + self.ExpandSpecial(generator_default_variables["PRODUCT_DIR"]), + self.GypPathToNinja, + arch, + ) + ldflags = env_ldflags + ldflags + elif self.flavor == "win": + manifest_base_name = self.GypPathToUniqueOutput( + self.ComputeOutputFileName(spec) + ) + ( + ldflags, + intermediate_manifest, + manifest_files, + ) = self.msvs_settings.GetLdflags( + config_name, + self.GypPathToNinja, + self.ExpandSpecial, + manifest_base_name, + output, + is_executable, + self.toplevel_build, + ) + ldflags = env_ldflags + ldflags + self.WriteVariableList(ninja_file, "manifests", manifest_files) + implicit_deps = implicit_deps.union(manifest_files) + if intermediate_manifest: + self.WriteVariableList( + ninja_file, "intermediatemanifest", [intermediate_manifest] + ) + command_suffix = _GetWinLinkRuleNameSuffix( + self.msvs_settings.IsEmbedManifest(config_name) + ) + def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja) + if def_file: + implicit_deps.add(def_file) + else: + # Respect environment variables related to build, but target-specific + # flags can still override them. + ldflags = env_ldflags + config.get("ldflags", []) + if is_executable and len(solibs): + rpath = "lib/" + if self.toolset != "target": + rpath += self.toolset + ldflags.append(r"-Wl,-rpath=\$$ORIGIN/%s" % rpath) + else: + ldflags.append("-Wl,-rpath=%s" % self.target_rpath) + ldflags.append("-Wl,-rpath-link=%s" % rpath) + self.WriteVariableList(ninja_file, "ldflags", map(self.ExpandSpecial, ldflags)) + + library_dirs = config.get("library_dirs", []) + if self.flavor == "win": + library_dirs = [ + self.msvs_settings.ConvertVSMacros(library_dir, config_name) + for library_dir in library_dirs + ] + library_dirs = [ + "/LIBPATH:" + + QuoteShellArgument(self.GypPathToNinja(library_dir), self.flavor) + for library_dir in library_dirs + ] + else: + library_dirs = [ + QuoteShellArgument("-L" + self.GypPathToNinja(library_dir), self.flavor) + for library_dir in library_dirs + ] + + libraries = gyp.common.uniquer( + map(self.ExpandSpecial, spec.get("libraries", [])) + ) + if self.flavor == "mac": + libraries = self.xcode_settings.AdjustLibraries(libraries, config_name) + elif self.flavor == "win": + libraries = self.msvs_settings.AdjustLibraries(libraries) + + self.WriteVariableList(ninja_file, "libs", library_dirs + libraries) + + linked_binary = output + + if command in ("solink", "solink_module"): + extra_bindings.append(("soname", os.path.split(output)[1])) + extra_bindings.append(("lib", gyp.common.EncodePOSIXShellArgument(output))) + if self.flavor != "win": + link_file_list = output + if self.is_mac_bundle: + # 'Dependency Framework.framework/Versions/A/Dependency Framework' + # -> 'Dependency Framework.framework.rsp' + link_file_list = self.xcode_settings.GetWrapperName() + if arch: + link_file_list += "." + arch + link_file_list += ".rsp" + # If an rspfile contains spaces, ninja surrounds the filename with + # quotes around it and then passes it to open(), creating a file with + # quotes in its name (and when looking for the rsp file, the name + # makes it through bash which strips the quotes) :-/ + link_file_list = link_file_list.replace(" ", "_") + extra_bindings.append( + ( + "link_file_list", + gyp.common.EncodePOSIXShellArgument(link_file_list), + ) + ) + if self.flavor == "win": + extra_bindings.append(("binary", output)) + if ( + "/NOENTRY" not in ldflags + and not self.msvs_settings.GetNoImportLibrary(config_name) + ): + self.target.import_lib = output + ".lib" + extra_bindings.append( + ("implibflag", "/IMPLIB:%s" % self.target.import_lib) + ) + pdbname = self.msvs_settings.GetPDBName( + config_name, self.ExpandSpecial, output + ".pdb" + ) + output = [output, self.target.import_lib] + if pdbname: + output.append(pdbname) + elif not self.is_mac_bundle: + output = [output, output + ".TOC"] + else: + command = command + "_notoc" + elif self.flavor == "win": + extra_bindings.append(("binary", output)) + pdbname = self.msvs_settings.GetPDBName( + config_name, self.ExpandSpecial, output + ".pdb" + ) + if pdbname: + output = [output, pdbname] + + if len(solibs): + extra_bindings.append( + ("solibs", gyp.common.EncodePOSIXShellList(sorted(solibs))) + ) + + ninja_file.build( + output, + command + command_suffix, + link_deps, + implicit=sorted(implicit_deps), + order_only=list(order_deps), + variables=extra_bindings, + ) + return linked_binary + + def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): + extra_link_deps = any( + self.target_outputs.get(dep).Linkable() + for dep in spec.get("dependencies", []) + if dep in self.target_outputs + ) + if spec["type"] == "none" or (not link_deps and not extra_link_deps): + # TODO(evan): don't call this function for 'none' target types, as + # it doesn't do anything, and we fake out a 'binary' with a stamp file. + self.target.binary = compile_deps + self.target.type = "none" + elif spec["type"] == "static_library": + self.target.binary = self.ComputeOutput(spec) + if ( + self.flavor not in ("mac", "openbsd", "netbsd", "win") + and not self.is_standalone_static_library + ): + self.ninja.build( + self.target.binary, "alink_thin", link_deps, order_only=compile_deps + ) + else: + variables = [] + if self.xcode_settings: + libtool_flags = self.xcode_settings.GetLibtoolflags(config_name) + if libtool_flags: + variables.append(("libtool_flags", libtool_flags)) + if self.msvs_settings: + libflags = self.msvs_settings.GetLibFlags( + config_name, self.GypPathToNinja + ) + variables.append(("libflags", libflags)) + + if self.flavor != "mac" or len(self.archs) == 1: + self.AppendPostbuildVariable( + variables, spec, self.target.binary, self.target.binary + ) + self.ninja.build( + self.target.binary, + "alink", + link_deps, + order_only=compile_deps, + variables=variables, + ) + else: + inputs = [] + for arch in self.archs: + output = self.ComputeOutput(spec, arch) + self.arch_subninjas[arch].build( + output, + "alink", + link_deps[arch], + order_only=compile_deps, + variables=variables, + ) + inputs.append(output) + # TODO: It's not clear if + # libtool_flags should be passed to the alink + # call that combines single-arch .a files into a fat .a file. + self.AppendPostbuildVariable( + variables, spec, self.target.binary, self.target.binary + ) + self.ninja.build( + self.target.binary, + "alink", + inputs, + # FIXME: test proving order_only=compile_deps isn't + # needed. + variables=variables, + ) + else: + self.target.binary = self.WriteLink( + spec, config_name, config, link_deps, compile_deps + ) + return self.target.binary + + def WriteMacBundle(self, spec, mac_bundle_depends, is_empty): + assert self.is_mac_bundle + package_framework = spec["type"] in ("shared_library", "loadable_module") + output = self.ComputeMacBundleOutput() + if is_empty: + output += ".stamp" + variables = [] + self.AppendPostbuildVariable( + variables, + spec, + output, + self.target.binary, + is_command_start=not package_framework, + ) + if package_framework and not is_empty: + if spec["type"] == "shared_library" and self.xcode_settings.isIOS: + self.ninja.build( + output, + "package_ios_framework", + mac_bundle_depends, + variables=variables, + ) + else: + variables.append(("version", self.xcode_settings.GetFrameworkVersion())) + self.ninja.build( + output, "package_framework", mac_bundle_depends, variables=variables + ) + else: + self.ninja.build(output, "stamp", mac_bundle_depends, variables=variables) + self.target.bundle = output + return output + + def GetToolchainEnv(self, additional_settings=None): + """Returns the variables toolchain would set for build steps.""" + env = self.GetSortedXcodeEnv(additional_settings=additional_settings) + if self.flavor == "win": + env = self.GetMsvsToolchainEnv(additional_settings=additional_settings) + return env + + def GetMsvsToolchainEnv(self, additional_settings=None): + """Returns the variables Visual Studio would set for build steps.""" + return self.msvs_settings.GetVSMacroEnv( + "$!PRODUCT_DIR", config=self.config_name + ) + + def GetSortedXcodeEnv(self, additional_settings=None): + """Returns the variables Xcode would set for build steps.""" + assert self.abs_build_dir + abs_build_dir = self.abs_build_dir + return gyp.xcode_emulation.GetSortedXcodeEnv( + self.xcode_settings, + abs_build_dir, + os.path.join(abs_build_dir, self.build_to_base), + self.config_name, + additional_settings, + ) + + def GetSortedXcodePostbuildEnv(self): + """Returns the variables Xcode would set for postbuild steps.""" + postbuild_settings = {} + # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. + # TODO(thakis): It would be nice to have some general mechanism instead. + strip_save_file = self.xcode_settings.GetPerTargetSetting( + "CHROMIUM_STRIP_SAVE_FILE" + ) + if strip_save_file: + postbuild_settings["CHROMIUM_STRIP_SAVE_FILE"] = strip_save_file + return self.GetSortedXcodeEnv(additional_settings=postbuild_settings) + + def AppendPostbuildVariable( + self, variables, spec, output, binary, is_command_start=False + ): + """Adds a 'postbuild' variable if there is a postbuild for |output|.""" + postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start) + if postbuild: + variables.append(("postbuilds", postbuild)) + + def GetPostbuildCommand(self, spec, output, output_binary, is_command_start): + """Returns a shell command that runs all the postbuilds, and removes + |output| if any of them fails. If |is_command_start| is False, then the + returned string will start with ' && '.""" + if not self.xcode_settings or spec["type"] == "none" or not output: + return "" + output = QuoteShellArgument(output, self.flavor) + postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) + if output_binary is not None: + postbuilds = self.xcode_settings.AddImplicitPostbuilds( + self.config_name, + os.path.normpath(os.path.join(self.base_to_build, output)), + QuoteShellArgument( + os.path.normpath(os.path.join(self.base_to_build, output_binary)), + self.flavor, + ), + postbuilds, + quiet=True, + ) + + if not postbuilds: + return "" + # Postbuilds expect to be run in the gyp file's directory, so insert an + # implicit postbuild to cd to there. + postbuilds.insert( + 0, gyp.common.EncodePOSIXShellList(["cd", self.build_to_base]) + ) + env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) + # G will be non-null if any postbuild fails. Run all postbuilds in a + # subshell. + commands = ( + env + + " (" + + " && ".join([ninja_syntax.escape(command) for command in postbuilds]) + ) + command_string = ( + commands + + "); G=$$?; " + # Remove the final output if any postbuild failed. + "((exit $$G) || rm -rf %s) " % output + + "&& exit $$G)" + ) + if is_command_start: + return "(" + command_string + " && " + else: + return "$ && (" + command_string + + def ComputeExportEnvString(self, env): + """Given an environment, returns a string looking like + 'export FOO=foo; export BAR="${FOO} bar;' + that exports |env| to the shell.""" + export_str = [] + for k, v in env: + export_str.append( + "export %s=%s;" + % (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))) + ) + return " ".join(export_str) + + def ComputeMacBundleOutput(self): + """Return the 'output' (full output path) to a bundle output directory.""" + assert self.is_mac_bundle + path = generator_default_variables["PRODUCT_DIR"] + return self.ExpandSpecial( + os.path.join(path, self.xcode_settings.GetWrapperName()) + ) + + def ComputeOutputFileName(self, spec, type=None): + """Compute the filename of the final output for the current target.""" + if not type: + type = spec["type"] + + default_variables = copy.copy(generator_default_variables) + CalculateVariables(default_variables, {"flavor": self.flavor}) + + # Compute filename prefix: the product prefix, or a default for + # the product type. + DEFAULT_PREFIX = { + "loadable_module": default_variables["SHARED_LIB_PREFIX"], + "shared_library": default_variables["SHARED_LIB_PREFIX"], + "static_library": default_variables["STATIC_LIB_PREFIX"], + "executable": default_variables["EXECUTABLE_PREFIX"], + } + prefix = spec.get("product_prefix", DEFAULT_PREFIX.get(type, "")) + + # Compute filename extension: the product extension, or a default + # for the product type. + DEFAULT_EXTENSION = { + "loadable_module": default_variables["SHARED_LIB_SUFFIX"], + "shared_library": default_variables["SHARED_LIB_SUFFIX"], + "static_library": default_variables["STATIC_LIB_SUFFIX"], + "executable": default_variables["EXECUTABLE_SUFFIX"], + } + extension = spec.get("product_extension") + if extension: + extension = "." + extension + else: + extension = DEFAULT_EXTENSION.get(type, "") + + if "product_name" in spec: + # If we were given an explicit name, use that. + target = spec["product_name"] + else: + # Otherwise, derive a name from the target name. + target = spec["target_name"] + if prefix == "lib": + # Snip out an extra 'lib' from libs if appropriate. + target = StripPrefix(target, "lib") + + if type in ( + "static_library", + "loadable_module", + "shared_library", + "executable", + ): + return f"{prefix}{target}{extension}" + elif type == "none": + return "%s.stamp" % target + else: + raise Exception("Unhandled output type %s" % type) + + def ComputeOutput(self, spec, arch=None): + """Compute the path for the final output of the spec.""" + type = spec["type"] + + if self.flavor == "win": + override = self.msvs_settings.GetOutputName( + self.config_name, self.ExpandSpecial + ) + if override: + return override + + if ( + arch is None + and self.flavor == "mac" + and type + in ("static_library", "executable", "shared_library", "loadable_module") + ): + filename = self.xcode_settings.GetExecutablePath() + else: + filename = self.ComputeOutputFileName(spec, type) + + if arch is None and "product_dir" in spec: + path = os.path.join(spec["product_dir"], filename) + return self.ExpandSpecial(path) + + # Some products go into the output root, libraries go into shared library + # dir, and everything else goes into the normal place. + type_in_output_root = ["executable", "loadable_module"] + if self.flavor == "mac" and self.toolset == "target": + type_in_output_root += ["shared_library", "static_library"] + elif self.flavor == "win" and self.toolset == "target": + type_in_output_root += ["shared_library"] + + if arch is not None: + # Make sure partial executables don't end up in a bundle or the regular + # output directory. + archdir = "arch" + if self.toolset != "target": + archdir = os.path.join("arch", "%s" % self.toolset) + return os.path.join(archdir, AddArch(filename, arch)) + elif type in type_in_output_root or self.is_standalone_static_library: + return filename + elif type == "shared_library": + libdir = "lib" + if self.toolset != "target": + libdir = os.path.join("lib", "%s" % self.toolset) + return os.path.join(libdir, filename) + else: + return self.GypPathToUniqueOutput(filename, qualified=False) + + def WriteVariableList(self, ninja_file, var, values): + assert not isinstance(values, str) + if values is None: + values = [] + ninja_file.variable(var, " ".join(values)) + + def WriteNewNinjaRule( + self, name, args, description, win_shell_flags, env, pool, depfile=None + ): + """Write out a new ninja "rule" statement for a given command. + + Returns the name of the new rule, and a copy of |args| with variables + expanded.""" + + if self.flavor == "win": + args = [ + self.msvs_settings.ConvertVSMacros( + arg, self.base_to_build, config=self.config_name + ) + for arg in args + ] + description = self.msvs_settings.ConvertVSMacros( + description, config=self.config_name + ) + elif self.flavor == "mac": + # |env| is an empty list on non-mac. + args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] + description = gyp.xcode_emulation.ExpandEnvVars(description, env) + + # TODO: we shouldn't need to qualify names; we do it because + # currently the ninja rule namespace is global, but it really + # should be scoped to the subninja. + rule_name = self.name + if self.toolset == "target": + rule_name += "." + self.toolset + rule_name += "." + name + rule_name = re.sub("[^a-zA-Z0-9_]", "_", rule_name) + + # Remove variable references, but not if they refer to the magic rule + # variables. This is not quite right, as it also protects these for + # actions, not just for rules where they are valid. Good enough. + protect = ["${root}", "${dirname}", "${source}", "${ext}", "${name}"] + protect = "(?!" + "|".join(map(re.escape, protect)) + ")" + description = re.sub(protect + r"\$", "_", description) + + # gyp dictates that commands are run from the base directory. + # cd into the directory before running, and adjust paths in + # the arguments to point to the proper locations. + rspfile = None + rspfile_content = None + args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] + if self.flavor == "win": + rspfile = rule_name + ".$unique_name.rsp" + # The cygwin case handles this inside the bash sub-shell. + run_in = "" if win_shell_flags.cygwin else " " + self.build_to_base + if win_shell_flags.cygwin: + rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( + args, self.build_to_base + ) + else: + rspfile_content = gyp.msvs_emulation.EncodeRspFileList( + args, win_shell_flags.quote) + command = ( + "%s gyp-win-tool action-wrapper $arch " % sys.executable + + rspfile + + run_in + ) + else: + env = self.ComputeExportEnvString(env) + command = gyp.common.EncodePOSIXShellList(args) + command = "cd %s; " % self.build_to_base + env + command + + # GYP rules/actions express being no-ops by not touching their outputs. + # Avoid executing downstream dependencies in this case by specifying + # restat=1 to ninja. + self.ninja.rule( + rule_name, + command, + description, + depfile=depfile, + restat=True, + pool=pool, + rspfile=rspfile, + rspfile_content=rspfile_content, + ) + self.ninja.newline() + + return rule_name, args + + +def CalculateVariables(default_variables, params): + """Calculate additional variables for use in the build (called by gyp).""" + global generator_additional_non_configuration_keys + global generator_additional_path_sections + flavor = gyp.common.GetFlavor(params) + if flavor == "mac": + default_variables.setdefault("OS", "mac") + default_variables.setdefault("SHARED_LIB_SUFFIX", ".dylib") + default_variables.setdefault( + "SHARED_LIB_DIR", generator_default_variables["PRODUCT_DIR"] + ) + default_variables.setdefault( + "LIB_DIR", generator_default_variables["PRODUCT_DIR"] + ) + + # Copy additional generator configuration data from Xcode, which is shared + # by the Mac Ninja generator. + import gyp.generator.xcode as xcode_generator + + generator_additional_non_configuration_keys = getattr( + xcode_generator, "generator_additional_non_configuration_keys", [] + ) + generator_additional_path_sections = getattr( + xcode_generator, "generator_additional_path_sections", [] + ) + global generator_extra_sources_for_rules + generator_extra_sources_for_rules = getattr( + xcode_generator, "generator_extra_sources_for_rules", [] + ) + elif flavor == "win": + exts = gyp.MSVSUtil.TARGET_TYPE_EXT + default_variables.setdefault("OS", "win") + default_variables["EXECUTABLE_SUFFIX"] = "." + exts["executable"] + default_variables["STATIC_LIB_PREFIX"] = "" + default_variables["STATIC_LIB_SUFFIX"] = "." + exts["static_library"] + default_variables["SHARED_LIB_PREFIX"] = "" + default_variables["SHARED_LIB_SUFFIX"] = "." + exts["shared_library"] + + # Copy additional generator configuration data from VS, which is shared + # by the Windows Ninja generator. + import gyp.generator.msvs as msvs_generator + + generator_additional_non_configuration_keys = getattr( + msvs_generator, "generator_additional_non_configuration_keys", [] + ) + generator_additional_path_sections = getattr( + msvs_generator, "generator_additional_path_sections", [] + ) + + gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) + else: + operating_system = flavor + if flavor == "android": + operating_system = "linux" # Keep this legacy behavior for now. + default_variables.setdefault("OS", operating_system) + default_variables.setdefault("SHARED_LIB_SUFFIX", ".so") + default_variables.setdefault( + "SHARED_LIB_DIR", os.path.join("$!PRODUCT_DIR", "lib") + ) + default_variables.setdefault("LIB_DIR", os.path.join("$!PRODUCT_DIR", "obj")) + + +def ComputeOutputDir(params): + """Returns the path from the toplevel_dir to the build output directory.""" + # generator_dir: relative path from pwd to where make puts build files. + # Makes migrating from make to ninja easier, ninja doesn't put anything here. + generator_dir = os.path.relpath(params["options"].generator_output or ".") + + # output_dir: relative path from generator_dir to the build directory. + output_dir = params.get("generator_flags", {}).get("output_dir", "out") + + # Relative path from source root to our output files. e.g. "out" + return os.path.normpath(os.path.join(generator_dir, output_dir)) + + +def CalculateGeneratorInputInfo(params): + """Called by __init__ to initialize generator values based on params.""" + # E.g. "out/gypfiles" + toplevel = params["options"].toplevel_dir + qualified_out_dir = os.path.normpath( + os.path.join(toplevel, ComputeOutputDir(params), "gypfiles") + ) + + global generator_filelist_paths + generator_filelist_paths = { + "toplevel": toplevel, + "qualified_out_dir": qualified_out_dir, + } + + +def OpenOutput(path, mode="w"): + """Open |path| for writing, creating directories if necessary.""" + gyp.common.EnsureDirExists(path) + return open(path, mode) + + +def CommandWithWrapper(cmd, wrappers, prog): + wrapper = wrappers.get(cmd, "") + if wrapper: + return wrapper + " " + prog + return prog + + +def GetDefaultConcurrentLinks(): + """Returns a best-guess for a number of concurrent links.""" + pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY", 0)) + if pool_size: + return pool_size + + if sys.platform in ("win32", "cygwin"): + import ctypes + + class MEMORYSTATUSEX(ctypes.Structure): + _fields_ = [ + ("dwLength", ctypes.c_ulong), + ("dwMemoryLoad", ctypes.c_ulong), + ("ullTotalPhys", ctypes.c_ulonglong), + ("ullAvailPhys", ctypes.c_ulonglong), + ("ullTotalPageFile", ctypes.c_ulonglong), + ("ullAvailPageFile", ctypes.c_ulonglong), + ("ullTotalVirtual", ctypes.c_ulonglong), + ("ullAvailVirtual", ctypes.c_ulonglong), + ("sullAvailExtendedVirtual", ctypes.c_ulonglong), + ] + + stat = MEMORYSTATUSEX() + stat.dwLength = ctypes.sizeof(stat) + ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) + + # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM + # on a 64 GB machine. + mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB + hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX", 2 ** 32))) + return min(mem_limit, hard_cap) + elif sys.platform.startswith("linux"): + if os.path.exists("/proc/meminfo"): + with open("/proc/meminfo") as meminfo: + memtotal_re = re.compile(r"^MemTotal:\s*(\d*)\s*kB") + for line in meminfo: + match = memtotal_re.match(line) + if not match: + continue + # Allow 8Gb per link on Linux because Gold is quite memory hungry + return max(1, int(match.group(1)) // (8 * (2 ** 20))) + return 1 + elif sys.platform == "darwin": + try: + avail_bytes = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"])) + # A static library debug build of Chromium's unit_tests takes ~2.7GB, so + # 4GB per ld process allows for some more bloat. + return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB + except subprocess.CalledProcessError: + return 1 + else: + # TODO(scottmg): Implement this for other platforms. + return 1 + + +def _GetWinLinkRuleNameSuffix(embed_manifest): + """Returns the suffix used to select an appropriate linking rule depending on + whether the manifest embedding is enabled.""" + return "_embed" if embed_manifest else "" + + +def _AddWinLinkRules(master_ninja, embed_manifest): + """Adds link rules for Windows platform to |master_ninja|.""" + + def FullLinkCommand(ldcmd, out, binary_type): + resource_name = {"exe": "1", "dll": "2"}[binary_type] + return ( + "%(python)s gyp-win-tool link-with-manifests $arch %(embed)s " + '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' + "$manifests" + % { + "python": sys.executable, + "out": out, + "ldcmd": ldcmd, + "resname": resource_name, + "embed": embed_manifest, + } + ) + + rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest) + use_separate_mspdbsrv = int(os.environ.get("GYP_USE_SEPARATE_MSPDBSRV", "0")) != 0 + dlldesc = "LINK%s(DLL) $binary" % rule_name_suffix.upper() + dllcmd = ( + "%s gyp-win-tool link-wrapper $arch %s " + "$ld /nologo $implibflag /DLL /OUT:$binary " + "@$binary.rsp" % (sys.executable, use_separate_mspdbsrv) + ) + dllcmd = FullLinkCommand(dllcmd, "$binary", "dll") + master_ninja.rule( + "solink" + rule_name_suffix, + description=dlldesc, + command=dllcmd, + rspfile="$binary.rsp", + rspfile_content="$libs $in_newline $ldflags", + restat=True, + pool="link_pool", + ) + master_ninja.rule( + "solink_module" + rule_name_suffix, + description=dlldesc, + command=dllcmd, + rspfile="$binary.rsp", + rspfile_content="$libs $in_newline $ldflags", + restat=True, + pool="link_pool", + ) + # Note that ldflags goes at the end so that it has the option of + # overriding default settings earlier in the command line. + exe_cmd = ( + "%s gyp-win-tool link-wrapper $arch %s " + "$ld /nologo /OUT:$binary @$binary.rsp" + % (sys.executable, use_separate_mspdbsrv) + ) + exe_cmd = FullLinkCommand(exe_cmd, "$binary", "exe") + master_ninja.rule( + "link" + rule_name_suffix, + description="LINK%s $binary" % rule_name_suffix.upper(), + command=exe_cmd, + rspfile="$binary.rsp", + rspfile_content="$in_newline $libs $ldflags", + pool="link_pool", + ) + + +def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name): + options = params["options"] + flavor = gyp.common.GetFlavor(params) + generator_flags = params.get("generator_flags", {}) + + # build_dir: relative path from source root to our output files. + # e.g. "out/Debug" + build_dir = os.path.normpath(os.path.join(ComputeOutputDir(params), config_name)) + + toplevel_build = os.path.join(options.toplevel_dir, build_dir) + + master_ninja_file = OpenOutput(os.path.join(toplevel_build, "build.ninja")) + master_ninja = ninja_syntax.Writer(master_ninja_file, width=120) + + # Put build-time support tools in out/{config_name}. + gyp.common.CopyTool(flavor, toplevel_build, generator_flags) + + # Grab make settings for CC/CXX. + # The rules are + # - The priority from low to high is gcc/g++, the 'make_global_settings' in + # gyp, the environment variable. + # - If there is no 'make_global_settings' for CC.host/CXX.host or + # 'CC_host'/'CXX_host' environment variable, cc_host/cxx_host should be set + # to cc/cxx. + if flavor == "win": + ar = "lib.exe" + # cc and cxx must be set to the correct architecture by overriding with one + # of cl_x86 or cl_x64 below. + cc = "UNSET" + cxx = "UNSET" + ld = "link.exe" + ld_host = "$ld" + else: + ar = "ar" + cc = "cc" + cxx = "c++" + ld = "$cc" + ldxx = "$cxx" + ld_host = "$cc_host" + ldxx_host = "$cxx_host" + + ar_host = ar + cc_host = None + cxx_host = None + cc_host_global_setting = None + cxx_host_global_setting = None + clang_cl = None + nm = "nm" + nm_host = "nm" + readelf = "readelf" + readelf_host = "readelf" + + build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) + make_global_settings = data[build_file].get("make_global_settings", []) + build_to_root = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir) + wrappers = {} + for key, value in make_global_settings: + if key == "AR": + ar = os.path.join(build_to_root, value) + if key == "AR.host": + ar_host = os.path.join(build_to_root, value) + if key == "CC": + cc = os.path.join(build_to_root, value) + if cc.endswith("clang-cl"): + clang_cl = cc + if key == "CXX": + cxx = os.path.join(build_to_root, value) + if key == "CC.host": + cc_host = os.path.join(build_to_root, value) + cc_host_global_setting = value + if key == "CXX.host": + cxx_host = os.path.join(build_to_root, value) + cxx_host_global_setting = value + if key == "LD": + ld = os.path.join(build_to_root, value) + if key == "LD.host": + ld_host = os.path.join(build_to_root, value) + if key == "LDXX": + ldxx = os.path.join(build_to_root, value) + if key == "LDXX.host": + ldxx_host = os.path.join(build_to_root, value) + if key == "NM": + nm = os.path.join(build_to_root, value) + if key == "NM.host": + nm_host = os.path.join(build_to_root, value) + if key == "READELF": + readelf = os.path.join(build_to_root, value) + if key == "READELF.host": + readelf_host = os.path.join(build_to_root, value) + if key.endswith("_wrapper"): + wrappers[key[: -len("_wrapper")]] = os.path.join(build_to_root, value) + + # Support wrappers from environment variables too. + for key, value in os.environ.items(): + if key.lower().endswith("_wrapper"): + key_prefix = key[: -len("_wrapper")] + key_prefix = re.sub(r"\.HOST$", ".host", key_prefix) + wrappers[key_prefix] = os.path.join(build_to_root, value) + + mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None) + if mac_toolchain_dir: + wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir + + if flavor == "win": + configs = [ + target_dicts[qualified_target]["configurations"][config_name] + for qualified_target in target_list + ] + shared_system_includes = None + if not generator_flags.get("ninja_use_custom_environment_files", 0): + shared_system_includes = gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes( + configs, generator_flags + ) + cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( + toplevel_build, generator_flags, shared_system_includes, OpenOutput + ) + for arch, path in sorted(cl_paths.items()): + if clang_cl: + # If we have selected clang-cl, use that instead. + path = clang_cl + command = CommandWithWrapper( + "CC", wrappers, QuoteShellArgument(path, "win") + ) + if clang_cl: + # Use clang-cl to cross-compile for x86 or x86_64. + command += " -m32" if arch == "x86" else " -m64" + master_ninja.variable("cl_" + arch, command) + + cc = GetEnvironFallback(["CC_target", "CC"], cc) + master_ninja.variable("cc", CommandWithWrapper("CC", wrappers, cc)) + cxx = GetEnvironFallback(["CXX_target", "CXX"], cxx) + master_ninja.variable("cxx", CommandWithWrapper("CXX", wrappers, cxx)) + + if flavor == "win": + master_ninja.variable("ld", ld) + master_ninja.variable("idl", "midl.exe") + master_ninja.variable("ar", ar) + master_ninja.variable("rc", "rc.exe") + master_ninja.variable("ml_x86", "ml.exe") + master_ninja.variable("ml_x64", "ml64.exe") + master_ninja.variable("mt", "mt.exe") + else: + master_ninja.variable("ld", CommandWithWrapper("LINK", wrappers, ld)) + master_ninja.variable("ldxx", CommandWithWrapper("LINK", wrappers, ldxx)) + master_ninja.variable("ar", GetEnvironFallback(["AR_target", "AR"], ar)) + if flavor != "mac": + # Mac does not use readelf/nm for .TOC generation, so avoiding polluting + # the master ninja with extra unused variables. + master_ninja.variable("nm", GetEnvironFallback(["NM_target", "NM"], nm)) + master_ninja.variable( + "readelf", GetEnvironFallback(["READELF_target", "READELF"], readelf) + ) + + if generator_supports_multiple_toolsets: + if not cc_host: + cc_host = cc + if not cxx_host: + cxx_host = cxx + + master_ninja.variable("ar_host", GetEnvironFallback(["AR_host"], ar_host)) + master_ninja.variable("nm_host", GetEnvironFallback(["NM_host"], nm_host)) + master_ninja.variable( + "readelf_host", GetEnvironFallback(["READELF_host"], readelf_host) + ) + cc_host = GetEnvironFallback(["CC_host"], cc_host) + cxx_host = GetEnvironFallback(["CXX_host"], cxx_host) + + # The environment variable could be used in 'make_global_settings', like + # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. + if "$(CC)" in cc_host and cc_host_global_setting: + cc_host = cc_host_global_setting.replace("$(CC)", cc) + if "$(CXX)" in cxx_host and cxx_host_global_setting: + cxx_host = cxx_host_global_setting.replace("$(CXX)", cxx) + master_ninja.variable( + "cc_host", CommandWithWrapper("CC.host", wrappers, cc_host) + ) + master_ninja.variable( + "cxx_host", CommandWithWrapper("CXX.host", wrappers, cxx_host) + ) + if flavor == "win": + master_ninja.variable("ld_host", ld_host) + else: + master_ninja.variable( + "ld_host", CommandWithWrapper("LINK", wrappers, ld_host) + ) + master_ninja.variable( + "ldxx_host", CommandWithWrapper("LINK", wrappers, ldxx_host) + ) + + master_ninja.newline() + + master_ninja.pool("link_pool", depth=GetDefaultConcurrentLinks()) + master_ninja.newline() + + deps = "msvc" if flavor == "win" else "gcc" + + if flavor != "win": + master_ninja.rule( + "cc", + description="CC $out", + command=( + "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c " + "$cflags_pch_c -c $in -o $out" + ), + depfile="$out.d", + deps=deps, + ) + master_ninja.rule( + "cc_s", + description="CC $out", + command=( + "$cc $defines $includes $cflags $cflags_c " + "$cflags_pch_c -c $in -o $out" + ), + ) + master_ninja.rule( + "cxx", + description="CXX $out", + command=( + "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc " + "$cflags_pch_cc -c $in -o $out" + ), + depfile="$out.d", + deps=deps, + ) + else: + # TODO(scottmg) Separate pdb names is a test to see if it works around + # http://crbug.com/142362. It seems there's a race between the creation of + # the .pdb by the precompiled header step for .cc and the compilation of + # .c files. This should be handled by mspdbsrv, but rarely errors out with + # c1xx : fatal error C1033: cannot open program database + # By making the rules target separate pdb files this might be avoided. + cc_command = ( + "ninja -t msvc -e $arch " + "-- " + "$cc /nologo /showIncludes /FC " + "@$out.rsp /c $in /Fo$out /Fd$pdbname_c " + ) + cxx_command = ( + "ninja -t msvc -e $arch " + "-- " + "$cxx /nologo /showIncludes /FC " + "@$out.rsp /c $in /Fo$out /Fd$pdbname_cc " + ) + master_ninja.rule( + "cc", + description="CC $out", + command=cc_command, + rspfile="$out.rsp", + rspfile_content="$defines $includes $cflags $cflags_c", + deps=deps, + ) + master_ninja.rule( + "cxx", + description="CXX $out", + command=cxx_command, + rspfile="$out.rsp", + rspfile_content="$defines $includes $cflags $cflags_cc", + deps=deps, + ) + master_ninja.rule( + "idl", + description="IDL $in", + command=( + "%s gyp-win-tool midl-wrapper $arch $outdir " + "$tlb $h $dlldata $iid $proxy $in " + "$midl_includes $idlflags" % sys.executable + ), + ) + master_ninja.rule( + "rc", + description="RC $in", + # Note: $in must be last otherwise rc.exe complains. + command=( + "%s gyp-win-tool rc-wrapper " + "$arch $rc $defines $resource_includes $rcflags /fo$out $in" + % sys.executable + ), + ) + master_ninja.rule( + "asm", + description="ASM $out", + command=( + "%s gyp-win-tool asm-wrapper " + "$arch $asm $defines $includes $asmflags /c /Fo $out $in" + % sys.executable + ), + ) + + if flavor != "mac" and flavor != "win": + master_ninja.rule( + "alink", + description="AR $out", + command="rm -f $out && $ar rcs $arflags $out $in", + ) + master_ninja.rule( + "alink_thin", + description="AR $out", + command="rm -f $out && $ar rcsT $arflags $out $in", + ) + + # This allows targets that only need to depend on $lib's API to declare an + # order-only dependency on $lib.TOC and avoid relinking such downstream + # dependencies when $lib changes only in non-public ways. + # The resulting string leaves an uninterpolated %{suffix} which + # is used in the final substitution below. + mtime_preserving_solink_base = ( + "if [ ! -e $lib -o ! -e $lib.TOC ]; then " + "%(solink)s && %(extract_toc)s > $lib.TOC; else " + "%(solink)s && %(extract_toc)s > $lib.tmp && " + "if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; " + "fi; fi" + % { + "solink": "$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s", + "extract_toc": ( + "{ $readelf -d $lib | grep SONAME ; " + "$nm -gD -f p $lib | cut -f1-2 -d' '; }" + ), + } + ) + + master_ninja.rule( + "solink", + description="SOLINK $lib", + restat=True, + command=mtime_preserving_solink_base + % {"suffix": "@$link_file_list"}, # noqa: E501 + rspfile="$link_file_list", + rspfile_content=( + "-Wl,--whole-archive $in $solibs -Wl," "--no-whole-archive $libs" + ), + pool="link_pool", + ) + master_ninja.rule( + "solink_module", + description="SOLINK(module) $lib", + restat=True, + command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"}, + rspfile="$link_file_list", + rspfile_content="-Wl,--start-group $in $solibs $libs -Wl,--end-group", + pool="link_pool", + ) + master_ninja.rule( + "link", + description="LINK $out", + command=( + "$ld $ldflags -o $out " + "-Wl,--start-group $in $solibs $libs -Wl,--end-group" + ), + pool="link_pool", + ) + elif flavor == "win": + master_ninja.rule( + "alink", + description="LIB $out", + command=( + "%s gyp-win-tool link-wrapper $arch False " + "$ar /nologo /ignore:4221 /OUT:$out @$out.rsp" % sys.executable + ), + rspfile="$out.rsp", + rspfile_content="$in_newline $libflags", + ) + _AddWinLinkRules(master_ninja, embed_manifest=True) + _AddWinLinkRules(master_ninja, embed_manifest=False) + else: + master_ninja.rule( + "objc", + description="OBJC $out", + command=( + "$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc " + "$cflags_pch_objc -c $in -o $out" + ), + depfile="$out.d", + deps=deps, + ) + master_ninja.rule( + "objcxx", + description="OBJCXX $out", + command=( + "$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc " + "$cflags_pch_objcc -c $in -o $out" + ), + depfile="$out.d", + deps=deps, + ) + master_ninja.rule( + "alink", + description="LIBTOOL-STATIC $out, POSTBUILDS", + command="rm -f $out && " + "./gyp-mac-tool filter-libtool libtool $libtool_flags " + "-static -o $out $in" + "$postbuilds", + ) + master_ninja.rule( + "lipo", + description="LIPO $out, POSTBUILDS", + command="rm -f $out && lipo -create $in -output $out$postbuilds", + ) + master_ninja.rule( + "solipo", + description="SOLIPO $out, POSTBUILDS", + command=( + "rm -f $lib $lib.TOC && lipo -create $in -output $lib$postbuilds &&" + "%(extract_toc)s > $lib.TOC" + % { + "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; " + "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }" + } + ), + ) + + # Record the public interface of $lib in $lib.TOC. See the corresponding + # comment in the posix section above for details. + solink_base = "$ld %(type)s $ldflags -o $lib %(suffix)s" + mtime_preserving_solink_base = ( + "if [ ! -e $lib -o ! -e $lib.TOC ] || " + # Always force dependent targets to relink if this library + # reexports something. Handling this correctly would require + # recursive TOC dumping but this is rare in practice, so punt. + "otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then " + "%(solink)s && %(extract_toc)s > $lib.TOC; " + "else " + "%(solink)s && %(extract_toc)s > $lib.tmp && " + "if ! cmp -s $lib.tmp $lib.TOC; then " + "mv $lib.tmp $lib.TOC ; " + "fi; " + "fi" + % { + "solink": solink_base, + "extract_toc": "{ otool -l $lib | grep LC_ID_DYLIB -A 5; " + "nm -gP $lib | cut -f1-2 -d' ' | grep -v U$$; true; }", + } + ) + + solink_suffix = "@$link_file_list$postbuilds" + master_ninja.rule( + "solink", + description="SOLINK $lib, POSTBUILDS", + restat=True, + command=mtime_preserving_solink_base + % {"suffix": solink_suffix, "type": "-shared"}, + rspfile="$link_file_list", + rspfile_content="$in $solibs $libs", + pool="link_pool", + ) + master_ninja.rule( + "solink_notoc", + description="SOLINK $lib, POSTBUILDS", + restat=True, + command=solink_base % {"suffix": solink_suffix, "type": "-shared"}, + rspfile="$link_file_list", + rspfile_content="$in $solibs $libs", + pool="link_pool", + ) + + master_ninja.rule( + "solink_module", + description="SOLINK(module) $lib, POSTBUILDS", + restat=True, + command=mtime_preserving_solink_base + % {"suffix": solink_suffix, "type": "-bundle"}, + rspfile="$link_file_list", + rspfile_content="$in $solibs $libs", + pool="link_pool", + ) + master_ninja.rule( + "solink_module_notoc", + description="SOLINK(module) $lib, POSTBUILDS", + restat=True, + command=solink_base % {"suffix": solink_suffix, "type": "-bundle"}, + rspfile="$link_file_list", + rspfile_content="$in $solibs $libs", + pool="link_pool", + ) + + master_ninja.rule( + "link", + description="LINK $out, POSTBUILDS", + command=("$ld $ldflags -o $out " "$in $solibs $libs$postbuilds"), + pool="link_pool", + ) + master_ninja.rule( + "preprocess_infoplist", + description="PREPROCESS INFOPLIST $out", + command=( + "$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && " + "plutil -convert xml1 $out $out" + ), + ) + master_ninja.rule( + "copy_infoplist", + description="COPY INFOPLIST $in", + command="$env ./gyp-mac-tool copy-info-plist $in $out $binary $keys", + ) + master_ninja.rule( + "merge_infoplist", + description="MERGE INFOPLISTS $in", + command="$env ./gyp-mac-tool merge-info-plist $out $in", + ) + master_ninja.rule( + "compile_xcassets", + description="COMPILE XCASSETS $in", + command="$env ./gyp-mac-tool compile-xcassets $keys $in", + ) + master_ninja.rule( + "compile_ios_framework_headers", + description="COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in", + command="$env ./gyp-mac-tool compile-ios-framework-header-map $out " + "$framework $in && $env ./gyp-mac-tool " + "copy-ios-framework-headers $framework $copy_headers", + ) + master_ninja.rule( + "mac_tool", + description="MACTOOL $mactool_cmd $in", + command="$env ./gyp-mac-tool $mactool_cmd $in $out $binary", + ) + master_ninja.rule( + "package_framework", + description="PACKAGE FRAMEWORK $out, POSTBUILDS", + command="./gyp-mac-tool package-framework $out $version$postbuilds " + "&& touch $out", + ) + master_ninja.rule( + "package_ios_framework", + description="PACKAGE IOS FRAMEWORK $out, POSTBUILDS", + command="./gyp-mac-tool package-ios-framework $out $postbuilds " + "&& touch $out", + ) + if flavor == "win": + master_ninja.rule( + "stamp", + description="STAMP $out", + command="%s gyp-win-tool stamp $out" % sys.executable, + ) + else: + master_ninja.rule( + "stamp", description="STAMP $out", command="${postbuilds}touch $out" + ) + if flavor == "win": + master_ninja.rule( + "copy", + description="COPY $in $out", + command="%s gyp-win-tool recursive-mirror $in $out" % sys.executable, + ) + elif flavor == "zos": + master_ninja.rule( + "copy", + description="COPY $in $out", + command="rm -rf $out && cp -fRP $in $out", + ) + else: + master_ninja.rule( + "copy", + description="COPY $in $out", + command="ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)", + ) + master_ninja.newline() + + all_targets = set() + for build_file in params["build_files"]: + for target in gyp.common.AllTargets( + target_list, target_dicts, os.path.normpath(build_file) + ): + all_targets.add(target) + all_outputs = set() + + # target_outputs is a map from qualified target name to a Target object. + target_outputs = {} + # target_short_names is a map from target short name to a list of Target + # objects. + target_short_names = {} + + # short name of targets that were skipped because they didn't contain anything + # interesting. + # NOTE: there may be overlap between this an non_empty_target_names. + empty_target_names = set() + + # Set of non-empty short target names. + # NOTE: there may be overlap between this an empty_target_names. + non_empty_target_names = set() + + for qualified_target in target_list: + # qualified_target is like: third_party/icu/icu.gyp:icui18n#target + build_file, name, toolset = gyp.common.ParseQualifiedTarget(qualified_target) + + this_make_global_settings = data[build_file].get("make_global_settings", []) + assert make_global_settings == this_make_global_settings, ( + "make_global_settings needs to be the same for all targets. " + f"{this_make_global_settings} vs. {make_global_settings}" + ) + + spec = target_dicts[qualified_target] + if flavor == "mac": + gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) + + # If build_file is a symlink, we must not follow it because there's a chance + # it could point to a path above toplevel_dir, and we cannot correctly deal + # with that case at the moment. + build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, False) + + qualified_target_for_hash = gyp.common.QualifiedTarget( + build_file, name, toolset + ) + qualified_target_for_hash = qualified_target_for_hash.encode("utf-8") + hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() + + base_path = os.path.dirname(build_file) + obj = "obj" + if toolset != "target": + obj += "." + toolset + output_file = os.path.join(obj, base_path, name + ".ninja") + + ninja_output = StringIO() + writer = NinjaWriter( + hash_for_rules, + target_outputs, + base_path, + build_dir, + ninja_output, + toplevel_build, + output_file, + flavor, + toplevel_dir=options.toplevel_dir, + ) + + target = writer.WriteSpec(spec, config_name, generator_flags) + + if ninja_output.tell() > 0: + # Only create files for ninja files that actually have contents. + with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file: + ninja_file.write(ninja_output.getvalue()) + ninja_output.close() + master_ninja.subninja(output_file) + + if target: + if name != target.FinalOutput() and spec["toolset"] == "target": + target_short_names.setdefault(name, []).append(target) + target_outputs[qualified_target] = target + if qualified_target in all_targets: + all_outputs.add(target.FinalOutput()) + non_empty_target_names.add(name) + else: + empty_target_names.add(name) + + if target_short_names: + # Write a short name to build this target. This benefits both the + # "build chrome" case as well as the gyp tests, which expect to be + # able to run actions and build libraries by their short name. + master_ninja.newline() + master_ninja.comment("Short names for targets.") + for short_name in sorted(target_short_names): + master_ninja.build( + short_name, + "phony", + [x.FinalOutput() for x in target_short_names[short_name]], + ) + + # Write phony targets for any empty targets that weren't written yet. As + # short names are not necessarily unique only do this for short names that + # haven't already been output for another target. + empty_target_names = empty_target_names - non_empty_target_names + if empty_target_names: + master_ninja.newline() + master_ninja.comment("Empty targets (output for completeness).") + for name in sorted(empty_target_names): + master_ninja.build(name, "phony") + + if all_outputs: + master_ninja.newline() + master_ninja.build("all", "phony", sorted(all_outputs)) + master_ninja.default(generator_flags.get("default_target", "all")) + + master_ninja_file.close() + + +def PerformBuild(data, configurations, params): + options = params["options"] + for config in configurations: + builddir = os.path.join(options.toplevel_dir, "out", config) + arguments = ["ninja", "-C", builddir] + print(f"Building [{config}]: {arguments}") + subprocess.check_call(arguments) + + +def CallGenerateOutputForConfig(arglist): + # Ignore the interrupt signal so that the parent process catches it and + # kills all multiprocessing children. + signal.signal(signal.SIGINT, signal.SIG_IGN) + + (target_list, target_dicts, data, params, config_name) = arglist + GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) + + +def GenerateOutput(target_list, target_dicts, data, params): + # Update target_dicts for iOS device builds. + target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator( + target_dicts + ) + + user_config = params.get("generator_flags", {}).get("config", None) + if gyp.common.GetFlavor(params) == "win": + target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) + target_list, target_dicts = MSVSUtil.InsertLargePdbShims( + target_list, target_dicts, generator_default_variables + ) + + if user_config: + GenerateOutputForConfig(target_list, target_dicts, data, params, user_config) + else: + config_names = target_dicts[target_list[0]]["configurations"] + if params["parallel"]: + try: + pool = multiprocessing.Pool(len(config_names)) + arglists = [] + for config_name in config_names: + arglists.append( + (target_list, target_dicts, data, params, config_name) + ) + pool.map(CallGenerateOutputForConfig, arglists) + except KeyboardInterrupt as e: + pool.terminate() + raise e + else: + for config_name in config_names: + GenerateOutputForConfig( + target_list, target_dicts, data, params, config_name + ) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py new file mode 100644 index 0000000..7d18068 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Unit tests for the ninja.py file. """ + +import sys +import unittest + +import gyp.generator.ninja as ninja + + +class TestPrefixesAndSuffixes(unittest.TestCase): + def test_BinaryNamesWindows(self): + # These cannot run on non-Windows as they require a VS installation to + # correctly handle variable expansion. + if sys.platform.startswith("win"): + writer = ninja.NinjaWriter( + "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "win" + ) + spec = {"target_name": "wee"} + self.assertTrue( + writer.ComputeOutputFileName(spec, "executable").endswith(".exe") + ) + self.assertTrue( + writer.ComputeOutputFileName(spec, "shared_library").endswith(".dll") + ) + self.assertTrue( + writer.ComputeOutputFileName(spec, "static_library").endswith(".lib") + ) + + def test_BinaryNamesLinux(self): + writer = ninja.NinjaWriter( + "foo", "wee", ".", ".", "build.ninja", ".", "build.ninja", "linux" + ) + spec = {"target_name": "wee"} + self.assertTrue("." not in writer.ComputeOutputFileName(spec, "executable")) + self.assertTrue( + writer.ComputeOutputFileName(spec, "shared_library").startswith("lib") + ) + self.assertTrue( + writer.ComputeOutputFileName(spec, "static_library").startswith("lib") + ) + self.assertTrue( + writer.ComputeOutputFileName(spec, "shared_library").endswith(".so") + ) + self.assertTrue( + writer.ComputeOutputFileName(spec, "static_library").endswith(".a") + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py new file mode 100644 index 0000000..2f4d17e --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py @@ -0,0 +1,1394 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import filecmp +import gyp.common +import gyp.xcodeproj_file +import gyp.xcode_ninja +import errno +import os +import sys +import posixpath +import re +import shutil +import subprocess +import tempfile + + +# Project files generated by this module will use _intermediate_var as a +# custom Xcode setting whose value is a DerivedSources-like directory that's +# project-specific and configuration-specific. The normal choice, +# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive +# as it is likely that multiple targets within a single project file will want +# to access the same set of generated files. The other option, +# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, +# it is not configuration-specific. INTERMEDIATE_DIR is defined as +# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). +_intermediate_var = "INTERMEDIATE_DIR" + +# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all +# targets that share the same BUILT_PRODUCTS_DIR. +_shared_intermediate_var = "SHARED_INTERMEDIATE_DIR" + +_library_search_paths_var = "LIBRARY_SEARCH_PATHS" + +generator_default_variables = { + "EXECUTABLE_PREFIX": "", + "EXECUTABLE_SUFFIX": "", + "STATIC_LIB_PREFIX": "lib", + "SHARED_LIB_PREFIX": "lib", + "STATIC_LIB_SUFFIX": ".a", + "SHARED_LIB_SUFFIX": ".dylib", + # INTERMEDIATE_DIR is a place for targets to build up intermediate products. + # It is specific to each build environment. It is only guaranteed to exist + # and be constant within the context of a project, corresponding to a single + # input file. Some build environments may allow their intermediate directory + # to be shared on a wider scale, but this is not guaranteed. + "INTERMEDIATE_DIR": "$(%s)" % _intermediate_var, + "OS": "mac", + "PRODUCT_DIR": "$(BUILT_PRODUCTS_DIR)", + "LIB_DIR": "$(BUILT_PRODUCTS_DIR)", + "RULE_INPUT_ROOT": "$(INPUT_FILE_BASE)", + "RULE_INPUT_EXT": "$(INPUT_FILE_SUFFIX)", + "RULE_INPUT_NAME": "$(INPUT_FILE_NAME)", + "RULE_INPUT_PATH": "$(INPUT_FILE_PATH)", + "RULE_INPUT_DIRNAME": "$(INPUT_FILE_DIRNAME)", + "SHARED_INTERMEDIATE_DIR": "$(%s)" % _shared_intermediate_var, + "CONFIGURATION_NAME": "$(CONFIGURATION)", +} + +# The Xcode-specific sections that hold paths. +generator_additional_path_sections = [ + "mac_bundle_resources", + "mac_framework_headers", + "mac_framework_private_headers", + # 'mac_framework_dirs', input already handles _dirs endings. +] + +# The Xcode-specific keys that exist on targets and aren't moved down to +# configurations. +generator_additional_non_configuration_keys = [ + "ios_app_extension", + "ios_watch_app", + "ios_watchkit_extension", + "mac_bundle", + "mac_bundle_resources", + "mac_framework_headers", + "mac_framework_private_headers", + "mac_xctest_bundle", + "mac_xcuitest_bundle", + "xcode_create_dependents_test_runner", +] + +# We want to let any rules apply to files that are resources also. +generator_extra_sources_for_rules = [ + "mac_bundle_resources", + "mac_framework_headers", + "mac_framework_private_headers", +] + +generator_filelist_paths = None + +# Xcode's standard set of library directories, which don't need to be duplicated +# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. +xcode_standard_library_dirs = frozenset( + ["$(SDKROOT)/usr/lib", "$(SDKROOT)/usr/local/lib"] +) + + +def CreateXCConfigurationList(configuration_names): + xccl = gyp.xcodeproj_file.XCConfigurationList({"buildConfigurations": []}) + if len(configuration_names) == 0: + configuration_names = ["Default"] + for configuration_name in configuration_names: + xcbc = gyp.xcodeproj_file.XCBuildConfiguration({"name": configuration_name}) + xccl.AppendProperty("buildConfigurations", xcbc) + xccl.SetProperty("defaultConfigurationName", configuration_names[0]) + return xccl + + +class XcodeProject: + def __init__(self, gyp_path, path, build_file_dict): + self.gyp_path = gyp_path + self.path = path + self.project = gyp.xcodeproj_file.PBXProject(path=path) + projectDirPath = gyp.common.RelativePath( + os.path.dirname(os.path.abspath(self.gyp_path)), + os.path.dirname(path) or ".", + ) + self.project.SetProperty("projectDirPath", projectDirPath) + self.project_file = gyp.xcodeproj_file.XCProjectFile( + {"rootObject": self.project} + ) + self.build_file_dict = build_file_dict + + # TODO(mark): add destructor that cleans up self.path if created_dir is + # True and things didn't complete successfully. Or do something even + # better with "try"? + self.created_dir = False + try: + os.makedirs(self.path) + self.created_dir = True + except OSError as e: + if e.errno != errno.EEXIST: + raise + + def Finalize1(self, xcode_targets, serialize_all_tests): + # Collect a list of all of the build configuration names used by the + # various targets in the file. It is very heavily advised to keep each + # target in an entire project (even across multiple project files) using + # the same set of configuration names. + configurations = [] + for xct in self.project.GetProperty("targets"): + xccl = xct.GetProperty("buildConfigurationList") + xcbcs = xccl.GetProperty("buildConfigurations") + for xcbc in xcbcs: + name = xcbc.GetProperty("name") + if name not in configurations: + configurations.append(name) + + # Replace the XCConfigurationList attached to the PBXProject object with + # a new one specifying all of the configuration names used by the various + # targets. + try: + xccl = CreateXCConfigurationList(configurations) + self.project.SetProperty("buildConfigurationList", xccl) + except Exception: + sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) + raise + + # The need for this setting is explained above where _intermediate_var is + # defined. The comments below about wanting to avoid project-wide build + # settings apply here too, but this needs to be set on a project-wide basis + # so that files relative to the _intermediate_var setting can be displayed + # properly in the Xcode UI. + # + # Note that for configuration-relative files such as anything relative to + # _intermediate_var, for the purposes of UI tree view display, Xcode will + # only resolve the configuration name once, when the project file is + # opened. If the active build configuration is changed, the project file + # must be closed and reopened if it is desired for the tree view to update. + # This is filed as Apple radar 6588391. + xccl.SetBuildSetting( + _intermediate_var, "$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)" + ) + xccl.SetBuildSetting( + _shared_intermediate_var, "$(SYMROOT)/DerivedSources/$(CONFIGURATION)" + ) + + # Set user-specified project-wide build settings and config files. This + # is intended to be used very sparingly. Really, almost everything should + # go into target-specific build settings sections. The project-wide + # settings are only intended to be used in cases where Xcode attempts to + # resolve variable references in a project context as opposed to a target + # context, such as when resolving sourceTree references while building up + # the tree tree view for UI display. + # Any values set globally are applied to all configurations, then any + # per-configuration values are applied. + for xck, xcv in self.build_file_dict.get("xcode_settings", {}).items(): + xccl.SetBuildSetting(xck, xcv) + if "xcode_config_file" in self.build_file_dict: + config_ref = self.project.AddOrGetFileInRootGroup( + self.build_file_dict["xcode_config_file"] + ) + xccl.SetBaseConfiguration(config_ref) + build_file_configurations = self.build_file_dict.get("configurations", {}) + if build_file_configurations: + for config_name in configurations: + build_file_configuration_named = build_file_configurations.get( + config_name, {} + ) + if build_file_configuration_named: + xcc = xccl.ConfigurationNamed(config_name) + for xck, xcv in build_file_configuration_named.get( + "xcode_settings", {} + ).items(): + xcc.SetBuildSetting(xck, xcv) + if "xcode_config_file" in build_file_configuration_named: + config_ref = self.project.AddOrGetFileInRootGroup( + build_file_configurations[config_name]["xcode_config_file"] + ) + xcc.SetBaseConfiguration(config_ref) + + # Sort the targets based on how they appeared in the input. + # TODO(mark): Like a lot of other things here, this assumes internal + # knowledge of PBXProject - in this case, of its "targets" property. + + # ordinary_targets are ordinary targets that are already in the project + # file. run_test_targets are the targets that run unittests and should be + # used for the Run All Tests target. support_targets are the action/rule + # targets used by GYP file targets, just kept for the assert check. + ordinary_targets = [] + run_test_targets = [] + support_targets = [] + + # targets is full list of targets in the project. + targets = [] + + # does the it define it's own "all"? + has_custom_all = False + + # targets_for_all is the list of ordinary_targets that should be listed + # in this project's "All" target. It includes each non_runtest_target + # that does not have suppress_wildcard set. + targets_for_all = [] + + for target in self.build_file_dict["targets"]: + target_name = target["target_name"] + toolset = target["toolset"] + qualified_target = gyp.common.QualifiedTarget( + self.gyp_path, target_name, toolset + ) + xcode_target = xcode_targets[qualified_target] + # Make sure that the target being added to the sorted list is already in + # the unsorted list. + assert xcode_target in self.project._properties["targets"] + targets.append(xcode_target) + ordinary_targets.append(xcode_target) + if xcode_target.support_target: + support_targets.append(xcode_target.support_target) + targets.append(xcode_target.support_target) + + if not int(target.get("suppress_wildcard", False)): + targets_for_all.append(xcode_target) + + if target_name.lower() == "all": + has_custom_all = True + + # If this target has a 'run_as' attribute, add its target to the + # targets, and add it to the test targets. + if target.get("run_as"): + # Make a target to run something. It should have one + # dependency, the parent xcode target. + xccl = CreateXCConfigurationList(configurations) + run_target = gyp.xcodeproj_file.PBXAggregateTarget( + { + "name": "Run " + target_name, + "productName": xcode_target.GetProperty("productName"), + "buildConfigurationList": xccl, + }, + parent=self.project, + ) + run_target.AddDependency(xcode_target) + + command = target["run_as"] + script = "" + if command.get("working_directory"): + script = ( + script + + 'cd "%s"\n' + % gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + command.get("working_directory") + ) + ) + + if command.get("environment"): + script = ( + script + + "\n".join( + [ + 'export %s="%s"' + % ( + key, + gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + val + ), + ) + for (key, val) in command.get("environment").items() + ] + ) + + "\n" + ) + + # Some test end up using sockets, files on disk, etc. and can get + # confused if more then one test runs at a time. The generator + # flag 'xcode_serialize_all_test_runs' controls the forcing of all + # tests serially. It defaults to True. To get serial runs this + # little bit of python does the same as the linux flock utility to + # make sure only one runs at a time. + command_prefix = "" + if serialize_all_tests: + command_prefix = """python -c "import fcntl, subprocess, sys +file = open('$TMPDIR/GYP_serialize_test_runs', 'a') +fcntl.flock(file.fileno(), fcntl.LOCK_EX) +sys.exit(subprocess.call(sys.argv[1:]))" """ + + # If we were unable to exec for some reason, we want to exit + # with an error, and fixup variable references to be shell + # syntax instead of xcode syntax. + script = ( + script + + "exec " + + command_prefix + + "%s\nexit 1\n" + % gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + gyp.common.EncodePOSIXShellList(command.get("action")) + ) + ) + + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( + {"shellScript": script, "showEnvVarsInLog": 0} + ) + run_target.AppendProperty("buildPhases", ssbp) + + # Add the run target to the project file. + targets.append(run_target) + run_test_targets.append(run_target) + xcode_target.test_runner = run_target + + # Make sure that the list of targets being replaced is the same length as + # the one replacing it, but allow for the added test runner targets. + assert len(self.project._properties["targets"]) == len(ordinary_targets) + len( + support_targets + ) + + self.project._properties["targets"] = targets + + # Get rid of unnecessary levels of depth in groups like the Source group. + self.project.RootGroupsTakeOverOnlyChildren(True) + + # Sort the groups nicely. Do this after sorting the targets, because the + # Products group is sorted based on the order of the targets. + self.project.SortGroups() + + # Create an "All" target if there's more than one target in this project + # file and the project didn't define its own "All" target. Put a generated + # "All" target first so that people opening up the project for the first + # time will build everything by default. + if len(targets_for_all) > 1 and not has_custom_all: + xccl = CreateXCConfigurationList(configurations) + all_target = gyp.xcodeproj_file.PBXAggregateTarget( + {"buildConfigurationList": xccl, "name": "All"}, parent=self.project + ) + + for target in targets_for_all: + all_target.AddDependency(target) + + # TODO(mark): This is evil because it relies on internal knowledge of + # PBXProject._properties. It's important to get the "All" target first, + # though. + self.project._properties["targets"].insert(0, all_target) + + # The same, but for run_test_targets. + if len(run_test_targets) > 1: + xccl = CreateXCConfigurationList(configurations) + run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( + {"buildConfigurationList": xccl, "name": "Run All Tests"}, + parent=self.project, + ) + for run_test_target in run_test_targets: + run_all_tests_target.AddDependency(run_test_target) + + # Insert after the "All" target, which must exist if there is more than + # one run_test_target. + self.project._properties["targets"].insert(1, run_all_tests_target) + + def Finalize2(self, xcode_targets, xcode_target_to_target_dict): + # Finalize2 needs to happen in a separate step because the process of + # updating references to other projects depends on the ordering of targets + # within remote project files. Finalize1 is responsible for sorting duty, + # and once all project files are sorted, Finalize2 can come in and update + # these references. + + # To support making a "test runner" target that will run all the tests + # that are direct dependents of any given target, we look for + # xcode_create_dependents_test_runner being set on an Aggregate target, + # and generate a second target that will run the tests runners found under + # the marked target. + for bf_tgt in self.build_file_dict["targets"]: + if int(bf_tgt.get("xcode_create_dependents_test_runner", 0)): + tgt_name = bf_tgt["target_name"] + toolset = bf_tgt["toolset"] + qualified_target = gyp.common.QualifiedTarget( + self.gyp_path, tgt_name, toolset + ) + xcode_target = xcode_targets[qualified_target] + if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): + # Collect all the run test targets. + all_run_tests = [] + pbxtds = xcode_target.GetProperty("dependencies") + for pbxtd in pbxtds: + pbxcip = pbxtd.GetProperty("targetProxy") + dependency_xct = pbxcip.GetProperty("remoteGlobalIDString") + if hasattr(dependency_xct, "test_runner"): + all_run_tests.append(dependency_xct.test_runner) + + # Directly depend on all the runners as they depend on the target + # that builds them. + if len(all_run_tests) > 0: + run_all_target = gyp.xcodeproj_file.PBXAggregateTarget( + { + "name": "Run %s Tests" % tgt_name, + "productName": tgt_name, + }, + parent=self.project, + ) + for run_test_target in all_run_tests: + run_all_target.AddDependency(run_test_target) + + # Insert the test runner after the related target. + idx = self.project._properties["targets"].index(xcode_target) + self.project._properties["targets"].insert( + idx + 1, run_all_target + ) + + # Update all references to other projects, to make sure that the lists of + # remote products are complete. Otherwise, Xcode will fill them in when + # it opens the project file, which will result in unnecessary diffs. + # TODO(mark): This is evil because it relies on internal knowledge of + # PBXProject._other_pbxprojects. + for other_pbxproject in self.project._other_pbxprojects.keys(): + self.project.AddOrGetProjectReference(other_pbxproject) + + self.project.SortRemoteProductReferences() + + # Give everything an ID. + self.project_file.ComputeIDs() + + # Make sure that no two objects in the project file have the same ID. If + # multiple objects wind up with the same ID, upon loading the file, Xcode + # will only recognize one object (the last one in the file?) and the + # results are unpredictable. + self.project_file.EnsureNoIDCollisions() + + def Write(self): + # Write the project file to a temporary location first. Xcode watches for + # changes to the project file and presents a UI sheet offering to reload + # the project when it does change. However, in some cases, especially when + # multiple projects are open or when Xcode is busy, things don't work so + # seamlessly. Sometimes, Xcode is able to detect that a project file has + # changed but can't unload it because something else is referencing it. + # To mitigate this problem, and to avoid even having Xcode present the UI + # sheet when an open project is rewritten for inconsequential changes, the + # project file is written to a temporary file in the xcodeproj directory + # first. The new temporary file is then compared to the existing project + # file, if any. If they differ, the new file replaces the old; otherwise, + # the new project file is simply deleted. Xcode properly detects a file + # being renamed over an open project file as a change and so it remains + # able to present the "project file changed" sheet under this system. + # Writing to a temporary file first also avoids the possible problem of + # Xcode rereading an incomplete project file. + (output_fd, new_pbxproj_path) = tempfile.mkstemp( + suffix=".tmp", prefix="project.pbxproj.gyp.", dir=self.path + ) + + try: + output_file = os.fdopen(output_fd, "w") + + self.project_file.Print(output_file) + output_file.close() + + pbxproj_path = os.path.join(self.path, "project.pbxproj") + + same = False + try: + same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) + except OSError as e: + if e.errno != errno.ENOENT: + raise + + if same: + # The new file is identical to the old one, just get rid of the new + # one. + os.unlink(new_pbxproj_path) + else: + # The new file is different from the old one, or there is no old one. + # Rename the new file to the permanent name. + # + # tempfile.mkstemp uses an overly restrictive mode, resulting in a + # file that can only be read by the owner, regardless of the umask. + # There's no reason to not respect the umask here, which means that + # an extra hoop is required to fetch it and reset the new file's mode. + # + # No way to get the umask without setting a new one? Set a safe one + # and then set it back to the old value. + umask = os.umask(0o77) + os.umask(umask) + + os.chmod(new_pbxproj_path, 0o666 & ~umask) + os.rename(new_pbxproj_path, pbxproj_path) + + except Exception: + # Don't leave turds behind. In fact, if this code was responsible for + # creating the xcodeproj directory, get rid of that too. + os.unlink(new_pbxproj_path) + if self.created_dir: + shutil.rmtree(self.path, True) + raise + + +def AddSourceToTarget(source, type, pbxp, xct): + # TODO(mark): Perhaps source_extensions and library_extensions can be made a + # little bit fancier. + source_extensions = ["c", "cc", "cpp", "cxx", "m", "mm", "s", "swift"] + + # .o is conceptually more of a "source" than a "library," but Xcode thinks + # of "sources" as things to compile and "libraries" (or "frameworks") as + # things to link with. Adding an object file to an Xcode target's frameworks + # phase works properly. + library_extensions = ["a", "dylib", "framework", "o"] + + basename = posixpath.basename(source) + (root, ext) = posixpath.splitext(basename) + if ext: + ext = ext[1:].lower() + + if ext in source_extensions and type != "none": + xct.SourcesPhase().AddFile(source) + elif ext in library_extensions and type != "none": + xct.FrameworksPhase().AddFile(source) + else: + # Files that aren't added to a sources or frameworks build phase can still + # go into the project file, just not as part of a build phase. + pbxp.AddOrGetFileInRootGroup(source) + + +def AddResourceToTarget(resource, pbxp, xct): + # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call + # where it's used. + xct.ResourcesPhase().AddFile(resource) + + +def AddHeaderToTarget(header, pbxp, xct, is_public): + # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call + # where it's used. + settings = "{ATTRIBUTES = (%s, ); }" % ("Private", "Public")[is_public] + xct.HeadersPhase().AddFile(header, settings) + + +_xcode_variable_re = re.compile(r"(\$\((.*?)\))") + + +def ExpandXcodeVariables(string, expansions): + """Expands Xcode-style $(VARIABLES) in string per the expansions dict. + + In some rare cases, it is appropriate to expand Xcode variables when a + project file is generated. For any substring $(VAR) in string, if VAR is a + key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. + Any $(VAR) substring in string for which VAR is not a key in the expansions + dict will remain in the returned string. + """ + + matches = _xcode_variable_re.findall(string) + if matches is None: + return string + + matches.reverse() + for match in matches: + (to_replace, variable) = match + if variable not in expansions: + continue + + replacement = expansions[variable] + string = re.sub(re.escape(to_replace), replacement, string) + + return string + + +_xcode_define_re = re.compile(r"([\\\"\' ])") + + +def EscapeXcodeDefine(s): + """We must escape the defines that we give to XCode so that it knows not to + split on spaces and to respect backslash and quote literals. However, we + must not quote the define, or Xcode will incorrectly interpret variables + especially $(inherited).""" + return re.sub(_xcode_define_re, r"\\\1", s) + + +def PerformBuild(data, configurations, params): + options = params["options"] + + for build_file, build_file_dict in data.items(): + (build_file_root, build_file_ext) = os.path.splitext(build_file) + if build_file_ext != ".gyp": + continue + xcodeproj_path = build_file_root + options.suffix + ".xcodeproj" + if options.generator_output: + xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) + + for config in configurations: + arguments = ["xcodebuild", "-project", xcodeproj_path] + arguments += ["-configuration", config] + print(f"Building [{config}]: {arguments}") + subprocess.check_call(arguments) + + +def CalculateGeneratorInputInfo(params): + toplevel = params["options"].toplevel_dir + if params.get("flavor") == "ninja": + generator_dir = os.path.relpath(params["options"].generator_output or ".") + output_dir = params.get("generator_flags", {}).get("output_dir", "out") + output_dir = os.path.normpath(os.path.join(generator_dir, output_dir)) + qualified_out_dir = os.path.normpath( + os.path.join(toplevel, output_dir, "gypfiles-xcode-ninja") + ) + else: + output_dir = os.path.normpath(os.path.join(toplevel, "xcodebuild")) + qualified_out_dir = os.path.normpath( + os.path.join(toplevel, output_dir, "gypfiles") + ) + + global generator_filelist_paths + generator_filelist_paths = { + "toplevel": toplevel, + "qualified_out_dir": qualified_out_dir, + } + + +def GenerateOutput(target_list, target_dicts, data, params): + # Optionally configure each spec to use ninja as the external builder. + ninja_wrapper = params.get("flavor") == "ninja" + if ninja_wrapper: + (target_list, target_dicts, data) = gyp.xcode_ninja.CreateWrapper( + target_list, target_dicts, data, params + ) + + options = params["options"] + generator_flags = params.get("generator_flags", {}) + parallel_builds = generator_flags.get("xcode_parallel_builds", True) + serialize_all_tests = generator_flags.get("xcode_serialize_all_test_runs", True) + upgrade_check_project_version = generator_flags.get( + "xcode_upgrade_check_project_version", None + ) + + # Format upgrade_check_project_version with leading zeros as needed. + if upgrade_check_project_version: + upgrade_check_project_version = str(upgrade_check_project_version) + while len(upgrade_check_project_version) < 4: + upgrade_check_project_version = "0" + upgrade_check_project_version + + skip_excluded_files = not generator_flags.get("xcode_list_excluded_files", True) + xcode_projects = {} + for build_file, build_file_dict in data.items(): + (build_file_root, build_file_ext) = os.path.splitext(build_file) + if build_file_ext != ".gyp": + continue + xcodeproj_path = build_file_root + options.suffix + ".xcodeproj" + if options.generator_output: + xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) + xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) + xcode_projects[build_file] = xcp + pbxp = xcp.project + + # Set project-level attributes from multiple options + project_attributes = {} + if parallel_builds: + project_attributes["BuildIndependentTargetsInParallel"] = "YES" + if upgrade_check_project_version: + project_attributes["LastUpgradeCheck"] = upgrade_check_project_version + project_attributes[ + "LastTestingUpgradeCheck" + ] = upgrade_check_project_version + project_attributes["LastSwiftUpdateCheck"] = upgrade_check_project_version + pbxp.SetProperty("attributes", project_attributes) + + # Add gyp/gypi files to project + if not generator_flags.get("standalone"): + main_group = pbxp.GetProperty("mainGroup") + build_group = gyp.xcodeproj_file.PBXGroup({"name": "Build"}) + main_group.AppendChild(build_group) + for included_file in build_file_dict["included_files"]: + build_group.AddOrGetFileByPath(included_file, False) + + xcode_targets = {} + xcode_target_to_target_dict = {} + for qualified_target in target_list: + [build_file, target_name, toolset] = gyp.common.ParseQualifiedTarget( + qualified_target + ) + + spec = target_dicts[qualified_target] + if spec["toolset"] != "target": + raise Exception( + "Multiple toolsets not supported in xcode build (target %s)" + % qualified_target + ) + configuration_names = [spec["default_configuration"]] + for configuration_name in sorted(spec["configurations"].keys()): + if configuration_name not in configuration_names: + configuration_names.append(configuration_name) + xcp = xcode_projects[build_file] + pbxp = xcp.project + + # Set up the configurations for the target according to the list of names + # supplied. + xccl = CreateXCConfigurationList(configuration_names) + + # Create an XCTarget subclass object for the target. The type with + # "+bundle" appended will be used if the target has "mac_bundle" set. + # loadable_modules not in a mac_bundle are mapped to + # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets + # to create a single-file mh_bundle. + _types = { + "executable": "com.apple.product-type.tool", + "loadable_module": "com.googlecode.gyp.xcode.bundle", + "shared_library": "com.apple.product-type.library.dynamic", + "static_library": "com.apple.product-type.library.static", + "mac_kernel_extension": "com.apple.product-type.kernel-extension", + "executable+bundle": "com.apple.product-type.application", + "loadable_module+bundle": "com.apple.product-type.bundle", + "loadable_module+xctest": "com.apple.product-type.bundle.unit-test", + "loadable_module+xcuitest": "com.apple.product-type.bundle.ui-testing", + "shared_library+bundle": "com.apple.product-type.framework", + "executable+extension+bundle": "com.apple.product-type.app-extension", + "executable+watch+extension+bundle": + "com.apple.product-type.watchkit-extension", + "executable+watch+bundle": "com.apple.product-type.application.watchapp", + "mac_kernel_extension+bundle": "com.apple.product-type.kernel-extension", + } + + target_properties = { + "buildConfigurationList": xccl, + "name": target_name, + } + + type = spec["type"] + is_xctest = int(spec.get("mac_xctest_bundle", 0)) + is_xcuitest = int(spec.get("mac_xcuitest_bundle", 0)) + is_bundle = int(spec.get("mac_bundle", 0)) or is_xctest + is_app_extension = int(spec.get("ios_app_extension", 0)) + is_watchkit_extension = int(spec.get("ios_watchkit_extension", 0)) + is_watch_app = int(spec.get("ios_watch_app", 0)) + if type != "none": + type_bundle_key = type + if is_xcuitest: + type_bundle_key += "+xcuitest" + assert type == "loadable_module", ( + "mac_xcuitest_bundle targets must have type loadable_module " + "(target %s)" % target_name + ) + elif is_xctest: + type_bundle_key += "+xctest" + assert type == "loadable_module", ( + "mac_xctest_bundle targets must have type loadable_module " + "(target %s)" % target_name + ) + elif is_app_extension: + assert is_bundle, ( + "ios_app_extension flag requires mac_bundle " + "(target %s)" % target_name + ) + type_bundle_key += "+extension+bundle" + elif is_watchkit_extension: + assert is_bundle, ( + "ios_watchkit_extension flag requires mac_bundle " + "(target %s)" % target_name + ) + type_bundle_key += "+watch+extension+bundle" + elif is_watch_app: + assert is_bundle, ( + "ios_watch_app flag requires mac_bundle " + "(target %s)" % target_name + ) + type_bundle_key += "+watch+bundle" + elif is_bundle: + type_bundle_key += "+bundle" + + xctarget_type = gyp.xcodeproj_file.PBXNativeTarget + try: + target_properties["productType"] = _types[type_bundle_key] + except KeyError as e: + gyp.common.ExceptionAppend( + e, + "-- unknown product type while " "writing target %s" % target_name, + ) + raise + else: + xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget + assert not is_bundle, ( + 'mac_bundle targets cannot have type none (target "%s")' % target_name + ) + assert not is_xcuitest, ( + 'mac_xcuitest_bundle targets cannot have type none (target "%s")' + % target_name + ) + assert not is_xctest, ( + 'mac_xctest_bundle targets cannot have type none (target "%s")' + % target_name + ) + + target_product_name = spec.get("product_name") + if target_product_name is not None: + target_properties["productName"] = target_product_name + + xct = xctarget_type( + target_properties, + parent=pbxp, + force_outdir=spec.get("product_dir"), + force_prefix=spec.get("product_prefix"), + force_extension=spec.get("product_extension"), + ) + pbxp.AppendProperty("targets", xct) + xcode_targets[qualified_target] = xct + xcode_target_to_target_dict[xct] = spec + + spec_actions = spec.get("actions", []) + spec_rules = spec.get("rules", []) + + # Xcode has some "issues" with checking dependencies for the "Compile + # sources" step with any source files/headers generated by actions/rules. + # To work around this, if a target is building anything directly (not + # type "none"), then a second target is used to run the GYP actions/rules + # and is made a dependency of this target. This way the work is done + # before the dependency checks for what should be recompiled. + support_xct = None + # The Xcode "issues" don't affect xcode-ninja builds, since the dependency + # logic all happens in ninja. Don't bother creating the extra targets in + # that case. + if type != "none" and (spec_actions or spec_rules) and not ninja_wrapper: + support_xccl = CreateXCConfigurationList(configuration_names) + support_target_suffix = generator_flags.get( + "support_target_suffix", " Support" + ) + support_target_properties = { + "buildConfigurationList": support_xccl, + "name": target_name + support_target_suffix, + } + if target_product_name: + support_target_properties["productName"] = ( + target_product_name + " Support" + ) + support_xct = gyp.xcodeproj_file.PBXAggregateTarget( + support_target_properties, parent=pbxp + ) + pbxp.AppendProperty("targets", support_xct) + xct.AddDependency(support_xct) + # Hang the support target off the main target so it can be tested/found + # by the generator during Finalize. + xct.support_target = support_xct + + prebuild_index = 0 + + # Add custom shell script phases for "actions" sections. + for action in spec_actions: + # There's no need to write anything into the script to ensure that the + # output directories already exist, because Xcode will look at the + # declared outputs and automatically ensure that they exist for us. + + # Do we have a message to print when this action runs? + message = action.get("message") + if message: + message = "echo note: " + gyp.common.EncodePOSIXShellArgument(message) + else: + message = "" + + # Turn the list into a string that can be passed to a shell. + action_string = gyp.common.EncodePOSIXShellList(action["action"]) + + # Convert Xcode-type variable references to sh-compatible environment + # variable references. + message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) + action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( + action_string + ) + + script = "" + # Include the optional message + if message_sh: + script += message_sh + "\n" + # Be sure the script runs in exec, and that if exec fails, the script + # exits signalling an error. + script += "exec " + action_string_sh + "\nexit 1\n" + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( + { + "inputPaths": action["inputs"], + "name": 'Action "' + action["action_name"] + '"', + "outputPaths": action["outputs"], + "shellScript": script, + "showEnvVarsInLog": 0, + } + ) + + if support_xct: + support_xct.AppendProperty("buildPhases", ssbp) + else: + # TODO(mark): this assumes too much knowledge of the internals of + # xcodeproj_file; some of these smarts should move into xcodeproj_file + # itself. + xct._properties["buildPhases"].insert(prebuild_index, ssbp) + prebuild_index = prebuild_index + 1 + + # TODO(mark): Should verify that at most one of these is specified. + if int(action.get("process_outputs_as_sources", False)): + for output in action["outputs"]: + AddSourceToTarget(output, type, pbxp, xct) + + if int(action.get("process_outputs_as_mac_bundle_resources", False)): + for output in action["outputs"]: + AddResourceToTarget(output, pbxp, xct) + + # tgt_mac_bundle_resources holds the list of bundle resources so + # the rule processing can check against it. + if is_bundle: + tgt_mac_bundle_resources = spec.get("mac_bundle_resources", []) + else: + tgt_mac_bundle_resources = [] + + # Add custom shell script phases driving "make" for "rules" sections. + # + # Xcode's built-in rule support is almost powerful enough to use directly, + # but there are a few significant deficiencies that render them unusable. + # There are workarounds for some of its inadequacies, but in aggregate, + # the workarounds added complexity to the generator, and some workarounds + # actually require input files to be crafted more carefully than I'd like. + # Consequently, until Xcode rules are made more capable, "rules" input + # sections will be handled in Xcode output by shell script build phases + # performed prior to the compilation phase. + # + # The following problems with Xcode rules were found. The numbers are + # Apple radar IDs. I hope that these shortcomings are addressed, I really + # liked having the rules handled directly in Xcode during the period that + # I was prototyping this. + # + # 6588600 Xcode compiles custom script rule outputs too soon, compilation + # fails. This occurs when rule outputs from distinct inputs are + # interdependent. The only workaround is to put rules and their + # inputs in a separate target from the one that compiles the rule + # outputs. This requires input file cooperation and it means that + # process_outputs_as_sources is unusable. + # 6584932 Need to declare that custom rule outputs should be excluded from + # compilation. A possible workaround is to lie to Xcode about a + # rule's output, giving it a dummy file it doesn't know how to + # compile. The rule action script would need to touch the dummy. + # 6584839 I need a way to declare additional inputs to a custom rule. + # A possible workaround is a shell script phase prior to + # compilation that touches a rule's primary input files if any + # would-be additional inputs are newer than the output. Modifying + # the source tree - even just modification times - feels dirty. + # 6564240 Xcode "custom script" build rules always dump all environment + # variables. This is a low-prioroty problem and is not a + # show-stopper. + rules_by_ext = {} + for rule in spec_rules: + rules_by_ext[rule["extension"]] = rule + + # First, some definitions: + # + # A "rule source" is a file that was listed in a target's "sources" + # list and will have a rule applied to it on the basis of matching the + # rule's "extensions" attribute. Rule sources are direct inputs to + # rules. + # + # Rule definitions may specify additional inputs in their "inputs" + # attribute. These additional inputs are used for dependency tracking + # purposes. + # + # A "concrete output" is a rule output with input-dependent variables + # resolved. For example, given a rule with: + # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], + # if the target's "sources" list contained "one.ext" and "two.ext", + # the "concrete output" for rule input "two.ext" would be "two.cc". If + # a rule specifies multiple outputs, each input file that the rule is + # applied to will have the same number of concrete outputs. + # + # If any concrete outputs are outdated or missing relative to their + # corresponding rule_source or to any specified additional input, the + # rule action must be performed to generate the concrete outputs. + + # concrete_outputs_by_rule_source will have an item at the same index + # as the rule['rule_sources'] that it corresponds to. Each item is a + # list of all of the concrete outputs for the rule_source. + concrete_outputs_by_rule_source = [] + + # concrete_outputs_all is a flat list of all concrete outputs that this + # rule is able to produce, given the known set of input files + # (rule_sources) that apply to it. + concrete_outputs_all = [] + + # messages & actions are keyed by the same indices as rule['rule_sources'] + # and concrete_outputs_by_rule_source. They contain the message and + # action to perform after resolving input-dependent variables. The + # message is optional, in which case None is stored for each rule source. + messages = [] + actions = [] + + for rule_source in rule.get("rule_sources", []): + rule_source_dirname, rule_source_basename = posixpath.split(rule_source) + (rule_source_root, rule_source_ext) = posixpath.splitext( + rule_source_basename + ) + + # These are the same variable names that Xcode uses for its own native + # rule support. Because Xcode's rule engine is not being used, they + # need to be expanded as they are written to the makefile. + rule_input_dict = { + "INPUT_FILE_BASE": rule_source_root, + "INPUT_FILE_SUFFIX": rule_source_ext, + "INPUT_FILE_NAME": rule_source_basename, + "INPUT_FILE_PATH": rule_source, + "INPUT_FILE_DIRNAME": rule_source_dirname, + } + + concrete_outputs_for_this_rule_source = [] + for output in rule.get("outputs", []): + # Fortunately, Xcode and make both use $(VAR) format for their + # variables, so the expansion is the only transformation necessary. + # Any remaining $(VAR)-type variables in the string can be given + # directly to make, which will pick up the correct settings from + # what Xcode puts into the environment. + concrete_output = ExpandXcodeVariables(output, rule_input_dict) + concrete_outputs_for_this_rule_source.append(concrete_output) + + # Add all concrete outputs to the project. + pbxp.AddOrGetFileInRootGroup(concrete_output) + + concrete_outputs_by_rule_source.append( + concrete_outputs_for_this_rule_source + ) + concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) + + # TODO(mark): Should verify that at most one of these is specified. + if int(rule.get("process_outputs_as_sources", False)): + for output in concrete_outputs_for_this_rule_source: + AddSourceToTarget(output, type, pbxp, xct) + + # If the file came from the mac_bundle_resources list or if the rule + # is marked to process outputs as bundle resource, do so. + was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources + if was_mac_bundle_resource or int( + rule.get("process_outputs_as_mac_bundle_resources", False) + ): + for output in concrete_outputs_for_this_rule_source: + AddResourceToTarget(output, pbxp, xct) + + # Do we have a message to print when this rule runs? + message = rule.get("message") + if message: + message = gyp.common.EncodePOSIXShellArgument(message) + message = ExpandXcodeVariables(message, rule_input_dict) + messages.append(message) + + # Turn the list into a string that can be passed to a shell. + action_string = gyp.common.EncodePOSIXShellList(rule["action"]) + + action = ExpandXcodeVariables(action_string, rule_input_dict) + actions.append(action) + + if len(concrete_outputs_all) > 0: + # TODO(mark): There's a possibility for collision here. Consider + # target "t" rule "A_r" and target "t_A" rule "r". + makefile_name = "%s.make" % re.sub( + "[^a-zA-Z0-9_]", "_", "{}_{}".format(target_name, rule["rule_name"]) + ) + makefile_path = os.path.join( + xcode_projects[build_file].path, makefile_name + ) + # TODO(mark): try/close? Write to a temporary file and swap it only + # if it's got changes? + makefile = open(makefile_path, "w") + + # make will build the first target in the makefile by default. By + # convention, it's called "all". List all (or at least one) + # concrete output for each rule source as a prerequisite of the "all" + # target. + makefile.write("all: \\\n") + for concrete_output_index, concrete_output_by_rule_source in enumerate( + concrete_outputs_by_rule_source + ): + # Only list the first (index [0]) concrete output of each input + # in the "all" target. Otherwise, a parallel make (-j > 1) would + # attempt to process each input multiple times simultaneously. + # Otherwise, "all" could just contain the entire list of + # concrete_outputs_all. + concrete_output = concrete_output_by_rule_source[0] + if ( + concrete_output_index + == len(concrete_outputs_by_rule_source) - 1 + ): + eol = "" + else: + eol = " \\" + makefile.write(f" {concrete_output}{eol}\n") + + for (rule_source, concrete_outputs, message, action) in zip( + rule["rule_sources"], + concrete_outputs_by_rule_source, + messages, + actions, + ): + makefile.write("\n") + + # Add a rule that declares it can build each concrete output of a + # rule source. Collect the names of the directories that are + # required. + concrete_output_dirs = [] + for concrete_output_index, concrete_output in enumerate( + concrete_outputs + ): + if concrete_output_index == 0: + bol = "" + else: + bol = " " + makefile.write(f"{bol}{concrete_output} \\\n") + + concrete_output_dir = posixpath.dirname(concrete_output) + if ( + concrete_output_dir + and concrete_output_dir not in concrete_output_dirs + ): + concrete_output_dirs.append(concrete_output_dir) + + makefile.write(" : \\\n") + + # The prerequisites for this rule are the rule source itself and + # the set of additional rule inputs, if any. + prerequisites = [rule_source] + prerequisites.extend(rule.get("inputs", [])) + for prerequisite_index, prerequisite in enumerate(prerequisites): + if prerequisite_index == len(prerequisites) - 1: + eol = "" + else: + eol = " \\" + makefile.write(f" {prerequisite}{eol}\n") + + # Make sure that output directories exist before executing the rule + # action. + if len(concrete_output_dirs) > 0: + makefile.write( + '\t@mkdir -p "%s"\n' % '" "'.join(concrete_output_dirs) + ) + + # The rule message and action have already had + # the necessary variable substitutions performed. + if message: + # Mark it with note: so Xcode picks it up in build output. + makefile.write("\t@echo note: %s\n" % message) + makefile.write("\t%s\n" % action) + + makefile.close() + + # It might be nice to ensure that needed output directories exist + # here rather than in each target in the Makefile, but that wouldn't + # work if there ever was a concrete output that had an input-dependent + # variable anywhere other than in the leaf position. + + # Don't declare any inputPaths or outputPaths. If they're present, + # Xcode will provide a slight optimization by only running the script + # phase if any output is missing or outdated relative to any input. + # Unfortunately, it will also assume that all outputs are touched by + # the script, and if the outputs serve as files in a compilation + # phase, they will be unconditionally rebuilt. Since make might not + # rebuild everything that could be declared here as an output, this + # extra compilation activity is unnecessary. With inputPaths and + # outputPaths not supplied, make will always be called, but it knows + # enough to not do anything when everything is up-to-date. + + # To help speed things up, pass -j COUNT to make so it does some work + # in parallel. Don't use ncpus because Xcode will build ncpus targets + # in parallel and if each target happens to have a rules step, there + # would be ncpus^2 things going. With a machine that has 2 quad-core + # Xeons, a build can quickly run out of processes based on + # scheduling/other tasks, and randomly failing builds are no good. + script = ( + """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" +if [ "${JOB_COUNT}" -gt 4 ]; then + JOB_COUNT=4 +fi +exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" +exit 1 +""" + % makefile_name + ) + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( + { + "name": 'Rule "' + rule["rule_name"] + '"', + "shellScript": script, + "showEnvVarsInLog": 0, + } + ) + + if support_xct: + support_xct.AppendProperty("buildPhases", ssbp) + else: + # TODO(mark): this assumes too much knowledge of the internals of + # xcodeproj_file; some of these smarts should move + # into xcodeproj_file itself. + xct._properties["buildPhases"].insert(prebuild_index, ssbp) + prebuild_index = prebuild_index + 1 + + # Extra rule inputs also go into the project file. Concrete outputs were + # already added when they were computed. + groups = ["inputs", "inputs_excluded"] + if skip_excluded_files: + groups = [x for x in groups if not x.endswith("_excluded")] + for group in groups: + for item in rule.get(group, []): + pbxp.AddOrGetFileInRootGroup(item) + + # Add "sources". + for source in spec.get("sources", []): + (source_root, source_extension) = posixpath.splitext(source) + if source_extension[1:] not in rules_by_ext: + # AddSourceToTarget will add the file to a root group if it's not + # already there. + AddSourceToTarget(source, type, pbxp, xct) + else: + pbxp.AddOrGetFileInRootGroup(source) + + # Add "mac_bundle_resources" and "mac_framework_private_headers" if + # it's a bundle of any type. + if is_bundle: + for resource in tgt_mac_bundle_resources: + (resource_root, resource_extension) = posixpath.splitext(resource) + if resource_extension[1:] not in rules_by_ext: + AddResourceToTarget(resource, pbxp, xct) + else: + pbxp.AddOrGetFileInRootGroup(resource) + + for header in spec.get("mac_framework_private_headers", []): + AddHeaderToTarget(header, pbxp, xct, False) + + # Add "mac_framework_headers". These can be valid for both frameworks + # and static libraries. + if is_bundle or type == "static_library": + for header in spec.get("mac_framework_headers", []): + AddHeaderToTarget(header, pbxp, xct, True) + + # Add "copies". + pbxcp_dict = {} + for copy_group in spec.get("copies", []): + dest = copy_group["destination"] + if dest[0] not in ("/", "$"): + # Relative paths are relative to $(SRCROOT). + dest = "$(SRCROOT)/" + dest + + code_sign = int(copy_group.get("xcode_code_sign", 0)) + settings = (None, "{ATTRIBUTES = (CodeSignOnCopy, ); }")[code_sign] + + # Coalesce multiple "copies" sections in the same target with the same + # "destination" property into the same PBXCopyFilesBuildPhase, otherwise + # they'll wind up with ID collisions. + pbxcp = pbxcp_dict.get(dest, None) + if pbxcp is None: + pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase( + {"name": "Copy to " + copy_group["destination"]}, parent=xct + ) + pbxcp.SetDestination(dest) + + # TODO(mark): The usual comment about this knowing too much about + # gyp.xcodeproj_file internals applies. + xct._properties["buildPhases"].insert(prebuild_index, pbxcp) + + pbxcp_dict[dest] = pbxcp + + for file in copy_group["files"]: + pbxcp.AddFile(file, settings) + + # Excluded files can also go into the project file. + if not skip_excluded_files: + for key in [ + "sources", + "mac_bundle_resources", + "mac_framework_headers", + "mac_framework_private_headers", + ]: + excluded_key = key + "_excluded" + for item in spec.get(excluded_key, []): + pbxp.AddOrGetFileInRootGroup(item) + + # So can "inputs" and "outputs" sections of "actions" groups. + groups = ["inputs", "inputs_excluded", "outputs", "outputs_excluded"] + if skip_excluded_files: + groups = [x for x in groups if not x.endswith("_excluded")] + for action in spec.get("actions", []): + for group in groups: + for item in action.get(group, []): + # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not + # sources. + if not item.startswith("$(BUILT_PRODUCTS_DIR)/"): + pbxp.AddOrGetFileInRootGroup(item) + + for postbuild in spec.get("postbuilds", []): + action_string_sh = gyp.common.EncodePOSIXShellList(postbuild["action"]) + script = "exec " + action_string_sh + "\nexit 1\n" + + # Make the postbuild step depend on the output of ld or ar from this + # target. Apparently putting the script step after the link step isn't + # sufficient to ensure proper ordering in all cases. With an input + # declared but no outputs, the script step should run every time, as + # desired. + ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase( + { + "inputPaths": ["$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)"], + "name": 'Postbuild "' + postbuild["postbuild_name"] + '"', + "shellScript": script, + "showEnvVarsInLog": 0, + } + ) + xct.AppendProperty("buildPhases", ssbp) + + # Add dependencies before libraries, because adding a dependency may imply + # adding a library. It's preferable to keep dependencies listed first + # during a link phase so that they can override symbols that would + # otherwise be provided by libraries, which will usually include system + # libraries. On some systems, ld is finicky and even requires the + # libraries to be ordered in such a way that unresolved symbols in + # earlier-listed libraries may only be resolved by later-listed libraries. + # The Mac linker doesn't work that way, but other platforms do, and so + # their linker invocations need to be constructed in this way. There's + # no compelling reason for Xcode's linker invocations to differ. + + if "dependencies" in spec: + for dependency in spec["dependencies"]: + xct.AddDependency(xcode_targets[dependency]) + # The support project also gets the dependencies (in case they are + # needed for the actions/rules to work). + if support_xct: + support_xct.AddDependency(xcode_targets[dependency]) + + if "libraries" in spec: + for library in spec["libraries"]: + xct.FrameworksPhase().AddFile(library) + # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. + # I wish Xcode handled this automatically. + library_dir = posixpath.dirname(library) + if library_dir not in xcode_standard_library_dirs and ( + not xct.HasBuildSetting(_library_search_paths_var) + or library_dir not in xct.GetBuildSetting(_library_search_paths_var) + ): + xct.AppendBuildSetting(_library_search_paths_var, library_dir) + + for configuration_name in configuration_names: + configuration = spec["configurations"][configuration_name] + xcbc = xct.ConfigurationNamed(configuration_name) + for include_dir in configuration.get("mac_framework_dirs", []): + xcbc.AppendBuildSetting("FRAMEWORK_SEARCH_PATHS", include_dir) + for include_dir in configuration.get("include_dirs", []): + xcbc.AppendBuildSetting("HEADER_SEARCH_PATHS", include_dir) + for library_dir in configuration.get("library_dirs", []): + if library_dir not in xcode_standard_library_dirs and ( + not xcbc.HasBuildSetting(_library_search_paths_var) + or library_dir + not in xcbc.GetBuildSetting(_library_search_paths_var) + ): + xcbc.AppendBuildSetting(_library_search_paths_var, library_dir) + + if "defines" in configuration: + for define in configuration["defines"]: + set_define = EscapeXcodeDefine(define) + xcbc.AppendBuildSetting("GCC_PREPROCESSOR_DEFINITIONS", set_define) + if "xcode_settings" in configuration: + for xck, xcv in configuration["xcode_settings"].items(): + xcbc.SetBuildSetting(xck, xcv) + if "xcode_config_file" in configuration: + config_ref = pbxp.AddOrGetFileInRootGroup( + configuration["xcode_config_file"] + ) + xcbc.SetBaseConfiguration(config_ref) + + build_files = [] + for build_file, build_file_dict in data.items(): + if build_file.endswith(".gyp"): + build_files.append(build_file) + + for build_file in build_files: + xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) + + for build_file in build_files: + xcode_projects[build_file].Finalize2(xcode_targets, xcode_target_to_target_dict) + + for build_file in build_files: + xcode_projects[build_file].Write() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py new file mode 100644 index 0000000..49772d1 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" Unit tests for the xcode.py file. """ + +import gyp.generator.xcode as xcode +import unittest +import sys + + +class TestEscapeXcodeDefine(unittest.TestCase): + if sys.platform == "darwin": + + def test_InheritedRemainsUnescaped(self): + self.assertEqual(xcode.EscapeXcodeDefine("$(inherited)"), "$(inherited)") + + def test_Escaping(self): + self.assertEqual(xcode.EscapeXcodeDefine('a b"c\\'), 'a\\ b\\"c\\\\') + + +if __name__ == "__main__": + unittest.main() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/node-gyp/gyp/pylib/gyp/input.py new file mode 100644 index 0000000..354958b --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/input.py @@ -0,0 +1,3137 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + + +import ast + +import gyp.common +import gyp.simple_copy +import multiprocessing +import os.path +import re +import shlex +import signal +import subprocess +import sys +import threading +import traceback +from distutils.version import StrictVersion +from gyp.common import GypError +from gyp.common import OrderedSet + +# A list of types that are treated as linkable. +linkable_types = [ + "executable", + "shared_library", + "loadable_module", + "mac_kernel_extension", + "windows_driver", +] + +# A list of sections that contain links to other targets. +dependency_sections = ["dependencies", "export_dependent_settings"] + +# base_path_sections is a list of sections defined by GYP that contain +# pathnames. The generators can provide more keys, the two lists are merged +# into path_sections, but you should call IsPathSection instead of using either +# list directly. +base_path_sections = [ + "destination", + "files", + "include_dirs", + "inputs", + "libraries", + "outputs", + "sources", +] +path_sections = set() + +# These per-process dictionaries are used to cache build file data when loading +# in parallel mode. +per_process_data = {} +per_process_aux_data = {} + + +def IsPathSection(section): + # If section ends in one of the '=+?!' characters, it's applied to a section + # without the trailing characters. '/' is notably absent from this list, + # because there's no way for a regular expression to be treated as a path. + while section and section[-1:] in "=+?!": + section = section[:-1] + + if section in path_sections: + return True + + # Sections matching the regexp '_(dir|file|path)s?$' are also + # considered PathSections. Using manual string matching since that + # is much faster than the regexp and this can be called hundreds of + # thousands of times so micro performance matters. + if "_" in section: + tail = section[-6:] + if tail[-1] == "s": + tail = tail[:-1] + if tail[-5:] in ("_file", "_path"): + return True + return tail[-4:] == "_dir" + + return False + + +# base_non_configuration_keys is a list of key names that belong in the target +# itself and should not be propagated into its configurations. It is merged +# with a list that can come from the generator to +# create non_configuration_keys. +base_non_configuration_keys = [ + # Sections that must exist inside targets and not configurations. + "actions", + "configurations", + "copies", + "default_configuration", + "dependencies", + "dependencies_original", + "libraries", + "postbuilds", + "product_dir", + "product_extension", + "product_name", + "product_prefix", + "rules", + "run_as", + "sources", + "standalone_static_library", + "suppress_wildcard", + "target_name", + "toolset", + "toolsets", + "type", + # Sections that can be found inside targets or configurations, but that + # should not be propagated from targets into their configurations. + "variables", +] +non_configuration_keys = [] + +# Keys that do not belong inside a configuration dictionary. +invalid_configuration_keys = [ + "actions", + "all_dependent_settings", + "configurations", + "dependencies", + "direct_dependent_settings", + "libraries", + "link_settings", + "sources", + "standalone_static_library", + "target_name", + "type", +] + +# Controls whether or not the generator supports multiple toolsets. +multiple_toolsets = False + +# Paths for converting filelist paths to output paths: { +# toplevel, +# qualified_output_dir, +# } +generator_filelist_paths = None + + +def GetIncludedBuildFiles(build_file_path, aux_data, included=None): + """Return a list of all build files included into build_file_path. + + The returned list will contain build_file_path as well as all other files + that it included, either directly or indirectly. Note that the list may + contain files that were included into a conditional section that evaluated + to false and was not merged into build_file_path's dict. + + aux_data is a dict containing a key for each build file or included build + file. Those keys provide access to dicts whose "included" keys contain + lists of all other files included by the build file. + + included should be left at its default None value by external callers. It + is used for recursion. + + The returned list will not contain any duplicate entries. Each build file + in the list will be relative to the current directory. + """ + + if included is None: + included = [] + + if build_file_path in included: + return included + + included.append(build_file_path) + + for included_build_file in aux_data[build_file_path].get("included", []): + GetIncludedBuildFiles(included_build_file, aux_data, included) + + return included + + +def CheckedEval(file_contents): + """Return the eval of a gyp file. + The gyp file is restricted to dictionaries and lists only, and + repeated keys are not allowed. + Note that this is slower than eval() is. + """ + + syntax_tree = ast.parse(file_contents) + assert isinstance(syntax_tree, ast.Module) + c1 = syntax_tree.body + assert len(c1) == 1 + c2 = c1[0] + assert isinstance(c2, ast.Expr) + return CheckNode(c2.value, []) + + +def CheckNode(node, keypath): + if isinstance(node, ast.Dict): + dict = {} + for key, value in zip(node.keys, node.values): + assert isinstance(key, ast.Str) + key = key.s + if key in dict: + raise GypError( + "Key '" + + key + + "' repeated at level " + + repr(len(keypath) + 1) + + " with key path '" + + ".".join(keypath) + + "'" + ) + kp = list(keypath) # Make a copy of the list for descending this node. + kp.append(key) + dict[key] = CheckNode(value, kp) + return dict + elif isinstance(node, ast.List): + children = [] + for index, child in enumerate(node.elts): + kp = list(keypath) # Copy list. + kp.append(repr(index)) + children.append(CheckNode(child, kp)) + return children + elif isinstance(node, ast.Str): + return node.s + else: + raise TypeError( + "Unknown AST node at key path '" + ".".join(keypath) + "': " + repr(node) + ) + + +def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check): + if build_file_path in data: + return data[build_file_path] + + if os.path.exists(build_file_path): + build_file_contents = open(build_file_path, encoding='utf-8').read() + else: + raise GypError(f"{build_file_path} not found (cwd: {os.getcwd()})") + + build_file_data = None + try: + if check: + build_file_data = CheckedEval(build_file_contents) + else: + build_file_data = eval(build_file_contents, {"__builtins__": {}}, None) + except SyntaxError as e: + e.filename = build_file_path + raise + except Exception as e: + gyp.common.ExceptionAppend(e, "while reading " + build_file_path) + raise + + if type(build_file_data) is not dict: + raise GypError("%s does not evaluate to a dictionary." % build_file_path) + + data[build_file_path] = build_file_data + aux_data[build_file_path] = {} + + # Scan for includes and merge them in. + if "skip_includes" not in build_file_data or not build_file_data["skip_includes"]: + try: + if is_target: + LoadBuildFileIncludesIntoDict( + build_file_data, build_file_path, data, aux_data, includes, check + ) + else: + LoadBuildFileIncludesIntoDict( + build_file_data, build_file_path, data, aux_data, None, check + ) + except Exception as e: + gyp.common.ExceptionAppend( + e, "while reading includes of " + build_file_path + ) + raise + + return build_file_data + + +def LoadBuildFileIncludesIntoDict( + subdict, subdict_path, data, aux_data, includes, check +): + includes_list = [] + if includes is not None: + includes_list.extend(includes) + if "includes" in subdict: + for include in subdict["includes"]: + # "include" is specified relative to subdict_path, so compute the real + # path to include by appending the provided "include" to the directory + # in which subdict_path resides. + relative_include = os.path.normpath( + os.path.join(os.path.dirname(subdict_path), include) + ) + includes_list.append(relative_include) + # Unhook the includes list, it's no longer needed. + del subdict["includes"] + + # Merge in the included files. + for include in includes_list: + if "included" not in aux_data[subdict_path]: + aux_data[subdict_path]["included"] = [] + aux_data[subdict_path]["included"].append(include) + + gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) + + MergeDicts( + subdict, + LoadOneBuildFile(include, data, aux_data, None, False, check), + subdict_path, + include, + ) + + # Recurse into subdictionaries. + for k, v in subdict.items(): + if type(v) is dict: + LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, None, check) + elif type(v) is list: + LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, check) + + +# This recurses into lists so that it can look for dicts. +def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check): + for item in sublist: + if type(item) is dict: + LoadBuildFileIncludesIntoDict( + item, sublist_path, data, aux_data, None, check + ) + elif type(item) is list: + LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check) + + +# Processes toolsets in all the targets. This recurses into condition entries +# since they can contain toolsets as well. +def ProcessToolsetsInDict(data): + if "targets" in data: + target_list = data["targets"] + new_target_list = [] + for target in target_list: + # If this target already has an explicit 'toolset', and no 'toolsets' + # list, don't modify it further. + if "toolset" in target and "toolsets" not in target: + new_target_list.append(target) + continue + if multiple_toolsets: + toolsets = target.get("toolsets", ["target"]) + else: + toolsets = ["target"] + # Make sure this 'toolsets' definition is only processed once. + if "toolsets" in target: + del target["toolsets"] + if len(toolsets) > 0: + # Optimization: only do copies if more than one toolset is specified. + for build in toolsets[1:]: + new_target = gyp.simple_copy.deepcopy(target) + new_target["toolset"] = build + new_target_list.append(new_target) + target["toolset"] = toolsets[0] + new_target_list.append(target) + data["targets"] = new_target_list + if "conditions" in data: + for condition in data["conditions"]: + if type(condition) is list: + for condition_dict in condition[1:]: + if type(condition_dict) is dict: + ProcessToolsetsInDict(condition_dict) + + +# TODO(mark): I don't love this name. It just means that it's going to load +# a build file that contains targets and is expected to provide a targets dict +# that contains the targets... +def LoadTargetBuildFile( + build_file_path, + data, + aux_data, + variables, + includes, + depth, + check, + load_dependencies, +): + # If depth is set, predefine the DEPTH variable to be a relative path from + # this build file's directory to the directory identified by depth. + if depth: + # TODO(dglazkov) The backslash/forward-slash replacement at the end is a + # temporary measure. This should really be addressed by keeping all paths + # in POSIX until actual project generation. + d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) + if d == "": + variables["DEPTH"] = "." + else: + variables["DEPTH"] = d.replace("\\", "/") + + # The 'target_build_files' key is only set when loading target build files in + # the non-parallel code path, where LoadTargetBuildFile is called + # recursively. In the parallel code path, we don't need to check whether the + # |build_file_path| has already been loaded, because the 'scheduled' set in + # ParallelState guarantees that we never load the same |build_file_path| + # twice. + if "target_build_files" in data: + if build_file_path in data["target_build_files"]: + # Already loaded. + return False + data["target_build_files"].add(build_file_path) + + gyp.DebugOutput( + gyp.DEBUG_INCLUDES, "Loading Target Build File '%s'", build_file_path + ) + + build_file_data = LoadOneBuildFile( + build_file_path, data, aux_data, includes, True, check + ) + + # Store DEPTH for later use in generators. + build_file_data["_DEPTH"] = depth + + # Set up the included_files key indicating which .gyp files contributed to + # this target dict. + if "included_files" in build_file_data: + raise GypError(build_file_path + " must not contain included_files key") + + included = GetIncludedBuildFiles(build_file_path, aux_data) + build_file_data["included_files"] = [] + for included_file in included: + # included_file is relative to the current directory, but it needs to + # be made relative to build_file_path's directory. + included_relative = gyp.common.RelativePath( + included_file, os.path.dirname(build_file_path) + ) + build_file_data["included_files"].append(included_relative) + + # Do a first round of toolsets expansion so that conditions can be defined + # per toolset. + ProcessToolsetsInDict(build_file_data) + + # Apply "pre"/"early" variable expansions and condition evaluations. + ProcessVariablesAndConditionsInDict( + build_file_data, PHASE_EARLY, variables, build_file_path + ) + + # Since some toolsets might have been defined conditionally, perform + # a second round of toolsets expansion now. + ProcessToolsetsInDict(build_file_data) + + # Look at each project's target_defaults dict, and merge settings into + # targets. + if "target_defaults" in build_file_data: + if "targets" not in build_file_data: + raise GypError("Unable to find targets in build file %s" % build_file_path) + + index = 0 + while index < len(build_file_data["targets"]): + # This procedure needs to give the impression that target_defaults is + # used as defaults, and the individual targets inherit from that. + # The individual targets need to be merged into the defaults. Make + # a deep copy of the defaults for each target, merge the target dict + # as found in the input file into that copy, and then hook up the + # copy with the target-specific data merged into it as the replacement + # target dict. + old_target_dict = build_file_data["targets"][index] + new_target_dict = gyp.simple_copy.deepcopy( + build_file_data["target_defaults"] + ) + MergeDicts( + new_target_dict, old_target_dict, build_file_path, build_file_path + ) + build_file_data["targets"][index] = new_target_dict + index += 1 + + # No longer needed. + del build_file_data["target_defaults"] + + # Look for dependencies. This means that dependency resolution occurs + # after "pre" conditionals and variable expansion, but before "post" - + # in other words, you can't put a "dependencies" section inside a "post" + # conditional within a target. + + dependencies = [] + if "targets" in build_file_data: + for target_dict in build_file_data["targets"]: + if "dependencies" not in target_dict: + continue + for dependency in target_dict["dependencies"]: + dependencies.append( + gyp.common.ResolveTarget(build_file_path, dependency, None)[0] + ) + + if load_dependencies: + for dependency in dependencies: + try: + LoadTargetBuildFile( + dependency, + data, + aux_data, + variables, + includes, + depth, + check, + load_dependencies, + ) + except Exception as e: + gyp.common.ExceptionAppend( + e, "while loading dependencies of %s" % build_file_path + ) + raise + else: + return (build_file_path, dependencies) + + +def CallLoadTargetBuildFile( + global_flags, + build_file_path, + variables, + includes, + depth, + check, + generator_input_info, +): + """Wrapper around LoadTargetBuildFile for parallel processing. + + This wrapper is used when LoadTargetBuildFile is executed in + a worker process. + """ + + try: + signal.signal(signal.SIGINT, signal.SIG_IGN) + + # Apply globals so that the worker process behaves the same. + for key, value in global_flags.items(): + globals()[key] = value + + SetGeneratorGlobals(generator_input_info) + result = LoadTargetBuildFile( + build_file_path, + per_process_data, + per_process_aux_data, + variables, + includes, + depth, + check, + False, + ) + if not result: + return result + + (build_file_path, dependencies) = result + + # We can safely pop the build_file_data from per_process_data because it + # will never be referenced by this process again, so we don't need to keep + # it in the cache. + build_file_data = per_process_data.pop(build_file_path) + + # This gets serialized and sent back to the main process via a pipe. + # It's handled in LoadTargetBuildFileCallback. + return (build_file_path, build_file_data, dependencies) + except GypError as e: + sys.stderr.write("gyp: %s\n" % e) + return None + except Exception as e: + print("Exception:", e, file=sys.stderr) + print(traceback.format_exc(), file=sys.stderr) + return None + + +class ParallelProcessingError(Exception): + pass + + +class ParallelState: + """Class to keep track of state when processing input files in parallel. + + If build files are loaded in parallel, use this to keep track of + state during farming out and processing parallel jobs. It's stored + in a global so that the callback function can have access to it. + """ + + def __init__(self): + # The multiprocessing pool. + self.pool = None + # The condition variable used to protect this object and notify + # the main loop when there might be more data to process. + self.condition = None + # The "data" dict that was passed to LoadTargetBuildFileParallel + self.data = None + # The number of parallel calls outstanding; decremented when a response + # was received. + self.pending = 0 + # The set of all build files that have been scheduled, so we don't + # schedule the same one twice. + self.scheduled = set() + # A list of dependency build file paths that haven't been scheduled yet. + self.dependencies = [] + # Flag to indicate if there was an error in a child process. + self.error = False + + def LoadTargetBuildFileCallback(self, result): + """Handle the results of running LoadTargetBuildFile in another process. + """ + self.condition.acquire() + if not result: + self.error = True + self.condition.notify() + self.condition.release() + return + (build_file_path0, build_file_data0, dependencies0) = result + self.data[build_file_path0] = build_file_data0 + self.data["target_build_files"].add(build_file_path0) + for new_dependency in dependencies0: + if new_dependency not in self.scheduled: + self.scheduled.add(new_dependency) + self.dependencies.append(new_dependency) + self.pending -= 1 + self.condition.notify() + self.condition.release() + + +def LoadTargetBuildFilesParallel( + build_files, data, variables, includes, depth, check, generator_input_info +): + parallel_state = ParallelState() + parallel_state.condition = threading.Condition() + # Make copies of the build_files argument that we can modify while working. + parallel_state.dependencies = list(build_files) + parallel_state.scheduled = set(build_files) + parallel_state.pending = 0 + parallel_state.data = data + + try: + parallel_state.condition.acquire() + while parallel_state.dependencies or parallel_state.pending: + if parallel_state.error: + break + if not parallel_state.dependencies: + parallel_state.condition.wait() + continue + + dependency = parallel_state.dependencies.pop() + + parallel_state.pending += 1 + global_flags = { + "path_sections": globals()["path_sections"], + "non_configuration_keys": globals()["non_configuration_keys"], + "multiple_toolsets": globals()["multiple_toolsets"], + } + + if not parallel_state.pool: + parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) + parallel_state.pool.apply_async( + CallLoadTargetBuildFile, + args=( + global_flags, + dependency, + variables, + includes, + depth, + check, + generator_input_info, + ), + callback=parallel_state.LoadTargetBuildFileCallback, + ) + except KeyboardInterrupt as e: + parallel_state.pool.terminate() + raise e + + parallel_state.condition.release() + + parallel_state.pool.close() + parallel_state.pool.join() + parallel_state.pool = None + + if parallel_state.error: + sys.exit(1) + + +# Look for the bracket that matches the first bracket seen in a +# string, and return the start and end as a tuple. For example, if +# the input is something like "<(foo <(bar)) blah", then it would +# return (1, 13), indicating the entire string except for the leading +# "<" and trailing " blah". +LBRACKETS = set("{[(") +BRACKETS = {"}": "{", "]": "[", ")": "("} + + +def FindEnclosingBracketGroup(input_str): + stack = [] + start = -1 + for index, char in enumerate(input_str): + if char in LBRACKETS: + stack.append(char) + if start == -1: + start = index + elif char in BRACKETS: + if not stack: + return (-1, -1) + if stack.pop() != BRACKETS[char]: + return (-1, -1) + if not stack: + return (start, index + 1) + return (-1, -1) + + +def IsStrCanonicalInt(string): + """Returns True if |string| is in its canonical integer form. + + The canonical form is such that str(int(string)) == string. + """ + if type(string) is str: + # This function is called a lot so for maximum performance, avoid + # involving regexps which would otherwise make the code much + # shorter. Regexps would need twice the time of this function. + if string: + if string == "0": + return True + if string[0] == "-": + string = string[1:] + if not string: + return False + if "1" <= string[0] <= "9": + return string.isdigit() + + return False + + +# This matches things like "<(asdf)", "(?P<(?:(?:!?@?)|\|)?)" + r"(?P[-a-zA-Z0-9_.]+)?" + r"\((?P\s*\[?)" + r"(?P.*?)(\]?)\))" +) + +# This matches the same as early_variable_re, but with '>' instead of '<'. +late_variable_re = re.compile( + r"(?P(?P>(?:(?:!?@?)|\|)?)" + r"(?P[-a-zA-Z0-9_.]+)?" + r"\((?P\s*\[?)" + r"(?P.*?)(\]?)\))" +) + +# This matches the same as early_variable_re, but with '^' instead of '<'. +latelate_variable_re = re.compile( + r"(?P(?P[\^](?:(?:!?@?)|\|)?)" + r"(?P[-a-zA-Z0-9_.]+)?" + r"\((?P\s*\[?)" + r"(?P.*?)(\]?)\))" +) + +# Global cache of results from running commands so they don't have to be run +# more then once. +cached_command_results = {} + + +def FixupPlatformCommand(cmd): + if sys.platform == "win32": + if type(cmd) is list: + cmd = [re.sub("^cat ", "type ", cmd[0])] + cmd[1:] + else: + cmd = re.sub("^cat ", "type ", cmd) + return cmd + + +PHASE_EARLY = 0 +PHASE_LATE = 1 +PHASE_LATELATE = 2 + + +def ExpandVariables(input, phase, variables, build_file): + # Look for the pattern that gets expanded into variables + if phase == PHASE_EARLY: + variable_re = early_variable_re + expansion_symbol = "<" + elif phase == PHASE_LATE: + variable_re = late_variable_re + expansion_symbol = ">" + elif phase == PHASE_LATELATE: + variable_re = latelate_variable_re + expansion_symbol = "^" + else: + assert False + + input_str = str(input) + if IsStrCanonicalInt(input_str): + return int(input_str) + + # Do a quick scan to determine if an expensive regex search is warranted. + if expansion_symbol not in input_str: + return input_str + + # Get the entire list of matches as a list of MatchObject instances. + # (using findall here would return strings instead of MatchObjects). + matches = list(variable_re.finditer(input_str)) + if not matches: + return input_str + + output = input_str + # Reverse the list of matches so that replacements are done right-to-left. + # That ensures that earlier replacements won't mess up the string in a + # way that causes later calls to find the earlier substituted text instead + # of what's intended for replacement. + matches.reverse() + for match_group in matches: + match = match_group.groupdict() + gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match) + # match['replace'] is the substring to look for, match['type'] + # is the character code for the replacement type (< > ! <| >| <@ + # >@ !@), match['is_array'] contains a '[' for command + # arrays, and match['content'] is the name of the variable (< >) + # or command to run (!). match['command_string'] is an optional + # command string. Currently, only 'pymod_do_main' is supported. + + # run_command is true if a ! variant is used. + run_command = "!" in match["type"] + command_string = match["command_string"] + + # file_list is true if a | variant is used. + file_list = "|" in match["type"] + + # Capture these now so we can adjust them later. + replace_start = match_group.start("replace") + replace_end = match_group.end("replace") + + # Find the ending paren, and re-evaluate the contained string. + (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) + + # Adjust the replacement range to match the entire command + # found by FindEnclosingBracketGroup (since the variable_re + # probably doesn't match the entire command if it contained + # nested variables). + replace_end = replace_start + c_end + + # Find the "real" replacement, matching the appropriate closing + # paren, and adjust the replacement start and end. + replacement = input_str[replace_start:replace_end] + + # Figure out what the contents of the variable parens are. + contents_start = replace_start + c_start + 1 + contents_end = replace_end - 1 + contents = input_str[contents_start:contents_end] + + # Do filter substitution now for <|(). + # Admittedly, this is different than the evaluation order in other + # contexts. However, since filtration has no chance to run on <|(), + # this seems like the only obvious way to give them access to filters. + if file_list: + processed_variables = gyp.simple_copy.deepcopy(variables) + ProcessListFiltersInDict(contents, processed_variables) + # Recurse to expand variables in the contents + contents = ExpandVariables(contents, phase, processed_variables, build_file) + else: + # Recurse to expand variables in the contents + contents = ExpandVariables(contents, phase, variables, build_file) + + # Strip off leading/trailing whitespace so that variable matches are + # simpler below (and because they are rarely needed). + contents = contents.strip() + + # expand_to_list is true if an @ variant is used. In that case, + # the expansion should result in a list. Note that the caller + # is to be expecting a list in return, and not all callers do + # because not all are working in list context. Also, for list + # expansions, there can be no other text besides the variable + # expansion in the input string. + expand_to_list = "@" in match["type"] and input_str == replacement + + if run_command or file_list: + # Find the build file's directory, so commands can be run or file lists + # generated relative to it. + build_file_dir = os.path.dirname(build_file) + if build_file_dir == "" and not file_list: + # If build_file is just a leaf filename indicating a file in the + # current directory, build_file_dir might be an empty string. Set + # it to None to signal to subprocess.Popen that it should run the + # command in the current directory. + build_file_dir = None + + # Support <|(listfile.txt ...) which generates a file + # containing items from a gyp list, generated at gyp time. + # This works around actions/rules which have more inputs than will + # fit on the command line. + if file_list: + if type(contents) is list: + contents_list = contents + else: + contents_list = contents.split(" ") + replacement = contents_list[0] + if os.path.isabs(replacement): + raise GypError('| cannot handle absolute paths, got "%s"' % replacement) + + if not generator_filelist_paths: + path = os.path.join(build_file_dir, replacement) + else: + if os.path.isabs(build_file_dir): + toplevel = generator_filelist_paths["toplevel"] + rel_build_file_dir = gyp.common.RelativePath( + build_file_dir, toplevel + ) + else: + rel_build_file_dir = build_file_dir + qualified_out_dir = generator_filelist_paths["qualified_out_dir"] + path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement) + gyp.common.EnsureDirExists(path) + + replacement = gyp.common.RelativePath(path, build_file_dir) + f = gyp.common.WriteOnDiff(path) + for i in contents_list[1:]: + f.write("%s\n" % i) + f.close() + + elif run_command: + use_shell = True + if match["is_array"]: + contents = eval(contents) + use_shell = False + + # Check for a cached value to avoid executing commands, or generating + # file lists more than once. The cache key contains the command to be + # run as well as the directory to run it from, to account for commands + # that depend on their current directory. + # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, + # someone could author a set of GYP files where each time the command + # is invoked it produces different output by design. When the need + # arises, the syntax should be extended to support no caching off a + # command's output so it is run every time. + cache_key = (str(contents), build_file_dir) + cached_value = cached_command_results.get(cache_key, None) + if cached_value is None: + gyp.DebugOutput( + gyp.DEBUG_VARIABLES, + "Executing command '%s' in directory '%s'", + contents, + build_file_dir, + ) + + replacement = "" + + if command_string == "pymod_do_main": + # (sources/) etc. to resolve to + # and empty list if undefined. This allows actions to: + # 'action!': [ + # '>@(_sources!)', + # ], + # 'action/': [ + # '>@(_sources/)', + # ], + replacement = [] + else: + raise GypError( + "Undefined variable " + contents + " in " + build_file + ) + else: + replacement = variables[contents] + + if isinstance(replacement, bytes) and not isinstance(replacement, str): + replacement = replacement.decode("utf-8") # done on Python 3 only + if type(replacement) is list: + for item in replacement: + if isinstance(item, bytes) and not isinstance(item, str): + item = item.decode("utf-8") # done on Python 3 only + if not contents[-1] == "/" and type(item) not in (str, int): + raise GypError( + "Variable " + + contents + + " must expand to a string or list of strings; " + + "list contains a " + + item.__class__.__name__ + ) + # Run through the list and handle variable expansions in it. Since + # the list is guaranteed not to contain dicts, this won't do anything + # with conditions sections. + ProcessVariablesAndConditionsInList( + replacement, phase, variables, build_file + ) + elif type(replacement) not in (str, int): + raise GypError( + "Variable " + + contents + + " must expand to a string or list of strings; " + + "found a " + + replacement.__class__.__name__ + ) + + if expand_to_list: + # Expanding in list context. It's guaranteed that there's only one + # replacement to do in |input_str| and that it's this replacement. See + # above. + if type(replacement) is list: + # If it's already a list, make a copy. + output = replacement[:] + else: + # Split it the same way sh would split arguments. + output = shlex.split(str(replacement)) + else: + # Expanding in string context. + encoded_replacement = "" + if type(replacement) is list: + # When expanding a list into string context, turn the list items + # into a string in a way that will work with a subprocess call. + # + # TODO(mark): This isn't completely correct. This should + # call a generator-provided function that observes the + # proper list-to-argument quoting rules on a specific + # platform instead of just calling the POSIX encoding + # routine. + encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) + else: + encoded_replacement = replacement + + output = ( + output[:replace_start] + str(encoded_replacement) + output[replace_end:] + ) + # Prepare for the next match iteration. + input_str = output + + if output == input: + gyp.DebugOutput( + gyp.DEBUG_VARIABLES, + "Found only identity matches on %r, avoiding infinite " "recursion.", + output, + ) + else: + # Look for more matches now that we've replaced some, to deal with + # expanding local variables (variables defined in the same + # variables block as this one). + gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output) + if type(output) is list: + if output and type(output[0]) is list: + # Leave output alone if it's a list of lists. + # We don't want such lists to be stringified. + pass + else: + new_output = [] + for item in output: + new_output.append( + ExpandVariables(item, phase, variables, build_file) + ) + output = new_output + else: + output = ExpandVariables(output, phase, variables, build_file) + + # Convert all strings that are canonically-represented integers into integers. + if type(output) is list: + for index, outstr in enumerate(output): + if IsStrCanonicalInt(outstr): + output[index] = int(outstr) + elif IsStrCanonicalInt(output): + output = int(output) + + return output + + +# The same condition is often evaluated over and over again so it +# makes sense to cache as much as possible between evaluations. +cached_conditions_asts = {} + + +def EvalCondition(condition, conditions_key, phase, variables, build_file): + """Returns the dict that should be used or None if the result was + that nothing should be used.""" + if type(condition) is not list: + raise GypError(conditions_key + " must be a list") + if len(condition) < 2: + # It's possible that condition[0] won't work in which case this + # attempt will raise its own IndexError. That's probably fine. + raise GypError( + conditions_key + + " " + + condition[0] + + " must be at least length 2, not " + + str(len(condition)) + ) + + i = 0 + result = None + while i < len(condition): + cond_expr = condition[i] + true_dict = condition[i + 1] + if type(true_dict) is not dict: + raise GypError( + "{} {} must be followed by a dictionary, not {}".format( + conditions_key, cond_expr, type(true_dict) + ) + ) + if len(condition) > i + 2 and type(condition[i + 2]) is dict: + false_dict = condition[i + 2] + i = i + 3 + if i != len(condition): + raise GypError( + "{} {} has {} unexpected trailing items".format( + conditions_key, cond_expr, len(condition) - i + ) + ) + else: + false_dict = None + i = i + 2 + if result is None: + result = EvalSingleCondition( + cond_expr, true_dict, false_dict, phase, variables, build_file + ) + + return result + + +def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file): + """Returns true_dict if cond_expr evaluates to true, and false_dict + otherwise.""" + # Do expansions on the condition itself. Since the condition can naturally + # contain variable references without needing to resort to GYP expansion + # syntax, this is of dubious value for variables, but someone might want to + # use a command expansion directly inside a condition. + cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, build_file) + if type(cond_expr_expanded) not in (str, int): + raise ValueError( + "Variable expansion in this context permits str and int " + + "only, found " + + cond_expr_expanded.__class__.__name__ + ) + + try: + if cond_expr_expanded in cached_conditions_asts: + ast_code = cached_conditions_asts[cond_expr_expanded] + else: + ast_code = compile(cond_expr_expanded, "", "eval") + cached_conditions_asts[cond_expr_expanded] = ast_code + env = {"__builtins__": {}, "v": StrictVersion} + if eval(ast_code, env, variables): + return true_dict + return false_dict + except SyntaxError as e: + syntax_error = SyntaxError( + "%s while evaluating condition '%s' in %s " + "at character %d." % (str(e.args[0]), e.text, build_file, e.offset), + e.filename, + e.lineno, + e.offset, + e.text, + ) + raise syntax_error + except NameError as e: + gyp.common.ExceptionAppend( + e, + f"while evaluating condition '{cond_expr_expanded}' in {build_file}", + ) + raise GypError(e) + + +def ProcessConditionsInDict(the_dict, phase, variables, build_file): + # Process a 'conditions' or 'target_conditions' section in the_dict, + # depending on phase. + # early -> conditions + # late -> target_conditions + # latelate -> no conditions + # + # Each item in a conditions list consists of cond_expr, a string expression + # evaluated as the condition, and true_dict, a dict that will be merged into + # the_dict if cond_expr evaluates to true. Optionally, a third item, + # false_dict, may be present. false_dict is merged into the_dict if + # cond_expr evaluates to false. + # + # Any dict merged into the_dict will be recursively processed for nested + # conditionals and other expansions, also according to phase, immediately + # prior to being merged. + + if phase == PHASE_EARLY: + conditions_key = "conditions" + elif phase == PHASE_LATE: + conditions_key = "target_conditions" + elif phase == PHASE_LATELATE: + return + else: + assert False + + if conditions_key not in the_dict: + return + + conditions_list = the_dict[conditions_key] + # Unhook the conditions list, it's no longer needed. + del the_dict[conditions_key] + + for condition in conditions_list: + merge_dict = EvalCondition( + condition, conditions_key, phase, variables, build_file + ) + + if merge_dict is not None: + # Expand variables and nested conditinals in the merge_dict before + # merging it. + ProcessVariablesAndConditionsInDict( + merge_dict, phase, variables, build_file + ) + + MergeDicts(the_dict, merge_dict, build_file, build_file) + + +def LoadAutomaticVariablesFromDict(variables, the_dict): + # Any keys with plain string values in the_dict become automatic variables. + # The variable name is the key name with a "_" character prepended. + for key, value in the_dict.items(): + if type(value) in (str, int, list): + variables["_" + key] = value + + +def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): + # Any keys in the_dict's "variables" dict, if it has one, becomes a + # variable. The variable name is the key name in the "variables" dict. + # Variables that end with the % character are set only if they are unset in + # the variables dict. the_dict_key is the name of the key that accesses + # the_dict in the_dict's parent dict. If the_dict's parent is not a dict + # (it could be a list or it could be parentless because it is a root dict), + # the_dict_key will be None. + for key, value in the_dict.get("variables", {}).items(): + if type(value) not in (str, int, list): + continue + + if key.endswith("%"): + variable_name = key[:-1] + if variable_name in variables: + # If the variable is already set, don't set it. + continue + if the_dict_key == "variables" and variable_name in the_dict: + # If the variable is set without a % in the_dict, and the_dict is a + # variables dict (making |variables| a variables sub-dict of a + # variables dict), use the_dict's definition. + value = the_dict[variable_name] + else: + variable_name = key + + variables[variable_name] = value + + +def ProcessVariablesAndConditionsInDict( + the_dict, phase, variables_in, build_file, the_dict_key=None +): + """Handle all variable and command expansion and conditional evaluation. + + This function is the public entry point for all variable expansions and + conditional evaluations. The variables_in dictionary will not be modified + by this function. + """ + + # Make a copy of the variables_in dict that can be modified during the + # loading of automatics and the loading of the variables dict. + variables = variables_in.copy() + LoadAutomaticVariablesFromDict(variables, the_dict) + + if "variables" in the_dict: + # Make sure all the local variables are added to the variables + # list before we process them so that you can reference one + # variable from another. They will be fully expanded by recursion + # in ExpandVariables. + for key, value in the_dict["variables"].items(): + variables[key] = value + + # Handle the associated variables dict first, so that any variable + # references within can be resolved prior to using them as variables. + # Pass a copy of the variables dict to avoid having it be tainted. + # Otherwise, it would have extra automatics added for everything that + # should just be an ordinary variable in this scope. + ProcessVariablesAndConditionsInDict( + the_dict["variables"], phase, variables, build_file, "variables" + ) + + LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) + + for key, value in the_dict.items(): + # Skip "variables", which was already processed if present. + if key != "variables" and type(value) is str: + expanded = ExpandVariables(value, phase, variables, build_file) + if type(expanded) not in (str, int): + raise ValueError( + "Variable expansion in this context permits str and int " + + "only, found " + + expanded.__class__.__name__ + + " for " + + key + ) + the_dict[key] = expanded + + # Variable expansion may have resulted in changes to automatics. Reload. + # TODO(mark): Optimization: only reload if no changes were made. + variables = variables_in.copy() + LoadAutomaticVariablesFromDict(variables, the_dict) + LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) + + # Process conditions in this dict. This is done after variable expansion + # so that conditions may take advantage of expanded variables. For example, + # if the_dict contains: + # {'type': '<(library_type)', + # 'conditions': [['_type=="static_library"', { ... }]]}, + # _type, as used in the condition, will only be set to the value of + # library_type if variable expansion is performed before condition + # processing. However, condition processing should occur prior to recursion + # so that variables (both automatic and "variables" dict type) may be + # adjusted by conditions sections, merged into the_dict, and have the + # intended impact on contained dicts. + # + # This arrangement means that a "conditions" section containing a "variables" + # section will only have those variables effective in subdicts, not in + # the_dict. The workaround is to put a "conditions" section within a + # "variables" section. For example: + # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], + # 'defines': ['<(define)'], + # 'my_subdict': {'defines': ['<(define)']}}, + # will not result in "IS_MAC" being appended to the "defines" list in the + # current scope but would result in it being appended to the "defines" list + # within "my_subdict". By comparison: + # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, + # 'defines': ['<(define)'], + # 'my_subdict': {'defines': ['<(define)']}}, + # will append "IS_MAC" to both "defines" lists. + + # Evaluate conditions sections, allowing variable expansions within them + # as well as nested conditionals. This will process a 'conditions' or + # 'target_conditions' section, perform appropriate merging and recursive + # conditional and variable processing, and then remove the conditions section + # from the_dict if it is present. + ProcessConditionsInDict(the_dict, phase, variables, build_file) + + # Conditional processing may have resulted in changes to automatics or the + # variables dict. Reload. + variables = variables_in.copy() + LoadAutomaticVariablesFromDict(variables, the_dict) + LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) + + # Recurse into child dicts, or process child lists which may result in + # further recursion into descendant dicts. + for key, value in the_dict.items(): + # Skip "variables" and string values, which were already processed if + # present. + if key == "variables" or type(value) is str: + continue + if type(value) is dict: + # Pass a copy of the variables dict so that subdicts can't influence + # parents. + ProcessVariablesAndConditionsInDict( + value, phase, variables, build_file, key + ) + elif type(value) is list: + # The list itself can't influence the variables dict, and + # ProcessVariablesAndConditionsInList will make copies of the variables + # dict if it needs to pass it to something that can influence it. No + # copy is necessary here. + ProcessVariablesAndConditionsInList(value, phase, variables, build_file) + elif type(value) is not int: + raise TypeError("Unknown type " + value.__class__.__name__ + " for " + key) + + +def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file): + # Iterate using an index so that new values can be assigned into the_list. + index = 0 + while index < len(the_list): + item = the_list[index] + if type(item) is dict: + # Make a copy of the variables dict so that it won't influence anything + # outside of its own scope. + ProcessVariablesAndConditionsInDict(item, phase, variables, build_file) + elif type(item) is list: + ProcessVariablesAndConditionsInList(item, phase, variables, build_file) + elif type(item) is str: + expanded = ExpandVariables(item, phase, variables, build_file) + if type(expanded) in (str, int): + the_list[index] = expanded + elif type(expanded) is list: + the_list[index : index + 1] = expanded + index += len(expanded) + + # index now identifies the next item to examine. Continue right now + # without falling into the index increment below. + continue + else: + raise ValueError( + "Variable expansion in this context permits strings and " + + "lists only, found " + + expanded.__class__.__name__ + + " at " + + index + ) + elif type(item) is not int: + raise TypeError( + "Unknown type " + item.__class__.__name__ + " at index " + index + ) + index = index + 1 + + +def BuildTargetsDict(data): + """Builds a dict mapping fully-qualified target names to their target dicts. + + |data| is a dict mapping loaded build files by pathname relative to the + current directory. Values in |data| are build file contents. For each + |data| value with a "targets" key, the value of the "targets" key is taken + as a list containing target dicts. Each target's fully-qualified name is + constructed from the pathname of the build file (|data| key) and its + "target_name" property. These fully-qualified names are used as the keys + in the returned dict. These keys provide access to the target dicts, + the dicts in the "targets" lists. + """ + + targets = {} + for build_file in data["target_build_files"]: + for target in data[build_file].get("targets", []): + target_name = gyp.common.QualifiedTarget( + build_file, target["target_name"], target["toolset"] + ) + if target_name in targets: + raise GypError("Duplicate target definitions for " + target_name) + targets[target_name] = target + + return targets + + +def QualifyDependencies(targets): + """Make dependency links fully-qualified relative to the current directory. + + |targets| is a dict mapping fully-qualified target names to their target + dicts. For each target in this dict, keys known to contain dependency + links are examined, and any dependencies referenced will be rewritten + so that they are fully-qualified and relative to the current directory. + All rewritten dependencies are suitable for use as keys to |targets| or a + similar dict. + """ + + all_dependency_sections = [ + dep + op for dep in dependency_sections for op in ("", "!", "/") + ] + + for target, target_dict in targets.items(): + target_build_file = gyp.common.BuildFile(target) + toolset = target_dict["toolset"] + for dependency_key in all_dependency_sections: + dependencies = target_dict.get(dependency_key, []) + for index, dep in enumerate(dependencies): + dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( + target_build_file, dep, toolset + ) + if not multiple_toolsets: + # Ignore toolset specification in the dependency if it is specified. + dep_toolset = toolset + dependency = gyp.common.QualifiedTarget( + dep_file, dep_target, dep_toolset + ) + dependencies[index] = dependency + + # Make sure anything appearing in a list other than "dependencies" also + # appears in the "dependencies" list. + if ( + dependency_key != "dependencies" + and dependency not in target_dict["dependencies"] + ): + raise GypError( + "Found " + + dependency + + " in " + + dependency_key + + " of " + + target + + ", but not in dependencies" + ) + + +def ExpandWildcardDependencies(targets, data): + """Expands dependencies specified as build_file:*. + + For each target in |targets|, examines sections containing links to other + targets. If any such section contains a link of the form build_file:*, it + is taken as a wildcard link, and is expanded to list each target in + build_file. The |data| dict provides access to build file dicts. + + Any target that does not wish to be included by wildcard can provide an + optional "suppress_wildcard" key in its target dict. When present and + true, a wildcard dependency link will not include such targets. + + All dependency names, including the keys to |targets| and the values in each + dependency list, must be qualified when this function is called. + """ + + for target, target_dict in targets.items(): + target_build_file = gyp.common.BuildFile(target) + for dependency_key in dependency_sections: + dependencies = target_dict.get(dependency_key, []) + + # Loop this way instead of "for dependency in" or "for index in range" + # because the dependencies list will be modified within the loop body. + index = 0 + while index < len(dependencies): + ( + dependency_build_file, + dependency_target, + dependency_toolset, + ) = gyp.common.ParseQualifiedTarget(dependencies[index]) + if dependency_target != "*" and dependency_toolset != "*": + # Not a wildcard. Keep it moving. + index = index + 1 + continue + + if dependency_build_file == target_build_file: + # It's an error for a target to depend on all other targets in + # the same file, because a target cannot depend on itself. + raise GypError( + "Found wildcard in " + + dependency_key + + " of " + + target + + " referring to same build file" + ) + + # Take the wildcard out and adjust the index so that the next + # dependency in the list will be processed the next time through the + # loop. + del dependencies[index] + index = index - 1 + + # Loop through the targets in the other build file, adding them to + # this target's list of dependencies in place of the removed + # wildcard. + dependency_target_dicts = data[dependency_build_file]["targets"] + for dependency_target_dict in dependency_target_dicts: + if int(dependency_target_dict.get("suppress_wildcard", False)): + continue + dependency_target_name = dependency_target_dict["target_name"] + if ( + dependency_target != "*" + and dependency_target != dependency_target_name + ): + continue + dependency_target_toolset = dependency_target_dict["toolset"] + if ( + dependency_toolset != "*" + and dependency_toolset != dependency_target_toolset + ): + continue + dependency = gyp.common.QualifiedTarget( + dependency_build_file, + dependency_target_name, + dependency_target_toolset, + ) + index = index + 1 + dependencies.insert(index, dependency) + + index = index + 1 + + +def Unify(items): + """Removes duplicate elements from items, keeping the first element.""" + seen = {} + return [seen.setdefault(e, e) for e in items if e not in seen] + + +def RemoveDuplicateDependencies(targets): + """Makes sure every dependency appears only once in all targets's dependency + lists.""" + for target_name, target_dict in targets.items(): + for dependency_key in dependency_sections: + dependencies = target_dict.get(dependency_key, []) + if dependencies: + target_dict[dependency_key] = Unify(dependencies) + + +def Filter(items, item): + """Removes item from items.""" + res = {} + return [res.setdefault(e, e) for e in items if e != item] + + +def RemoveSelfDependencies(targets): + """Remove self dependencies from targets that have the prune_self_dependency + variable set.""" + for target_name, target_dict in targets.items(): + for dependency_key in dependency_sections: + dependencies = target_dict.get(dependency_key, []) + if dependencies: + for t in dependencies: + if t == target_name: + if ( + targets[t] + .get("variables", {}) + .get("prune_self_dependency", 0) + ): + target_dict[dependency_key] = Filter( + dependencies, target_name + ) + + +def RemoveLinkDependenciesFromNoneTargets(targets): + """Remove dependencies having the 'link_dependency' attribute from the 'none' + targets.""" + for target_name, target_dict in targets.items(): + for dependency_key in dependency_sections: + dependencies = target_dict.get(dependency_key, []) + if dependencies: + for t in dependencies: + if target_dict.get("type", None) == "none": + if targets[t].get("variables", {}).get("link_dependency", 0): + target_dict[dependency_key] = Filter( + target_dict[dependency_key], t + ) + + +class DependencyGraphNode: + """ + + Attributes: + ref: A reference to an object that this DependencyGraphNode represents. + dependencies: List of DependencyGraphNodes on which this one depends. + dependents: List of DependencyGraphNodes that depend on this one. + """ + + class CircularException(GypError): + pass + + def __init__(self, ref): + self.ref = ref + self.dependencies = [] + self.dependents = [] + + def __repr__(self): + return "" % self.ref + + def FlattenToList(self): + # flat_list is the sorted list of dependencies - actually, the list items + # are the "ref" attributes of DependencyGraphNodes. Every target will + # appear in flat_list after all of its dependencies, and before all of its + # dependents. + flat_list = OrderedSet() + + def ExtractNodeRef(node): + """Extracts the object that the node represents from the given node.""" + return node.ref + + # in_degree_zeros is the list of DependencyGraphNodes that have no + # dependencies not in flat_list. Initially, it is a copy of the children + # of this node, because when the graph was built, nodes with no + # dependencies were made implicit dependents of the root node. + in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef) + + while in_degree_zeros: + # Nodes in in_degree_zeros have no dependencies not in flat_list, so they + # can be appended to flat_list. Take these nodes out of in_degree_zeros + # as work progresses, so that the next node to process from the list can + # always be accessed at a consistent position. + node = in_degree_zeros.pop() + flat_list.add(node.ref) + + # Look at dependents of the node just added to flat_list. Some of them + # may now belong in in_degree_zeros. + for node_dependent in sorted(node.dependents, key=ExtractNodeRef): + is_in_degree_zero = True + # TODO: We want to check through the + # node_dependent.dependencies list but if it's long and we + # always start at the beginning, then we get O(n^2) behaviour. + for node_dependent_dependency in sorted( + node_dependent.dependencies, key=ExtractNodeRef + ): + if node_dependent_dependency.ref not in flat_list: + # The dependent one or more dependencies not in flat_list. + # There will be more chances to add it to flat_list + # when examining it again as a dependent of those other + # dependencies, provided that there are no cycles. + is_in_degree_zero = False + break + + if is_in_degree_zero: + # All of the dependent's dependencies are already in flat_list. Add + # it to in_degree_zeros where it will be processed in a future + # iteration of the outer loop. + in_degree_zeros += [node_dependent] + + return list(flat_list) + + def FindCycles(self): + """ + Returns a list of cycles in the graph, where each cycle is its own list. + """ + results = [] + visited = set() + + def Visit(node, path): + for child in node.dependents: + if child in path: + results.append([child] + path[: path.index(child) + 1]) + elif child not in visited: + visited.add(child) + Visit(child, [child] + path) + + visited.add(self) + Visit(self, [self]) + + return results + + def DirectDependencies(self, dependencies=None): + """Returns a list of just direct dependencies.""" + if dependencies is None: + dependencies = [] + + for dependency in self.dependencies: + # Check for None, corresponding to the root node. + if dependency.ref and dependency.ref not in dependencies: + dependencies.append(dependency.ref) + + return dependencies + + def _AddImportedDependencies(self, targets, dependencies=None): + """Given a list of direct dependencies, adds indirect dependencies that + other dependencies have declared to export their settings. + + This method does not operate on self. Rather, it operates on the list + of dependencies in the |dependencies| argument. For each dependency in + that list, if any declares that it exports the settings of one of its + own dependencies, those dependencies whose settings are "passed through" + are added to the list. As new items are added to the list, they too will + be processed, so it is possible to import settings through multiple levels + of dependencies. + + This method is not terribly useful on its own, it depends on being + "primed" with a list of direct dependencies such as one provided by + DirectDependencies. DirectAndImportedDependencies is intended to be the + public entry point. + """ + + if dependencies is None: + dependencies = [] + + index = 0 + while index < len(dependencies): + dependency = dependencies[index] + dependency_dict = targets[dependency] + # Add any dependencies whose settings should be imported to the list + # if not already present. Newly-added items will be checked for + # their own imports when the list iteration reaches them. + # Rather than simply appending new items, insert them after the + # dependency that exported them. This is done to more closely match + # the depth-first method used by DeepDependencies. + add_index = 1 + for imported_dependency in dependency_dict.get( + "export_dependent_settings", [] + ): + if imported_dependency not in dependencies: + dependencies.insert(index + add_index, imported_dependency) + add_index = add_index + 1 + index = index + 1 + + return dependencies + + def DirectAndImportedDependencies(self, targets, dependencies=None): + """Returns a list of a target's direct dependencies and all indirect + dependencies that a dependency has advertised settings should be exported + through the dependency for. + """ + + dependencies = self.DirectDependencies(dependencies) + return self._AddImportedDependencies(targets, dependencies) + + def DeepDependencies(self, dependencies=None): + """Returns an OrderedSet of all of a target's dependencies, recursively.""" + if dependencies is None: + # Using a list to get ordered output and a set to do fast "is it + # already added" checks. + dependencies = OrderedSet() + + for dependency in self.dependencies: + # Check for None, corresponding to the root node. + if dependency.ref is None: + continue + if dependency.ref not in dependencies: + dependency.DeepDependencies(dependencies) + dependencies.add(dependency.ref) + + return dependencies + + def _LinkDependenciesInternal( + self, targets, include_shared_libraries, dependencies=None, initial=True + ): + """Returns an OrderedSet of dependency targets that are linked + into this target. + + This function has a split personality, depending on the setting of + |initial|. Outside callers should always leave |initial| at its default + setting. + + When adding a target to the list of dependencies, this function will + recurse into itself with |initial| set to False, to collect dependencies + that are linked into the linkable target for which the list is being built. + + If |include_shared_libraries| is False, the resulting dependencies will not + include shared_library targets that are linked into this target. + """ + if dependencies is None: + # Using a list to get ordered output and a set to do fast "is it + # already added" checks. + dependencies = OrderedSet() + + # Check for None, corresponding to the root node. + if self.ref is None: + return dependencies + + # It's kind of sucky that |targets| has to be passed into this function, + # but that's presently the easiest way to access the target dicts so that + # this function can find target types. + + if "target_name" not in targets[self.ref]: + raise GypError("Missing 'target_name' field in target.") + + if "type" not in targets[self.ref]: + raise GypError( + "Missing 'type' field in target %s" % targets[self.ref]["target_name"] + ) + + target_type = targets[self.ref]["type"] + + is_linkable = target_type in linkable_types + + if initial and not is_linkable: + # If this is the first target being examined and it's not linkable, + # return an empty list of link dependencies, because the link + # dependencies are intended to apply to the target itself (initial is + # True) and this target won't be linked. + return dependencies + + # Don't traverse 'none' targets if explicitly excluded. + if target_type == "none" and not targets[self.ref].get( + "dependencies_traverse", True + ): + dependencies.add(self.ref) + return dependencies + + # Executables, mac kernel extensions, windows drivers and loadable modules + # are already fully and finally linked. Nothing else can be a link + # dependency of them, there can only be dependencies in the sense that a + # dependent target might run an executable or load the loadable_module. + if not initial and target_type in ( + "executable", + "loadable_module", + "mac_kernel_extension", + "windows_driver", + ): + return dependencies + + # Shared libraries are already fully linked. They should only be included + # in |dependencies| when adjusting static library dependencies (in order to + # link against the shared_library's import lib), but should not be included + # in |dependencies| when propagating link_settings. + # The |include_shared_libraries| flag controls which of these two cases we + # are handling. + if ( + not initial + and target_type == "shared_library" + and not include_shared_libraries + ): + return dependencies + + # The target is linkable, add it to the list of link dependencies. + if self.ref not in dependencies: + dependencies.add(self.ref) + if initial or not is_linkable: + # If this is a subsequent target and it's linkable, don't look any + # further for linkable dependencies, as they'll already be linked into + # this target linkable. Always look at dependencies of the initial + # target, and always look at dependencies of non-linkables. + for dependency in self.dependencies: + dependency._LinkDependenciesInternal( + targets, include_shared_libraries, dependencies, False + ) + + return dependencies + + def DependenciesForLinkSettings(self, targets): + """ + Returns a list of dependency targets whose link_settings should be merged + into this target. + """ + + # TODO(sbaig) Currently, chrome depends on the bug that shared libraries' + # link_settings are propagated. So for now, we will allow it, unless the + # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to + # False. Once chrome is fixed, we can remove this flag. + include_shared_libraries = targets[self.ref].get( + "allow_sharedlib_linksettings_propagation", True + ) + return self._LinkDependenciesInternal(targets, include_shared_libraries) + + def DependenciesToLinkAgainst(self, targets): + """ + Returns a list of dependency targets that are linked into this target. + """ + return self._LinkDependenciesInternal(targets, True) + + +def BuildDependencyList(targets): + # Create a DependencyGraphNode for each target. Put it into a dict for easy + # access. + dependency_nodes = {} + for target, spec in targets.items(): + if target not in dependency_nodes: + dependency_nodes[target] = DependencyGraphNode(target) + + # Set up the dependency links. Targets that have no dependencies are treated + # as dependent on root_node. + root_node = DependencyGraphNode(None) + for target, spec in targets.items(): + target_node = dependency_nodes[target] + dependencies = spec.get("dependencies") + if not dependencies: + target_node.dependencies = [root_node] + root_node.dependents.append(target_node) + else: + for dependency in dependencies: + dependency_node = dependency_nodes.get(dependency) + if not dependency_node: + raise GypError( + "Dependency '%s' not found while " + "trying to load target %s" % (dependency, target) + ) + target_node.dependencies.append(dependency_node) + dependency_node.dependents.append(target_node) + + flat_list = root_node.FlattenToList() + + # If there's anything left unvisited, there must be a circular dependency + # (cycle). + if len(flat_list) != len(targets): + if not root_node.dependents: + # If all targets have dependencies, add the first target as a dependent + # of root_node so that the cycle can be discovered from root_node. + target = next(iter(targets)) + target_node = dependency_nodes[target] + target_node.dependencies.append(root_node) + root_node.dependents.append(target_node) + + cycles = [] + for cycle in root_node.FindCycles(): + paths = [node.ref for node in cycle] + cycles.append("Cycle: %s" % " -> ".join(paths)) + raise DependencyGraphNode.CircularException( + "Cycles in dependency graph detected:\n" + "\n".join(cycles) + ) + + return [dependency_nodes, flat_list] + + +def VerifyNoGYPFileCircularDependencies(targets): + # Create a DependencyGraphNode for each gyp file containing a target. Put + # it into a dict for easy access. + dependency_nodes = {} + for target in targets: + build_file = gyp.common.BuildFile(target) + if build_file not in dependency_nodes: + dependency_nodes[build_file] = DependencyGraphNode(build_file) + + # Set up the dependency links. + for target, spec in targets.items(): + build_file = gyp.common.BuildFile(target) + build_file_node = dependency_nodes[build_file] + target_dependencies = spec.get("dependencies", []) + for dependency in target_dependencies: + try: + dependency_build_file = gyp.common.BuildFile(dependency) + except GypError as e: + gyp.common.ExceptionAppend( + e, "while computing dependencies of .gyp file %s" % build_file + ) + raise + + if dependency_build_file == build_file: + # A .gyp file is allowed to refer back to itself. + continue + dependency_node = dependency_nodes.get(dependency_build_file) + if not dependency_node: + raise GypError("Dependency '%s' not found" % dependency_build_file) + if dependency_node not in build_file_node.dependencies: + build_file_node.dependencies.append(dependency_node) + dependency_node.dependents.append(build_file_node) + + # Files that have no dependencies are treated as dependent on root_node. + root_node = DependencyGraphNode(None) + for build_file_node in dependency_nodes.values(): + if len(build_file_node.dependencies) == 0: + build_file_node.dependencies.append(root_node) + root_node.dependents.append(build_file_node) + + flat_list = root_node.FlattenToList() + + # If there's anything left unvisited, there must be a circular dependency + # (cycle). + if len(flat_list) != len(dependency_nodes): + if not root_node.dependents: + # If all files have dependencies, add the first file as a dependent + # of root_node so that the cycle can be discovered from root_node. + file_node = next(iter(dependency_nodes.values())) + file_node.dependencies.append(root_node) + root_node.dependents.append(file_node) + cycles = [] + for cycle in root_node.FindCycles(): + paths = [node.ref for node in cycle] + cycles.append("Cycle: %s" % " -> ".join(paths)) + raise DependencyGraphNode.CircularException( + "Cycles in .gyp file dependency graph detected:\n" + "\n".join(cycles) + ) + + +def DoDependentSettings(key, flat_list, targets, dependency_nodes): + # key should be one of all_dependent_settings, direct_dependent_settings, + # or link_settings. + + for target in flat_list: + target_dict = targets[target] + build_file = gyp.common.BuildFile(target) + + if key == "all_dependent_settings": + dependencies = dependency_nodes[target].DeepDependencies() + elif key == "direct_dependent_settings": + dependencies = dependency_nodes[target].DirectAndImportedDependencies( + targets + ) + elif key == "link_settings": + dependencies = dependency_nodes[target].DependenciesForLinkSettings(targets) + else: + raise GypError( + "DoDependentSettings doesn't know how to determine " + "dependencies for " + key + ) + + for dependency in dependencies: + dependency_dict = targets[dependency] + if key not in dependency_dict: + continue + dependency_build_file = gyp.common.BuildFile(dependency) + MergeDicts( + target_dict, dependency_dict[key], build_file, dependency_build_file + ) + + +def AdjustStaticLibraryDependencies( + flat_list, targets, dependency_nodes, sort_dependencies +): + # Recompute target "dependencies" properties. For each static library + # target, remove "dependencies" entries referring to other static libraries, + # unless the dependency has the "hard_dependency" attribute set. For each + # linkable target, add a "dependencies" entry referring to all of the + # target's computed list of link dependencies (including static libraries + # if no such entry is already present. + for target in flat_list: + target_dict = targets[target] + target_type = target_dict["type"] + + if target_type == "static_library": + if "dependencies" not in target_dict: + continue + + target_dict["dependencies_original"] = target_dict.get("dependencies", [])[ + : + ] + + # A static library should not depend on another static library unless + # the dependency relationship is "hard," which should only be done when + # a dependent relies on some side effect other than just the build + # product, like a rule or action output. Further, if a target has a + # non-hard dependency, but that dependency exports a hard dependency, + # the non-hard dependency can safely be removed, but the exported hard + # dependency must be added to the target to keep the same dependency + # ordering. + dependencies = dependency_nodes[target].DirectAndImportedDependencies( + targets + ) + index = 0 + while index < len(dependencies): + dependency = dependencies[index] + dependency_dict = targets[dependency] + + # Remove every non-hard static library dependency and remove every + # non-static library dependency that isn't a direct dependency. + if ( + dependency_dict["type"] == "static_library" + and not dependency_dict.get("hard_dependency", False) + ) or ( + dependency_dict["type"] != "static_library" + and dependency not in target_dict["dependencies"] + ): + # Take the dependency out of the list, and don't increment index + # because the next dependency to analyze will shift into the index + # formerly occupied by the one being removed. + del dependencies[index] + else: + index = index + 1 + + # Update the dependencies. If the dependencies list is empty, it's not + # needed, so unhook it. + if len(dependencies) > 0: + target_dict["dependencies"] = dependencies + else: + del target_dict["dependencies"] + + elif target_type in linkable_types: + # Get a list of dependency targets that should be linked into this + # target. Add them to the dependencies list if they're not already + # present. + + link_dependencies = dependency_nodes[target].DependenciesToLinkAgainst( + targets + ) + for dependency in link_dependencies: + if dependency == target: + continue + if "dependencies" not in target_dict: + target_dict["dependencies"] = [] + if dependency not in target_dict["dependencies"]: + target_dict["dependencies"].append(dependency) + # Sort the dependencies list in the order from dependents to dependencies. + # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D. + # Note: flat_list is already sorted in the order from dependencies to + # dependents. + if sort_dependencies and "dependencies" in target_dict: + target_dict["dependencies"] = [ + dep + for dep in reversed(flat_list) + if dep in target_dict["dependencies"] + ] + + +# Initialize this here to speed up MakePathRelative. +exception_re = re.compile(r"""["']?[-/$<>^]""") + + +def MakePathRelative(to_file, fro_file, item): + # If item is a relative path, it's relative to the build file dict that it's + # coming from. Fix it up to make it relative to the build file dict that + # it's going into. + # Exception: any |item| that begins with these special characters is + # returned without modification. + # / Used when a path is already absolute (shortcut optimization; + # such paths would be returned as absolute anyway) + # $ Used for build environment variables + # - Used for some build environment flags (such as -lapr-1 in a + # "libraries" section) + # < Used for our own variable and command expansions (see ExpandVariables) + # > Used for our own variable and command expansions (see ExpandVariables) + # ^ Used for our own variable and command expansions (see ExpandVariables) + # + # "/' Used when a value is quoted. If these are present, then we + # check the second character instead. + # + if to_file == fro_file or exception_re.match(item): + return item + else: + # TODO(dglazkov) The backslash/forward-slash replacement at the end is a + # temporary measure. This should really be addressed by keeping all paths + # in POSIX until actual project generation. + ret = os.path.normpath( + os.path.join( + gyp.common.RelativePath( + os.path.dirname(fro_file), os.path.dirname(to_file) + ), + item, + ) + ).replace("\\", "/") + if item.endswith("/"): + ret += "/" + return ret + + +def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): + # Python documentation recommends objects which do not support hash + # set this value to None. Python library objects follow this rule. + def is_hashable(val): + return val.__hash__ + + # If x is hashable, returns whether x is in s. Else returns whether x is in items. + def is_in_set_or_list(x, s, items): + if is_hashable(x): + return x in s + return x in items + + prepend_index = 0 + + # Make membership testing of hashables in |to| (in particular, strings) + # faster. + hashable_to_set = {x for x in to if is_hashable(x)} + for item in fro: + singleton = False + if type(item) in (str, int): + # The cheap and easy case. + if is_paths: + to_item = MakePathRelative(to_file, fro_file, item) + else: + to_item = item + + if not (type(item) is str and item.startswith("-")): + # Any string that doesn't begin with a "-" is a singleton - it can + # only appear once in a list, to be enforced by the list merge append + # or prepend. + singleton = True + elif type(item) is dict: + # Make a copy of the dictionary, continuing to look for paths to fix. + # The other intelligent aspects of merge processing won't apply because + # item is being merged into an empty dict. + to_item = {} + MergeDicts(to_item, item, to_file, fro_file) + elif type(item) is list: + # Recurse, making a copy of the list. If the list contains any + # descendant dicts, path fixing will occur. Note that here, custom + # values for is_paths and append are dropped; those are only to be + # applied to |to| and |fro|, not sublists of |fro|. append shouldn't + # matter anyway because the new |to_item| list is empty. + to_item = [] + MergeLists(to_item, item, to_file, fro_file) + else: + raise TypeError( + "Attempt to merge list item of unsupported type " + + item.__class__.__name__ + ) + + if append: + # If appending a singleton that's already in the list, don't append. + # This ensures that the earliest occurrence of the item will stay put. + if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to): + to.append(to_item) + if is_hashable(to_item): + hashable_to_set.add(to_item) + else: + # If prepending a singleton that's already in the list, remove the + # existing instance and proceed with the prepend. This ensures that the + # item appears at the earliest possible position in the list. + while singleton and to_item in to: + to.remove(to_item) + + # Don't just insert everything at index 0. That would prepend the new + # items to the list in reverse order, which would be an unwelcome + # surprise. + to.insert(prepend_index, to_item) + if is_hashable(to_item): + hashable_to_set.add(to_item) + prepend_index = prepend_index + 1 + + +def MergeDicts(to, fro, to_file, fro_file): + # I wanted to name the parameter "from" but it's a Python keyword... + for k, v in fro.items(): + # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give + # copy semantics. Something else may want to merge from the |fro| dict + # later, and having the same dict ref pointed to twice in the tree isn't + # what anyone wants considering that the dicts may subsequently be + # modified. + if k in to: + bad_merge = False + if type(v) in (str, int): + if type(to[k]) not in (str, int): + bad_merge = True + elif not isinstance(v, type(to[k])): + bad_merge = True + + if bad_merge: + raise TypeError( + "Attempt to merge dict value of type " + + v.__class__.__name__ + + " into incompatible type " + + to[k].__class__.__name__ + + " for key " + + k + ) + if type(v) in (str, int): + # Overwrite the existing value, if any. Cheap and easy. + is_path = IsPathSection(k) + if is_path: + to[k] = MakePathRelative(to_file, fro_file, v) + else: + to[k] = v + elif type(v) is dict: + # Recurse, guaranteeing copies will be made of objects that require it. + if k not in to: + to[k] = {} + MergeDicts(to[k], v, to_file, fro_file) + elif type(v) is list: + # Lists in dicts can be merged with different policies, depending on + # how the key in the "from" dict (k, the from-key) is written. + # + # If the from-key has ...the to-list will have this action + # this character appended:... applied when receiving the from-list: + # = replace + # + prepend + # ? set, only if to-list does not yet exist + # (none) append + # + # This logic is list-specific, but since it relies on the associated + # dict key, it's checked in this dict-oriented function. + ext = k[-1] + append = True + if ext == "=": + list_base = k[:-1] + lists_incompatible = [list_base, list_base + "?"] + to[list_base] = [] + elif ext == "+": + list_base = k[:-1] + lists_incompatible = [list_base + "=", list_base + "?"] + append = False + elif ext == "?": + list_base = k[:-1] + lists_incompatible = [list_base, list_base + "=", list_base + "+"] + else: + list_base = k + lists_incompatible = [list_base + "=", list_base + "?"] + + # Some combinations of merge policies appearing together are meaningless. + # It's stupid to replace and append simultaneously, for example. Append + # and prepend are the only policies that can coexist. + for list_incompatible in lists_incompatible: + if list_incompatible in fro: + raise GypError( + "Incompatible list policies " + k + " and " + list_incompatible + ) + + if list_base in to: + if ext == "?": + # If the key ends in "?", the list will only be merged if it doesn't + # already exist. + continue + elif type(to[list_base]) is not list: + # This may not have been checked above if merging in a list with an + # extension character. + raise TypeError( + "Attempt to merge dict value of type " + + v.__class__.__name__ + + " into incompatible type " + + to[list_base].__class__.__name__ + + " for key " + + list_base + + "(" + + k + + ")" + ) + else: + to[list_base] = [] + + # Call MergeLists, which will make copies of objects that require it. + # MergeLists can recurse back into MergeDicts, although this will be + # to make copies of dicts (with paths fixed), there will be no + # subsequent dict "merging" once entering a list because lists are + # always replaced, appended to, or prepended to. + is_paths = IsPathSection(list_base) + MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) + else: + raise TypeError( + "Attempt to merge dict value of unsupported type " + + v.__class__.__name__ + + " for key " + + k + ) + + +def MergeConfigWithInheritance( + new_configuration_dict, build_file, target_dict, configuration, visited +): + # Skip if previously visited. + if configuration in visited: + return + + # Look at this configuration. + configuration_dict = target_dict["configurations"][configuration] + + # Merge in parents. + for parent in configuration_dict.get("inherit_from", []): + MergeConfigWithInheritance( + new_configuration_dict, + build_file, + target_dict, + parent, + visited + [configuration], + ) + + # Merge it into the new config. + MergeDicts(new_configuration_dict, configuration_dict, build_file, build_file) + + # Drop abstract. + if "abstract" in new_configuration_dict: + del new_configuration_dict["abstract"] + + +def SetUpConfigurations(target, target_dict): + # key_suffixes is a list of key suffixes that might appear on key names. + # These suffixes are handled in conditional evaluations (for =, +, and ?) + # and rules/exclude processing (for ! and /). Keys with these suffixes + # should be treated the same as keys without. + key_suffixes = ["=", "+", "?", "!", "/"] + + build_file = gyp.common.BuildFile(target) + + # Provide a single configuration by default if none exists. + # TODO(mark): Signal an error if default_configurations exists but + # configurations does not. + if "configurations" not in target_dict: + target_dict["configurations"] = {"Default": {}} + if "default_configuration" not in target_dict: + concrete = [ + i + for (i, config) in target_dict["configurations"].items() + if not config.get("abstract") + ] + target_dict["default_configuration"] = sorted(concrete)[0] + + merged_configurations = {} + configs = target_dict["configurations"] + for (configuration, old_configuration_dict) in configs.items(): + # Skip abstract configurations (saves work only). + if old_configuration_dict.get("abstract"): + continue + # Configurations inherit (most) settings from the enclosing target scope. + # Get the inheritance relationship right by making a copy of the target + # dict. + new_configuration_dict = {} + for (key, target_val) in target_dict.items(): + key_ext = key[-1:] + if key_ext in key_suffixes: + key_base = key[:-1] + else: + key_base = key + if key_base not in non_configuration_keys: + new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val) + + # Merge in configuration (with all its parents first). + MergeConfigWithInheritance( + new_configuration_dict, build_file, target_dict, configuration, [] + ) + + merged_configurations[configuration] = new_configuration_dict + + # Put the new configurations back into the target dict as a configuration. + for configuration in merged_configurations.keys(): + target_dict["configurations"][configuration] = merged_configurations[ + configuration + ] + + # Now drop all the abstract ones. + configs = target_dict["configurations"] + target_dict["configurations"] = { + k: v for k, v in configs.items() if not v.get("abstract") + } + + # Now that all of the target's configurations have been built, go through + # the target dict's keys and remove everything that's been moved into a + # "configurations" section. + delete_keys = [] + for key in target_dict: + key_ext = key[-1:] + if key_ext in key_suffixes: + key_base = key[:-1] + else: + key_base = key + if key_base not in non_configuration_keys: + delete_keys.append(key) + for key in delete_keys: + del target_dict[key] + + # Check the configurations to see if they contain invalid keys. + for configuration in target_dict["configurations"].keys(): + configuration_dict = target_dict["configurations"][configuration] + for key in configuration_dict.keys(): + if key in invalid_configuration_keys: + raise GypError( + "%s not allowed in the %s configuration, found in " + "target %s" % (key, configuration, target) + ) + + +def ProcessListFiltersInDict(name, the_dict): + """Process regular expression and exclusion-based filters on lists. + + An exclusion list is in a dict key named with a trailing "!", like + "sources!". Every item in such a list is removed from the associated + main list, which in this example, would be "sources". Removed items are + placed into a "sources_excluded" list in the dict. + + Regular expression (regex) filters are contained in dict keys named with a + trailing "/", such as "sources/" to operate on the "sources" list. Regex + filters in a dict take the form: + 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'], + ['include', '_mac\\.cc$'] ], + The first filter says to exclude all files ending in _linux.cc, _mac.cc, and + _win.cc. The second filter then includes all files ending in _mac.cc that + are now or were once in the "sources" list. Items matching an "exclude" + filter are subject to the same processing as would occur if they were listed + by name in an exclusion list (ending in "!"). Items matching an "include" + filter are brought back into the main list if previously excluded by an + exclusion list or exclusion regex filter. Subsequent matching "exclude" + patterns can still cause items to be excluded after matching an "include". + """ + + # Look through the dictionary for any lists whose keys end in "!" or "/". + # These are lists that will be treated as exclude lists and regular + # expression-based exclude/include lists. Collect the lists that are + # needed first, looking for the lists that they operate on, and assemble + # then into |lists|. This is done in a separate loop up front, because + # the _included and _excluded keys need to be added to the_dict, and that + # can't be done while iterating through it. + + lists = [] + del_lists = [] + for key, value in the_dict.items(): + operation = key[-1] + if operation != "!" and operation != "/": + continue + + if type(value) is not list: + raise ValueError( + name + " key " + key + " must be list, not " + value.__class__.__name__ + ) + + list_key = key[:-1] + if list_key not in the_dict: + # This happens when there's a list like "sources!" but no corresponding + # "sources" list. Since there's nothing for it to operate on, queue up + # the "sources!" list for deletion now. + del_lists.append(key) + continue + + if type(the_dict[list_key]) is not list: + value = the_dict[list_key] + raise ValueError( + name + + " key " + + list_key + + " must be list, not " + + value.__class__.__name__ + + " when applying " + + {"!": "exclusion", "/": "regex"}[operation] + ) + + if list_key not in lists: + lists.append(list_key) + + # Delete the lists that are known to be unneeded at this point. + for del_list in del_lists: + del the_dict[del_list] + + for list_key in lists: + the_list = the_dict[list_key] + + # Initialize the list_actions list, which is parallel to the_list. Each + # item in list_actions identifies whether the corresponding item in + # the_list should be excluded, unconditionally preserved (included), or + # whether no exclusion or inclusion has been applied. Items for which + # no exclusion or inclusion has been applied (yet) have value -1, items + # excluded have value 0, and items included have value 1. Includes and + # excludes override previous actions. All items in list_actions are + # initialized to -1 because no excludes or includes have been processed + # yet. + list_actions = list((-1,) * len(the_list)) + + exclude_key = list_key + "!" + if exclude_key in the_dict: + for exclude_item in the_dict[exclude_key]: + for index, list_item in enumerate(the_list): + if exclude_item == list_item: + # This item matches the exclude_item, so set its action to 0 + # (exclude). + list_actions[index] = 0 + + # The "whatever!" list is no longer needed, dump it. + del the_dict[exclude_key] + + regex_key = list_key + "/" + if regex_key in the_dict: + for regex_item in the_dict[regex_key]: + [action, pattern] = regex_item + pattern_re = re.compile(pattern) + + if action == "exclude": + # This item matches an exclude regex, set its value to 0 (exclude). + action_value = 0 + elif action == "include": + # This item matches an include regex, set its value to 1 (include). + action_value = 1 + else: + # This is an action that doesn't make any sense. + raise ValueError( + "Unrecognized action " + + action + + " in " + + name + + " key " + + regex_key + ) + + for index, list_item in enumerate(the_list): + if list_actions[index] == action_value: + # Even if the regex matches, nothing will change so continue + # (regex searches are expensive). + continue + if pattern_re.search(list_item): + # Regular expression match. + list_actions[index] = action_value + + # The "whatever/" list is no longer needed, dump it. + del the_dict[regex_key] + + # Add excluded items to the excluded list. + # + # Note that exclude_key ("sources!") is different from excluded_key + # ("sources_excluded"). The exclude_key list is input and it was already + # processed and deleted; the excluded_key list is output and it's about + # to be created. + excluded_key = list_key + "_excluded" + if excluded_key in the_dict: + raise GypError( + name + " key " + excluded_key + " must not be present prior " + " to applying exclusion/regex filters for " + list_key + ) + + excluded_list = [] + + # Go backwards through the list_actions list so that as items are deleted, + # the indices of items that haven't been seen yet don't shift. That means + # that things need to be prepended to excluded_list to maintain them in the + # same order that they existed in the_list. + for index in range(len(list_actions) - 1, -1, -1): + if list_actions[index] == 0: + # Dump anything with action 0 (exclude). Keep anything with action 1 + # (include) or -1 (no include or exclude seen for the item). + excluded_list.insert(0, the_list[index]) + del the_list[index] + + # If anything was excluded, put the excluded list into the_dict at + # excluded_key. + if len(excluded_list) > 0: + the_dict[excluded_key] = excluded_list + + # Now recurse into subdicts and lists that may contain dicts. + for key, value in the_dict.items(): + if type(value) is dict: + ProcessListFiltersInDict(key, value) + elif type(value) is list: + ProcessListFiltersInList(key, value) + + +def ProcessListFiltersInList(name, the_list): + for item in the_list: + if type(item) is dict: + ProcessListFiltersInDict(name, item) + elif type(item) is list: + ProcessListFiltersInList(name, item) + + +def ValidateTargetType(target, target_dict): + """Ensures the 'type' field on the target is one of the known types. + + Arguments: + target: string, name of target. + target_dict: dict, target spec. + + Raises an exception on error. + """ + VALID_TARGET_TYPES = ( + "executable", + "loadable_module", + "static_library", + "shared_library", + "mac_kernel_extension", + "none", + "windows_driver", + ) + target_type = target_dict.get("type", None) + if target_type not in VALID_TARGET_TYPES: + raise GypError( + "Target %s has an invalid target type '%s'. " + "Must be one of %s." % (target, target_type, "/".join(VALID_TARGET_TYPES)) + ) + if ( + target_dict.get("standalone_static_library", 0) + and not target_type == "static_library" + ): + raise GypError( + "Target %s has type %s but standalone_static_library flag is" + " only valid for static_library type." % (target, target_type) + ) + + +def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): + """Ensures that the rules sections in target_dict are valid and consistent, + and determines which sources they apply to. + + Arguments: + target: string, name of target. + target_dict: dict, target spec containing "rules" and "sources" lists. + extra_sources_for_rules: a list of keys to scan for rule matches in + addition to 'sources'. + """ + + # Dicts to map between values found in rules' 'rule_name' and 'extension' + # keys and the rule dicts themselves. + rule_names = {} + rule_extensions = {} + + rules = target_dict.get("rules", []) + for rule in rules: + # Make sure that there's no conflict among rule names and extensions. + rule_name = rule["rule_name"] + if rule_name in rule_names: + raise GypError( + f"rule {rule_name} exists in duplicate, target {target}" + ) + rule_names[rule_name] = rule + + rule_extension = rule["extension"] + if rule_extension.startswith("."): + rule_extension = rule_extension[1:] + if rule_extension in rule_extensions: + raise GypError( + ( + "extension %s associated with multiple rules, " + + "target %s rules %s and %s" + ) + % ( + rule_extension, + target, + rule_extensions[rule_extension]["rule_name"], + rule_name, + ) + ) + rule_extensions[rule_extension] = rule + + # Make sure rule_sources isn't already there. It's going to be + # created below if needed. + if "rule_sources" in rule: + raise GypError( + "rule_sources must not exist in input, target %s rule %s" + % (target, rule_name) + ) + + rule_sources = [] + source_keys = ["sources"] + source_keys.extend(extra_sources_for_rules) + for source_key in source_keys: + for source in target_dict.get(source_key, []): + (source_root, source_extension) = os.path.splitext(source) + if source_extension.startswith("."): + source_extension = source_extension[1:] + if source_extension == rule_extension: + rule_sources.append(source) + + if len(rule_sources) > 0: + rule["rule_sources"] = rule_sources + + +def ValidateRunAsInTarget(target, target_dict, build_file): + target_name = target_dict.get("target_name") + run_as = target_dict.get("run_as") + if not run_as: + return + if type(run_as) is not dict: + raise GypError( + "The 'run_as' in target %s from file %s should be a " + "dictionary." % (target_name, build_file) + ) + action = run_as.get("action") + if not action: + raise GypError( + "The 'run_as' in target %s from file %s must have an " + "'action' section." % (target_name, build_file) + ) + if type(action) is not list: + raise GypError( + "The 'action' for 'run_as' in target %s from file %s " + "must be a list." % (target_name, build_file) + ) + working_directory = run_as.get("working_directory") + if working_directory and type(working_directory) is not str: + raise GypError( + "The 'working_directory' for 'run_as' in target %s " + "in file %s should be a string." % (target_name, build_file) + ) + environment = run_as.get("environment") + if environment and type(environment) is not dict: + raise GypError( + "The 'environment' for 'run_as' in target %s " + "in file %s should be a dictionary." % (target_name, build_file) + ) + + +def ValidateActionsInTarget(target, target_dict, build_file): + """Validates the inputs to the actions in a target.""" + target_name = target_dict.get("target_name") + actions = target_dict.get("actions", []) + for action in actions: + action_name = action.get("action_name") + if not action_name: + raise GypError( + "Anonymous action in target %s. " + "An action must have an 'action_name' field." % target_name + ) + inputs = action.get("inputs", None) + if inputs is None: + raise GypError("Action in target %s has no inputs." % target_name) + action_command = action.get("action") + if action_command and not action_command[0]: + raise GypError("Empty action as command in target %s." % target_name) + + +def TurnIntIntoStrInDict(the_dict): + """Given dict the_dict, recursively converts all integers into strings. + """ + # Use items instead of iteritems because there's no need to try to look at + # reinserted keys and their associated values. + for k, v in the_dict.items(): + if type(v) is int: + v = str(v) + the_dict[k] = v + elif type(v) is dict: + TurnIntIntoStrInDict(v) + elif type(v) is list: + TurnIntIntoStrInList(v) + + if type(k) is int: + del the_dict[k] + the_dict[str(k)] = v + + +def TurnIntIntoStrInList(the_list): + """Given list the_list, recursively converts all integers into strings. + """ + for index, item in enumerate(the_list): + if type(item) is int: + the_list[index] = str(item) + elif type(item) is dict: + TurnIntIntoStrInDict(item) + elif type(item) is list: + TurnIntIntoStrInList(item) + + +def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data): + """Return only the targets that are deep dependencies of |root_targets|.""" + qualified_root_targets = [] + for target in root_targets: + target = target.strip() + qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list) + if not qualified_targets: + raise GypError("Could not find target %s" % target) + qualified_root_targets.extend(qualified_targets) + + wanted_targets = {} + for target in qualified_root_targets: + wanted_targets[target] = targets[target] + for dependency in dependency_nodes[target].DeepDependencies(): + wanted_targets[dependency] = targets[dependency] + + wanted_flat_list = [t for t in flat_list if t in wanted_targets] + + # Prune unwanted targets from each build_file's data dict. + for build_file in data["target_build_files"]: + if "targets" not in data[build_file]: + continue + new_targets = [] + for target in data[build_file]["targets"]: + qualified_name = gyp.common.QualifiedTarget( + build_file, target["target_name"], target["toolset"] + ) + if qualified_name in wanted_targets: + new_targets.append(target) + data[build_file]["targets"] = new_targets + + return wanted_targets, wanted_flat_list + + +def VerifyNoCollidingTargets(targets): + """Verify that no two targets in the same directory share the same name. + + Arguments: + targets: A list of targets in the form 'path/to/file.gyp:target_name'. + """ + # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'. + used = {} + for target in targets: + # Separate out 'path/to/file.gyp, 'target_name' from + # 'path/to/file.gyp:target_name'. + path, name = target.rsplit(":", 1) + # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'. + subdir, gyp = os.path.split(path) + # Use '.' for the current directory '', so that the error messages make + # more sense. + if not subdir: + subdir = "." + # Prepare a key like 'path/to:target_name'. + key = subdir + ":" + name + if key in used: + # Complain if this target is already used. + raise GypError( + 'Duplicate target name "%s" in directory "%s" used both ' + 'in "%s" and "%s".' % (name, subdir, gyp, used[key]) + ) + used[key] = gyp + + +def SetGeneratorGlobals(generator_input_info): + # Set up path_sections and non_configuration_keys with the default data plus + # the generator-specific data. + global path_sections + path_sections = set(base_path_sections) + path_sections.update(generator_input_info["path_sections"]) + + global non_configuration_keys + non_configuration_keys = base_non_configuration_keys[:] + non_configuration_keys.extend(generator_input_info["non_configuration_keys"]) + + global multiple_toolsets + multiple_toolsets = generator_input_info["generator_supports_multiple_toolsets"] + + global generator_filelist_paths + generator_filelist_paths = generator_input_info["generator_filelist_paths"] + + +def Load( + build_files, + variables, + includes, + depth, + generator_input_info, + check, + circular_check, + parallel, + root_targets, +): + SetGeneratorGlobals(generator_input_info) + # A generator can have other lists (in addition to sources) be processed + # for rules. + extra_sources_for_rules = generator_input_info["extra_sources_for_rules"] + + # Load build files. This loads every target-containing build file into + # the |data| dictionary such that the keys to |data| are build file names, + # and the values are the entire build file contents after "early" or "pre" + # processing has been done and includes have been resolved. + # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as + # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps + # track of the keys corresponding to "target" files. + data = {"target_build_files": set()} + # Normalize paths everywhere. This is important because paths will be + # used as keys to the data dict and for references between input files. + build_files = set(map(os.path.normpath, build_files)) + if parallel: + LoadTargetBuildFilesParallel( + build_files, data, variables, includes, depth, check, generator_input_info + ) + else: + aux_data = {} + for build_file in build_files: + try: + LoadTargetBuildFile( + build_file, data, aux_data, variables, includes, depth, check, True + ) + except Exception as e: + gyp.common.ExceptionAppend(e, "while trying to load %s" % build_file) + raise + + # Build a dict to access each target's subdict by qualified name. + targets = BuildTargetsDict(data) + + # Fully qualify all dependency links. + QualifyDependencies(targets) + + # Remove self-dependencies from targets that have 'prune_self_dependencies' + # set to 1. + RemoveSelfDependencies(targets) + + # Expand dependencies specified as build_file:*. + ExpandWildcardDependencies(targets, data) + + # Remove all dependencies marked as 'link_dependency' from the targets of + # type 'none'. + RemoveLinkDependenciesFromNoneTargets(targets) + + # Apply exclude (!) and regex (/) list filters only for dependency_sections. + for target_name, target_dict in targets.items(): + tmp_dict = {} + for key_base in dependency_sections: + for op in ("", "!", "/"): + key = key_base + op + if key in target_dict: + tmp_dict[key] = target_dict[key] + del target_dict[key] + ProcessListFiltersInDict(target_name, tmp_dict) + # Write the results back to |target_dict|. + for key in tmp_dict: + target_dict[key] = tmp_dict[key] + + # Make sure every dependency appears at most once. + RemoveDuplicateDependencies(targets) + + if circular_check: + # Make sure that any targets in a.gyp don't contain dependencies in other + # .gyp files that further depend on a.gyp. + VerifyNoGYPFileCircularDependencies(targets) + + [dependency_nodes, flat_list] = BuildDependencyList(targets) + + if root_targets: + # Remove, from |targets| and |flat_list|, the targets that are not deep + # dependencies of the targets specified in |root_targets|. + targets, flat_list = PruneUnwantedTargets( + targets, flat_list, dependency_nodes, root_targets, data + ) + + # Check that no two targets in the same directory have the same name. + VerifyNoCollidingTargets(flat_list) + + # Handle dependent settings of various types. + for settings_type in [ + "all_dependent_settings", + "direct_dependent_settings", + "link_settings", + ]: + DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) + + # Take out the dependent settings now that they've been published to all + # of the targets that require them. + for target in flat_list: + if settings_type in targets[target]: + del targets[target][settings_type] + + # Make sure static libraries don't declare dependencies on other static + # libraries, but that linkables depend on all unlinked static libraries + # that they need so that their link steps will be correct. + gii = generator_input_info + if gii["generator_wants_static_library_dependencies_adjusted"]: + AdjustStaticLibraryDependencies( + flat_list, + targets, + dependency_nodes, + gii["generator_wants_sorted_dependencies"], + ) + + # Apply "post"/"late"/"target" variable expansions and condition evaluations. + for target in flat_list: + target_dict = targets[target] + build_file = gyp.common.BuildFile(target) + ProcessVariablesAndConditionsInDict( + target_dict, PHASE_LATE, variables, build_file + ) + + # Move everything that can go into a "configurations" section into one. + for target in flat_list: + target_dict = targets[target] + SetUpConfigurations(target, target_dict) + + # Apply exclude (!) and regex (/) list filters. + for target in flat_list: + target_dict = targets[target] + ProcessListFiltersInDict(target, target_dict) + + # Apply "latelate" variable expansions and condition evaluations. + for target in flat_list: + target_dict = targets[target] + build_file = gyp.common.BuildFile(target) + ProcessVariablesAndConditionsInDict( + target_dict, PHASE_LATELATE, variables, build_file + ) + + # Make sure that the rules make sense, and build up rule_sources lists as + # needed. Not all generators will need to use the rule_sources lists, but + # some may, and it seems best to build the list in a common spot. + # Also validate actions and run_as elements in targets. + for target in flat_list: + target_dict = targets[target] + build_file = gyp.common.BuildFile(target) + ValidateTargetType(target, target_dict) + ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) + ValidateRunAsInTarget(target, target_dict, build_file) + ValidateActionsInTarget(target, target_dict, build_file) + + # Generators might not expect ints. Turn them into strs. + TurnIntIntoStrInDict(data) + + # TODO(mark): Return |data| for now because the generator needs a list of + # build files that came in. In the future, maybe it should just accept + # a list, and not the whole data dict. + return [flat_list, targets, data] diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input_test.py b/node_modules/node-gyp/gyp/pylib/gyp/input_test.py new file mode 100644 index 0000000..a18f72e --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/input_test.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 + +# Copyright 2013 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Unit tests for the input.py file.""" + +import gyp.input +import unittest + + +class TestFindCycles(unittest.TestCase): + def setUp(self): + self.nodes = {} + for x in ("a", "b", "c", "d", "e"): + self.nodes[x] = gyp.input.DependencyGraphNode(x) + + def _create_dependency(self, dependent, dependency): + dependent.dependencies.append(dependency) + dependency.dependents.append(dependent) + + def test_no_cycle_empty_graph(self): + for label, node in self.nodes.items(): + self.assertEqual([], node.FindCycles()) + + def test_no_cycle_line(self): + self._create_dependency(self.nodes["a"], self.nodes["b"]) + self._create_dependency(self.nodes["b"], self.nodes["c"]) + self._create_dependency(self.nodes["c"], self.nodes["d"]) + + for label, node in self.nodes.items(): + self.assertEqual([], node.FindCycles()) + + def test_no_cycle_dag(self): + self._create_dependency(self.nodes["a"], self.nodes["b"]) + self._create_dependency(self.nodes["a"], self.nodes["c"]) + self._create_dependency(self.nodes["b"], self.nodes["c"]) + + for label, node in self.nodes.items(): + self.assertEqual([], node.FindCycles()) + + def test_cycle_self_reference(self): + self._create_dependency(self.nodes["a"], self.nodes["a"]) + + self.assertEqual( + [[self.nodes["a"], self.nodes["a"]]], self.nodes["a"].FindCycles() + ) + + def test_cycle_two_nodes(self): + self._create_dependency(self.nodes["a"], self.nodes["b"]) + self._create_dependency(self.nodes["b"], self.nodes["a"]) + + self.assertEqual( + [[self.nodes["a"], self.nodes["b"], self.nodes["a"]]], + self.nodes["a"].FindCycles(), + ) + self.assertEqual( + [[self.nodes["b"], self.nodes["a"], self.nodes["b"]]], + self.nodes["b"].FindCycles(), + ) + + def test_two_cycles(self): + self._create_dependency(self.nodes["a"], self.nodes["b"]) + self._create_dependency(self.nodes["b"], self.nodes["a"]) + + self._create_dependency(self.nodes["b"], self.nodes["c"]) + self._create_dependency(self.nodes["c"], self.nodes["b"]) + + cycles = self.nodes["a"].FindCycles() + self.assertTrue([self.nodes["a"], self.nodes["b"], self.nodes["a"]] in cycles) + self.assertTrue([self.nodes["b"], self.nodes["c"], self.nodes["b"]] in cycles) + self.assertEqual(2, len(cycles)) + + def test_big_cycle(self): + self._create_dependency(self.nodes["a"], self.nodes["b"]) + self._create_dependency(self.nodes["b"], self.nodes["c"]) + self._create_dependency(self.nodes["c"], self.nodes["d"]) + self._create_dependency(self.nodes["d"], self.nodes["e"]) + self._create_dependency(self.nodes["e"], self.nodes["a"]) + + self.assertEqual( + [ + [ + self.nodes["a"], + self.nodes["b"], + self.nodes["c"], + self.nodes["d"], + self.nodes["e"], + self.nodes["a"], + ] + ], + self.nodes["a"].FindCycles(), + ) + + +if __name__ == "__main__": + unittest.main() diff --git a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py new file mode 100644 index 0000000..59647c9 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py @@ -0,0 +1,771 @@ +#!/usr/bin/env python3 +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions to perform Xcode-style build steps. + +These functions are executed via gyp-mac-tool when using the Makefile generator. +""" + + +import fcntl +import fnmatch +import glob +import json +import os +import plistlib +import re +import shutil +import struct +import subprocess +import sys +import tempfile + + +def main(args): + executor = MacTool() + exit_code = executor.Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class MacTool: + """This class performs all the Mac tooling steps. The methods can either be + executed directly, or dispatched from an argument list.""" + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like copy-info-plist to CopyInfoPlist""" + return name_string.title().replace("-", "") + + def ExecCopyBundleResource(self, source, dest, convert_to_binary): + """Copies a resource file to the bundle/Resources directory, performing any + necessary compilation on each resource.""" + convert_to_binary = convert_to_binary == "True" + extension = os.path.splitext(source)[1].lower() + if os.path.isdir(source): + # Copy tree. + # TODO(thakis): This copies file attributes like mtime, while the + # single-file branch below doesn't. This should probably be changed to + # be consistent with the single-file branch. + if os.path.exists(dest): + shutil.rmtree(dest) + shutil.copytree(source, dest) + elif extension == ".xib": + return self._CopyXIBFile(source, dest) + elif extension == ".storyboard": + return self._CopyXIBFile(source, dest) + elif extension == ".strings" and not convert_to_binary: + self._CopyStringsFile(source, dest) + else: + if os.path.exists(dest): + os.unlink(dest) + shutil.copy(source, dest) + + if convert_to_binary and extension in (".plist", ".strings"): + self._ConvertToBinary(dest) + + def _CopyXIBFile(self, source, dest): + """Compiles a XIB file with ibtool into a binary plist in the bundle.""" + + # ibtool sometimes crashes with relative paths. See crbug.com/314728. + base = os.path.dirname(os.path.realpath(__file__)) + if os.path.relpath(source): + source = os.path.join(base, source) + if os.path.relpath(dest): + dest = os.path.join(base, dest) + + args = ["xcrun", "ibtool", "--errors", "--warnings", "--notices"] + + if os.environ["XCODE_VERSION_ACTUAL"] > "0700": + args.extend(["--auto-activate-custom-fonts"]) + if "IPHONEOS_DEPLOYMENT_TARGET" in os.environ: + args.extend( + [ + "--target-device", + "iphone", + "--target-device", + "ipad", + "--minimum-deployment-target", + os.environ["IPHONEOS_DEPLOYMENT_TARGET"], + ] + ) + else: + args.extend( + [ + "--target-device", + "mac", + "--minimum-deployment-target", + os.environ["MACOSX_DEPLOYMENT_TARGET"], + ] + ) + + args.extend( + ["--output-format", "human-readable-text", "--compile", dest, source] + ) + + ibtool_section_re = re.compile(r"/\*.*\*/") + ibtool_re = re.compile(r".*note:.*is clipping its content") + try: + stdout = subprocess.check_output(args) + except subprocess.CalledProcessError as e: + print(e.output) + raise + current_section_header = None + for line in stdout.splitlines(): + if ibtool_section_re.match(line): + current_section_header = line + elif not ibtool_re.match(line): + if current_section_header: + print(current_section_header) + current_section_header = None + print(line) + return 0 + + def _ConvertToBinary(self, dest): + subprocess.check_call( + ["xcrun", "plutil", "-convert", "binary1", "-o", dest, dest] + ) + + def _CopyStringsFile(self, source, dest): + """Copies a .strings file using iconv to reconvert the input into UTF-16.""" + input_code = self._DetectInputEncoding(source) or "UTF-8" + + # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call + # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints + # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing + # semicolon in dictionary. + # on invalid files. Do the same kind of validation. + import CoreFoundation + + with open(source, "rb") as in_file: + s = in_file.read() + d = CoreFoundation.CFDataCreate(None, s, len(s)) + _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) + if error: + return + + with open(dest, "wb") as fp: + fp.write(s.decode(input_code).encode("UTF-16")) + + def _DetectInputEncoding(self, file_name): + """Reads the first few bytes from file_name and tries to guess the text + encoding. Returns None as a guess if it can't detect it.""" + with open(file_name, "rb") as fp: + try: + header = fp.read(3) + except Exception: + return None + if header.startswith(b"\xFE\xFF"): + return "UTF-16" + elif header.startswith(b"\xFF\xFE"): + return "UTF-16" + elif header.startswith(b"\xEF\xBB\xBF"): + return "UTF-8" + else: + return None + + def ExecCopyInfoPlist(self, source, dest, convert_to_binary, *keys): + """Copies the |source| Info.plist to the destination directory |dest|.""" + # Read the source Info.plist into memory. + with open(source) as fd: + lines = fd.read() + + # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild). + plist = plistlib.readPlistFromString(lines) + if keys: + plist.update(json.loads(keys[0])) + lines = plistlib.writePlistToString(plist) + + # Go through all the environment variables and replace them as variables in + # the file. + IDENT_RE = re.compile(r"[_/\s]") + for key in os.environ: + if key.startswith("_"): + continue + evar = "${%s}" % key + evalue = os.environ[key] + lines = lines.replace(lines, evar, evalue) + + # Xcode supports various suffices on environment variables, which are + # all undocumented. :rfc1034identifier is used in the standard project + # template these days, and :identifier was used earlier. They are used to + # convert non-url characters into things that look like valid urls -- + # except that the replacement character for :identifier, '_' isn't valid + # in a URL either -- oops, hence :rfc1034identifier was born. + evar = "${%s:identifier}" % key + evalue = IDENT_RE.sub("_", os.environ[key]) + lines = lines.replace(lines, evar, evalue) + + evar = "${%s:rfc1034identifier}" % key + evalue = IDENT_RE.sub("-", os.environ[key]) + lines = lines.replace(lines, evar, evalue) + + # Remove any keys with values that haven't been replaced. + lines = lines.splitlines() + for i in range(len(lines)): + if lines[i].strip().startswith("${"): + lines[i] = None + lines[i - 1] = None + lines = "\n".join(line for line in lines if line is not None) + + # Write out the file with variables replaced. + with open(dest, "w") as fd: + fd.write(lines) + + # Now write out PkgInfo file now that the Info.plist file has been + # "compiled". + self._WritePkgInfo(dest) + + if convert_to_binary == "True": + self._ConvertToBinary(dest) + + def _WritePkgInfo(self, info_plist): + """This writes the PkgInfo file from the data stored in Info.plist.""" + plist = plistlib.readPlist(info_plist) + if not plist: + return + + # Only create PkgInfo for executable types. + package_type = plist["CFBundlePackageType"] + if package_type != "APPL": + return + + # The format of PkgInfo is eight characters, representing the bundle type + # and bundle signature, each four characters. If that is missing, four + # '?' characters are used instead. + signature_code = plist.get("CFBundleSignature", "????") + if len(signature_code) != 4: # Wrong length resets everything, too. + signature_code = "?" * 4 + + dest = os.path.join(os.path.dirname(info_plist), "PkgInfo") + with open(dest, "w") as fp: + fp.write(f"{package_type}{signature_code}") + + def ExecFlock(self, lockfile, *cmd_list): + """Emulates the most basic behavior of Linux's flock(1).""" + # Rely on exception handling to report errors. + fd = os.open(lockfile, os.O_RDONLY | os.O_NOCTTY | os.O_CREAT, 0o666) + fcntl.flock(fd, fcntl.LOCK_EX) + return subprocess.call(cmd_list) + + def ExecFilterLibtool(self, *cmd_list): + """Calls libtool and filters out '/path/to/libtool: file: foo.o has no + symbols'.""" + libtool_re = re.compile( + r"^.*libtool: (?:for architecture: \S* )?" r"file: .* has no symbols$" + ) + libtool_re5 = re.compile( + r"^.*libtool: warning for library: " + + r".* the table of contents is empty " + + r"\(no object file members in the library define global symbols\)$" + ) + env = os.environ.copy() + # Ref: + # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c + # The problem with this flag is that it resets the file mtime on the file to + # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on timezone. + env["ZERO_AR_DATE"] = "1" + libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env) + err = libtoolout.communicate()[1].decode("utf-8") + for line in err.splitlines(): + if not libtool_re.match(line) and not libtool_re5.match(line): + print(line, file=sys.stderr) + # Unconditionally touch the output .a file on the command line if present + # and the command succeeded. A bit hacky. + if not libtoolout.returncode: + for i in range(len(cmd_list) - 1): + if cmd_list[i] == "-o" and cmd_list[i + 1].endswith(".a"): + os.utime(cmd_list[i + 1], None) + break + return libtoolout.returncode + + def ExecPackageIosFramework(self, framework): + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split(".")[0] + module_path = os.path.join(framework, "Modules") + if not os.path.exists(module_path): + os.mkdir(module_path) + module_template = ( + "framework module %s {\n" + ' umbrella header "%s.h"\n' + "\n" + " export *\n" + " module * { export * }\n" + "}\n" % (binary, binary) + ) + + with open(os.path.join(module_path, "module.modulemap"), "w") as module_file: + module_file.write(module_template) + + def ExecPackageFramework(self, framework, version): + """Takes a path to Something.framework and the Current version of that and + sets up all the symlinks.""" + # Find the name of the binary based on the part before the ".framework". + binary = os.path.basename(framework).split(".")[0] + + CURRENT = "Current" + RESOURCES = "Resources" + VERSIONS = "Versions" + + if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): + # Binary-less frameworks don't seem to contain symlinks (see e.g. + # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). + return + + # Move into the framework directory to set the symlinks correctly. + pwd = os.getcwd() + os.chdir(framework) + + # Set up the Current version. + self._Relink(version, os.path.join(VERSIONS, CURRENT)) + + # Set up the root symlinks. + self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) + self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) + + # Back to where we were before! + os.chdir(pwd) + + def _Relink(self, dest, link): + """Creates a symlink to |dest| named |link|. If |link| already exists, + it is overwritten.""" + if os.path.lexists(link): + os.remove(link) + os.symlink(dest, link) + + def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers): + framework_name = os.path.basename(framework).split(".")[0] + all_headers = [os.path.abspath(header) for header in all_headers] + filelist = {} + for header in all_headers: + filename = os.path.basename(header) + filelist[filename] = header + filelist[os.path.join(framework_name, filename)] = header + WriteHmap(out, filelist) + + def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers): + header_path = os.path.join(framework, "Headers") + if not os.path.exists(header_path): + os.makedirs(header_path) + for header in copy_headers: + shutil.copy(header, os.path.join(header_path, os.path.basename(header))) + + def ExecCompileXcassets(self, keys, *inputs): + """Compiles multiple .xcassets files into a single .car file. + + This invokes 'actool' to compile all the inputs .xcassets files. The + |keys| arguments is a json-encoded dictionary of extra arguments to + pass to 'actool' when the asset catalogs contains an application icon + or a launch image. + + Note that 'actool' does not create the Assets.car file if the asset + catalogs does not contains imageset. + """ + command_line = [ + "xcrun", + "actool", + "--output-format", + "human-readable-text", + "--compress-pngs", + "--notices", + "--warnings", + "--errors", + ] + is_iphone_target = "IPHONEOS_DEPLOYMENT_TARGET" in os.environ + if is_iphone_target: + platform = os.environ["CONFIGURATION"].split("-")[-1] + if platform not in ("iphoneos", "iphonesimulator"): + platform = "iphonesimulator" + command_line.extend( + [ + "--platform", + platform, + "--target-device", + "iphone", + "--target-device", + "ipad", + "--minimum-deployment-target", + os.environ["IPHONEOS_DEPLOYMENT_TARGET"], + "--compile", + os.path.abspath(os.environ["CONTENTS_FOLDER_PATH"]), + ] + ) + else: + command_line.extend( + [ + "--platform", + "macosx", + "--target-device", + "mac", + "--minimum-deployment-target", + os.environ["MACOSX_DEPLOYMENT_TARGET"], + "--compile", + os.path.abspath(os.environ["UNLOCALIZED_RESOURCES_FOLDER_PATH"]), + ] + ) + if keys: + keys = json.loads(keys) + for key, value in keys.items(): + arg_name = "--" + key + if isinstance(value, bool): + if value: + command_line.append(arg_name) + elif isinstance(value, list): + for v in value: + command_line.append(arg_name) + command_line.append(str(v)) + else: + command_line.append(arg_name) + command_line.append(str(value)) + # Note: actool crashes if inputs path are relative, so use os.path.abspath + # to get absolute path name for inputs. + command_line.extend(map(os.path.abspath, inputs)) + subprocess.check_call(command_line) + + def ExecMergeInfoPlist(self, output, *inputs): + """Merge multiple .plist files into a single .plist file.""" + merged_plist = {} + for path in inputs: + plist = self._LoadPlistMaybeBinary(path) + self._MergePlist(merged_plist, plist) + plistlib.writePlist(merged_plist, output) + + def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve): + """Code sign a bundle. + + This function tries to code sign an iOS bundle, following the same + algorithm as Xcode: + 1. pick the provisioning profile that best match the bundle identifier, + and copy it into the bundle as embedded.mobileprovision, + 2. copy Entitlements.plist from user or SDK next to the bundle, + 3. code sign the bundle. + """ + substitutions, overrides = self._InstallProvisioningProfile( + provisioning, self._GetCFBundleIdentifier() + ) + entitlements_path = self._InstallEntitlements( + entitlements, substitutions, overrides + ) + + args = ["codesign", "--force", "--sign", key] + if preserve == "True": + args.extend(["--deep", "--preserve-metadata=identifier,entitlements"]) + else: + args.extend(["--entitlements", entitlements_path]) + args.extend(["--timestamp=none", path]) + subprocess.check_call(args) + + def _InstallProvisioningProfile(self, profile, bundle_identifier): + """Installs embedded.mobileprovision into the bundle. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple containing two dictionary: variables substitutions and values + to overrides when generating the entitlements file. + """ + source_path, provisioning_data, team_id = self._FindProvisioningProfile( + profile, bundle_identifier + ) + target_path = os.path.join( + os.environ["BUILT_PRODUCTS_DIR"], + os.environ["CONTENTS_FOLDER_PATH"], + "embedded.mobileprovision", + ) + shutil.copy2(source_path, target_path) + substitutions = self._GetSubstitutions(bundle_identifier, team_id + ".") + return substitutions, provisioning_data["Entitlements"] + + def _FindProvisioningProfile(self, profile, bundle_identifier): + """Finds the .mobileprovision file to use for signing the bundle. + + Checks all the installed provisioning profiles (or if the user specified + the PROVISIONING_PROFILE variable, only consult it) and select the most + specific that correspond to the bundle identifier. + + Args: + profile: string, optional, short name of the .mobileprovision file + to use, if empty or the file is missing, the best file installed + will be used + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + + Returns: + A tuple of the path to the selected provisioning profile, the data of + the embedded plist in the provisioning profile and the team identifier + to use for code signing. + + Raises: + SystemExit: if no .mobileprovision can be used to sign the bundle. + """ + profiles_dir = os.path.join( + os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles" + ) + if not os.path.isdir(profiles_dir): + print( + "cannot find mobile provisioning for %s" % (bundle_identifier), + file=sys.stderr, + ) + sys.exit(1) + provisioning_profiles = None + if profile: + profile_path = os.path.join(profiles_dir, profile + ".mobileprovision") + if os.path.exists(profile_path): + provisioning_profiles = [profile_path] + if not provisioning_profiles: + provisioning_profiles = glob.glob( + os.path.join(profiles_dir, "*.mobileprovision") + ) + valid_provisioning_profiles = {} + for profile_path in provisioning_profiles: + profile_data = self._LoadProvisioningProfile(profile_path) + app_id_pattern = profile_data.get("Entitlements", {}).get( + "application-identifier", "" + ) + for team_identifier in profile_data.get("TeamIdentifier", []): + app_id = f"{team_identifier}.{bundle_identifier}" + if fnmatch.fnmatch(app_id, app_id_pattern): + valid_provisioning_profiles[app_id_pattern] = ( + profile_path, + profile_data, + team_identifier, + ) + if not valid_provisioning_profiles: + print( + "cannot find mobile provisioning for %s" % (bundle_identifier), + file=sys.stderr, + ) + sys.exit(1) + # If the user has multiple provisioning profiles installed that can be + # used for ${bundle_identifier}, pick the most specific one (ie. the + # provisioning profile whose pattern is the longest). + selected_key = max(valid_provisioning_profiles, key=lambda v: len(v)) + return valid_provisioning_profiles[selected_key] + + def _LoadProvisioningProfile(self, profile_path): + """Extracts the plist embedded in a provisioning profile. + + Args: + profile_path: string, path to the .mobileprovision file + + Returns: + Content of the plist embedded in the provisioning profile as a dictionary. + """ + with tempfile.NamedTemporaryFile() as temp: + subprocess.check_call( + ["security", "cms", "-D", "-i", profile_path, "-o", temp.name] + ) + return self._LoadPlistMaybeBinary(temp.name) + + def _MergePlist(self, merged_plist, plist): + """Merge |plist| into |merged_plist|.""" + for key, value in plist.items(): + if isinstance(value, dict): + merged_value = merged_plist.get(key, {}) + if isinstance(merged_value, dict): + self._MergePlist(merged_value, value) + merged_plist[key] = merged_value + else: + merged_plist[key] = value + else: + merged_plist[key] = value + + def _LoadPlistMaybeBinary(self, plist_path): + """Loads into a memory a plist possibly encoded in binary format. + + This is a wrapper around plistlib.readPlist that tries to convert the + plist to the XML format if it can't be parsed (assuming that it is in + the binary format). + + Args: + plist_path: string, path to a plist file, in XML or binary format + + Returns: + Content of the plist as a dictionary. + """ + try: + # First, try to read the file using plistlib that only supports XML, + # and if an exception is raised, convert a temporary copy to XML and + # load that copy. + return plistlib.readPlist(plist_path) + except Exception: + pass + with tempfile.NamedTemporaryFile() as temp: + shutil.copy2(plist_path, temp.name) + subprocess.check_call(["plutil", "-convert", "xml1", temp.name]) + return plistlib.readPlist(temp.name) + + def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix): + """Constructs a dictionary of variable substitutions for Entitlements.plist. + + Args: + bundle_identifier: string, value of CFBundleIdentifier from Info.plist + app_identifier_prefix: string, value for AppIdentifierPrefix + + Returns: + Dictionary of substitutions to apply when generating Entitlements.plist. + """ + return { + "CFBundleIdentifier": bundle_identifier, + "AppIdentifierPrefix": app_identifier_prefix, + } + + def _GetCFBundleIdentifier(self): + """Extracts CFBundleIdentifier value from Info.plist in the bundle. + + Returns: + Value of CFBundleIdentifier in the Info.plist located in the bundle. + """ + info_plist_path = os.path.join( + os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"] + ) + info_plist_data = self._LoadPlistMaybeBinary(info_plist_path) + return info_plist_data["CFBundleIdentifier"] + + def _InstallEntitlements(self, entitlements, substitutions, overrides): + """Generates and install the ${BundleName}.xcent entitlements file. + + Expands variables "$(variable)" pattern in the source entitlements file, + add extra entitlements defined in the .mobileprovision file and the copy + the generated plist to "${BundlePath}.xcent". + + Args: + entitlements: string, optional, path to the Entitlements.plist template + to use, defaults to "${SDKROOT}/Entitlements.plist" + substitutions: dictionary, variable substitutions + overrides: dictionary, values to add to the entitlements + + Returns: + Path to the generated entitlements file. + """ + source_path = entitlements + target_path = os.path.join( + os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent" + ) + if not source_path: + source_path = os.path.join(os.environ["SDKROOT"], "Entitlements.plist") + shutil.copy2(source_path, target_path) + data = self._LoadPlistMaybeBinary(target_path) + data = self._ExpandVariables(data, substitutions) + if overrides: + for key in overrides: + if key not in data: + data[key] = overrides[key] + plistlib.writePlist(data, target_path) + return target_path + + def _ExpandVariables(self, data, substitutions): + """Expands variables "$(variable)" in data. + + Args: + data: object, can be either string, list or dictionary + substitutions: dictionary, variable substitutions to perform + + Returns: + Copy of data where each references to "$(variable)" has been replaced + by the corresponding value found in substitutions, or left intact if + the key was not found. + """ + if isinstance(data, str): + for key, value in substitutions.items(): + data = data.replace("$(%s)" % key, value) + return data + if isinstance(data, list): + return [self._ExpandVariables(v, substitutions) for v in data] + if isinstance(data, dict): + return {k: self._ExpandVariables(data[k], substitutions) for k in data} + return data + + +def NextGreaterPowerOf2(x): + return 2 ** (x).bit_length() + + +def WriteHmap(output_name, filelist): + """Generates a header map based on |filelist|. + + Per Mark Mentovai: + A header map is structured essentially as a hash table, keyed by names used + in #includes, and providing pathnames to the actual files. + + The implementation below and the comment above comes from inspecting: + http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt + while also looking at the implementation in clang in: + https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp + """ + magic = 1751998832 + version = 1 + _reserved = 0 + count = len(filelist) + capacity = NextGreaterPowerOf2(count) + strings_offset = 24 + (12 * capacity) + max_value_length = max(len(value) for value in filelist.values()) + + out = open(output_name, "wb") + out.write( + struct.pack( + " 0 or arg.count("/") > 1: + arg = os.path.normpath(arg) + + # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes + # preceding it, and results in n backslashes + the quote. So we substitute + # in 2* what we match, +1 more, plus the quote. + if quote_cmd: + arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg) + + # %'s also need to be doubled otherwise they're interpreted as batch + # positional arguments. Also make sure to escape the % so that they're + # passed literally through escaping so they can be singled to just the + # original %. Otherwise, trying to pass the literal representation that + # looks like an environment variable to the shell (e.g. %PATH%) would fail. + arg = arg.replace("%", "%%") + + # These commands are used in rsp files, so no escaping for the shell (via ^) + # is necessary. + + # As a workaround for programs that don't use CommandLineToArgvW, gyp + # supports msvs_quote_cmd=0, which simply disables all quoting. + if quote_cmd: + # Finally, wrap the whole thing in quotes so that the above quote rule + # applies and whitespace isn't a word break. + return f'"{arg}"' + + return arg + + +def EncodeRspFileList(args, quote_cmd): + """Process a list of arguments using QuoteCmdExeArgument.""" + # Note that the first argument is assumed to be the command. Don't add + # quotes around it because then built-ins like 'echo', etc. won't work. + # Take care to normpath only the path in the case of 'call ../x.bat' because + # otherwise the whole thing is incorrectly interpreted as a path and not + # normalized correctly. + if not args: + return "" + if args[0].startswith("call "): + call, program = args[0].split(" ", 1) + program = call + " " + os.path.normpath(program) + else: + program = os.path.normpath(args[0]) + return (program + " " + + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:])) + + +def _GenericRetrieve(root, default, path): + """Given a list of dictionary keys |path| and a tree of dicts |root|, find + value at path, or return |default| if any of the path doesn't exist.""" + if not root: + return default + if not path: + return root + return _GenericRetrieve(root.get(path[0]), default, path[1:]) + + +def _AddPrefix(element, prefix): + """Add |prefix| to |element| or each subelement if element is iterable.""" + if element is None: + return element + # Note, not Iterable because we don't want to handle strings like that. + if isinstance(element, list) or isinstance(element, tuple): + return [prefix + e for e in element] + else: + return prefix + element + + +def _DoRemapping(element, map): + """If |element| then remap it through |map|. If |element| is iterable then + each item will be remapped. Any elements not found will be removed.""" + if map is not None and element is not None: + if not callable(map): + map = map.get # Assume it's a dict, otherwise a callable to do the remap. + if isinstance(element, list) or isinstance(element, tuple): + element = filter(None, [map(elem) for elem in element]) + else: + element = map(element) + return element + + +def _AppendOrReturn(append, element): + """If |append| is None, simply return |element|. If |append| is not None, + then add |element| to it, adding each item in |element| if it's a list or + tuple.""" + if append is not None and element is not None: + if isinstance(element, list) or isinstance(element, tuple): + append.extend(element) + else: + append.append(element) + else: + return element + + +def _FindDirectXInstallation(): + """Try to find an installation location for the DirectX SDK. Check for the + standard environment variable, and if that doesn't exist, try to find + via the registry. May return None if not found in either location.""" + # Return previously calculated value, if there is one + if hasattr(_FindDirectXInstallation, "dxsdk_dir"): + return _FindDirectXInstallation.dxsdk_dir + + dxsdk_dir = os.environ.get("DXSDK_DIR") + if not dxsdk_dir: + # Setup params to pass to and attempt to launch reg.exe. + cmd = ["reg.exe", "query", r"HKLM\Software\Microsoft\DirectX", "/s"] + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout = p.communicate()[0].decode("utf-8") + for line in stdout.splitlines(): + if "InstallPath" in line: + dxsdk_dir = line.split(" ")[3] + "\\" + + # Cache return value + _FindDirectXInstallation.dxsdk_dir = dxsdk_dir + return dxsdk_dir + + +def GetGlobalVSMacroEnv(vs_version): + """Get a dict of variables mapping internal VS macro names to their gyp + equivalents. Returns all variables that are independent of the target.""" + env = {} + # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when + # Visual Studio is actually installed. + if vs_version.Path(): + env["$(VSInstallDir)"] = vs_version.Path() + env["$(VCInstallDir)"] = os.path.join(vs_version.Path(), "VC") + "\\" + # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be + # set. This happens when the SDK is sync'd via src-internal, rather than + # by typical end-user installation of the SDK. If it's not set, we don't + # want to leave the unexpanded variable in the path, so simply strip it. + dxsdk_dir = _FindDirectXInstallation() + env["$(DXSDK_DIR)"] = dxsdk_dir if dxsdk_dir else "" + # Try to find an installation location for the Windows DDK by checking + # the WDK_DIR environment variable, may be None. + env["$(WDK_DIR)"] = os.environ.get("WDK_DIR", "") + return env + + +def ExtractSharedMSVSSystemIncludes(configs, generator_flags): + """Finds msvs_system_include_dirs that are common to all targets, removes + them from all targets, and returns an OrderedSet containing them.""" + all_system_includes = OrderedSet(configs[0].get("msvs_system_include_dirs", [])) + for config in configs[1:]: + system_includes = config.get("msvs_system_include_dirs", []) + all_system_includes = all_system_includes & OrderedSet(system_includes) + if not all_system_includes: + return None + # Expand macros in all_system_includes. + env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags)) + expanded_system_includes = OrderedSet( + [ExpandMacros(include, env) for include in all_system_includes] + ) + if any(["$" in include for include in expanded_system_includes]): + # Some path relies on target-specific variables, bail. + return None + + # Remove system includes shared by all targets from the targets. + for config in configs: + includes = config.get("msvs_system_include_dirs", []) + if includes: # Don't insert a msvs_system_include_dirs key if not needed. + # This must check the unexpanded includes list: + new_includes = [i for i in includes if i not in all_system_includes] + config["msvs_system_include_dirs"] = new_includes + return expanded_system_includes + + +class MsvsSettings: + """A class that understands the gyp 'msvs_...' values (especially the + msvs_settings field). They largely correpond to the VS2008 IDE DOM. This + class helps map those settings to command line options.""" + + def __init__(self, spec, generator_flags): + self.spec = spec + self.vs_version = GetVSVersion(generator_flags) + + supported_fields = [ + ("msvs_configuration_attributes", dict), + ("msvs_settings", dict), + ("msvs_system_include_dirs", list), + ("msvs_disabled_warnings", list), + ("msvs_precompiled_header", str), + ("msvs_precompiled_source", str), + ("msvs_configuration_platform", str), + ("msvs_target_platform", str), + ] + configs = spec["configurations"] + for field, default in supported_fields: + setattr(self, field, {}) + for configname, config in configs.items(): + getattr(self, field)[configname] = config.get(field, default()) + + self.msvs_cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."]) + + unsupported_fields = [ + "msvs_prebuild", + "msvs_postbuild", + ] + unsupported = [] + for field in unsupported_fields: + for config in configs.values(): + if field in config: + unsupported += [ + "{} not supported (target {}).".format( + field, spec["target_name"] + ) + ] + if unsupported: + raise Exception("\n".join(unsupported)) + + def GetExtension(self): + """Returns the extension for the target, with no leading dot. + + Uses 'product_extension' if specified, otherwise uses MSVS defaults based on + the target type. + """ + ext = self.spec.get("product_extension", None) + if ext: + return ext + return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "") + + def GetVSMacroEnv(self, base_to_build=None, config=None): + """Get a dict of variables mapping internal VS macro names to their gyp + equivalents.""" + target_arch = self.GetArch(config) + if target_arch == "x86": + target_platform = "Win32" + else: + target_platform = target_arch + target_name = self.spec.get("product_prefix", "") + self.spec.get( + "product_name", self.spec["target_name"] + ) + target_dir = base_to_build + "\\" if base_to_build else "" + target_ext = "." + self.GetExtension() + target_file_name = target_name + target_ext + + replacements = { + "$(InputName)": "${root}", + "$(InputPath)": "${source}", + "$(IntDir)": "$!INTERMEDIATE_DIR", + "$(OutDir)\\": target_dir, + "$(PlatformName)": target_platform, + "$(ProjectDir)\\": "", + "$(ProjectName)": self.spec["target_name"], + "$(TargetDir)\\": target_dir, + "$(TargetExt)": target_ext, + "$(TargetFileName)": target_file_name, + "$(TargetName)": target_name, + "$(TargetPath)": os.path.join(target_dir, target_file_name), + } + replacements.update(GetGlobalVSMacroEnv(self.vs_version)) + return replacements + + def ConvertVSMacros(self, s, base_to_build=None, config=None): + """Convert from VS macro names to something equivalent.""" + env = self.GetVSMacroEnv(base_to_build, config=config) + return ExpandMacros(s, env) + + def AdjustLibraries(self, libraries): + """Strip -l from library if it's specified with that.""" + libs = [lib[2:] if lib.startswith("-l") else lib for lib in libraries] + return [ + lib + ".lib" + if not lib.lower().endswith(".lib") and not lib.lower().endswith(".obj") + else lib + for lib in libs + ] + + def _GetAndMunge(self, field, path, default, prefix, append, map): + """Retrieve a value from |field| at |path| or return |default|. If + |append| is specified, and the item is found, it will be appended to that + object instead of returned. If |map| is specified, results will be + remapped through |map| before being returned or appended.""" + result = _GenericRetrieve(field, default, path) + result = _DoRemapping(result, map) + result = _AddPrefix(result, prefix) + return _AppendOrReturn(append, result) + + class _GetWrapper: + def __init__(self, parent, field, base_path, append=None): + self.parent = parent + self.field = field + self.base_path = [base_path] + self.append = append + + def __call__(self, name, map=None, prefix="", default=None): + return self.parent._GetAndMunge( + self.field, + self.base_path + [name], + default=default, + prefix=prefix, + append=self.append, + map=map, + ) + + def GetArch(self, config): + """Get architecture based on msvs_configuration_platform and + msvs_target_platform. Returns either 'x86' or 'x64'.""" + configuration_platform = self.msvs_configuration_platform.get(config, "") + platform = self.msvs_target_platform.get(config, "") + if not platform: # If no specific override, use the configuration's. + platform = configuration_platform + # Map from platform to architecture. + return {"Win32": "x86", "x64": "x64", "ARM64": "arm64"}.get(platform, "x86") + + def _TargetConfig(self, config): + """Returns the target-specific configuration.""" + # There's two levels of architecture/platform specification in VS. The + # first level is globally for the configuration (this is what we consider + # "the" config at the gyp level, which will be something like 'Debug' or + # 'Release'), VS2015 and later only use this level + if int(self.vs_version.short_name) >= 2015: + return config + # and a second target-specific configuration, which is an + # override for the global one. |config| is remapped here to take into + # account the local target-specific overrides to the global configuration. + arch = self.GetArch(config) + if arch == "x64" and not config.endswith("_x64"): + config += "_x64" + if arch == "x86" and config.endswith("_x64"): + config = config.rsplit("_", 1)[0] + return config + + def _Setting(self, path, config, default=None, prefix="", append=None, map=None): + """_GetAndMunge for msvs_settings.""" + return self._GetAndMunge( + self.msvs_settings[config], path, default, prefix, append, map + ) + + def _ConfigAttrib( + self, path, config, default=None, prefix="", append=None, map=None + ): + """_GetAndMunge for msvs_configuration_attributes.""" + return self._GetAndMunge( + self.msvs_configuration_attributes[config], + path, + default, + prefix, + append, + map, + ) + + def AdjustIncludeDirs(self, include_dirs, config): + """Updates include_dirs to expand VS specific paths, and adds the system + include dirs used for platform SDK and similar.""" + config = self._TargetConfig(config) + includes = include_dirs + self.msvs_system_include_dirs[config] + includes.extend( + self._Setting( + ("VCCLCompilerTool", "AdditionalIncludeDirectories"), config, default=[] + ) + ) + return [self.ConvertVSMacros(p, config=config) for p in includes] + + def AdjustMidlIncludeDirs(self, midl_include_dirs, config): + """Updates midl_include_dirs to expand VS specific paths, and adds the + system include dirs used for platform SDK and similar.""" + config = self._TargetConfig(config) + includes = midl_include_dirs + self.msvs_system_include_dirs[config] + includes.extend( + self._Setting( + ("VCMIDLTool", "AdditionalIncludeDirectories"), config, default=[] + ) + ) + return [self.ConvertVSMacros(p, config=config) for p in includes] + + def GetComputedDefines(self, config): + """Returns the set of defines that are injected to the defines list based + on other VS settings.""" + config = self._TargetConfig(config) + defines = [] + if self._ConfigAttrib(["CharacterSet"], config) == "1": + defines.extend(("_UNICODE", "UNICODE")) + if self._ConfigAttrib(["CharacterSet"], config) == "2": + defines.append("_MBCS") + defines.extend( + self._Setting( + ("VCCLCompilerTool", "PreprocessorDefinitions"), config, default=[] + ) + ) + return defines + + def GetCompilerPdbName(self, config, expand_special): + """Get the pdb file name that should be used for compiler invocations, or + None if there's no explicit name specified.""" + config = self._TargetConfig(config) + pdbname = self._Setting(("VCCLCompilerTool", "ProgramDataBaseFileName"), config) + if pdbname: + pdbname = expand_special(self.ConvertVSMacros(pdbname)) + return pdbname + + def GetMapFileName(self, config, expand_special): + """Gets the explicitly overridden map file name for a target or returns None + if it's not set.""" + config = self._TargetConfig(config) + map_file = self._Setting(("VCLinkerTool", "MapFileName"), config) + if map_file: + map_file = expand_special(self.ConvertVSMacros(map_file, config=config)) + return map_file + + def GetOutputName(self, config, expand_special): + """Gets the explicitly overridden output name for a target or returns None + if it's not overridden.""" + config = self._TargetConfig(config) + type = self.spec["type"] + root = "VCLibrarianTool" if type == "static_library" else "VCLinkerTool" + # TODO(scottmg): Handle OutputDirectory without OutputFile. + output_file = self._Setting((root, "OutputFile"), config) + if output_file: + output_file = expand_special( + self.ConvertVSMacros(output_file, config=config) + ) + return output_file + + def GetPDBName(self, config, expand_special, default): + """Gets the explicitly overridden pdb name for a target or returns + default if it's not overridden, or if no pdb will be generated.""" + config = self._TargetConfig(config) + output_file = self._Setting(("VCLinkerTool", "ProgramDatabaseFile"), config) + generate_debug_info = self._Setting( + ("VCLinkerTool", "GenerateDebugInformation"), config + ) + if generate_debug_info == "true": + if output_file: + return expand_special(self.ConvertVSMacros(output_file, config=config)) + else: + return default + else: + return None + + def GetNoImportLibrary(self, config): + """If NoImportLibrary: true, ninja will not expect the output to include + an import library.""" + config = self._TargetConfig(config) + noimplib = self._Setting(("NoImportLibrary",), config) + return noimplib == "true" + + def GetAsmflags(self, config): + """Returns the flags that need to be added to ml invocations.""" + config = self._TargetConfig(config) + asmflags = [] + safeseh = self._Setting(("MASM", "UseSafeExceptionHandlers"), config) + if safeseh == "true": + asmflags.append("/safeseh") + return asmflags + + def GetCflags(self, config): + """Returns the flags that need to be added to .c and .cc compilations.""" + config = self._TargetConfig(config) + cflags = [] + cflags.extend(["/wd" + w for w in self.msvs_disabled_warnings[config]]) + cl = self._GetWrapper( + self, self.msvs_settings[config], "VCCLCompilerTool", append=cflags + ) + cl( + "Optimization", + map={"0": "d", "1": "1", "2": "2", "3": "x"}, + prefix="/O", + default="2", + ) + cl("InlineFunctionExpansion", prefix="/Ob") + cl("DisableSpecificWarnings", prefix="/wd") + cl("StringPooling", map={"true": "/GF"}) + cl("EnableFiberSafeOptimizations", map={"true": "/GT"}) + cl("OmitFramePointers", map={"false": "-", "true": ""}, prefix="/Oy") + cl("EnableIntrinsicFunctions", map={"false": "-", "true": ""}, prefix="/Oi") + cl("FavorSizeOrSpeed", map={"1": "t", "2": "s"}, prefix="/O") + cl( + "FloatingPointModel", + map={"0": "precise", "1": "strict", "2": "fast"}, + prefix="/fp:", + default="0", + ) + cl("CompileAsManaged", map={"false": "", "true": "/clr"}) + cl("WholeProgramOptimization", map={"true": "/GL"}) + cl("WarningLevel", prefix="/W") + cl("WarnAsError", map={"true": "/WX"}) + cl( + "CallingConvention", + map={"0": "d", "1": "r", "2": "z", "3": "v"}, + prefix="/G", + ) + cl("DebugInformationFormat", map={"1": "7", "3": "i", "4": "I"}, prefix="/Z") + cl("RuntimeTypeInfo", map={"true": "/GR", "false": "/GR-"}) + cl("EnableFunctionLevelLinking", map={"true": "/Gy", "false": "/Gy-"}) + cl("MinimalRebuild", map={"true": "/Gm"}) + cl("BufferSecurityCheck", map={"true": "/GS", "false": "/GS-"}) + cl("BasicRuntimeChecks", map={"1": "s", "2": "u", "3": "1"}, prefix="/RTC") + cl( + "RuntimeLibrary", + map={"0": "T", "1": "Td", "2": "D", "3": "Dd"}, + prefix="/M", + ) + cl("ExceptionHandling", map={"1": "sc", "2": "a"}, prefix="/EH") + cl("DefaultCharIsUnsigned", map={"true": "/J"}) + cl( + "TreatWChar_tAsBuiltInType", + map={"false": "-", "true": ""}, + prefix="/Zc:wchar_t", + ) + cl("EnablePREfast", map={"true": "/analyze"}) + cl("AdditionalOptions", prefix="") + cl( + "EnableEnhancedInstructionSet", + map={"1": "SSE", "2": "SSE2", "3": "AVX", "4": "IA32", "5": "AVX2"}, + prefix="/arch:", + ) + cflags.extend( + [ + "/FI" + f + for f in self._Setting( + ("VCCLCompilerTool", "ForcedIncludeFiles"), config, default=[] + ) + ] + ) + if float(self.vs_version.project_version) >= 12.0: + # New flag introduced in VS2013 (project version 12.0) Forces writes to + # the program database (PDB) to be serialized through MSPDBSRV.EXE. + # https://msdn.microsoft.com/en-us/library/dn502518.aspx + cflags.append("/FS") + # ninja handles parallelism by itself, don't have the compiler do it too. + cflags = [x for x in cflags if not x.startswith("/MP")] + return cflags + + def _GetPchFlags(self, config, extension): + """Get the flags to be added to the cflags for precompiled header support.""" + config = self._TargetConfig(config) + # The PCH is only built once by a particular source file. Usage of PCH must + # only be for the same language (i.e. C vs. C++), so only include the pch + # flags when the language matches. + if self.msvs_precompiled_header[config]: + source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1] + if _LanguageMatchesForPch(source_ext, extension): + pch = self.msvs_precompiled_header[config] + pchbase = os.path.split(pch)[1] + return ["/Yu" + pch, "/FI" + pch, "/Fp${pchprefix}." + pchbase + ".pch"] + return [] + + def GetCflagsC(self, config): + """Returns the flags that need to be added to .c compilations.""" + config = self._TargetConfig(config) + return self._GetPchFlags(config, ".c") + + def GetCflagsCC(self, config): + """Returns the flags that need to be added to .cc compilations.""" + config = self._TargetConfig(config) + return ["/TP"] + self._GetPchFlags(config, ".cc") + + def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): + """Get and normalize the list of paths in AdditionalLibraryDirectories + setting.""" + config = self._TargetConfig(config) + libpaths = self._Setting( + (root, "AdditionalLibraryDirectories"), config, default=[] + ) + libpaths = [ + os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(p, config=config))) + for p in libpaths + ] + return ['/LIBPATH:"' + p + '"' for p in libpaths] + + def GetLibFlags(self, config, gyp_to_build_path): + """Returns the flags that need to be added to lib commands.""" + config = self._TargetConfig(config) + libflags = [] + lib = self._GetWrapper( + self, self.msvs_settings[config], "VCLibrarianTool", append=libflags + ) + libflags.extend( + self._GetAdditionalLibraryDirectories( + "VCLibrarianTool", config, gyp_to_build_path + ) + ) + lib("LinkTimeCodeGeneration", map={"true": "/LTCG"}) + lib( + "TargetMachine", + map={"1": "X86", "17": "X64", "3": "ARM"}, + prefix="/MACHINE:", + ) + lib("AdditionalOptions") + return libflags + + def GetDefFile(self, gyp_to_build_path): + """Returns the .def file from sources, if any. Otherwise returns None.""" + spec = self.spec + if spec["type"] in ("shared_library", "loadable_module", "executable"): + def_files = [ + s for s in spec.get("sources", []) if s.lower().endswith(".def") + ] + if len(def_files) == 1: + return gyp_to_build_path(def_files[0]) + elif len(def_files) > 1: + raise Exception("Multiple .def files") + return None + + def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path): + """.def files get implicitly converted to a ModuleDefinitionFile for the + linker in the VS generator. Emulate that behaviour here.""" + def_file = self.GetDefFile(gyp_to_build_path) + if def_file: + ldflags.append('/DEF:"%s"' % def_file) + + def GetPGDName(self, config, expand_special): + """Gets the explicitly overridden pgd name for a target or returns None + if it's not overridden.""" + config = self._TargetConfig(config) + output_file = self._Setting(("VCLinkerTool", "ProfileGuidedDatabase"), config) + if output_file: + output_file = expand_special( + self.ConvertVSMacros(output_file, config=config) + ) + return output_file + + def GetLdflags( + self, + config, + gyp_to_build_path, + expand_special, + manifest_base_name, + output_name, + is_executable, + build_dir, + ): + """Returns the flags that need to be added to link commands, and the + manifest files.""" + config = self._TargetConfig(config) + ldflags = [] + ld = self._GetWrapper( + self, self.msvs_settings[config], "VCLinkerTool", append=ldflags + ) + self._GetDefFileAsLdflags(ldflags, gyp_to_build_path) + ld("GenerateDebugInformation", map={"true": "/DEBUG"}) + # TODO: These 'map' values come from machineTypeOption enum, + # and does not have an official value for ARM64 in VS2017 (yet). + # It needs to verify the ARM64 value when machineTypeOption is updated. + ld( + "TargetMachine", + map={"1": "X86", "17": "X64", "3": "ARM", "18": "ARM64"}, + prefix="/MACHINE:", + ) + ldflags.extend( + self._GetAdditionalLibraryDirectories( + "VCLinkerTool", config, gyp_to_build_path + ) + ) + ld("DelayLoadDLLs", prefix="/DELAYLOAD:") + ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"}) + out = self.GetOutputName(config, expand_special) + if out: + ldflags.append("/OUT:" + out) + pdb = self.GetPDBName(config, expand_special, output_name + ".pdb") + if pdb: + ldflags.append("/PDB:" + pdb) + pgd = self.GetPGDName(config, expand_special) + if pgd: + ldflags.append("/PGD:" + pgd) + map_file = self.GetMapFileName(config, expand_special) + ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"}) + ld("MapExports", map={"true": "/MAPINFO:EXPORTS"}) + ld("AdditionalOptions", prefix="") + + minimum_required_version = self._Setting( + ("VCLinkerTool", "MinimumRequiredVersion"), config, default="" + ) + if minimum_required_version: + minimum_required_version = "," + minimum_required_version + ld( + "SubSystem", + map={ + "1": "CONSOLE%s" % minimum_required_version, + "2": "WINDOWS%s" % minimum_required_version, + }, + prefix="/SUBSYSTEM:", + ) + + stack_reserve_size = self._Setting( + ("VCLinkerTool", "StackReserveSize"), config, default="" + ) + if stack_reserve_size: + stack_commit_size = self._Setting( + ("VCLinkerTool", "StackCommitSize"), config, default="" + ) + if stack_commit_size: + stack_commit_size = "," + stack_commit_size + ldflags.append(f"/STACK:{stack_reserve_size}{stack_commit_size}") + + ld("TerminalServerAware", map={"1": ":NO", "2": ""}, prefix="/TSAWARE") + ld("LinkIncremental", map={"1": ":NO", "2": ""}, prefix="/INCREMENTAL") + ld("BaseAddress", prefix="/BASE:") + ld("FixedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/FIXED") + ld("RandomizedBaseAddress", map={"1": ":NO", "2": ""}, prefix="/DYNAMICBASE") + ld("DataExecutionPrevention", map={"1": ":NO", "2": ""}, prefix="/NXCOMPAT") + ld("OptimizeReferences", map={"1": "NOREF", "2": "REF"}, prefix="/OPT:") + ld("ForceSymbolReferences", prefix="/INCLUDE:") + ld("EnableCOMDATFolding", map={"1": "NOICF", "2": "ICF"}, prefix="/OPT:") + ld( + "LinkTimeCodeGeneration", + map={"1": "", "2": ":PGINSTRUMENT", "3": ":PGOPTIMIZE", "4": ":PGUPDATE"}, + prefix="/LTCG", + ) + ld("IgnoreDefaultLibraryNames", prefix="/NODEFAULTLIB:") + ld("ResourceOnlyDLL", map={"true": "/NOENTRY"}) + ld("EntryPointSymbol", prefix="/ENTRY:") + ld("Profile", map={"true": "/PROFILE"}) + ld("LargeAddressAware", map={"1": ":NO", "2": ""}, prefix="/LARGEADDRESSAWARE") + # TODO(scottmg): This should sort of be somewhere else (not really a flag). + ld("AdditionalDependencies", prefix="") + + if self.GetArch(config) == "x86": + safeseh_default = "true" + else: + safeseh_default = None + ld( + "ImageHasSafeExceptionHandlers", + map={"false": ":NO", "true": ""}, + prefix="/SAFESEH", + default=safeseh_default, + ) + + # If the base address is not specifically controlled, DYNAMICBASE should + # be on by default. + if not any("DYNAMICBASE" in flag or flag == "/FIXED" for flag in ldflags): + ldflags.append("/DYNAMICBASE") + + # If the NXCOMPAT flag has not been specified, default to on. Despite the + # documentation that says this only defaults to on when the subsystem is + # Vista or greater (which applies to the linker), the IDE defaults it on + # unless it's explicitly off. + if not any("NXCOMPAT" in flag for flag in ldflags): + ldflags.append("/NXCOMPAT") + + have_def_file = any(flag.startswith("/DEF:") for flag in ldflags) + ( + manifest_flags, + intermediate_manifest, + manifest_files, + ) = self._GetLdManifestFlags( + config, + manifest_base_name, + gyp_to_build_path, + is_executable and not have_def_file, + build_dir, + ) + ldflags.extend(manifest_flags) + return ldflags, intermediate_manifest, manifest_files + + def _GetLdManifestFlags( + self, config, name, gyp_to_build_path, allow_isolation, build_dir + ): + """Returns a 3-tuple: + - the set of flags that need to be added to the link to generate + a default manifest + - the intermediate manifest that the linker will generate that should be + used to assert it doesn't add anything to the merged one. + - the list of all the manifest files to be merged by the manifest tool and + included into the link.""" + generate_manifest = self._Setting( + ("VCLinkerTool", "GenerateManifest"), config, default="true" + ) + if generate_manifest != "true": + # This means not only that the linker should not generate the intermediate + # manifest but also that the manifest tool should do nothing even when + # additional manifests are specified. + return ["/MANIFEST:NO"], [], [] + + output_name = name + ".intermediate.manifest" + flags = [ + "/MANIFEST", + "/ManifestFile:" + output_name, + ] + + # Instead of using the MANIFESTUAC flags, we generate a .manifest to + # include into the list of manifests. This allows us to avoid the need to + # do two passes during linking. The /MANIFEST flag and /ManifestFile are + # still used, and the intermediate manifest is used to assert that the + # final manifest we get from merging all the additional manifest files + # (plus the one we generate here) isn't modified by merging the + # intermediate into it. + + # Always NO, because we generate a manifest file that has what we want. + flags.append("/MANIFESTUAC:NO") + + config = self._TargetConfig(config) + enable_uac = self._Setting( + ("VCLinkerTool", "EnableUAC"), config, default="true" + ) + manifest_files = [] + generated_manifest_outer = ( + "" + "" + "%s" + ) + if enable_uac == "true": + execution_level = self._Setting( + ("VCLinkerTool", "UACExecutionLevel"), config, default="0" + ) + execution_level_map = { + "0": "asInvoker", + "1": "highestAvailable", + "2": "requireAdministrator", + } + + ui_access = self._Setting( + ("VCLinkerTool", "UACUIAccess"), config, default="false" + ) + + inner = """ + + + + + + +""".format( + execution_level_map[execution_level], + ui_access, + ) + else: + inner = "" + + generated_manifest_contents = generated_manifest_outer % inner + generated_name = name + ".generated.manifest" + # Need to join with the build_dir here as we're writing it during + # generation time, but we return the un-joined version because the build + # will occur in that directory. We only write the file if the contents + # have changed so that simply regenerating the project files doesn't + # cause a relink. + build_dir_generated_name = os.path.join(build_dir, generated_name) + gyp.common.EnsureDirExists(build_dir_generated_name) + f = gyp.common.WriteOnDiff(build_dir_generated_name) + f.write(generated_manifest_contents) + f.close() + manifest_files = [generated_name] + + if allow_isolation: + flags.append("/ALLOWISOLATION") + + manifest_files += self._GetAdditionalManifestFiles(config, gyp_to_build_path) + return flags, output_name, manifest_files + + def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): + """Gets additional manifest files that are added to the default one + generated by the linker.""" + files = self._Setting( + ("VCManifestTool", "AdditionalManifestFiles"), config, default=[] + ) + if isinstance(files, str): + files = files.split(";") + return [ + os.path.normpath(gyp_to_build_path(self.ConvertVSMacros(f, config=config))) + for f in files + ] + + def IsUseLibraryDependencyInputs(self, config): + """Returns whether the target should be linked via Use Library Dependency + Inputs (using component .objs of a given .lib).""" + config = self._TargetConfig(config) + uldi = self._Setting(("VCLinkerTool", "UseLibraryDependencyInputs"), config) + return uldi == "true" + + def IsEmbedManifest(self, config): + """Returns whether manifest should be linked into binary.""" + config = self._TargetConfig(config) + embed = self._Setting( + ("VCManifestTool", "EmbedManifest"), config, default="true" + ) + return embed == "true" + + def IsLinkIncremental(self, config): + """Returns whether the target should be linked incrementally.""" + config = self._TargetConfig(config) + link_inc = self._Setting(("VCLinkerTool", "LinkIncremental"), config) + return link_inc != "1" + + def GetRcflags(self, config, gyp_to_ninja_path): + """Returns the flags that need to be added to invocations of the resource + compiler.""" + config = self._TargetConfig(config) + rcflags = [] + rc = self._GetWrapper( + self, self.msvs_settings[config], "VCResourceCompilerTool", append=rcflags + ) + rc("AdditionalIncludeDirectories", map=gyp_to_ninja_path, prefix="/I") + rcflags.append("/I" + gyp_to_ninja_path(".")) + rc("PreprocessorDefinitions", prefix="/d") + # /l arg must be in hex without leading '0x' + rc("Culture", prefix="/l", map=lambda x: hex(int(x))[2:]) + return rcflags + + def BuildCygwinBashCommandLine(self, args, path_to_base): + """Build a command line that runs args via cygwin bash. We assume that all + incoming paths are in Windows normpath'd form, so they need to be + converted to posix style for the part of the command line that's passed to + bash. We also have to do some Visual Studio macro emulation here because + various rules use magic VS names for things. Also note that rules that + contain ninja variables cannot be fixed here (for example ${source}), so + the outer generator needs to make sure that the paths that are written out + are in posix style, if the command line will be used here.""" + cygwin_dir = os.path.normpath( + os.path.join(path_to_base, self.msvs_cygwin_dirs[0]) + ) + cd = ("cd %s" % path_to_base).replace("\\", "/") + args = [a.replace("\\", "/").replace('"', '\\"') for a in args] + args = ["'%s'" % a.replace("'", "'\\''") for a in args] + bash_cmd = " ".join(args) + cmd = ( + 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir + + f'bash -c "{cd} ; {bash_cmd}"' + ) + return cmd + + RuleShellFlags = collections.namedtuple("RuleShellFlags", ["cygwin", "quote"]) + + def GetRuleShellFlags(self, rule): + """Return RuleShellFlags about how the given rule should be run. This + includes whether it should run under cygwin (msvs_cygwin_shell), and + whether the commands should be quoted (msvs_quote_cmd).""" + # If the variable is unset, or set to 1 we use cygwin + cygwin = int(rule.get("msvs_cygwin_shell", + self.spec.get("msvs_cygwin_shell", 1))) != 0 + # Default to quoting. There's only a few special instances where the + # target command uses non-standard command line parsing and handle quotes + # and quote escaping differently. + quote_cmd = int(rule.get("msvs_quote_cmd", 1)) + assert quote_cmd != 0 or cygwin != 1, \ + "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0" + return MsvsSettings.RuleShellFlags(cygwin, quote_cmd) + + def _HasExplicitRuleForExtension(self, spec, extension): + """Determine if there's an explicit rule for a particular extension.""" + for rule in spec.get("rules", []): + if rule["extension"] == extension: + return True + return False + + def _HasExplicitIdlActions(self, spec): + """Determine if an action should not run midl for .idl files.""" + return any( + [action.get("explicit_idl_action", 0) for action in spec.get("actions", [])] + ) + + def HasExplicitIdlRulesOrActions(self, spec): + """Determine if there's an explicit rule or action for idl files. When + there isn't we need to generate implicit rules to build MIDL .idl files.""" + return self._HasExplicitRuleForExtension( + spec, "idl" + ) or self._HasExplicitIdlActions(spec) + + def HasExplicitAsmRules(self, spec): + """Determine if there's an explicit rule for asm files. When there isn't we + need to generate implicit rules to assemble .asm files.""" + return self._HasExplicitRuleForExtension(spec, "asm") + + def GetIdlBuildData(self, source, config): + """Determine the implicit outputs for an idl file. Returns output + directory, outputs, and variables and flags that are required.""" + config = self._TargetConfig(config) + midl_get = self._GetWrapper(self, self.msvs_settings[config], "VCMIDLTool") + + def midl(name, default=None): + return self.ConvertVSMacros(midl_get(name, default=default), config=config) + + tlb = midl("TypeLibraryName", default="${root}.tlb") + header = midl("HeaderFileName", default="${root}.h") + dlldata = midl("DLLDataFileName", default="dlldata.c") + iid = midl("InterfaceIdentifierFileName", default="${root}_i.c") + proxy = midl("ProxyFileName", default="${root}_p.c") + # Note that .tlb is not included in the outputs as it is not always + # generated depending on the content of the input idl file. + outdir = midl("OutputDirectory", default="") + output = [header, dlldata, iid, proxy] + variables = [ + ("tlb", tlb), + ("h", header), + ("dlldata", dlldata), + ("iid", iid), + ("proxy", proxy), + ] + # TODO(scottmg): Are there configuration settings to set these flags? + target_platform = self.GetArch(config) + if target_platform == "x86": + target_platform = "win32" + flags = ["/char", "signed", "/env", target_platform, "/Oicf"] + return outdir, output, variables, flags + + +def _LanguageMatchesForPch(source_ext, pch_source_ext): + c_exts = (".c",) + cc_exts = (".cc", ".cxx", ".cpp") + return (source_ext in c_exts and pch_source_ext in c_exts) or ( + source_ext in cc_exts and pch_source_ext in cc_exts + ) + + +class PrecompiledHeader: + """Helper to generate dependencies and build rules to handle generation of + precompiled headers. Interface matches the GCH handler in xcode_emulation.py. + """ + + def __init__( + self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext + ): + self.settings = settings + self.config = config + pch_source = self.settings.msvs_precompiled_source[self.config] + self.pch_source = gyp_to_build_path(pch_source) + filename, _ = os.path.splitext(pch_source) + self.output_obj = gyp_to_unique_output(filename + obj_ext).lower() + + def _PchHeader(self): + """Get the header that will appear in an #include line for all source + files.""" + return self.settings.msvs_precompiled_header[self.config] + + def GetObjDependencies(self, sources, objs, arch): + """Given a list of sources files and the corresponding object files, + returns a list of the pch files that should be depended upon. The + additional wrapping in the return value is for interface compatibility + with make.py on Mac, and xcode_emulation.py.""" + assert arch is None + if not self._PchHeader(): + return [] + pch_ext = os.path.splitext(self.pch_source)[1] + for source in sources: + if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext): + return [(None, None, self.output_obj)] + return [] + + def GetPchBuildCommands(self, arch): + """Not used on Windows as there are no additional build steps required + (instead, existing steps are modified in GetFlagsModifications below).""" + return [] + + def GetFlagsModifications( + self, input, output, implicit, command, cflags_c, cflags_cc, expand_special + ): + """Get the modified cflags and implicit dependencies that should be used + for the pch compilation step.""" + if input == self.pch_source: + pch_output = ["/Yc" + self._PchHeader()] + if command == "cxx": + return ( + [("cflags_cc", map(expand_special, cflags_cc + pch_output))], + self.output_obj, + [], + ) + elif command == "cc": + return ( + [("cflags_c", map(expand_special, cflags_c + pch_output))], + self.output_obj, + [], + ) + return [], output, implicit + + +vs_version = None + + +def GetVSVersion(generator_flags): + global vs_version + if not vs_version: + vs_version = gyp.MSVSVersion.SelectVisualStudioVersion( + generator_flags.get("msvs_version", "auto"), allow_fallback=False + ) + return vs_version + + +def _GetVsvarsSetupArgs(generator_flags, arch): + vs = GetVSVersion(generator_flags) + return vs.SetupScript() + + +def ExpandMacros(string, expansions): + """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv + for the canonical way to retrieve a suitable dict.""" + if "$" in string: + for old, new in expansions.items(): + assert "$(" not in new, new + string = string.replace(old, new) + return string + + +def _ExtractImportantEnvironment(output_of_set): + """Extracts environment variables required for the toolchain to run from + a textual dump output by the cmd.exe 'set' command.""" + envvars_to_save = ( + "goma_.*", # TODO(scottmg): This is ugly, but needed for goma. + "include", + "lib", + "libpath", + "path", + "pathext", + "systemroot", + "temp", + "tmp", + ) + env = {} + # This occasionally happens and leads to misleading SYSTEMROOT error messages + # if not caught here. + if output_of_set.count("=") == 0: + raise Exception("Invalid output_of_set. Value is:\n%s" % output_of_set) + for line in output_of_set.splitlines(): + for envvar in envvars_to_save: + if re.match(envvar + "=", line.lower()): + var, setting = line.split("=", 1) + if envvar == "path": + # Our own rules (for running gyp-win-tool) and other actions in + # Chromium rely on python being in the path. Add the path to this + # python here so that if it's not in the path when ninja is run + # later, python will still be found. + setting = os.path.dirname(sys.executable) + os.pathsep + setting + env[var.upper()] = setting + break + for required in ("SYSTEMROOT", "TEMP", "TMP"): + if required not in env: + raise Exception( + 'Environment variable "%s" ' + "required to be set to valid path" % required + ) + return env + + +def _FormatAsEnvironmentBlock(envvar_dict): + """Format as an 'environment block' directly suitable for CreateProcess. + Briefly this is a list of key=value\0, terminated by an additional \0. See + CreateProcess documentation for more details.""" + block = "" + nul = "\0" + for key, value in envvar_dict.items(): + block += key + "=" + value + nul + block += nul + return block + + +def _ExtractCLPath(output_of_where): + """Gets the path to cl.exe based on the output of calling the environment + setup batch file, followed by the equivalent of `where`.""" + # Take the first line, as that's the first found in the PATH. + for line in output_of_where.strip().splitlines(): + if line.startswith("LOC:"): + return line[len("LOC:") :].strip() + + +def GenerateEnvironmentFiles( + toplevel_build_dir, generator_flags, system_includes, open_out +): + """It's not sufficient to have the absolute path to the compiler, linker, + etc. on Windows, as those tools rely on .dlls being in the PATH. We also + need to support both x86 and x64 compilers within the same build (to support + msvs_target_platform hackery). Different architectures require a different + compiler binary, and different supporting environment variables (INCLUDE, + LIB, LIBPATH). So, we extract the environment here, wrap all invocations + of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which + sets up the environment, and then we do not prefix the compiler with + an absolute path, instead preferring something like "cl.exe" in the rule + which will then run whichever the environment setup has put in the path. + When the following procedure to generate environment files does not + meet your requirement (e.g. for custom toolchains), you can pass + "-G ninja_use_custom_environment_files" to the gyp to suppress file + generation and use custom environment files prepared by yourself.""" + archs = ("x86", "x64") + if generator_flags.get("ninja_use_custom_environment_files", 0): + cl_paths = {} + for arch in archs: + cl_paths[arch] = "cl.exe" + return cl_paths + vs = GetVSVersion(generator_flags) + cl_paths = {} + for arch in archs: + # Extract environment variables for subprocesses. + args = vs.SetupScript(arch) + args.extend(("&&", "set")) + popen = subprocess.Popen( + args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + variables = popen.communicate()[0].decode("utf-8") + if popen.returncode != 0: + raise Exception('"%s" failed with error %d' % (args, popen.returncode)) + env = _ExtractImportantEnvironment(variables) + + # Inject system includes from gyp files into INCLUDE. + if system_includes: + system_includes = system_includes | OrderedSet( + env.get("INCLUDE", "").split(";") + ) + env["INCLUDE"] = ";".join(system_includes) + + env_block = _FormatAsEnvironmentBlock(env) + f = open_out(os.path.join(toplevel_build_dir, "environment." + arch), "w") + f.write(env_block) + f.close() + + # Find cl.exe location for this architecture. + args = vs.SetupScript(arch) + args.extend( + ("&&", "for", "%i", "in", "(cl.exe)", "do", "@echo", "LOC:%~$PATH:i") + ) + popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE) + output = popen.communicate()[0].decode("utf-8") + cl_paths[arch] = _ExtractCLPath(output) + return cl_paths + + +def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): + """Emulate behavior of msvs_error_on_missing_sources present in the msvs + generator: Check that all regular source files, i.e. not created at run time, + exist on disk. Missing files cause needless recompilation when building via + VS, and we want this check to match for people/bots that build using ninja, + so they're not surprised when the VS build fails.""" + if int(generator_flags.get("msvs_error_on_missing_sources", 0)): + no_specials = filter(lambda x: "$" not in x, sources) + relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] + missing = [x for x in relative if not os.path.exists(x)] + if missing: + # They'll look like out\Release\..\..\stuff\things.cc, so normalize the + # path for a slightly less crazy looking output. + cleaned_up = [os.path.normpath(x) for x in missing] + raise Exception("Missing input files:\n%s" % "\n".join(cleaned_up)) + + +# Sets some values in default_variables, which are required for many +# generators, run on Windows. +def CalculateCommonVariables(default_variables, params): + generator_flags = params.get("generator_flags", {}) + + # Set a variable so conditions can be based on msvs_version. + msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) + default_variables["MSVS_VERSION"] = msvs_version.ShortName() + + # To determine processor word size on Windows, in addition to checking + # PROCESSOR_ARCHITECTURE (which reflects the word size of the current + # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which + # contains the actual word size of the system when running thru WOW64). + if "64" in os.environ.get("PROCESSOR_ARCHITECTURE", "") or "64" in os.environ.get( + "PROCESSOR_ARCHITEW6432", "" + ): + default_variables["MSVS_OS_BITS"] = 64 + else: + default_variables["MSVS_OS_BITS"] = 32 diff --git a/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py b/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py new file mode 100644 index 0000000..0e3e86c --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py @@ -0,0 +1,174 @@ +# This file comes from +# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py +# Do not edit! Edit the upstream one instead. + +"""Python module for generating .ninja files. + +Note that this is emphatically not a required piece of Ninja; it's +just a helpful utility for build-file-generation systems that already +use Python. +""" + +import textwrap + + +def escape_path(word): + return word.replace("$ ", "$$ ").replace(" ", "$ ").replace(":", "$:") + + +class Writer: + def __init__(self, output, width=78): + self.output = output + self.width = width + + def newline(self): + self.output.write("\n") + + def comment(self, text): + for line in textwrap.wrap(text, self.width - 2): + self.output.write("# " + line + "\n") + + def variable(self, key, value, indent=0): + if value is None: + return + if isinstance(value, list): + value = " ".join(filter(None, value)) # Filter out empty strings. + self._line(f"{key} = {value}", indent) + + def pool(self, name, depth): + self._line("pool %s" % name) + self.variable("depth", depth, indent=1) + + def rule( + self, + name, + command, + description=None, + depfile=None, + generator=False, + pool=None, + restat=False, + rspfile=None, + rspfile_content=None, + deps=None, + ): + self._line("rule %s" % name) + self.variable("command", command, indent=1) + if description: + self.variable("description", description, indent=1) + if depfile: + self.variable("depfile", depfile, indent=1) + if generator: + self.variable("generator", "1", indent=1) + if pool: + self.variable("pool", pool, indent=1) + if restat: + self.variable("restat", "1", indent=1) + if rspfile: + self.variable("rspfile", rspfile, indent=1) + if rspfile_content: + self.variable("rspfile_content", rspfile_content, indent=1) + if deps: + self.variable("deps", deps, indent=1) + + def build( + self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None + ): + outputs = self._as_list(outputs) + all_inputs = self._as_list(inputs)[:] + out_outputs = list(map(escape_path, outputs)) + all_inputs = list(map(escape_path, all_inputs)) + + if implicit: + implicit = map(escape_path, self._as_list(implicit)) + all_inputs.append("|") + all_inputs.extend(implicit) + if order_only: + order_only = map(escape_path, self._as_list(order_only)) + all_inputs.append("||") + all_inputs.extend(order_only) + + self._line( + "build {}: {}".format(" ".join(out_outputs), " ".join([rule] + all_inputs)) + ) + + if variables: + if isinstance(variables, dict): + iterator = iter(variables.items()) + else: + iterator = iter(variables) + + for key, val in iterator: + self.variable(key, val, indent=1) + + return outputs + + def include(self, path): + self._line("include %s" % path) + + def subninja(self, path): + self._line("subninja %s" % path) + + def default(self, paths): + self._line("default %s" % " ".join(self._as_list(paths))) + + def _count_dollars_before_index(self, s, i): + """Returns the number of '$' characters right in front of s[i].""" + dollar_count = 0 + dollar_index = i - 1 + while dollar_index > 0 and s[dollar_index] == "$": + dollar_count += 1 + dollar_index -= 1 + return dollar_count + + def _line(self, text, indent=0): + """Write 'text' word-wrapped at self.width characters.""" + leading_space = " " * indent + while len(leading_space) + len(text) > self.width: + # The text is too wide; wrap if possible. + + # Find the rightmost space that would obey our width constraint and + # that's not an escaped space. + available_space = self.width - len(leading_space) - len(" $") + space = available_space + while True: + space = text.rfind(" ", 0, space) + if space < 0 or self._count_dollars_before_index(text, space) % 2 == 0: + break + + if space < 0: + # No such space; just use the first unescaped space we can find. + space = available_space - 1 + while True: + space = text.find(" ", space + 1) + if ( + space < 0 + or self._count_dollars_before_index(text, space) % 2 == 0 + ): + break + if space < 0: + # Give up on breaking. + break + + self.output.write(leading_space + text[0:space] + " $\n") + text = text[space + 1 :] + + # Subsequent lines are continuations, so indent them. + leading_space = " " * (indent + 2) + + self.output.write(leading_space + text + "\n") + + def _as_list(self, input): + if input is None: + return [] + if isinstance(input, list): + return input + return [input] + + +def escape(string): + """Escape a string such that it can be embedded into a Ninja file without + further interpretation.""" + assert "\n" not in string, "Ninja syntax does not allow newlines" + # We only have one special metacharacter: '$'. + return string.replace("$", "$$") diff --git a/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py new file mode 100644 index 0000000..729cec0 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py @@ -0,0 +1,61 @@ +# Copyright 2014 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""A clone of the default copy.deepcopy that doesn't handle cyclic +structures or complex types except for dicts and lists. This is +because gyp copies so large structure that small copy overhead ends up +taking seconds in a project the size of Chromium.""" + + +class Error(Exception): + pass + + +__all__ = ["Error", "deepcopy"] + + +def deepcopy(x): + """Deep copy operation on gyp objects such as strings, ints, dicts + and lists. More than twice as fast as copy.deepcopy but much less + generic.""" + + try: + return _deepcopy_dispatch[type(x)](x) + except KeyError: + raise Error( + "Unsupported type %s for deepcopy. Use copy.deepcopy " + + "or expand simple_copy support." % type(x) + ) + + +_deepcopy_dispatch = d = {} + + +def _deepcopy_atomic(x): + return x + + +types = bool, float, int, str, type, type(None) + +for x in types: + d[x] = _deepcopy_atomic + + +def _deepcopy_list(x): + return [deepcopy(a) for a in x] + + +d[list] = _deepcopy_list + + +def _deepcopy_dict(x): + y = {} + for key, value in x.items(): + y[deepcopy(key)] = deepcopy(value) + return y + + +d[dict] = _deepcopy_dict + +del d diff --git a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py new file mode 100644 index 0000000..638eee4 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py @@ -0,0 +1,374 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Utility functions for Windows builds. + +These functions are executed via gyp-win-tool when using the ninja generator. +""" + + +import os +import re +import shutil +import subprocess +import stat +import string +import sys + +BASE_DIR = os.path.dirname(os.path.abspath(__file__)) + +# A regex matching an argument corresponding to the output filename passed to +# link.exe. +_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P.+)$", re.IGNORECASE) + + +def main(args): + executor = WinTool() + exit_code = executor.Dispatch(args) + if exit_code is not None: + sys.exit(exit_code) + + +class WinTool: + """This class performs all the Windows tooling steps. The methods can either + be executed directly, or dispatched from an argument list.""" + + def _UseSeparateMspdbsrv(self, env, args): + """Allows to use a unique instance of mspdbsrv.exe per linker instead of a + shared one.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + if args[0] != "link.exe": + return + + # Use the output filename passed to the linker to generate an endpoint name + # for mspdbsrv.exe. + endpoint_name = None + for arg in args: + m = _LINK_EXE_OUT_ARG.match(arg) + if m: + endpoint_name = re.sub( + r"\W+", "", "%s_%d" % (m.group("out"), os.getpid()) + ) + break + + if endpoint_name is None: + return + + # Adds the appropriate environment variable. This will be read by link.exe + # to know which instance of mspdbsrv.exe it should connect to (if it's + # not set then the default endpoint is used). + env["_MSPDBSRV_ENDPOINT_"] = endpoint_name + + def Dispatch(self, args): + """Dispatches a string command to a method.""" + if len(args) < 1: + raise Exception("Not enough arguments") + + method = "Exec%s" % self._CommandifyName(args[0]) + return getattr(self, method)(*args[1:]) + + def _CommandifyName(self, name_string): + """Transforms a tool name like recursive-mirror to RecursiveMirror.""" + return name_string.title().replace("-", "") + + def _GetEnv(self, arch): + """Gets the saved environment from a file for a given architecture.""" + # The environment is saved as an "environment block" (see CreateProcess + # and msvs_emulation for details). We convert to a dict here. + # Drop last 2 NULs, one for list terminator, one for trailing vs. separator. + pairs = open(arch).read()[:-2].split("\0") + kvs = [item.split("=", 1) for item in pairs] + return dict(kvs) + + def ExecStamp(self, path): + """Simple stamp command.""" + open(path, "w").close() + + def ExecRecursiveMirror(self, source, dest): + """Emulation of rm -rf out && cp -af in out.""" + if os.path.exists(dest): + if os.path.isdir(dest): + + def _on_error(fn, path, excinfo): + # The operation failed, possibly because the file is set to + # read-only. If that's why, make it writable and try the op again. + if not os.access(path, os.W_OK): + os.chmod(path, stat.S_IWRITE) + fn(path) + + shutil.rmtree(dest, onerror=_on_error) + else: + if not os.access(dest, os.W_OK): + # Attempt to make the file writable before deleting it. + os.chmod(dest, stat.S_IWRITE) + os.unlink(dest) + + if os.path.isdir(source): + shutil.copytree(source, dest) + else: + shutil.copy2(source, dest) + + def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args): + """Filter diagnostic output from link that looks like: + ' Creating library ui.dll.lib and object ui.dll.exp' + This happens when there are exports from the dll or exe. + """ + env = self._GetEnv(arch) + if use_separate_mspdbsrv == "True": + self._UseSeparateMspdbsrv(env, args) + if sys.platform == "win32": + args = list(args) # *args is a tuple by default, which is read-only. + args[0] = args[0].replace("/", "\\") + # https://docs.python.org/2/library/subprocess.html: + # "On Unix with shell=True [...] if args is a sequence, the first item + # specifies the command string, and any additional items will be treated as + # additional arguments to the shell itself. That is to say, Popen does the + # equivalent of: + # Popen(['/bin/sh', '-c', args[0], args[1], ...])" + # For that reason, since going through the shell doesn't seem necessary on + # non-Windows don't do that there. + link = subprocess.Popen( + args, + shell=sys.platform == "win32", + env=env, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) + out = link.communicate()[0].decode("utf-8") + for line in out.splitlines(): + if ( + not line.startswith(" Creating library ") + and not line.startswith("Generating code") + and not line.startswith("Finished generating code") + ): + print(line) + return link.returncode + + def ExecLinkWithManifests( + self, + arch, + embed_manifest, + out, + ldcmd, + resname, + mt, + rc, + intermediate_manifest, + *manifests + ): + """A wrapper for handling creating a manifest resource and then executing + a link command.""" + # The 'normal' way to do manifests is to have link generate a manifest + # based on gathering dependencies from the object files, then merge that + # manifest with other manifests supplied as sources, convert the merged + # manifest to a resource, and then *relink*, including the compiled + # version of the manifest resource. This breaks incremental linking, and + # is generally overly complicated. Instead, we merge all the manifests + # provided (along with one that includes what would normally be in the + # linker-generated one, see msvs_emulation.py), and include that into the + # first and only link. We still tell link to generate a manifest, but we + # only use that to assert that our simpler process did not miss anything. + variables = { + "python": sys.executable, + "arch": arch, + "out": out, + "ldcmd": ldcmd, + "resname": resname, + "mt": mt, + "rc": rc, + "intermediate_manifest": intermediate_manifest, + "manifests": " ".join(manifests), + } + add_to_ld = "" + if manifests: + subprocess.check_call( + "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo " + "-manifest %(manifests)s -out:%(out)s.manifest" % variables + ) + if embed_manifest == "True": + subprocess.check_call( + "%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest" + " %(out)s.manifest.rc %(resname)s" % variables + ) + subprocess.check_call( + "%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s " + "%(out)s.manifest.rc" % variables + ) + add_to_ld = " %(out)s.manifest.res" % variables + subprocess.check_call(ldcmd + add_to_ld) + + # Run mt.exe on the theoretically complete manifest we generated, merging + # it with the one the linker generated to confirm that the linker + # generated one does not add anything. This is strictly unnecessary for + # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not + # used in a #pragma comment. + if manifests: + # Merge the intermediate one with ours to .assert.manifest, then check + # that .assert.manifest is identical to ours. + subprocess.check_call( + "%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo " + "-manifest %(out)s.manifest %(intermediate_manifest)s " + "-out:%(out)s.assert.manifest" % variables + ) + assert_manifest = "%(out)s.assert.manifest" % variables + our_manifest = "%(out)s.manifest" % variables + # Load and normalize the manifests. mt.exe sometimes removes whitespace, + # and sometimes doesn't unfortunately. + with open(our_manifest) as our_f: + with open(assert_manifest) as assert_f: + translator = str.maketrans('', '', string.whitespace) + our_data = our_f.read().translate(translator) + assert_data = assert_f.read().translate(translator) + if our_data != assert_data: + os.unlink(out) + + def dump(filename): + print(filename, file=sys.stderr) + print("-----", file=sys.stderr) + with open(filename) as f: + print(f.read(), file=sys.stderr) + print("-----", file=sys.stderr) + + dump(intermediate_manifest) + dump(our_manifest) + dump(assert_manifest) + sys.stderr.write( + 'Linker generated manifest "%s" added to final manifest "%s" ' + '(result in "%s"). ' + "Were /MANIFEST switches used in #pragma statements? " + % (intermediate_manifest, our_manifest, assert_manifest) + ) + return 1 + + def ExecManifestWrapper(self, arch, *args): + """Run manifest tool with environment set. Strip out undesirable warning + (some XML blocks are recognized by the OS loader, but not the manifest + tool).""" + env = self._GetEnv(arch) + popen = subprocess.Popen( + args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + out = popen.communicate()[0].decode("utf-8") + for line in out.splitlines(): + if line and "manifest authoring warning 81010002" not in line: + print(line) + return popen.returncode + + def ExecManifestToRc(self, arch, *args): + """Creates a resource file pointing a SxS assembly manifest. + |args| is tuple containing path to resource file, path to manifest file + and resource name which can be "1" (for executables) or "2" (for DLLs).""" + manifest_path, resource_path, resource_name = args + with open(resource_path, "w") as output: + output.write( + '#include \n%s RT_MANIFEST "%s"' + % (resource_name, os.path.abspath(manifest_path).replace("\\", "/")) + ) + + def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags): + """Filter noisy filenames output from MIDL compile step that isn't + quietable via command line flags. + """ + args = ( + ["midl", "/nologo"] + + list(flags) + + [ + "/out", + outdir, + "/tlb", + tlb, + "/h", + h, + "/dlldata", + dlldata, + "/iid", + iid, + "/proxy", + proxy, + idl, + ] + ) + env = self._GetEnv(arch) + popen = subprocess.Popen( + args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + out = popen.communicate()[0].decode("utf-8") + # Filter junk out of stdout, and write filtered versions. Output we want + # to filter is pairs of lines that look like this: + # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl + # objidl.idl + lines = out.splitlines() + prefixes = ("Processing ", "64 bit Processing ") + processing = {os.path.basename(x) for x in lines if x.startswith(prefixes)} + for line in lines: + if not line.startswith(prefixes) and line not in processing: + print(line) + return popen.returncode + + def ExecAsmWrapper(self, arch, *args): + """Filter logo banner from invocations of asm.exe.""" + env = self._GetEnv(arch) + popen = subprocess.Popen( + args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + out = popen.communicate()[0].decode("utf-8") + for line in out.splitlines(): + if ( + not line.startswith("Copyright (C) Microsoft Corporation") + and not line.startswith("Microsoft (R) Macro Assembler") + and not line.startswith(" Assembling: ") + and line + ): + print(line) + return popen.returncode + + def ExecRcWrapper(self, arch, *args): + """Filter logo banner from invocations of rc.exe. Older versions of RC + don't support the /nologo flag.""" + env = self._GetEnv(arch) + popen = subprocess.Popen( + args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT + ) + out = popen.communicate()[0].decode("utf-8") + for line in out.splitlines(): + if ( + not line.startswith("Microsoft (R) Windows (R) Resource Compiler") + and not line.startswith("Copyright (C) Microsoft Corporation") + and line + ): + print(line) + return popen.returncode + + def ExecActionWrapper(self, arch, rspfile, *dir): + """Runs an action command line from a response file using the environment + for |arch|. If |dir| is supplied, use that as the working directory.""" + env = self._GetEnv(arch) + # TODO(scottmg): This is a temporary hack to get some specific variables + # through to actions that are set after gyp-time. http://crbug.com/333738. + for k, v in os.environ.items(): + if k not in env: + env[k] = v + args = open(rspfile).read() + dir = dir[0] if dir else None + return subprocess.call(args, shell=True, env=env, cwd=dir) + + def ExecClCompile(self, project_dir, selected_files): + """Executed by msvs-ninja projects when the 'ClCompile' target is used to + build selected C/C++ files.""" + project_dir = os.path.relpath(project_dir, BASE_DIR) + selected_files = selected_files.split(";") + ninja_targets = [ + os.path.join(project_dir, filename) + "^^" for filename in selected_files + ] + cmd = ["ninja.exe"] + cmd.extend(ninja_targets) + return subprocess.call(cmd, shell=True, cwd=BASE_DIR) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py new file mode 100644 index 0000000..a75d8ee --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py @@ -0,0 +1,1939 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +This module contains classes that help to emulate xcodebuild behavior on top of +other build systems, such as make and ninja. +""" + + +import copy +import gyp.common +import os +import os.path +import re +import shlex +import subprocess +import sys +from gyp.common import GypError + +# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when +# "xcodebuild" is called too quickly (it has been found to return incorrect +# version number). +XCODE_VERSION_CACHE = None + +# Populated lazily by GetXcodeArchsDefault, to an |XcodeArchsDefault| instance +# corresponding to the installed version of Xcode. +XCODE_ARCHS_DEFAULT_CACHE = None + + +def XcodeArchsVariableMapping(archs, archs_including_64_bit=None): + """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable, + and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT).""" + mapping = {"$(ARCHS_STANDARD)": archs} + if archs_including_64_bit: + mapping["$(ARCHS_STANDARD_INCLUDING_64_BIT)"] = archs_including_64_bit + return mapping + + +class XcodeArchsDefault: + """A class to resolve ARCHS variable from xcode_settings, resolving Xcode + macros and implementing filtering by VALID_ARCHS. The expansion of macros + depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and + on the version of Xcode. + """ + + # Match variable like $(ARCHS_STANDARD). + variable_pattern = re.compile(r"\$\([a-zA-Z_][a-zA-Z0-9_]*\)$") + + def __init__(self, default, mac, iphonesimulator, iphoneos): + self._default = (default,) + self._archs = {"mac": mac, "ios": iphoneos, "iossim": iphonesimulator} + + def _VariableMapping(self, sdkroot): + """Returns the dictionary of variable mapping depending on the SDKROOT.""" + sdkroot = sdkroot.lower() + if "iphoneos" in sdkroot: + return self._archs["ios"] + elif "iphonesimulator" in sdkroot: + return self._archs["iossim"] + else: + return self._archs["mac"] + + def _ExpandArchs(self, archs, sdkroot): + """Expands variables references in ARCHS, and remove duplicates.""" + variable_mapping = self._VariableMapping(sdkroot) + expanded_archs = [] + for arch in archs: + if self.variable_pattern.match(arch): + variable = arch + try: + variable_expansion = variable_mapping[variable] + for arch in variable_expansion: + if arch not in expanded_archs: + expanded_archs.append(arch) + except KeyError: + print('Warning: Ignoring unsupported variable "%s".' % variable) + elif arch not in expanded_archs: + expanded_archs.append(arch) + return expanded_archs + + def ActiveArchs(self, archs, valid_archs, sdkroot): + """Expands variables references in ARCHS, and filter by VALID_ARCHS if it + is defined (if not set, Xcode accept any value in ARCHS, otherwise, only + values present in VALID_ARCHS are kept).""" + expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or "") + if valid_archs: + filtered_archs = [] + for arch in expanded_archs: + if arch in valid_archs: + filtered_archs.append(arch) + expanded_archs = filtered_archs + return expanded_archs + + +def GetXcodeArchsDefault(): + """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the + installed version of Xcode. The default values used by Xcode for ARCHS + and the expansion of the variables depends on the version of Xcode used. + + For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included + uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses + $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0 + and deprecated with Xcode 5.1. + + For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit + architecture as part of $(ARCHS_STANDARD) and default to only building it. + + For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part + of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they + are also part of $(ARCHS_STANDARD). + + All these rules are coded in the construction of the |XcodeArchsDefault| + object to use depending on the version of Xcode detected. The object is + for performance reason.""" + global XCODE_ARCHS_DEFAULT_CACHE + if XCODE_ARCHS_DEFAULT_CACHE: + return XCODE_ARCHS_DEFAULT_CACHE + xcode_version, _ = XcodeVersion() + if xcode_version < "0500": + XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( + "$(ARCHS_STANDARD)", + XcodeArchsVariableMapping(["i386"]), + XcodeArchsVariableMapping(["i386"]), + XcodeArchsVariableMapping(["armv7"]), + ) + elif xcode_version < "0510": + XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( + "$(ARCHS_STANDARD_INCLUDING_64_BIT)", + XcodeArchsVariableMapping(["x86_64"], ["x86_64"]), + XcodeArchsVariableMapping(["i386"], ["i386", "x86_64"]), + XcodeArchsVariableMapping( + ["armv7", "armv7s"], ["armv7", "armv7s", "arm64"] + ), + ) + else: + XCODE_ARCHS_DEFAULT_CACHE = XcodeArchsDefault( + "$(ARCHS_STANDARD)", + XcodeArchsVariableMapping(["x86_64"], ["x86_64"]), + XcodeArchsVariableMapping(["i386", "x86_64"], ["i386", "x86_64"]), + XcodeArchsVariableMapping( + ["armv7", "armv7s", "arm64"], ["armv7", "armv7s", "arm64"] + ), + ) + return XCODE_ARCHS_DEFAULT_CACHE + + +class XcodeSettings: + """A class that understands the gyp 'xcode_settings' object.""" + + # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached + # at class-level for efficiency. + _sdk_path_cache = {} + _platform_path_cache = {} + _sdk_root_cache = {} + + # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so + # cached at class-level for efficiency. + _plist_cache = {} + + # Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so + # cached at class-level for efficiency. + _codesigning_key_cache = {} + + def __init__(self, spec): + self.spec = spec + + self.isIOS = False + self.mac_toolchain_dir = None + self.header_map_path = None + + # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. + # This means self.xcode_settings[config] always contains all settings + # for that config -- the per-target settings as well. Settings that are + # the same for all configs are implicitly per-target settings. + self.xcode_settings = {} + configs = spec["configurations"] + for configname, config in configs.items(): + self.xcode_settings[configname] = config.get("xcode_settings", {}) + self._ConvertConditionalKeys(configname) + if self.xcode_settings[configname].get("IPHONEOS_DEPLOYMENT_TARGET", None): + self.isIOS = True + + # This is only non-None temporarily during the execution of some methods. + self.configname = None + + # Used by _AdjustLibrary to match .a and .dylib entries in libraries. + self.library_re = re.compile(r"^lib([^/]+)\.(a|dylib)$") + + def _ConvertConditionalKeys(self, configname): + """Converts or warns on conditional keys. Xcode supports conditional keys, + such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation + with some keys converted while the rest force a warning.""" + settings = self.xcode_settings[configname] + conditional_keys = [key for key in settings if key.endswith("]")] + for key in conditional_keys: + # If you need more, speak up at http://crbug.com/122592 + if key.endswith("[sdk=iphoneos*]"): + if configname.endswith("iphoneos"): + new_key = key.split("[")[0] + settings[new_key] = settings[key] + else: + print( + "Warning: Conditional keys not implemented, ignoring:", + " ".join(conditional_keys), + ) + del settings[key] + + def _Settings(self): + assert self.configname + return self.xcode_settings[self.configname] + + def _Test(self, test_key, cond_key, default): + return self._Settings().get(test_key, default) == cond_key + + def _Appendf(self, lst, test_key, format_str, default=None): + if test_key in self._Settings(): + lst.append(format_str % str(self._Settings()[test_key])) + elif default: + lst.append(format_str % str(default)) + + def _WarnUnimplemented(self, test_key): + if test_key in self._Settings(): + print('Warning: Ignoring not yet implemented key "%s".' % test_key) + + def IsBinaryOutputFormat(self, configname): + default = "binary" if self.isIOS else "xml" + format = self.xcode_settings[configname].get("INFOPLIST_OUTPUT_FORMAT", default) + return format == "binary" + + def IsIosFramework(self): + return self.spec["type"] == "shared_library" and self._IsBundle() and self.isIOS + + def _IsBundle(self): + return ( + int(self.spec.get("mac_bundle", 0)) != 0 + or self._IsXCTest() + or self._IsXCUiTest() + ) + + def _IsXCTest(self): + return int(self.spec.get("mac_xctest_bundle", 0)) != 0 + + def _IsXCUiTest(self): + return int(self.spec.get("mac_xcuitest_bundle", 0)) != 0 + + def _IsIosAppExtension(self): + return int(self.spec.get("ios_app_extension", 0)) != 0 + + def _IsIosWatchKitExtension(self): + return int(self.spec.get("ios_watchkit_extension", 0)) != 0 + + def _IsIosWatchApp(self): + return int(self.spec.get("ios_watch_app", 0)) != 0 + + def GetFrameworkVersion(self): + """Returns the framework version of the current target. Only valid for + bundles.""" + assert self._IsBundle() + return self.GetPerTargetSetting("FRAMEWORK_VERSION", default="A") + + def GetWrapperExtension(self): + """Returns the bundle extension (.app, .framework, .plugin, etc). Only + valid for bundles.""" + assert self._IsBundle() + if self.spec["type"] in ("loadable_module", "shared_library"): + default_wrapper_extension = { + "loadable_module": "bundle", + "shared_library": "framework", + }[self.spec["type"]] + wrapper_extension = self.GetPerTargetSetting( + "WRAPPER_EXTENSION", default=default_wrapper_extension + ) + return "." + self.spec.get("product_extension", wrapper_extension) + elif self.spec["type"] == "executable": + if self._IsIosAppExtension() or self._IsIosWatchKitExtension(): + return "." + self.spec.get("product_extension", "appex") + else: + return "." + self.spec.get("product_extension", "app") + else: + assert False, "Don't know extension for '{}', target '{}'".format( + self.spec["type"], + self.spec["target_name"], + ) + + def GetProductName(self): + """Returns PRODUCT_NAME.""" + return self.spec.get("product_name", self.spec["target_name"]) + + def GetFullProductName(self): + """Returns FULL_PRODUCT_NAME.""" + if self._IsBundle(): + return self.GetWrapperName() + else: + return self._GetStandaloneBinaryPath() + + def GetWrapperName(self): + """Returns the directory name of the bundle represented by this target. + Only valid for bundles.""" + assert self._IsBundle() + return self.GetProductName() + self.GetWrapperExtension() + + def GetBundleContentsFolderPath(self): + """Returns the qualified path to the bundle's contents folder. E.g. + Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles.""" + if self.isIOS: + return self.GetWrapperName() + assert self._IsBundle() + if self.spec["type"] == "shared_library": + return os.path.join( + self.GetWrapperName(), "Versions", self.GetFrameworkVersion() + ) + else: + # loadable_modules have a 'Contents' folder like executables. + return os.path.join(self.GetWrapperName(), "Contents") + + def GetBundleResourceFolder(self): + """Returns the qualified path to the bundle's resource folder. E.g. + Chromium.app/Contents/Resources. Only valid for bundles.""" + assert self._IsBundle() + if self.isIOS: + return self.GetBundleContentsFolderPath() + return os.path.join(self.GetBundleContentsFolderPath(), "Resources") + + def GetBundleExecutableFolderPath(self): + """Returns the qualified path to the bundle's executables folder. E.g. + Chromium.app/Contents/MacOS. Only valid for bundles.""" + assert self._IsBundle() + if self.spec["type"] in ("shared_library") or self.isIOS: + return self.GetBundleContentsFolderPath() + elif self.spec["type"] in ("executable", "loadable_module"): + return os.path.join(self.GetBundleContentsFolderPath(), "MacOS") + + def GetBundleJavaFolderPath(self): + """Returns the qualified path to the bundle's Java resource folder. + E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleResourceFolder(), "Java") + + def GetBundleFrameworksFolderPath(self): + """Returns the qualified path to the bundle's frameworks folder. E.g, + Chromium.app/Contents/Frameworks. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), "Frameworks") + + def GetBundleSharedFrameworksFolderPath(self): + """Returns the qualified path to the bundle's frameworks folder. E.g, + Chromium.app/Contents/SharedFrameworks. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), "SharedFrameworks") + + def GetBundleSharedSupportFolderPath(self): + """Returns the qualified path to the bundle's shared support folder. E.g, + Chromium.app/Contents/SharedSupport. Only valid for bundles.""" + assert self._IsBundle() + if self.spec["type"] == "shared_library": + return self.GetBundleResourceFolder() + else: + return os.path.join(self.GetBundleContentsFolderPath(), "SharedSupport") + + def GetBundlePlugInsFolderPath(self): + """Returns the qualified path to the bundle's plugins folder. E.g, + Chromium.app/Contents/PlugIns. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), "PlugIns") + + def GetBundleXPCServicesFolderPath(self): + """Returns the qualified path to the bundle's XPC services folder. E.g, + Chromium.app/Contents/XPCServices. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join(self.GetBundleContentsFolderPath(), "XPCServices") + + def GetBundlePlistPath(self): + """Returns the qualified path to the bundle's plist file. E.g. + Chromium.app/Contents/Info.plist. Only valid for bundles.""" + assert self._IsBundle() + if ( + self.spec["type"] in ("executable", "loadable_module") + or self.IsIosFramework() + ): + return os.path.join(self.GetBundleContentsFolderPath(), "Info.plist") + else: + return os.path.join( + self.GetBundleContentsFolderPath(), "Resources", "Info.plist" + ) + + def GetProductType(self): + """Returns the PRODUCT_TYPE of this target.""" + if self._IsIosAppExtension(): + assert self._IsBundle(), ( + "ios_app_extension flag requires mac_bundle " + "(target %s)" % self.spec["target_name"] + ) + return "com.apple.product-type.app-extension" + if self._IsIosWatchKitExtension(): + assert self._IsBundle(), ( + "ios_watchkit_extension flag requires " + "mac_bundle (target %s)" % self.spec["target_name"] + ) + return "com.apple.product-type.watchkit-extension" + if self._IsIosWatchApp(): + assert self._IsBundle(), ( + "ios_watch_app flag requires mac_bundle " + "(target %s)" % self.spec["target_name"] + ) + return "com.apple.product-type.application.watchapp" + if self._IsXCUiTest(): + assert self._IsBundle(), ( + "mac_xcuitest_bundle flag requires mac_bundle " + "(target %s)" % self.spec["target_name"] + ) + return "com.apple.product-type.bundle.ui-testing" + if self._IsBundle(): + return { + "executable": "com.apple.product-type.application", + "loadable_module": "com.apple.product-type.bundle", + "shared_library": "com.apple.product-type.framework", + }[self.spec["type"]] + else: + return { + "executable": "com.apple.product-type.tool", + "loadable_module": "com.apple.product-type.library.dynamic", + "shared_library": "com.apple.product-type.library.dynamic", + "static_library": "com.apple.product-type.library.static", + }[self.spec["type"]] + + def GetMachOType(self): + """Returns the MACH_O_TYPE of this target.""" + # Weird, but matches Xcode. + if not self._IsBundle() and self.spec["type"] == "executable": + return "" + return { + "executable": "mh_execute", + "static_library": "staticlib", + "shared_library": "mh_dylib", + "loadable_module": "mh_bundle", + }[self.spec["type"]] + + def _GetBundleBinaryPath(self): + """Returns the name of the bundle binary of by this target. + E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" + assert self._IsBundle() + return os.path.join( + self.GetBundleExecutableFolderPath(), self.GetExecutableName() + ) + + def _GetStandaloneExecutableSuffix(self): + if "product_extension" in self.spec: + return "." + self.spec["product_extension"] + return { + "executable": "", + "static_library": ".a", + "shared_library": ".dylib", + "loadable_module": ".so", + }[self.spec["type"]] + + def _GetStandaloneExecutablePrefix(self): + return self.spec.get( + "product_prefix", + { + "executable": "", + "static_library": "lib", + "shared_library": "lib", + # Non-bundled loadable_modules are called foo.so for some reason + # (that is, .so and no prefix) with the xcode build -- match that. + "loadable_module": "", + }[self.spec["type"]], + ) + + def _GetStandaloneBinaryPath(self): + """Returns the name of the non-bundle binary represented by this target. + E.g. hello_world. Only valid for non-bundles.""" + assert not self._IsBundle() + assert self.spec["type"] in ( + "executable", + "shared_library", + "static_library", + "loadable_module", + ), ("Unexpected type %s" % self.spec["type"]) + target = self.spec["target_name"] + if self.spec["type"] == "static_library": + if target[:3] == "lib": + target = target[3:] + elif self.spec["type"] in ("loadable_module", "shared_library"): + if target[:3] == "lib": + target = target[3:] + + target_prefix = self._GetStandaloneExecutablePrefix() + target = self.spec.get("product_name", target) + target_ext = self._GetStandaloneExecutableSuffix() + return target_prefix + target + target_ext + + def GetExecutableName(self): + """Returns the executable name of the bundle represented by this target. + E.g. Chromium.""" + if self._IsBundle(): + return self.spec.get("product_name", self.spec["target_name"]) + else: + return self._GetStandaloneBinaryPath() + + def GetExecutablePath(self): + """Returns the qualified path to the primary executable of the bundle + represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium.""" + if self._IsBundle(): + return self._GetBundleBinaryPath() + else: + return self._GetStandaloneBinaryPath() + + def GetActiveArchs(self, configname): + """Returns the architectures this target should be built for.""" + config_settings = self.xcode_settings[configname] + xcode_archs_default = GetXcodeArchsDefault() + return xcode_archs_default.ActiveArchs( + config_settings.get("ARCHS"), + config_settings.get("VALID_ARCHS"), + config_settings.get("SDKROOT"), + ) + + def _GetSdkVersionInfoItem(self, sdk, infoitem): + # xcodebuild requires Xcode and can't run on Command Line Tools-only + # systems from 10.7 onward. + # Since the CLT has no SDK paths anyway, returning None is the + # most sensible route and should still do the right thing. + try: + return GetStdoutQuiet(["xcrun", "--sdk", sdk, infoitem]) + except GypError: + pass + + def _SdkRoot(self, configname): + if configname is None: + configname = self.configname + return self.GetPerConfigSetting("SDKROOT", configname, default="") + + def _XcodePlatformPath(self, configname=None): + sdk_root = self._SdkRoot(configname) + if sdk_root not in XcodeSettings._platform_path_cache: + platform_path = self._GetSdkVersionInfoItem( + sdk_root, "--show-sdk-platform-path" + ) + XcodeSettings._platform_path_cache[sdk_root] = platform_path + return XcodeSettings._platform_path_cache[sdk_root] + + def _SdkPath(self, configname=None): + sdk_root = self._SdkRoot(configname) + if sdk_root.startswith("/"): + return sdk_root + return self._XcodeSdkPath(sdk_root) + + def _XcodeSdkPath(self, sdk_root): + if sdk_root not in XcodeSettings._sdk_path_cache: + sdk_path = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-path") + XcodeSettings._sdk_path_cache[sdk_root] = sdk_path + if sdk_root: + XcodeSettings._sdk_root_cache[sdk_path] = sdk_root + return XcodeSettings._sdk_path_cache[sdk_root] + + def _AppendPlatformVersionMinFlags(self, lst): + self._Appendf(lst, "MACOSX_DEPLOYMENT_TARGET", "-mmacosx-version-min=%s") + if "IPHONEOS_DEPLOYMENT_TARGET" in self._Settings(): + # TODO: Implement this better? + sdk_path_basename = os.path.basename(self._SdkPath()) + if sdk_path_basename.lower().startswith("iphonesimulator"): + self._Appendf( + lst, "IPHONEOS_DEPLOYMENT_TARGET", "-mios-simulator-version-min=%s" + ) + else: + self._Appendf( + lst, "IPHONEOS_DEPLOYMENT_TARGET", "-miphoneos-version-min=%s" + ) + + def GetCflags(self, configname, arch=None): + """Returns flags that need to be added to .c, .cc, .m, and .mm + compilations.""" + # This functions (and the similar ones below) do not offer complete + # emulation of all xcode_settings keys. They're implemented on demand. + + self.configname = configname + cflags = [] + + sdk_root = self._SdkPath() + if "SDKROOT" in self._Settings() and sdk_root: + cflags.append("-isysroot %s" % sdk_root) + + if self.header_map_path: + cflags.append("-I%s" % self.header_map_path) + + if self._Test("CLANG_WARN_CONSTANT_CONVERSION", "YES", default="NO"): + cflags.append("-Wconstant-conversion") + + if self._Test("GCC_CHAR_IS_UNSIGNED_CHAR", "YES", default="NO"): + cflags.append("-funsigned-char") + + if self._Test("GCC_CW_ASM_SYNTAX", "YES", default="YES"): + cflags.append("-fasm-blocks") + + if "GCC_DYNAMIC_NO_PIC" in self._Settings(): + if self._Settings()["GCC_DYNAMIC_NO_PIC"] == "YES": + cflags.append("-mdynamic-no-pic") + else: + pass + # TODO: In this case, it depends on the target. xcode passes + # mdynamic-no-pic by default for executable and possibly static lib + # according to mento + + if self._Test("GCC_ENABLE_PASCAL_STRINGS", "YES", default="YES"): + cflags.append("-mpascal-strings") + + self._Appendf(cflags, "GCC_OPTIMIZATION_LEVEL", "-O%s", default="s") + + if self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES"): + dbg_format = self._Settings().get("DEBUG_INFORMATION_FORMAT", "dwarf") + if dbg_format == "dwarf": + cflags.append("-gdwarf-2") + elif dbg_format == "stabs": + raise NotImplementedError("stabs debug format is not supported yet.") + elif dbg_format == "dwarf-with-dsym": + cflags.append("-gdwarf-2") + else: + raise NotImplementedError("Unknown debug format %s" % dbg_format) + + if self._Settings().get("GCC_STRICT_ALIASING") == "YES": + cflags.append("-fstrict-aliasing") + elif self._Settings().get("GCC_STRICT_ALIASING") == "NO": + cflags.append("-fno-strict-aliasing") + + if self._Test("GCC_SYMBOLS_PRIVATE_EXTERN", "YES", default="NO"): + cflags.append("-fvisibility=hidden") + + if self._Test("GCC_TREAT_WARNINGS_AS_ERRORS", "YES", default="NO"): + cflags.append("-Werror") + + if self._Test("GCC_WARN_ABOUT_MISSING_NEWLINE", "YES", default="NO"): + cflags.append("-Wnewline-eof") + + # In Xcode, this is only activated when GCC_COMPILER_VERSION is clang or + # llvm-gcc. It also requires a fairly recent libtool, and + # if the system clang isn't used, DYLD_LIBRARY_PATH needs to contain the + # path to the libLTO.dylib that matches the used clang. + if self._Test("LLVM_LTO", "YES", default="NO"): + cflags.append("-flto") + + self._AppendPlatformVersionMinFlags(cflags) + + # TODO: + if self._Test("COPY_PHASE_STRIP", "YES", default="NO"): + self._WarnUnimplemented("COPY_PHASE_STRIP") + self._WarnUnimplemented("GCC_DEBUGGING_SYMBOLS") + self._WarnUnimplemented("GCC_ENABLE_OBJC_EXCEPTIONS") + + # TODO: This is exported correctly, but assigning to it is not supported. + self._WarnUnimplemented("MACH_O_TYPE") + self._WarnUnimplemented("PRODUCT_TYPE") + + # If GYP_CROSSCOMPILE (--cross-compiling), disable architecture-specific + # additions and assume these will be provided as required via CC_host, + # CXX_host, CC_target and CXX_target. + if not gyp.common.CrossCompileRequested(): + if arch is not None: + archs = [arch] + else: + assert self.configname + archs = self.GetActiveArchs(self.configname) + if len(archs) != 1: + # TODO: Supporting fat binaries will be annoying. + self._WarnUnimplemented("ARCHS") + archs = ["i386"] + cflags.append("-arch " + archs[0]) + + if archs[0] in ("i386", "x86_64"): + if self._Test("GCC_ENABLE_SSE3_EXTENSIONS", "YES", default="NO"): + cflags.append("-msse3") + if self._Test( + "GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS", "YES", default="NO" + ): + cflags.append("-mssse3") # Note 3rd 's'. + if self._Test("GCC_ENABLE_SSE41_EXTENSIONS", "YES", default="NO"): + cflags.append("-msse4.1") + if self._Test("GCC_ENABLE_SSE42_EXTENSIONS", "YES", default="NO"): + cflags.append("-msse4.2") + + cflags += self._Settings().get("WARNING_CFLAGS", []) + + if self._IsXCTest(): + platform_root = self._XcodePlatformPath(configname) + if platform_root: + cflags.append("-F" + platform_root + "/Developer/Library/Frameworks/") + + if sdk_root: + framework_root = sdk_root + else: + framework_root = "" + config = self.spec["configurations"][self.configname] + framework_dirs = config.get("mac_framework_dirs", []) + for directory in framework_dirs: + cflags.append("-F" + directory.replace("$(SDKROOT)", framework_root)) + + self.configname = None + return cflags + + def GetCflagsC(self, configname): + """Returns flags that need to be added to .c, and .m compilations.""" + self.configname = configname + cflags_c = [] + if self._Settings().get("GCC_C_LANGUAGE_STANDARD", "") == "ansi": + cflags_c.append("-ansi") + else: + self._Appendf(cflags_c, "GCC_C_LANGUAGE_STANDARD", "-std=%s") + cflags_c += self._Settings().get("OTHER_CFLAGS", []) + self.configname = None + return cflags_c + + def GetCflagsCC(self, configname): + """Returns flags that need to be added to .cc, and .mm compilations.""" + self.configname = configname + cflags_cc = [] + + clang_cxx_language_standard = self._Settings().get( + "CLANG_CXX_LANGUAGE_STANDARD" + ) + # Note: Don't make c++0x to c++11 so that c++0x can be used with older + # clangs that don't understand c++11 yet (like Xcode 4.2's). + if clang_cxx_language_standard: + cflags_cc.append("-std=%s" % clang_cxx_language_standard) + + self._Appendf(cflags_cc, "CLANG_CXX_LIBRARY", "-stdlib=%s") + + if self._Test("GCC_ENABLE_CPP_RTTI", "NO", default="YES"): + cflags_cc.append("-fno-rtti") + if self._Test("GCC_ENABLE_CPP_EXCEPTIONS", "NO", default="YES"): + cflags_cc.append("-fno-exceptions") + if self._Test("GCC_INLINES_ARE_PRIVATE_EXTERN", "YES", default="NO"): + cflags_cc.append("-fvisibility-inlines-hidden") + if self._Test("GCC_THREADSAFE_STATICS", "NO", default="YES"): + cflags_cc.append("-fno-threadsafe-statics") + # Note: This flag is a no-op for clang, it only has an effect for gcc. + if self._Test("GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO", "NO", default="YES"): + cflags_cc.append("-Wno-invalid-offsetof") + + other_ccflags = [] + + for flag in self._Settings().get("OTHER_CPLUSPLUSFLAGS", ["$(inherited)"]): + # TODO: More general variable expansion. Missing in many other places too. + if flag in ("$inherited", "$(inherited)", "${inherited}"): + flag = "$OTHER_CFLAGS" + if flag in ("$OTHER_CFLAGS", "$(OTHER_CFLAGS)", "${OTHER_CFLAGS}"): + other_ccflags += self._Settings().get("OTHER_CFLAGS", []) + else: + other_ccflags.append(flag) + cflags_cc += other_ccflags + + self.configname = None + return cflags_cc + + def _AddObjectiveCGarbageCollectionFlags(self, flags): + gc_policy = self._Settings().get("GCC_ENABLE_OBJC_GC", "unsupported") + if gc_policy == "supported": + flags.append("-fobjc-gc") + elif gc_policy == "required": + flags.append("-fobjc-gc-only") + + def _AddObjectiveCARCFlags(self, flags): + if self._Test("CLANG_ENABLE_OBJC_ARC", "YES", default="NO"): + flags.append("-fobjc-arc") + + def _AddObjectiveCMissingPropertySynthesisFlags(self, flags): + if self._Test( + "CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS", "YES", default="NO" + ): + flags.append("-Wobjc-missing-property-synthesis") + + def GetCflagsObjC(self, configname): + """Returns flags that need to be added to .m compilations.""" + self.configname = configname + cflags_objc = [] + self._AddObjectiveCGarbageCollectionFlags(cflags_objc) + self._AddObjectiveCARCFlags(cflags_objc) + self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc) + self.configname = None + return cflags_objc + + def GetCflagsObjCC(self, configname): + """Returns flags that need to be added to .mm compilations.""" + self.configname = configname + cflags_objcc = [] + self._AddObjectiveCGarbageCollectionFlags(cflags_objcc) + self._AddObjectiveCARCFlags(cflags_objcc) + self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc) + if self._Test("GCC_OBJC_CALL_CXX_CDTORS", "YES", default="NO"): + cflags_objcc.append("-fobjc-call-cxx-cdtors") + self.configname = None + return cflags_objcc + + def GetInstallNameBase(self): + """Return DYLIB_INSTALL_NAME_BASE for this target.""" + # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. + if self.spec["type"] != "shared_library" and ( + self.spec["type"] != "loadable_module" or self._IsBundle() + ): + return None + install_base = self.GetPerTargetSetting( + "DYLIB_INSTALL_NAME_BASE", + default="/Library/Frameworks" if self._IsBundle() else "/usr/local/lib", + ) + return install_base + + def _StandardizePath(self, path): + """Do :standardizepath processing for path.""" + # I'm not quite sure what :standardizepath does. Just call normpath(), + # but don't let @executable_path/../foo collapse to foo. + if "/" in path: + prefix, rest = "", path + if path.startswith("@"): + prefix, rest = path.split("/", 1) + rest = os.path.normpath(rest) # :standardizepath + path = os.path.join(prefix, rest) + return path + + def GetInstallName(self): + """Return LD_DYLIB_INSTALL_NAME for this target.""" + # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. + if self.spec["type"] != "shared_library" and ( + self.spec["type"] != "loadable_module" or self._IsBundle() + ): + return None + + default_install_name = ( + "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)" + ) + install_name = self.GetPerTargetSetting( + "LD_DYLIB_INSTALL_NAME", default=default_install_name + ) + + # Hardcode support for the variables used in chromium for now, to + # unblock people using the make build. + if "$" in install_name: + assert install_name in ( + "$(DYLIB_INSTALL_NAME_BASE:standardizepath)/" + "$(WRAPPER_NAME)/$(PRODUCT_NAME)", + default_install_name, + ), ( + "Variables in LD_DYLIB_INSTALL_NAME are not generally supported " + "yet in target '%s' (got '%s')" + % (self.spec["target_name"], install_name) + ) + + install_name = install_name.replace( + "$(DYLIB_INSTALL_NAME_BASE:standardizepath)", + self._StandardizePath(self.GetInstallNameBase()), + ) + if self._IsBundle(): + # These are only valid for bundles, hence the |if|. + install_name = install_name.replace( + "$(WRAPPER_NAME)", self.GetWrapperName() + ) + install_name = install_name.replace( + "$(PRODUCT_NAME)", self.GetProductName() + ) + else: + assert "$(WRAPPER_NAME)" not in install_name + assert "$(PRODUCT_NAME)" not in install_name + + install_name = install_name.replace( + "$(EXECUTABLE_PATH)", self.GetExecutablePath() + ) + return install_name + + def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path): + """Checks if ldflag contains a filename and if so remaps it from + gyp-directory-relative to build-directory-relative.""" + # This list is expanded on demand. + # They get matched as: + # -exported_symbols_list file + # -Wl,exported_symbols_list file + # -Wl,exported_symbols_list,file + LINKER_FILE = r"(\S+)" + WORD = r"\S+" + linker_flags = [ + ["-exported_symbols_list", LINKER_FILE], # Needed for NaCl. + ["-unexported_symbols_list", LINKER_FILE], + ["-reexported_symbols_list", LINKER_FILE], + ["-sectcreate", WORD, WORD, LINKER_FILE], # Needed for remoting. + ] + for flag_pattern in linker_flags: + regex = re.compile("(?:-Wl,)?" + "[ ,]".join(flag_pattern)) + m = regex.match(ldflag) + if m: + ldflag = ( + ldflag[: m.start(1)] + + gyp_to_build_path(m.group(1)) + + ldflag[m.end(1) :] + ) + # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, + # TODO(thakis): Update ffmpeg.gyp): + if ldflag.startswith("-L"): + ldflag = "-L" + gyp_to_build_path(ldflag[len("-L") :]) + return ldflag + + def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None): + """Returns flags that need to be passed to the linker. + + Args: + configname: The name of the configuration to get ld flags for. + product_dir: The directory where products such static and dynamic + libraries are placed. This is added to the library search path. + gyp_to_build_path: A function that converts paths relative to the + current gyp file to paths relative to the build directory. + """ + self.configname = configname + ldflags = [] + + # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS + # can contain entries that depend on this. Explicitly absolutify these. + for ldflag in self._Settings().get("OTHER_LDFLAGS", []): + ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) + + if self._Test("DEAD_CODE_STRIPPING", "YES", default="NO"): + ldflags.append("-Wl,-dead_strip") + + if self._Test("PREBINDING", "YES", default="NO"): + ldflags.append("-Wl,-prebind") + + self._Appendf( + ldflags, "DYLIB_COMPATIBILITY_VERSION", "-compatibility_version %s" + ) + self._Appendf(ldflags, "DYLIB_CURRENT_VERSION", "-current_version %s") + + self._AppendPlatformVersionMinFlags(ldflags) + + if "SDKROOT" in self._Settings() and self._SdkPath(): + ldflags.append("-isysroot " + self._SdkPath()) + + for library_path in self._Settings().get("LIBRARY_SEARCH_PATHS", []): + ldflags.append("-L" + gyp_to_build_path(library_path)) + + if "ORDER_FILE" in self._Settings(): + ldflags.append( + "-Wl,-order_file " + + "-Wl," + + gyp_to_build_path(self._Settings()["ORDER_FILE"]) + ) + + if not gyp.common.CrossCompileRequested(): + if arch is not None: + archs = [arch] + else: + assert self.configname + archs = self.GetActiveArchs(self.configname) + if len(archs) != 1: + # TODO: Supporting fat binaries will be annoying. + self._WarnUnimplemented("ARCHS") + archs = ["i386"] + ldflags.append("-arch " + archs[0]) + + # Xcode adds the product directory by default. + # Rewrite -L. to -L./ to work around http://www.openradar.me/25313838 + ldflags.append("-L" + (product_dir if product_dir != "." else "./")) + + install_name = self.GetInstallName() + if install_name and self.spec["type"] != "loadable_module": + ldflags.append("-install_name " + install_name.replace(" ", r"\ ")) + + for rpath in self._Settings().get("LD_RUNPATH_SEARCH_PATHS", []): + ldflags.append("-Wl,-rpath," + rpath) + + sdk_root = self._SdkPath() + if not sdk_root: + sdk_root = "" + config = self.spec["configurations"][self.configname] + framework_dirs = config.get("mac_framework_dirs", []) + for directory in framework_dirs: + ldflags.append("-F" + directory.replace("$(SDKROOT)", sdk_root)) + + if self._IsXCTest(): + platform_root = self._XcodePlatformPath(configname) + if sdk_root and platform_root: + ldflags.append("-F" + platform_root + "/Developer/Library/Frameworks/") + ldflags.append("-framework XCTest") + + is_extension = self._IsIosAppExtension() or self._IsIosWatchKitExtension() + if sdk_root and is_extension: + # Adds the link flags for extensions. These flags are common for all + # extensions and provide loader and main function. + # These flags reflect the compilation options used by xcode to compile + # extensions. + xcode_version, _ = XcodeVersion() + if xcode_version < "0900": + ldflags.append("-lpkstart") + ldflags.append( + sdk_root + + "/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit" + ) + else: + ldflags.append("-e _NSExtensionMain") + ldflags.append("-fapplication-extension") + + self._Appendf(ldflags, "CLANG_CXX_LIBRARY", "-stdlib=%s") + + self.configname = None + return ldflags + + def GetLibtoolflags(self, configname): + """Returns flags that need to be passed to the static linker. + + Args: + configname: The name of the configuration to get ld flags for. + """ + self.configname = configname + libtoolflags = [] + + for libtoolflag in self._Settings().get("OTHER_LDFLAGS", []): + libtoolflags.append(libtoolflag) + # TODO(thakis): ARCHS? + + self.configname = None + return libtoolflags + + def GetPerTargetSettings(self): + """Gets a list of all the per-target settings. This will only fetch keys + whose values are the same across all configurations.""" + first_pass = True + result = {} + for configname in sorted(self.xcode_settings.keys()): + if first_pass: + result = dict(self.xcode_settings[configname]) + first_pass = False + else: + for key, value in self.xcode_settings[configname].items(): + if key not in result: + continue + elif result[key] != value: + del result[key] + return result + + def GetPerConfigSetting(self, setting, configname, default=None): + if configname in self.xcode_settings: + return self.xcode_settings[configname].get(setting, default) + else: + return self.GetPerTargetSetting(setting, default) + + def GetPerTargetSetting(self, setting, default=None): + """Tries to get xcode_settings.setting from spec. Assumes that the setting + has the same value in all configurations and throws otherwise.""" + is_first_pass = True + result = None + for configname in sorted(self.xcode_settings.keys()): + if is_first_pass: + result = self.xcode_settings[configname].get(setting, None) + is_first_pass = False + else: + assert result == self.xcode_settings[configname].get(setting, None), ( + "Expected per-target setting for '%s', got per-config setting " + "(target %s)" % (setting, self.spec["target_name"]) + ) + if result is None: + return default + return result + + def _GetStripPostbuilds(self, configname, output_binary, quiet): + """Returns a list of shell commands that contain the shell commands + necessary to strip this target's binary. These should be run as postbuilds + before the actual postbuilds run.""" + self.configname = configname + + result = [] + if self._Test("DEPLOYMENT_POSTPROCESSING", "YES", default="NO") and self._Test( + "STRIP_INSTALLED_PRODUCT", "YES", default="NO" + ): + + default_strip_style = "debugging" + if ( + self.spec["type"] == "loadable_module" or self._IsIosAppExtension() + ) and self._IsBundle(): + default_strip_style = "non-global" + elif self.spec["type"] == "executable": + default_strip_style = "all" + + strip_style = self._Settings().get("STRIP_STYLE", default_strip_style) + strip_flags = {"all": "", "non-global": "-x", "debugging": "-S"}[ + strip_style + ] + + explicit_strip_flags = self._Settings().get("STRIPFLAGS", "") + if explicit_strip_flags: + strip_flags += " " + _NormalizeEnvVarReferences(explicit_strip_flags) + + if not quiet: + result.append("echo STRIP\\(%s\\)" % self.spec["target_name"]) + result.append(f"strip {strip_flags} {output_binary}") + + self.configname = None + return result + + def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): + """Returns a list of shell commands that contain the shell commands + necessary to massage this target's debug information. These should be run + as postbuilds before the actual postbuilds run.""" + self.configname = configname + + # For static libraries, no dSYMs are created. + result = [] + if ( + self._Test("GCC_GENERATE_DEBUGGING_SYMBOLS", "YES", default="YES") + and self._Test( + "DEBUG_INFORMATION_FORMAT", "dwarf-with-dsym", default="dwarf" + ) + and self.spec["type"] != "static_library" + ): + if not quiet: + result.append("echo DSYMUTIL\\(%s\\)" % self.spec["target_name"]) + result.append("dsymutil {} -o {}".format(output_binary, output + ".dSYM")) + + self.configname = None + return result + + def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False): + """Returns a list of shell commands that contain the shell commands + to run as postbuilds for this target, before the actual postbuilds.""" + # dSYMs need to build before stripping happens. + return self._GetDebugInfoPostbuilds( + configname, output, output_binary, quiet + ) + self._GetStripPostbuilds(configname, output_binary, quiet) + + def _GetIOSPostbuilds(self, configname, output_binary): + """Return a shell command to codesign the iOS output binary so it can + be deployed to a device. This should be run as the very last step of the + build.""" + if not ( + self.isIOS + and (self.spec["type"] == "executable" or self._IsXCTest()) + or self.IsIosFramework() + ): + return [] + + postbuilds = [] + product_name = self.GetFullProductName() + settings = self.xcode_settings[configname] + + # Xcode expects XCTests to be copied into the TEST_HOST dir. + if self._IsXCTest(): + source = os.path.join("${BUILT_PRODUCTS_DIR}", product_name) + test_host = os.path.dirname(settings.get("TEST_HOST")) + xctest_destination = os.path.join(test_host, "PlugIns", product_name) + postbuilds.extend([f"ditto {source} {xctest_destination}"]) + + key = self._GetIOSCodeSignIdentityKey(settings) + if not key: + return postbuilds + + # Warn for any unimplemented signing xcode keys. + unimpl = ["OTHER_CODE_SIGN_FLAGS"] + unimpl = set(unimpl) & set(self.xcode_settings[configname].keys()) + if unimpl: + print( + "Warning: Some codesign keys not implemented, ignoring: %s" + % ", ".join(sorted(unimpl)) + ) + + if self._IsXCTest(): + # For device xctests, Xcode copies two extra frameworks into $TEST_HOST. + test_host = os.path.dirname(settings.get("TEST_HOST")) + frameworks_dir = os.path.join(test_host, "Frameworks") + platform_root = self._XcodePlatformPath(configname) + frameworks = [ + "Developer/Library/PrivateFrameworks/IDEBundleInjection.framework", + "Developer/Library/Frameworks/XCTest.framework", + ] + for framework in frameworks: + source = os.path.join(platform_root, framework) + destination = os.path.join(frameworks_dir, os.path.basename(framework)) + postbuilds.extend([f"ditto {source} {destination}"]) + + # Then re-sign everything with 'preserve=True' + postbuilds.extend( + [ + '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' + % ( + os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), + key, + settings.get("CODE_SIGN_ENTITLEMENTS", ""), + settings.get("PROVISIONING_PROFILE", ""), + destination, + True, + ) + ] + ) + plugin_dir = os.path.join(test_host, "PlugIns") + targets = [os.path.join(plugin_dir, product_name), test_host] + for target in targets: + postbuilds.extend( + [ + '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' + % ( + os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), + key, + settings.get("CODE_SIGN_ENTITLEMENTS", ""), + settings.get("PROVISIONING_PROFILE", ""), + target, + True, + ) + ] + ) + + postbuilds.extend( + [ + '%s code-sign-bundle "%s" "%s" "%s" "%s" %s' + % ( + os.path.join("${TARGET_BUILD_DIR}", "gyp-mac-tool"), + key, + settings.get("CODE_SIGN_ENTITLEMENTS", ""), + settings.get("PROVISIONING_PROFILE", ""), + os.path.join("${BUILT_PRODUCTS_DIR}", product_name), + False, + ) + ] + ) + return postbuilds + + def _GetIOSCodeSignIdentityKey(self, settings): + identity = settings.get("CODE_SIGN_IDENTITY") + if not identity: + return None + if identity not in XcodeSettings._codesigning_key_cache: + output = subprocess.check_output( + ["security", "find-identity", "-p", "codesigning", "-v"] + ) + for line in output.splitlines(): + if identity in line: + fingerprint = line.split()[1] + cache = XcodeSettings._codesigning_key_cache + assert identity not in cache or fingerprint == cache[identity], ( + "Multiple codesigning fingerprints for identity: %s" % identity + ) + XcodeSettings._codesigning_key_cache[identity] = fingerprint + return XcodeSettings._codesigning_key_cache.get(identity, "") + + def AddImplicitPostbuilds( + self, configname, output, output_binary, postbuilds=[], quiet=False + ): + """Returns a list of shell commands that should run before and after + |postbuilds|.""" + assert output_binary is not None + pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet) + post = self._GetIOSPostbuilds(configname, output_binary) + return pre + postbuilds + post + + def _AdjustLibrary(self, library, config_name=None): + if library.endswith(".framework"): + l_flag = "-framework " + os.path.splitext(os.path.basename(library))[0] + else: + m = self.library_re.match(library) + if m: + l_flag = "-l" + m.group(1) + else: + l_flag = library + + sdk_root = self._SdkPath(config_name) + if not sdk_root: + sdk_root = "" + # Xcode 7 started shipping with ".tbd" (text based stubs) files instead of + # ".dylib" without providing a real support for them. What it does, for + # "/usr/lib" libraries, is do "-L/usr/lib -lname" which is dependent on the + # library order and cause collision when building Chrome. + # + # Instead substitute ".tbd" to ".dylib" in the generated project when the + # following conditions are both true: + # - library is referenced in the gyp file as "$(SDKROOT)/**/*.dylib", + # - the ".dylib" file does not exists but a ".tbd" file do. + library = l_flag.replace("$(SDKROOT)", sdk_root) + if l_flag.startswith("$(SDKROOT)"): + basename, ext = os.path.splitext(library) + if ext == ".dylib" and not os.path.exists(library): + tbd_library = basename + ".tbd" + if os.path.exists(tbd_library): + library = tbd_library + return library + + def AdjustLibraries(self, libraries, config_name=None): + """Transforms entries like 'Cocoa.framework' in libraries into entries like + '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc. + """ + libraries = [self._AdjustLibrary(library, config_name) for library in libraries] + return libraries + + def _BuildMachineOSBuild(self): + return GetStdout(["sw_vers", "-buildVersion"]) + + def _XcodeIOSDeviceFamily(self, configname): + family = self.xcode_settings[configname].get("TARGETED_DEVICE_FAMILY", "1") + return [int(x) for x in family.split(",")] + + def GetExtraPlistItems(self, configname=None): + """Returns a dictionary with extra items to insert into Info.plist.""" + if configname not in XcodeSettings._plist_cache: + cache = {} + cache["BuildMachineOSBuild"] = self._BuildMachineOSBuild() + + xcode_version, xcode_build = XcodeVersion() + cache["DTXcode"] = xcode_version + cache["DTXcodeBuild"] = xcode_build + compiler = self.xcode_settings[configname].get("GCC_VERSION") + if compiler is not None: + cache["DTCompiler"] = compiler + + sdk_root = self._SdkRoot(configname) + if not sdk_root: + sdk_root = self._DefaultSdkRoot() + sdk_version = self._GetSdkVersionInfoItem(sdk_root, "--show-sdk-version") + cache["DTSDKName"] = sdk_root + (sdk_version or "") + if xcode_version >= "0720": + cache["DTSDKBuild"] = self._GetSdkVersionInfoItem( + sdk_root, "--show-sdk-build-version" + ) + elif xcode_version >= "0430": + cache["DTSDKBuild"] = sdk_version + else: + cache["DTSDKBuild"] = cache["BuildMachineOSBuild"] + + if self.isIOS: + cache["MinimumOSVersion"] = self.xcode_settings[configname].get( + "IPHONEOS_DEPLOYMENT_TARGET" + ) + cache["DTPlatformName"] = sdk_root + cache["DTPlatformVersion"] = sdk_version + + if configname.endswith("iphoneos"): + cache["CFBundleSupportedPlatforms"] = ["iPhoneOS"] + cache["DTPlatformBuild"] = cache["DTSDKBuild"] + else: + cache["CFBundleSupportedPlatforms"] = ["iPhoneSimulator"] + # This is weird, but Xcode sets DTPlatformBuild to an empty field + # for simulator builds. + cache["DTPlatformBuild"] = "" + XcodeSettings._plist_cache[configname] = cache + + # Include extra plist items that are per-target, not per global + # XcodeSettings. + items = dict(XcodeSettings._plist_cache[configname]) + if self.isIOS: + items["UIDeviceFamily"] = self._XcodeIOSDeviceFamily(configname) + return items + + def _DefaultSdkRoot(self): + """Returns the default SDKROOT to use. + + Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode + project, then the environment variable was empty. Starting with this + version, Xcode uses the name of the newest SDK installed. + """ + xcode_version, _ = XcodeVersion() + if xcode_version < "0500": + return "" + default_sdk_path = self._XcodeSdkPath("") + default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path) + if default_sdk_root: + return default_sdk_root + try: + all_sdks = GetStdout(["xcodebuild", "-showsdks"]) + except GypError: + # If xcodebuild fails, there will be no valid SDKs + return "" + for line in all_sdks.splitlines(): + items = line.split() + if len(items) >= 3 and items[-2] == "-sdk": + sdk_root = items[-1] + sdk_path = self._XcodeSdkPath(sdk_root) + if sdk_path == default_sdk_path: + return sdk_root + return "" + + +class MacPrefixHeader: + """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. + + This feature consists of several pieces: + * If GCC_PREFIX_HEADER is present, all compilations in that project get an + additional |-include path_to_prefix_header| cflag. + * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is + instead compiled, and all other compilations in the project get an + additional |-include path_to_compiled_header| instead. + + Compiled prefix headers have the extension gch. There is one gch file for + every language used in the project (c, cc, m, mm), since gch files for + different languages aren't compatible. + + gch files themselves are built with the target's normal cflags, but they + obviously don't get the |-include| flag. Instead, they need a -x flag that + describes their language. + + All o files in the target need to depend on the gch file, to make sure + it's built before any o file is built. + + This class helps with some of these tasks, but it needs help from the build + system for writing dependencies to the gch files, for writing build commands + for the gch files, and for figuring out the location of the gch files. + """ + + def __init__( + self, xcode_settings, gyp_path_to_build_path, gyp_path_to_build_output + ): + """If xcode_settings is None, all methods on this class are no-ops. + + Args: + gyp_path_to_build_path: A function that takes a gyp-relative path, + and returns a path relative to the build directory. + gyp_path_to_build_output: A function that takes a gyp-relative path and + a language code ('c', 'cc', 'm', or 'mm'), and that returns a path + to where the output of precompiling that path for that language + should be placed (without the trailing '.gch'). + """ + # This doesn't support per-configuration prefix headers. Good enough + # for now. + self.header = None + self.compile_headers = False + if xcode_settings: + self.header = xcode_settings.GetPerTargetSetting("GCC_PREFIX_HEADER") + self.compile_headers = ( + xcode_settings.GetPerTargetSetting( + "GCC_PRECOMPILE_PREFIX_HEADER", default="NO" + ) + != "NO" + ) + self.compiled_headers = {} + if self.header: + if self.compile_headers: + for lang in ["c", "cc", "m", "mm"]: + self.compiled_headers[lang] = gyp_path_to_build_output( + self.header, lang + ) + self.header = gyp_path_to_build_path(self.header) + + def _CompiledHeader(self, lang, arch): + assert self.compile_headers + h = self.compiled_headers[lang] + if arch: + h += "." + arch + return h + + def GetInclude(self, lang, arch=None): + """Gets the cflags to include the prefix header for language |lang|.""" + if self.compile_headers and lang in self.compiled_headers: + return "-include %s" % self._CompiledHeader(lang, arch) + elif self.header: + return "-include %s" % self.header + else: + return "" + + def _Gch(self, lang, arch): + """Returns the actual file name of the prefix header for language |lang|.""" + assert self.compile_headers + return self._CompiledHeader(lang, arch) + ".gch" + + def GetObjDependencies(self, sources, objs, arch=None): + """Given a list of source files and the corresponding object files, returns + a list of (source, object, gch) tuples, where |gch| is the build-directory + relative path to the gch file each object file depends on. |compilable[i]| + has to be the source file belonging to |objs[i]|.""" + if not self.header or not self.compile_headers: + return [] + + result = [] + for source, obj in zip(sources, objs): + ext = os.path.splitext(source)[1] + lang = { + ".c": "c", + ".cpp": "cc", + ".cc": "cc", + ".cxx": "cc", + ".m": "m", + ".mm": "mm", + }.get(ext, None) + if lang: + result.append((source, obj, self._Gch(lang, arch))) + return result + + def GetPchBuildCommands(self, arch=None): + """Returns [(path_to_gch, language_flag, language, header)]. + |path_to_gch| and |header| are relative to the build directory. + """ + if not self.header or not self.compile_headers: + return [] + return [ + (self._Gch("c", arch), "-x c-header", "c", self.header), + (self._Gch("cc", arch), "-x c++-header", "cc", self.header), + (self._Gch("m", arch), "-x objective-c-header", "m", self.header), + (self._Gch("mm", arch), "-x objective-c++-header", "mm", self.header), + ] + + +def XcodeVersion(): + """Returns a tuple of version and build version of installed Xcode.""" + # `xcodebuild -version` output looks like + # Xcode 4.6.3 + # Build version 4H1503 + # or like + # Xcode 3.2.6 + # Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0 + # BuildVersion: 10M2518 + # Convert that to ('0463', '4H1503') or ('0326', '10M2518'). + global XCODE_VERSION_CACHE + if XCODE_VERSION_CACHE: + return XCODE_VERSION_CACHE + version = "" + build = "" + try: + version_list = GetStdoutQuiet(["xcodebuild", "-version"]).splitlines() + # In some circumstances xcodebuild exits 0 but doesn't return + # the right results; for example, a user on 10.7 or 10.8 with + # a bogus path set via xcode-select + # In that case this may be a CLT-only install so fall back to + # checking that version. + if len(version_list) < 2: + raise GypError("xcodebuild returned unexpected results") + version = version_list[0].split()[-1] # Last word on first line + build = version_list[-1].split()[-1] # Last word on last line + except GypError: # Xcode not installed so look for XCode Command Line Tools + version = CLTVersion() # macOS Catalina returns 11.0.0.0.1.1567737322 + if not version: + raise GypError("No Xcode or CLT version detected!") + # Be careful to convert "4.2.3" to "0423" and "11.0.0" to "1100": + version = version.split(".")[:3] # Just major, minor, micro + version[0] = version[0].zfill(2) # Add a leading zero if major is one digit + version = ("".join(version) + "00")[:4] # Limit to exactly four characters + XCODE_VERSION_CACHE = (version, build) + return XCODE_VERSION_CACHE + + +# This function ported from the logic in Homebrew's CLT version check +def CLTVersion(): + """Returns the version of command-line tools from pkgutil.""" + # pkgutil output looks like + # package-id: com.apple.pkg.CLTools_Executables + # version: 5.0.1.0.1.1382131676 + # volume: / + # location: / + # install-time: 1382544035 + # groups: com.apple.FindSystemFiles.pkg-group + # com.apple.DevToolsBoth.pkg-group + # com.apple.DevToolsNonRelocatableShared.pkg-group + STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo" + FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI" + MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables" + + regex = re.compile("version: (?P.+)") + for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]: + try: + output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key]) + return re.search(regex, output).groupdict()["version"] + except GypError: + continue + + regex = re.compile(r'Command Line Tools for Xcode\s+(?P\S+)') + try: + output = GetStdout(["/usr/sbin/softwareupdate", "--history"]) + return re.search(regex, output).groupdict()["version"] + except GypError: + return None + + +def GetStdoutQuiet(cmdlist): + """Returns the content of standard output returned by invoking |cmdlist|. + Ignores the stderr. + Raises |GypError| if the command return with a non-zero return code.""" + job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out = job.communicate()[0].decode("utf-8") + if job.returncode != 0: + raise GypError("Error %d running %s" % (job.returncode, cmdlist[0])) + return out.rstrip("\n") + + +def GetStdout(cmdlist): + """Returns the content of standard output returned by invoking |cmdlist|. + Raises |GypError| if the command return with a non-zero return code.""" + job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE) + out = job.communicate()[0].decode("utf-8") + if job.returncode != 0: + sys.stderr.write(out + "\n") + raise GypError("Error %d running %s" % (job.returncode, cmdlist[0])) + return out.rstrip("\n") + + +def MergeGlobalXcodeSettingsToSpec(global_dict, spec): + """Merges the global xcode_settings dictionary into each configuration of the + target represented by spec. For keys that are both in the global and the local + xcode_settings dict, the local key gets precedence. + """ + # The xcode generator special-cases global xcode_settings and does something + # that amounts to merging in the global xcode_settings into each local + # xcode_settings dict. + global_xcode_settings = global_dict.get("xcode_settings", {}) + for config in spec["configurations"].values(): + if "xcode_settings" in config: + new_settings = global_xcode_settings.copy() + new_settings.update(config["xcode_settings"]) + config["xcode_settings"] = new_settings + + +def IsMacBundle(flavor, spec): + """Returns if |spec| should be treated as a bundle. + + Bundles are directories with a certain subdirectory structure, instead of + just a single file. Bundle rules do not produce a binary but also package + resources into that directory.""" + is_mac_bundle = ( + int(spec.get("mac_xctest_bundle", 0)) != 0 + or int(spec.get("mac_xcuitest_bundle", 0)) != 0 + or (int(spec.get("mac_bundle", 0)) != 0 and flavor == "mac") + ) + + if is_mac_bundle: + assert spec["type"] != "none", ( + 'mac_bundle targets cannot have type none (target "%s")' + % spec["target_name"] + ) + return is_mac_bundle + + +def GetMacBundleResources(product_dir, xcode_settings, resources): + """Yields (output, resource) pairs for every resource in |resources|. + Only call this for mac bundle targets. + + Args: + product_dir: Path to the directory containing the output bundle, + relative to the build directory. + xcode_settings: The XcodeSettings of the current target. + resources: A list of bundle resources, relative to the build directory. + """ + dest = os.path.join(product_dir, xcode_settings.GetBundleResourceFolder()) + for res in resources: + output = dest + + # The make generator doesn't support it, so forbid it everywhere + # to keep the generators more interchangeable. + assert " " not in res, "Spaces in resource filenames not supported (%s)" % res + + # Split into (path,file). + res_parts = os.path.split(res) + + # Now split the path into (prefix,maybe.lproj). + lproj_parts = os.path.split(res_parts[0]) + # If the resource lives in a .lproj bundle, add that to the destination. + if lproj_parts[1].endswith(".lproj"): + output = os.path.join(output, lproj_parts[1]) + + output = os.path.join(output, res_parts[1]) + # Compiled XIB files are referred to by .nib. + if output.endswith(".xib"): + output = os.path.splitext(output)[0] + ".nib" + # Compiled storyboard files are referred to by .storyboardc. + if output.endswith(".storyboard"): + output = os.path.splitext(output)[0] + ".storyboardc" + + yield output, res + + +def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): + """Returns (info_plist, dest_plist, defines, extra_env), where: + * |info_plist| is the source plist path, relative to the + build directory, + * |dest_plist| is the destination plist path, relative to the + build directory, + * |defines| is a list of preprocessor defines (empty if the plist + shouldn't be preprocessed, + * |extra_env| is a dict of env variables that should be exported when + invoking |mac_tool copy-info-plist|. + + Only call this for mac bundle targets. + + Args: + product_dir: Path to the directory containing the output bundle, + relative to the build directory. + xcode_settings: The XcodeSettings of the current target. + gyp_to_build_path: A function that converts paths relative to the + current gyp file to paths relative to the build directory. + """ + info_plist = xcode_settings.GetPerTargetSetting("INFOPLIST_FILE") + if not info_plist: + return None, None, [], {} + + # The make generator doesn't support it, so forbid it everywhere + # to keep the generators more interchangeable. + assert " " not in info_plist, ( + "Spaces in Info.plist filenames not supported (%s)" % info_plist + ) + + info_plist = gyp_path_to_build_path(info_plist) + + # If explicitly set to preprocess the plist, invoke the C preprocessor and + # specify any defines as -D flags. + if ( + xcode_settings.GetPerTargetSetting("INFOPLIST_PREPROCESS", default="NO") + == "YES" + ): + # Create an intermediate file based on the path. + defines = shlex.split( + xcode_settings.GetPerTargetSetting( + "INFOPLIST_PREPROCESSOR_DEFINITIONS", default="" + ) + ) + else: + defines = [] + + dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath()) + extra_env = xcode_settings.GetPerTargetSettings() + + return info_plist, dest_plist, defines, extra_env + + +def _GetXcodeEnv( + xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None +): + """Return the environment variables that Xcode would set. See + http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 + for a full list. + + Args: + xcode_settings: An XcodeSettings object. If this is None, this function + returns an empty dict. + built_products_dir: Absolute path to the built products dir. + srcroot: Absolute path to the source root. + configuration: The build configuration name. + additional_settings: An optional dict with more values to add to the + result. + """ + + if not xcode_settings: + return {} + + # This function is considered a friend of XcodeSettings, so let it reach into + # its implementation details. + spec = xcode_settings.spec + + # These are filled in on an as-needed basis. + env = { + "BUILT_FRAMEWORKS_DIR": built_products_dir, + "BUILT_PRODUCTS_DIR": built_products_dir, + "CONFIGURATION": configuration, + "PRODUCT_NAME": xcode_settings.GetProductName(), + # For FULL_PRODUCT_NAME see: + # /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec # noqa: E501 + "SRCROOT": srcroot, + "SOURCE_ROOT": "${SRCROOT}", + # This is not true for static libraries, but currently the env is only + # written for bundles: + "TARGET_BUILD_DIR": built_products_dir, + "TEMP_DIR": "${TMPDIR}", + "XCODE_VERSION_ACTUAL": XcodeVersion()[0], + } + if xcode_settings.GetPerConfigSetting("SDKROOT", configuration): + env["SDKROOT"] = xcode_settings._SdkPath(configuration) + else: + env["SDKROOT"] = "" + + if xcode_settings.mac_toolchain_dir: + env["DEVELOPER_DIR"] = xcode_settings.mac_toolchain_dir + + if spec["type"] in ( + "executable", + "static_library", + "shared_library", + "loadable_module", + ): + env["EXECUTABLE_NAME"] = xcode_settings.GetExecutableName() + env["EXECUTABLE_PATH"] = xcode_settings.GetExecutablePath() + env["FULL_PRODUCT_NAME"] = xcode_settings.GetFullProductName() + mach_o_type = xcode_settings.GetMachOType() + if mach_o_type: + env["MACH_O_TYPE"] = mach_o_type + env["PRODUCT_TYPE"] = xcode_settings.GetProductType() + if xcode_settings._IsBundle(): + # xcodeproj_file.py sets the same Xcode subfolder value for this as for + # FRAMEWORKS_FOLDER_PATH so Xcode builds will actually use FFP's value. + env["BUILT_FRAMEWORKS_DIR"] = os.path.join( + built_products_dir + os.sep + xcode_settings.GetBundleFrameworksFolderPath() + ) + env["CONTENTS_FOLDER_PATH"] = xcode_settings.GetBundleContentsFolderPath() + env["EXECUTABLE_FOLDER_PATH"] = xcode_settings.GetBundleExecutableFolderPath() + env[ + "UNLOCALIZED_RESOURCES_FOLDER_PATH" + ] = xcode_settings.GetBundleResourceFolder() + env["JAVA_FOLDER_PATH"] = xcode_settings.GetBundleJavaFolderPath() + env["FRAMEWORKS_FOLDER_PATH"] = xcode_settings.GetBundleFrameworksFolderPath() + env[ + "SHARED_FRAMEWORKS_FOLDER_PATH" + ] = xcode_settings.GetBundleSharedFrameworksFolderPath() + env[ + "SHARED_SUPPORT_FOLDER_PATH" + ] = xcode_settings.GetBundleSharedSupportFolderPath() + env["PLUGINS_FOLDER_PATH"] = xcode_settings.GetBundlePlugInsFolderPath() + env["XPCSERVICES_FOLDER_PATH"] = xcode_settings.GetBundleXPCServicesFolderPath() + env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath() + env["WRAPPER_NAME"] = xcode_settings.GetWrapperName() + + install_name = xcode_settings.GetInstallName() + if install_name: + env["LD_DYLIB_INSTALL_NAME"] = install_name + install_name_base = xcode_settings.GetInstallNameBase() + if install_name_base: + env["DYLIB_INSTALL_NAME_BASE"] = install_name_base + xcode_version, _ = XcodeVersion() + if xcode_version >= "0500" and not env.get("SDKROOT"): + sdk_root = xcode_settings._SdkRoot(configuration) + if not sdk_root: + sdk_root = xcode_settings._XcodeSdkPath("") + if sdk_root is None: + sdk_root = "" + env["SDKROOT"] = sdk_root + + if not additional_settings: + additional_settings = {} + else: + # Flatten lists to strings. + for k in additional_settings: + if not isinstance(additional_settings[k], str): + additional_settings[k] = " ".join(additional_settings[k]) + additional_settings.update(env) + + for k in additional_settings: + additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) + + return additional_settings + + +def _NormalizeEnvVarReferences(str): + """Takes a string containing variable references in the form ${FOO}, $(FOO), + or $FOO, and returns a string with all variable references in the form ${FOO}. + """ + # $FOO -> ${FOO} + str = re.sub(r"\$([a-zA-Z_][a-zA-Z0-9_]*)", r"${\1}", str) + + # $(FOO) -> ${FOO} + matches = re.findall(r"(\$\(([a-zA-Z0-9\-_]+)\))", str) + for match in matches: + to_replace, variable = match + assert "$(" not in match, "$($(FOO)) variables not supported: " + match + str = str.replace(to_replace, "${" + variable + "}") + + return str + + +def ExpandEnvVars(string, expansions): + """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the + expansions list. If the variable expands to something that references + another variable, this variable is expanded as well if it's in env -- + until no variables present in env are left.""" + for k, v in reversed(expansions): + string = string.replace("${" + k + "}", v) + string = string.replace("$(" + k + ")", v) + string = string.replace("$" + k, v) + return string + + +def _TopologicallySortedEnvVarKeys(env): + """Takes a dict |env| whose values are strings that can refer to other keys, + for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of + env such that key2 is after key1 in L if env[key2] refers to env[key1]. + + Throws an Exception in case of dependency cycles. + """ + # Since environment variables can refer to other variables, the evaluation + # order is important. Below is the logic to compute the dependency graph + # and sort it. + regex = re.compile(r"\$\{([a-zA-Z0-9\-_]+)\}") + + def GetEdges(node): + # Use a definition of edges such that user_of_variable -> used_varible. + # This happens to be easier in this case, since a variable's + # definition contains all variables it references in a single string. + # We can then reverse the result of the topological sort at the end. + # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) + matches = {v for v in regex.findall(env[node]) if v in env} + for dependee in matches: + assert "${" not in dependee, "Nested variables not supported: " + dependee + return matches + + try: + # Topologically sort, and then reverse, because we used an edge definition + # that's inverted from the expected result of this function (see comment + # above). + order = gyp.common.TopologicallySorted(env.keys(), GetEdges) + order.reverse() + return order + except gyp.common.CycleError as e: + raise GypError( + "Xcode environment variables are cyclically dependent: " + str(e.nodes) + ) + + +def GetSortedXcodeEnv( + xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None +): + env = _GetXcodeEnv( + xcode_settings, built_products_dir, srcroot, configuration, additional_settings + ) + return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)] + + +def GetSpecPostbuildCommands(spec, quiet=False): + """Returns the list of postbuilds explicitly defined on |spec|, in a form + executable by a shell.""" + postbuilds = [] + for postbuild in spec.get("postbuilds", []): + if not quiet: + postbuilds.append( + "echo POSTBUILD\\(%s\\) %s" + % (spec["target_name"], postbuild["postbuild_name"]) + ) + postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild["action"])) + return postbuilds + + +def _HasIOSTarget(targets): + """Returns true if any target contains the iOS specific key + IPHONEOS_DEPLOYMENT_TARGET.""" + for target_dict in targets.values(): + for config in target_dict["configurations"].values(): + if config.get("xcode_settings", {}).get("IPHONEOS_DEPLOYMENT_TARGET"): + return True + return False + + +def _AddIOSDeviceConfigurations(targets): + """Clone all targets and append -iphoneos to the name. Configure these targets + to build for iOS devices and use correct architectures for those builds.""" + for target_dict in targets.values(): + toolset = target_dict["toolset"] + configs = target_dict["configurations"] + for config_name, simulator_config_dict in dict(configs).items(): + iphoneos_config_dict = copy.deepcopy(simulator_config_dict) + configs[config_name + "-iphoneos"] = iphoneos_config_dict + configs[config_name + "-iphonesimulator"] = simulator_config_dict + if toolset == "target": + simulator_config_dict["xcode_settings"]["SDKROOT"] = "iphonesimulator" + iphoneos_config_dict["xcode_settings"]["SDKROOT"] = "iphoneos" + return targets + + +def CloneConfigurationForDeviceAndEmulator(target_dicts): + """If |target_dicts| contains any iOS targets, automatically create -iphoneos + targets for iOS device builds.""" + if _HasIOSTarget(target_dicts): + return _AddIOSDeviceConfigurations(target_dicts) + return target_dicts diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py new file mode 100644 index 0000000..bb74eac --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py @@ -0,0 +1,302 @@ +# Copyright (c) 2014 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Xcode-ninja wrapper project file generator. + +This updates the data structures passed to the Xcode gyp generator to build +with ninja instead. The Xcode project itself is transformed into a list of +executable targets, each with a build step to build with ninja, and a target +with every source and resource file. This appears to sidestep some of the +major performance headaches experienced using complex projects and large number +of targets within Xcode. +""" + +import errno +import gyp.generator.ninja +import os +import re +import xml.sax.saxutils + + +def _WriteWorkspace(main_gyp, sources_gyp, params): + """ Create a workspace to wrap main and sources gyp paths. """ + (build_file_root, build_file_ext) = os.path.splitext(main_gyp) + workspace_path = build_file_root + ".xcworkspace" + options = params["options"] + if options.generator_output: + workspace_path = os.path.join(options.generator_output, workspace_path) + try: + os.makedirs(workspace_path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + output_string = ( + '\n' + '\n' + ) + for gyp_name in [main_gyp, sources_gyp]: + name = os.path.splitext(os.path.basename(gyp_name))[0] + ".xcodeproj" + name = xml.sax.saxutils.quoteattr("group:" + name) + output_string += " \n" % name + output_string += "\n" + + workspace_file = os.path.join(workspace_path, "contents.xcworkspacedata") + + try: + with open(workspace_file) as input_file: + input_string = input_file.read() + if input_string == output_string: + return + except OSError: + # Ignore errors if the file doesn't exist. + pass + + with open(workspace_file, "w") as output_file: + output_file.write(output_string) + + +def _TargetFromSpec(old_spec, params): + """ Create fake target for xcode-ninja wrapper. """ + # Determine ninja top level build dir (e.g. /path/to/out). + ninja_toplevel = None + jobs = 0 + if params: + options = params["options"] + ninja_toplevel = os.path.join( + options.toplevel_dir, gyp.generator.ninja.ComputeOutputDir(params) + ) + jobs = params.get("generator_flags", {}).get("xcode_ninja_jobs", 0) + + target_name = old_spec.get("target_name") + product_name = old_spec.get("product_name", target_name) + product_extension = old_spec.get("product_extension") + + ninja_target = {} + ninja_target["target_name"] = target_name + ninja_target["product_name"] = product_name + if product_extension: + ninja_target["product_extension"] = product_extension + ninja_target["toolset"] = old_spec.get("toolset") + ninja_target["default_configuration"] = old_spec.get("default_configuration") + ninja_target["configurations"] = {} + + # Tell Xcode to look in |ninja_toplevel| for build products. + new_xcode_settings = {} + if ninja_toplevel: + new_xcode_settings["CONFIGURATION_BUILD_DIR"] = ( + "%s/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)" % ninja_toplevel + ) + + if "configurations" in old_spec: + for config in old_spec["configurations"]: + old_xcode_settings = old_spec["configurations"][config].get( + "xcode_settings", {} + ) + if "IPHONEOS_DEPLOYMENT_TARGET" in old_xcode_settings: + new_xcode_settings["CODE_SIGNING_REQUIRED"] = "NO" + new_xcode_settings["IPHONEOS_DEPLOYMENT_TARGET"] = old_xcode_settings[ + "IPHONEOS_DEPLOYMENT_TARGET" + ] + for key in ["BUNDLE_LOADER", "TEST_HOST"]: + if key in old_xcode_settings: + new_xcode_settings[key] = old_xcode_settings[key] + + ninja_target["configurations"][config] = {} + ninja_target["configurations"][config][ + "xcode_settings" + ] = new_xcode_settings + + ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0) + ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0) + ninja_target["ios_app_extension"] = old_spec.get("ios_app_extension", 0) + ninja_target["ios_watchkit_extension"] = old_spec.get("ios_watchkit_extension", 0) + ninja_target["ios_watchkit_app"] = old_spec.get("ios_watchkit_app", 0) + ninja_target["type"] = old_spec["type"] + if ninja_toplevel: + ninja_target["actions"] = [ + { + "action_name": "Compile and copy %s via ninja" % target_name, + "inputs": [], + "outputs": [], + "action": [ + "env", + "PATH=%s" % os.environ["PATH"], + "ninja", + "-C", + new_xcode_settings["CONFIGURATION_BUILD_DIR"], + target_name, + ], + "message": "Compile and copy %s via ninja" % target_name, + }, + ] + if jobs > 0: + ninja_target["actions"][0]["action"].extend(("-j", jobs)) + return ninja_target + + +def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): + """Limit targets for Xcode wrapper. + + Xcode sometimes performs poorly with too many targets, so only include + proper executable targets, with filters to customize. + Arguments: + target_extras: Regular expression to always add, matching any target. + executable_target_pattern: Regular expression limiting executable targets. + spec: Specifications for target. + """ + target_name = spec.get("target_name") + # Always include targets matching target_extras. + if target_extras is not None and re.search(target_extras, target_name): + return True + + # Otherwise just show executable targets and xc_tests. + if int(spec.get("mac_xctest_bundle", 0)) != 0 or ( + spec.get("type", "") == "executable" + and spec.get("product_extension", "") != "bundle" + ): + + # If there is a filter and the target does not match, exclude the target. + if executable_target_pattern is not None: + if not re.search(executable_target_pattern, target_name): + return False + return True + return False + + +def CreateWrapper(target_list, target_dicts, data, params): + """Initialize targets for the ninja wrapper. + + This sets up the necessary variables in the targets to generate Xcode projects + that use ninja as an external builder. + Arguments: + target_list: List of target pairs: 'base/base.gyp:base'. + target_dicts: Dict of target properties keyed on target pair. + data: Dict of flattened build files keyed on gyp path. + params: Dict of global options for gyp. + """ + orig_gyp = params["build_files"][0] + for gyp_name, gyp_dict in data.items(): + if gyp_name == orig_gyp: + depth = gyp_dict["_DEPTH"] + + # Check for custom main gyp name, otherwise use the default CHROMIUM_GYP_FILE + # and prepend .ninja before the .gyp extension. + generator_flags = params.get("generator_flags", {}) + main_gyp = generator_flags.get("xcode_ninja_main_gyp", None) + if main_gyp is None: + (build_file_root, build_file_ext) = os.path.splitext(orig_gyp) + main_gyp = build_file_root + ".ninja" + build_file_ext + + # Create new |target_list|, |target_dicts| and |data| data structures. + new_target_list = [] + new_target_dicts = {} + new_data = {} + + # Set base keys needed for |data|. + new_data[main_gyp] = {} + new_data[main_gyp]["included_files"] = [] + new_data[main_gyp]["targets"] = [] + new_data[main_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {}) + + # Normally the xcode-ninja generator includes only valid executable targets. + # If |xcode_ninja_executable_target_pattern| is set, that list is reduced to + # executable targets that match the pattern. (Default all) + executable_target_pattern = generator_flags.get( + "xcode_ninja_executable_target_pattern", None + ) + + # For including other non-executable targets, add the matching target name + # to the |xcode_ninja_target_pattern| regular expression. (Default none) + target_extras = generator_flags.get("xcode_ninja_target_pattern", None) + + for old_qualified_target in target_list: + spec = target_dicts[old_qualified_target] + if IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): + # Add to new_target_list. + target_name = spec.get("target_name") + new_target_name = f"{main_gyp}:{target_name}#target" + new_target_list.append(new_target_name) + + # Add to new_target_dicts. + new_target_dicts[new_target_name] = _TargetFromSpec(spec, params) + + # Add to new_data. + for old_target in data[old_qualified_target.split(":")[0]]["targets"]: + if old_target["target_name"] == target_name: + new_data_target = {} + new_data_target["target_name"] = old_target["target_name"] + new_data_target["toolset"] = old_target["toolset"] + new_data[main_gyp]["targets"].append(new_data_target) + + # Create sources target. + sources_target_name = "sources_for_indexing" + sources_target = _TargetFromSpec( + { + "target_name": sources_target_name, + "toolset": "target", + "default_configuration": "Default", + "mac_bundle": "0", + "type": "executable", + }, + None, + ) + + # Tell Xcode to look everywhere for headers. + sources_target["configurations"] = {"Default": {"include_dirs": [depth]}} + + # Put excluded files into the sources target so they can be opened in Xcode. + skip_excluded_files = not generator_flags.get( + "xcode_ninja_list_excluded_files", True + ) + + sources = [] + for target, target_dict in target_dicts.items(): + base = os.path.dirname(target) + files = target_dict.get("sources", []) + target_dict.get( + "mac_bundle_resources", [] + ) + + if not skip_excluded_files: + files.extend( + target_dict.get("sources_excluded", []) + + target_dict.get("mac_bundle_resources_excluded", []) + ) + + for action in target_dict.get("actions", []): + files.extend(action.get("inputs", [])) + + if not skip_excluded_files: + files.extend(action.get("inputs_excluded", [])) + + # Remove files starting with $. These are mostly intermediate files for the + # build system. + files = [file for file in files if not file.startswith("$")] + + # Make sources relative to root build file. + relative_path = os.path.dirname(main_gyp) + sources += [ + os.path.relpath(os.path.join(base, file), relative_path) for file in files + ] + + sources_target["sources"] = sorted(set(sources)) + + # Put sources_to_index in it's own gyp. + sources_gyp = os.path.join(os.path.dirname(main_gyp), sources_target_name + ".gyp") + fully_qualified_target_name = f"{sources_gyp}:{sources_target_name}#target" + + # Add to new_target_list, new_target_dicts and new_data. + new_target_list.append(fully_qualified_target_name) + new_target_dicts[fully_qualified_target_name] = sources_target + new_data_target = {} + new_data_target["target_name"] = sources_target["target_name"] + new_data_target["_DEPTH"] = depth + new_data_target["toolset"] = "target" + new_data[sources_gyp] = {} + new_data[sources_gyp]["targets"] = [] + new_data[sources_gyp]["included_files"] = [] + new_data[sources_gyp]["xcode_settings"] = data[orig_gyp].get("xcode_settings", {}) + new_data[sources_gyp]["targets"].append(new_data_target) + + # Write workspace to file. + _WriteWorkspace(main_gyp, sources_gyp, params) + return (new_target_list, new_target_dicts, new_data) diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py new file mode 100644 index 0000000..076eea3 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py @@ -0,0 +1,3197 @@ +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Xcode project file generator. + +This module is both an Xcode project file generator and a documentation of the +Xcode project file format. Knowledge of the project file format was gained +based on extensive experience with Xcode, and by making changes to projects in +Xcode.app and observing the resultant changes in the associated project files. + +XCODE PROJECT FILES + +The generator targets the file format as written by Xcode 3.2 (specifically, +3.2.6), but past experience has taught that the format has not changed +significantly in the past several years, and future versions of Xcode are able +to read older project files. + +Xcode project files are "bundled": the project "file" from an end-user's +perspective is actually a directory with an ".xcodeproj" extension. The +project file from this module's perspective is actually a file inside this +directory, always named "project.pbxproj". This file contains a complete +description of the project and is all that is needed to use the xcodeproj. +Other files contained in the xcodeproj directory are simply used to store +per-user settings, such as the state of various UI elements in the Xcode +application. + +The project.pbxproj file is a property list, stored in a format almost +identical to the NeXTstep property list format. The file is able to carry +Unicode data, and is encoded in UTF-8. The root element in the property list +is a dictionary that contains several properties of minimal interest, and two +properties of immense interest. The most important property is a dictionary +named "objects". The entire structure of the project is represented by the +children of this property. The objects dictionary is keyed by unique 96-bit +values represented by 24 uppercase hexadecimal characters. Each value in the +objects dictionary is itself a dictionary, describing an individual object. + +Each object in the dictionary is a member of a class, which is identified by +the "isa" property of each object. A variety of classes are represented in a +project file. Objects can refer to other objects by ID, using the 24-character +hexadecimal object key. A project's objects form a tree, with a root object +of class PBXProject at the root. As an example, the PBXProject object serves +as parent to an XCConfigurationList object defining the build configurations +used in the project, a PBXGroup object serving as a container for all files +referenced in the project, and a list of target objects, each of which defines +a target in the project. There are several different types of target object, +such as PBXNativeTarget and PBXAggregateTarget. In this module, this +relationship is expressed by having each target type derive from an abstract +base named XCTarget. + +The project.pbxproj file's root dictionary also contains a property, sibling to +the "objects" dictionary, named "rootObject". The value of rootObject is a +24-character object key referring to the root PBXProject object in the +objects dictionary. + +In Xcode, every file used as input to a target or produced as a final product +of a target must appear somewhere in the hierarchy rooted at the PBXGroup +object referenced by the PBXProject's mainGroup property. A PBXGroup is +generally represented as a folder in the Xcode application. PBXGroups can +contain other PBXGroups as well as PBXFileReferences, which are pointers to +actual files. + +Each XCTarget contains a list of build phases, represented in this module by +the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations +are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the +"Compile Sources" and "Link Binary With Libraries" phases displayed in the +Xcode application. Files used as input to these phases (for example, source +files in the former case and libraries and frameworks in the latter) are +represented by PBXBuildFile objects, referenced by elements of "files" lists +in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile +object as a "weak" reference: it does not "own" the PBXBuildFile, which is +owned by the root object's mainGroup or a descendant group. In most cases, the +layer of indirection between an XCBuildPhase and a PBXFileReference via a +PBXBuildFile appears extraneous, but there's actually one reason for this: +file-specific compiler flags are added to the PBXBuildFile object so as to +allow a single file to be a member of multiple targets while having distinct +compiler flags for each. These flags can be modified in the Xcode applciation +in the "Build" tab of a File Info window. + +When a project is open in the Xcode application, Xcode will rewrite it. As +such, this module is careful to adhere to the formatting used by Xcode, to +avoid insignificant changes appearing in the file when it is used in the +Xcode application. This will keep version control repositories happy, and +makes it possible to compare a project file used in Xcode to one generated by +this module to determine if any significant changes were made in the +application. + +Xcode has its own way of assigning 24-character identifiers to each object, +which is not duplicated here. Because the identifier only is only generated +once, when an object is created, and is then left unchanged, there is no need +to attempt to duplicate Xcode's behavior in this area. The generator is free +to select any identifier, even at random, to refer to the objects it creates, +and Xcode will retain those identifiers and use them when subsequently +rewriting the project file. However, the generator would choose new random +identifiers each time the project files are generated, leading to difficulties +comparing "used" project files to "pristine" ones produced by this module, +and causing the appearance of changes as every object identifier is changed +when updated projects are checked in to a version control repository. To +mitigate this problem, this module chooses identifiers in a more deterministic +way, by hashing a description of each object as well as its parent and ancestor +objects. This strategy should result in minimal "shift" in IDs as successive +generations of project files are produced. + +THIS MODULE + +This module introduces several classes, all derived from the XCObject class. +Nearly all of the "brains" are built into the XCObject class, which understands +how to create and modify objects, maintain the proper tree structure, compute +identifiers, and print objects. For the most part, classes derived from +XCObject need only provide a _schema class object, a dictionary that +expresses what properties objects of the class may contain. + +Given this structure, it's possible to build a minimal project file by creating +objects of the appropriate types and making the proper connections: + + config_list = XCConfigurationList() + group = PBXGroup() + project = PBXProject({'buildConfigurationList': config_list, + 'mainGroup': group}) + +With the project object set up, it can be added to an XCProjectFile object. +XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject +subclass that does not actually correspond to a class type found in a project +file. Rather, it is used to represent the project file's root dictionary. +Printing an XCProjectFile will print the entire project file, including the +full "objects" dictionary. + + project_file = XCProjectFile({'rootObject': project}) + project_file.ComputeIDs() + project_file.Print() + +Xcode project files are always encoded in UTF-8. This module will accept +strings of either the str class or the unicode class. Strings of class str +are assumed to already be encoded in UTF-8. Obviously, if you're just using +ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. +Strings of class unicode are handled properly and encoded in UTF-8 when +a project file is output. +""" + +import gyp.common +from functools import cmp_to_key +import hashlib +from operator import attrgetter +import posixpath +import re +import struct +import sys + + +def cmp(x, y): + return (x > y) - (x < y) + + +# See XCObject._EncodeString. This pattern is used to determine when a string +# can be printed unquoted. Strings that match this pattern may be printed +# unquoted. Strings that do not match must be quoted and may be further +# transformed to be properly encoded. Note that this expression matches the +# characters listed with "+", for 1 or more occurrences: if a string is empty, +# it must not match this pattern, because it needs to be encoded as "". +_unquoted = re.compile("^[A-Za-z0-9$./_]+$") + +# Strings that match this pattern are quoted regardless of what _unquoted says. +# Oddly, Xcode will quote any string with a run of three or more underscores. +_quoted = re.compile("___") + +# This pattern should match any character that needs to be escaped by +# XCObject._EncodeString. See that function. +_escaped = re.compile('[\\\\"]|[\x00-\x1f]') + + +# Used by SourceTreeAndPathFromPath +_path_leading_variable = re.compile(r"^\$\((.*?)\)(/(.*))?$") + + +def SourceTreeAndPathFromPath(input_path): + """Given input_path, returns a tuple with sourceTree and path values. + + Examples: + input_path (source_tree, output_path) + '$(VAR)/path' ('VAR', 'path') + '$(VAR)' ('VAR', None) + 'path' (None, 'path') + """ + + source_group_match = _path_leading_variable.match(input_path) + if source_group_match: + source_tree = source_group_match.group(1) + output_path = source_group_match.group(3) # This may be None. + else: + source_tree = None + output_path = input_path + + return (source_tree, output_path) + + +def ConvertVariablesToShellSyntax(input_string): + return re.sub(r"\$\((.*?)\)", "${\\1}", input_string) + + +class XCObject: + """The abstract base of all class types used in Xcode project files. + + Class variables: + _schema: A dictionary defining the properties of this class. The keys to + _schema are string property keys as used in project files. Values + are a list of four or five elements: + [ is_list, property_type, is_strong, is_required, default ] + is_list: True if the property described is a list, as opposed + to a single element. + property_type: The type to use as the value of the property, + or if is_list is True, the type to use for each + element of the value's list. property_type must + be an XCObject subclass, or one of the built-in + types str, int, or dict. + is_strong: If property_type is an XCObject subclass, is_strong + is True to assert that this class "owns," or serves + as parent, to the property value (or, if is_list is + True, values). is_strong must be False if + property_type is not an XCObject subclass. + is_required: True if the property is required for the class. + Note that is_required being True does not preclude + an empty string ("", in the case of property_type + str) or list ([], in the case of is_list True) from + being set for the property. + default: Optional. If is_required is True, default may be set + to provide a default value for objects that do not supply + their own value. If is_required is True and default + is not provided, users of the class must supply their own + value for the property. + Note that although the values of the array are expressed in + boolean terms, subclasses provide values as integers to conserve + horizontal space. + _should_print_single_line: False in XCObject. Subclasses whose objects + should be written to the project file in the + alternate single-line format, such as + PBXFileReference and PBXBuildFile, should + set this to True. + _encode_transforms: Used by _EncodeString to encode unprintable characters. + The index into this list is the ordinal of the + character to transform; each value is a string + used to represent the character in the output. XCObject + provides an _encode_transforms list suitable for most + XCObject subclasses. + _alternate_encode_transforms: Provided for subclasses that wish to use + the alternate encoding rules. Xcode seems + to use these rules when printing objects in + single-line format. Subclasses that desire + this behavior should set _encode_transforms + to _alternate_encode_transforms. + _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs + to construct this object's ID. Most classes that need custom + hashing behavior should do it by overriding Hashables, + but in some cases an object's parent may wish to push a + hashable value into its child, and it can do so by appending + to _hashables. + Attributes: + id: The object's identifier, a 24-character uppercase hexadecimal string. + Usually, objects being created should not set id until the entire + project file structure is built. At that point, UpdateIDs() should + be called on the root object to assign deterministic values for id to + each object in the tree. + parent: The object's parent. This is set by a parent XCObject when a child + object is added to it. + _properties: The object's property dictionary. An object's properties are + described by its class' _schema variable. + """ + + _schema = {} + _should_print_single_line = False + + # See _EncodeString. + _encode_transforms = [] + i = 0 + while i < ord(" "): + _encode_transforms.append("\\U%04x" % i) + i = i + 1 + _encode_transforms[7] = "\\a" + _encode_transforms[8] = "\\b" + _encode_transforms[9] = "\\t" + _encode_transforms[10] = "\\n" + _encode_transforms[11] = "\\v" + _encode_transforms[12] = "\\f" + _encode_transforms[13] = "\\n" + + _alternate_encode_transforms = list(_encode_transforms) + _alternate_encode_transforms[9] = chr(9) + _alternate_encode_transforms[10] = chr(10) + _alternate_encode_transforms[11] = chr(11) + + def __init__(self, properties=None, id=None, parent=None): + self.id = id + self.parent = parent + self._properties = {} + self._hashables = [] + self._SetDefaultsFromSchema() + self.UpdateProperties(properties) + + def __repr__(self): + try: + name = self.Name() + except NotImplementedError: + return f"<{self.__class__.__name__} at 0x{id(self):x}>" + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" + + def Copy(self): + """Make a copy of this object. + + The new object will have its own copy of lists and dicts. Any XCObject + objects owned by this object (marked "strong") will be copied in the + new object, even those found in lists. If this object has any weak + references to other XCObjects, the same references are added to the new + object without making a copy. + """ + + that = self.__class__(id=self.id, parent=self.parent) + for key, value in self._properties.items(): + is_strong = self._schema[key][2] + + if isinstance(value, XCObject): + if is_strong: + new_value = value.Copy() + new_value.parent = that + that._properties[key] = new_value + else: + that._properties[key] = value + elif isinstance(value, (str, int)): + that._properties[key] = value + elif isinstance(value, list): + if is_strong: + # If is_strong is True, each element is an XCObject, so it's safe to + # call Copy. + that._properties[key] = [] + for item in value: + new_item = item.Copy() + new_item.parent = that + that._properties[key].append(new_item) + else: + that._properties[key] = value[:] + elif isinstance(value, dict): + # dicts are never strong. + if is_strong: + raise TypeError( + "Strong dict for key " + key + " in " + self.__class__.__name__ + ) + else: + that._properties[key] = value.copy() + else: + raise TypeError( + "Unexpected type " + + value.__class__.__name__ + + " for key " + + key + + " in " + + self.__class__.__name__ + ) + + return that + + def Name(self): + """Return the name corresponding to an object. + + Not all objects necessarily need to be nameable, and not all that do have + a "name" property. Override as needed. + """ + + # If the schema indicates that "name" is required, try to access the + # property even if it doesn't exist. This will result in a KeyError + # being raised for the property that should be present, which seems more + # appropriate than NotImplementedError in this case. + if "name" in self._properties or ( + "name" in self._schema and self._schema["name"][3] + ): + return self._properties["name"] + + raise NotImplementedError(self.__class__.__name__ + " must implement Name") + + def Comment(self): + """Return a comment string for the object. + + Most objects just use their name as the comment, but PBXProject uses + different values. + + The returned comment is not escaped and does not have any comment marker + strings applied to it. + """ + + return self.Name() + + def Hashables(self): + hashables = [self.__class__.__name__] + + name = self.Name() + if name is not None: + hashables.append(name) + + hashables.extend(self._hashables) + + return hashables + + def HashablesForChild(self): + return None + + def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None): + """Set "id" properties deterministically. + + An object's "id" property is set based on a hash of its class type and + name, as well as the class type and name of all ancestor objects. As + such, it is only advisable to call ComputeIDs once an entire project file + tree is built. + + If recursive is True, recurse into all descendant objects and update their + hashes. + + If overwrite is True, any existing value set in the "id" property will be + replaced. + """ + + def _HashUpdate(hash, data): + """Update hash with data's length and contents. + + If the hash were updated only with the value of data, it would be + possible for clowns to induce collisions by manipulating the names of + their objects. By adding the length, it's exceedingly less likely that + ID collisions will be encountered, intentionally or not. + """ + + hash.update(struct.pack(">i", len(data))) + if isinstance(data, str): + data = data.encode("utf-8") + hash.update(data) + + if seed_hash is None: + seed_hash = hashlib.sha1() + + hash = seed_hash.copy() + + hashables = self.Hashables() + assert len(hashables) > 0 + for hashable in hashables: + _HashUpdate(hash, hashable) + + if recursive: + hashables_for_child = self.HashablesForChild() + if hashables_for_child is None: + child_hash = hash + else: + assert len(hashables_for_child) > 0 + child_hash = seed_hash.copy() + for hashable in hashables_for_child: + _HashUpdate(child_hash, hashable) + + for child in self.Children(): + child.ComputeIDs(recursive, overwrite, child_hash) + + if overwrite or self.id is None: + # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is + # is 160 bits. Instead of throwing out 64 bits of the digest, xor them + # into the portion that gets used. + assert hash.digest_size % 4 == 0 + digest_int_count = hash.digest_size // 4 + digest_ints = struct.unpack(">" + "I" * digest_int_count, hash.digest()) + id_ints = [0, 0, 0] + for index in range(0, digest_int_count): + id_ints[index % 3] ^= digest_ints[index] + self.id = "%08X%08X%08X" % tuple(id_ints) + + def EnsureNoIDCollisions(self): + """Verifies that no two objects have the same ID. Checks all descendants. + """ + + ids = {} + descendants = self.Descendants() + for descendant in descendants: + if descendant.id in ids: + other = ids[descendant.id] + raise KeyError( + 'Duplicate ID %s, objects "%s" and "%s" in "%s"' + % ( + descendant.id, + str(descendant._properties), + str(other._properties), + self._properties["rootObject"].Name(), + ) + ) + ids[descendant.id] = descendant + + def Children(self): + """Returns a list of all of this object's owned (strong) children.""" + + children = [] + for property, attributes in self._schema.items(): + (is_list, property_type, is_strong) = attributes[0:3] + if is_strong and property in self._properties: + if not is_list: + children.append(self._properties[property]) + else: + children.extend(self._properties[property]) + return children + + def Descendants(self): + """Returns a list of all of this object's descendants, including this + object. + """ + + children = self.Children() + descendants = [self] + for child in children: + descendants.extend(child.Descendants()) + return descendants + + def PBXProjectAncestor(self): + # The base case for recursion is defined at PBXProject.PBXProjectAncestor. + if self.parent: + return self.parent.PBXProjectAncestor() + return None + + def _EncodeComment(self, comment): + """Encodes a comment to be placed in the project file output, mimicking + Xcode behavior. + """ + + # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If + # the string already contains a "*/", it is turned into "(*)/". This keeps + # the file writer from outputting something that would be treated as the + # end of a comment in the middle of something intended to be entirely a + # comment. + + return "/* " + comment.replace("*/", "(*)/") + " */" + + def _EncodeTransform(self, match): + # This function works closely with _EncodeString. It will only be called + # by re.sub with match.group(0) containing a character matched by the + # the _escaped expression. + char = match.group(0) + + # Backslashes (\) and quotation marks (") are always replaced with a + # backslash-escaped version of the same. Everything else gets its + # replacement from the class' _encode_transforms array. + if char == "\\": + return "\\\\" + if char == '"': + return '\\"' + return self._encode_transforms[ord(char)] + + def _EncodeString(self, value): + """Encodes a string to be placed in the project file output, mimicking + Xcode behavior. + """ + + # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, + # $ (dollar sign), . (period), and _ (underscore) is present. Also use + # quotation marks to represent empty strings. + # + # Escape " (double-quote) and \ (backslash) by preceding them with a + # backslash. + # + # Some characters below the printable ASCII range are encoded specially: + # 7 ^G BEL is encoded as "\a" + # 8 ^H BS is encoded as "\b" + # 11 ^K VT is encoded as "\v" + # 12 ^L NP is encoded as "\f" + # 127 ^? DEL is passed through as-is without escaping + # - In PBXFileReference and PBXBuildFile objects: + # 9 ^I HT is passed through as-is without escaping + # 10 ^J NL is passed through as-is without escaping + # 13 ^M CR is passed through as-is without escaping + # - In other objects: + # 9 ^I HT is encoded as "\t" + # 10 ^J NL is encoded as "\n" + # 13 ^M CR is encoded as "\n" rendering it indistinguishable from + # 10 ^J NL + # All other characters within the ASCII control character range (0 through + # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point + # in hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". + # Characters above the ASCII range are passed through to the output encoded + # as UTF-8 without any escaping. These mappings are contained in the + # class' _encode_transforms list. + + if _unquoted.search(value) and not _quoted.search(value): + return value + + return '"' + _escaped.sub(self._EncodeTransform, value) + '"' + + def _XCPrint(self, file, tabs, line): + file.write("\t" * tabs + line) + + def _XCPrintableValue(self, tabs, value, flatten_list=False): + """Returns a representation of value that may be printed in a project file, + mimicking Xcode's behavior. + + _XCPrintableValue can handle str and int values, XCObjects (which are + made printable by returning their id property), and list and dict objects + composed of any of the above types. When printing a list or dict, and + _should_print_single_line is False, the tabs parameter is used to determine + how much to indent the lines corresponding to the items in the list or + dict. + + If flatten_list is True, single-element lists will be transformed into + strings. + """ + + printable = "" + comment = None + + if self._should_print_single_line: + sep = " " + element_tabs = "" + end_tabs = "" + else: + sep = "\n" + element_tabs = "\t" * (tabs + 1) + end_tabs = "\t" * tabs + + if isinstance(value, XCObject): + printable += value.id + comment = value.Comment() + elif isinstance(value, str): + printable += self._EncodeString(value) + elif isinstance(value, str): + printable += self._EncodeString(value.encode("utf-8")) + elif isinstance(value, int): + printable += str(value) + elif isinstance(value, list): + if flatten_list and len(value) <= 1: + if len(value) == 0: + printable += self._EncodeString("") + else: + printable += self._EncodeString(value[0]) + else: + printable = "(" + sep + for item in value: + printable += ( + element_tabs + + self._XCPrintableValue(tabs + 1, item, flatten_list) + + "," + + sep + ) + printable += end_tabs + ")" + elif isinstance(value, dict): + printable = "{" + sep + for item_key, item_value in sorted(value.items()): + printable += ( + element_tabs + + self._XCPrintableValue(tabs + 1, item_key, flatten_list) + + " = " + + self._XCPrintableValue(tabs + 1, item_value, flatten_list) + + ";" + + sep + ) + printable += end_tabs + "}" + else: + raise TypeError("Can't make " + value.__class__.__name__ + " printable") + + if comment: + printable += " " + self._EncodeComment(comment) + + return printable + + def _XCKVPrint(self, file, tabs, key, value): + """Prints a key and value, members of an XCObject's _properties dictionary, + to file. + + tabs is an int identifying the indentation level. If the class' + _should_print_single_line variable is True, tabs is ignored and the + key-value pair will be followed by a space insead of a newline. + """ + + if self._should_print_single_line: + printable = "" + after_kv = " " + else: + printable = "\t" * tabs + after_kv = "\n" + + # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy + # objects without comments. Sometimes it prints them with comments, but + # the majority of the time, it doesn't. To avoid unnecessary changes to + # the project file after Xcode opens it, don't write comments for + # remoteGlobalIDString. This is a sucky hack and it would certainly be + # cleaner to extend the schema to indicate whether or not a comment should + # be printed, but since this is the only case where the problem occurs and + # Xcode itself can't seem to make up its mind, the hack will suffice. + # + # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. + if key == "remoteGlobalIDString" and isinstance(self, PBXContainerItemProxy): + value_to_print = value.id + else: + value_to_print = value + + # PBXBuildFile's settings property is represented in the output as a dict, + # but a hack here has it represented as a string. Arrange to strip off the + # quotes so that it shows up in the output as expected. + if key == "settings" and isinstance(self, PBXBuildFile): + strip_value_quotes = True + else: + strip_value_quotes = False + + # In another one-off, let's set flatten_list on buildSettings properties + # of XCBuildConfiguration objects, because that's how Xcode treats them. + if key == "buildSettings" and isinstance(self, XCBuildConfiguration): + flatten_list = True + else: + flatten_list = False + + try: + printable_key = self._XCPrintableValue(tabs, key, flatten_list) + printable_value = self._XCPrintableValue(tabs, value_to_print, flatten_list) + if ( + strip_value_quotes + and len(printable_value) > 1 + and printable_value[0] == '"' + and printable_value[-1] == '"' + ): + printable_value = printable_value[1:-1] + printable += printable_key + " = " + printable_value + ";" + after_kv + except TypeError as e: + gyp.common.ExceptionAppend(e, 'while printing key "%s"' % key) + raise + + self._XCPrint(file, 0, printable) + + def Print(self, file=sys.stdout): + """Prints a reprentation of this object to file, adhering to Xcode output + formatting. + """ + + self.VerifyHasRequiredProperties() + + if self._should_print_single_line: + # When printing an object in a single line, Xcode doesn't put any space + # between the beginning of a dictionary (or presumably a list) and the + # first contained item, so you wind up with snippets like + # ...CDEF = {isa = PBXFileReference; fileRef = 0123... + # If it were me, I would have put a space in there after the opening + # curly, but I guess this is just another one of those inconsistencies + # between how Xcode prints PBXFileReference and PBXBuildFile objects as + # compared to other objects. Mimic Xcode's behavior here by using an + # empty string for sep. + sep = "" + end_tabs = 0 + else: + sep = "\n" + end_tabs = 2 + + # Start the object. For example, '\t\tPBXProject = {\n'. + self._XCPrint(file, 2, self._XCPrintableValue(2, self) + " = {" + sep) + + # "isa" isn't in the _properties dictionary, it's an intrinsic property + # of the class which the object belongs to. Xcode always outputs "isa" + # as the first element of an object dictionary. + self._XCKVPrint(file, 3, "isa", self.__class__.__name__) + + # The remaining elements of an object dictionary are sorted alphabetically. + for property, value in sorted(self._properties.items()): + self._XCKVPrint(file, 3, property, value) + + # End the object. + self._XCPrint(file, end_tabs, "};\n") + + def UpdateProperties(self, properties, do_copy=False): + """Merge the supplied properties into the _properties dictionary. + + The input properties must adhere to the class schema or a KeyError or + TypeError exception will be raised. If adding an object of an XCObject + subclass and the schema indicates a strong relationship, the object's + parent will be set to this object. + + If do_copy is True, then lists, dicts, strong-owned XCObjects, and + strong-owned XCObjects in lists will be copied instead of having their + references added. + """ + + if properties is None: + return + + for property, value in properties.items(): + # Make sure the property is in the schema. + if property not in self._schema: + raise KeyError(property + " not in " + self.__class__.__name__) + + # Make sure the property conforms to the schema. + (is_list, property_type, is_strong) = self._schema[property][0:3] + if is_list: + if value.__class__ != list: + raise TypeError( + property + + " of " + + self.__class__.__name__ + + " must be list, not " + + value.__class__.__name__ + ) + for item in value: + if not isinstance(item, property_type) and not ( + isinstance(item, str) and property_type == str + ): + # Accept unicode where str is specified. str is treated as + # UTF-8-encoded. + raise TypeError( + "item of " + + property + + " of " + + self.__class__.__name__ + + " must be " + + property_type.__name__ + + ", not " + + item.__class__.__name__ + ) + elif not isinstance(value, property_type) and not ( + isinstance(value, str) and property_type == str + ): + # Accept unicode where str is specified. str is treated as + # UTF-8-encoded. + raise TypeError( + property + + " of " + + self.__class__.__name__ + + " must be " + + property_type.__name__ + + ", not " + + value.__class__.__name__ + ) + + # Checks passed, perform the assignment. + if do_copy: + if isinstance(value, XCObject): + if is_strong: + self._properties[property] = value.Copy() + else: + self._properties[property] = value + elif isinstance(value, (str, int)): + self._properties[property] = value + elif isinstance(value, list): + if is_strong: + # If is_strong is True, each element is an XCObject, + # so it's safe to call Copy. + self._properties[property] = [] + for item in value: + self._properties[property].append(item.Copy()) + else: + self._properties[property] = value[:] + elif isinstance(value, dict): + self._properties[property] = value.copy() + else: + raise TypeError( + "Don't know how to copy a " + + value.__class__.__name__ + + " object for " + + property + + " in " + + self.__class__.__name__ + ) + else: + self._properties[property] = value + + # Set up the child's back-reference to this object. Don't use |value| + # any more because it may not be right if do_copy is true. + if is_strong: + if not is_list: + self._properties[property].parent = self + else: + for item in self._properties[property]: + item.parent = self + + def HasProperty(self, key): + return key in self._properties + + def GetProperty(self, key): + return self._properties[key] + + def SetProperty(self, key, value): + self.UpdateProperties({key: value}) + + def DelProperty(self, key): + if key in self._properties: + del self._properties[key] + + def AppendProperty(self, key, value): + # TODO(mark): Support ExtendProperty too (and make this call that)? + + # Schema validation. + if key not in self._schema: + raise KeyError(key + " not in " + self.__class__.__name__) + + (is_list, property_type, is_strong) = self._schema[key][0:3] + if not is_list: + raise TypeError(key + " of " + self.__class__.__name__ + " must be list") + if not isinstance(value, property_type): + raise TypeError( + "item of " + + key + + " of " + + self.__class__.__name__ + + " must be " + + property_type.__name__ + + ", not " + + value.__class__.__name__ + ) + + # If the property doesn't exist yet, create a new empty list to receive the + # item. + self._properties[key] = self._properties.get(key, []) + + # Set up the ownership link. + if is_strong: + value.parent = self + + # Store the item. + self._properties[key].append(value) + + def VerifyHasRequiredProperties(self): + """Ensure that all properties identified as required by the schema are + set. + """ + + # TODO(mark): A stronger verification mechanism is needed. Some + # subclasses need to perform validation beyond what the schema can enforce. + for property, attributes in self._schema.items(): + (is_list, property_type, is_strong, is_required) = attributes[0:4] + if is_required and property not in self._properties: + raise KeyError(self.__class__.__name__ + " requires " + property) + + def _SetDefaultsFromSchema(self): + """Assign object default values according to the schema. This will not + overwrite properties that have already been set.""" + + defaults = {} + for property, attributes in self._schema.items(): + (is_list, property_type, is_strong, is_required) = attributes[0:4] + if ( + is_required + and len(attributes) >= 5 + and property not in self._properties + ): + default = attributes[4] + + defaults[property] = default + + if len(defaults) > 0: + # Use do_copy=True so that each new object gets its own copy of strong + # objects, lists, and dicts. + self.UpdateProperties(defaults, do_copy=True) + + +class XCHierarchicalElement(XCObject): + """Abstract base for PBXGroup and PBXFileReference. Not represented in a + project file.""" + + # TODO(mark): Do name and path belong here? Probably so. + # If path is set and name is not, name may have a default value. Name will + # be set to the basename of path, if the basename of path is different from + # the full value of path. If path is already just a leaf name, name will + # not be set. + _schema = XCObject._schema.copy() + _schema.update( + { + "comments": [0, str, 0, 0], + "fileEncoding": [0, str, 0, 0], + "includeInIndex": [0, int, 0, 0], + "indentWidth": [0, int, 0, 0], + "lineEnding": [0, int, 0, 0], + "sourceTree": [0, str, 0, 1, ""], + "tabWidth": [0, int, 0, 0], + "usesTabs": [0, int, 0, 0], + "wrapsLines": [0, int, 0, 0], + } + ) + + def __init__(self, properties=None, id=None, parent=None): + # super + XCObject.__init__(self, properties, id, parent) + if "path" in self._properties and "name" not in self._properties: + path = self._properties["path"] + name = posixpath.basename(path) + if name != "" and path != name: + self.SetProperty("name", name) + + if "path" in self._properties and ( + "sourceTree" not in self._properties + or self._properties["sourceTree"] == "" + ): + # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take + # the variable out and make the path be relative to that variable by + # assigning the variable name as the sourceTree. + (source_tree, path) = SourceTreeAndPathFromPath(self._properties["path"]) + if source_tree is not None: + self._properties["sourceTree"] = source_tree + if path is not None: + self._properties["path"] = path + if ( + source_tree is not None + and path is None + and "name" not in self._properties + ): + # The path was of the form "$(SDKROOT)" with no path following it. + # This object is now relative to that variable, so it has no path + # attribute of its own. It does, however, keep a name. + del self._properties["path"] + self._properties["name"] = source_tree + + def Name(self): + if "name" in self._properties: + return self._properties["name"] + elif "path" in self._properties: + return self._properties["path"] + else: + # This happens in the case of the root PBXGroup. + return None + + def Hashables(self): + """Custom hashables for XCHierarchicalElements. + + XCHierarchicalElements are special. Generally, their hashes shouldn't + change if the paths don't change. The normal XCObject implementation of + Hashables adds a hashable for each object, which means that if + the hierarchical structure changes (possibly due to changes caused when + TakeOverOnlyChild runs and encounters slight changes in the hierarchy), + the hashes will change. For example, if a project file initially contains + a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent + a/b. If someone later adds a/f2 to the project file, a/b can no longer be + collapsed, and f1 winds up with parent b and grandparent a. That would + be sufficient to change f1's hash. + + To counteract this problem, hashables for all XCHierarchicalElements except + for the main group (which has neither a name nor a path) are taken to be + just the set of path components. Because hashables are inherited from + parents, this provides assurance that a/b/f1 has the same set of hashables + whether its parent is b or a/b. + + The main group is a special case. As it is permitted to have no name or + path, it is permitted to use the standard XCObject hash mechanism. This + is not considered a problem because there can be only one main group. + """ + + if self == self.PBXProjectAncestor()._properties["mainGroup"]: + # super + return XCObject.Hashables(self) + + hashables = [] + + # Put the name in first, ensuring that if TakeOverOnlyChild collapses + # children into a top-level group like "Source", the name always goes + # into the list of hashables without interfering with path components. + if "name" in self._properties: + # Make it less likely for people to manipulate hashes by following the + # pattern of always pushing an object type value onto the list first. + hashables.append(self.__class__.__name__ + ".name") + hashables.append(self._properties["name"]) + + # NOTE: This still has the problem that if an absolute path is encountered, + # including paths with a sourceTree, they'll still inherit their parents' + # hashables, even though the paths aren't relative to their parents. This + # is not expected to be much of a problem in practice. + path = self.PathFromSourceTreeAndPath() + if path is not None: + components = path.split(posixpath.sep) + for component in components: + hashables.append(self.__class__.__name__ + ".path") + hashables.append(component) + + hashables.extend(self._hashables) + + return hashables + + def Compare(self, other): + # Allow comparison of these types. PBXGroup has the highest sort rank; + # PBXVariantGroup is treated as equal to PBXFileReference. + valid_class_types = { + PBXFileReference: "file", + PBXGroup: "group", + PBXVariantGroup: "file", + } + self_type = valid_class_types[self.__class__] + other_type = valid_class_types[other.__class__] + + if self_type == other_type: + # If the two objects are of the same sort rank, compare their names. + return cmp(self.Name(), other.Name()) + + # Otherwise, sort groups before everything else. + if self_type == "group": + return -1 + return 1 + + def CompareRootGroup(self, other): + # This function should be used only to compare direct children of the + # containing PBXProject's mainGroup. These groups should appear in the + # listed order. + # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the + # generator should have a way of influencing this list rather than having + # to hardcode for the generator here. + order = [ + "Source", + "Intermediates", + "Projects", + "Frameworks", + "Products", + "Build", + ] + + # If the groups aren't in the listed order, do a name comparison. + # Otherwise, groups in the listed order should come before those that + # aren't. + self_name = self.Name() + other_name = other.Name() + self_in = isinstance(self, PBXGroup) and self_name in order + other_in = isinstance(self, PBXGroup) and other_name in order + if not self_in and not other_in: + return self.Compare(other) + if self_name in order and other_name not in order: + return -1 + if other_name in order and self_name not in order: + return 1 + + # If both groups are in the listed order, go by the defined order. + self_index = order.index(self_name) + other_index = order.index(other_name) + if self_index < other_index: + return -1 + if self_index > other_index: + return 1 + return 0 + + def PathFromSourceTreeAndPath(self): + # Turn the object's sourceTree and path properties into a single flat + # string of a form comparable to the path parameter. If there's a + # sourceTree property other than "", wrap it in $(...) for the + # comparison. + components = [] + if self._properties["sourceTree"] != "": + components.append("$(" + self._properties["sourceTree"] + ")") + if "path" in self._properties: + components.append(self._properties["path"]) + + if len(components) > 0: + return posixpath.join(*components) + + return None + + def FullPath(self): + # Returns a full path to self relative to the project file, or relative + # to some other source tree. Start with self, and walk up the chain of + # parents prepending their paths, if any, until no more parents are + # available (project-relative path) or until a path relative to some + # source tree is found. + xche = self + path = None + while isinstance(xche, XCHierarchicalElement) and ( + path is None or (not path.startswith("/") and not path.startswith("$")) + ): + this_path = xche.PathFromSourceTreeAndPath() + if this_path is not None and path is not None: + path = posixpath.join(this_path, path) + elif this_path is not None: + path = this_path + xche = xche.parent + + return path + + +class PBXGroup(XCHierarchicalElement): + """ + Attributes: + _children_by_path: Maps pathnames of children of this PBXGroup to the + actual child XCHierarchicalElement objects. + _variant_children_by_name_and_path: Maps (name, path) tuples of + PBXVariantGroup children to the actual child PBXVariantGroup objects. + """ + + _schema = XCHierarchicalElement._schema.copy() + _schema.update( + { + "children": [1, XCHierarchicalElement, 1, 1, []], + "name": [0, str, 0, 0], + "path": [0, str, 0, 0], + } + ) + + def __init__(self, properties=None, id=None, parent=None): + # super + XCHierarchicalElement.__init__(self, properties, id, parent) + self._children_by_path = {} + self._variant_children_by_name_and_path = {} + for child in self._properties.get("children", []): + self._AddChildToDicts(child) + + def Hashables(self): + # super + hashables = XCHierarchicalElement.Hashables(self) + + # It is not sufficient to just rely on name and parent to build a unique + # hashable : a node could have two child PBXGroup sharing a common name. + # To add entropy the hashable is enhanced with the names of all its + # children. + for child in self._properties.get("children", []): + child_name = child.Name() + if child_name is not None: + hashables.append(child_name) + + return hashables + + def HashablesForChild(self): + # To avoid a circular reference the hashables used to compute a child id do + # not include the child names. + return XCHierarchicalElement.Hashables(self) + + def _AddChildToDicts(self, child): + # Sets up this PBXGroup object's dicts to reference the child properly. + child_path = child.PathFromSourceTreeAndPath() + if child_path: + if child_path in self._children_by_path: + raise ValueError("Found multiple children with path " + child_path) + self._children_by_path[child_path] = child + + if isinstance(child, PBXVariantGroup): + child_name = child._properties.get("name", None) + key = (child_name, child_path) + if key in self._variant_children_by_name_and_path: + raise ValueError( + "Found multiple PBXVariantGroup children with " + + "name " + + str(child_name) + + " and path " + + str(child_path) + ) + self._variant_children_by_name_and_path[key] = child + + def AppendChild(self, child): + # Callers should use this instead of calling + # AppendProperty('children', child) directly because this function + # maintains the group's dicts. + self.AppendProperty("children", child) + self._AddChildToDicts(child) + + def GetChildByName(self, name): + # This is not currently optimized with a dict as GetChildByPath is because + # it has few callers. Most callers probably want GetChildByPath. This + # function is only useful to get children that have names but no paths, + # which is rare. The children of the main group ("Source", "Products", + # etc.) is pretty much the only case where this likely to come up. + # + # TODO(mark): Maybe this should raise an error if more than one child is + # present with the same name. + if "children" not in self._properties: + return None + + for child in self._properties["children"]: + if child.Name() == name: + return child + + return None + + def GetChildByPath(self, path): + if not path: + return None + + if path in self._children_by_path: + return self._children_by_path[path] + + return None + + def GetChildByRemoteObject(self, remote_object): + # This method is a little bit esoteric. Given a remote_object, which + # should be a PBXFileReference in another project file, this method will + # return this group's PBXReferenceProxy object serving as a local proxy + # for the remote PBXFileReference. + # + # This function might benefit from a dict optimization as GetChildByPath + # for some workloads, but profiling shows that it's not currently a + # problem. + if "children" not in self._properties: + return None + + for child in self._properties["children"]: + if not isinstance(child, PBXReferenceProxy): + continue + + container_proxy = child._properties["remoteRef"] + if container_proxy._properties["remoteGlobalIDString"] == remote_object: + return child + + return None + + def AddOrGetFileByPath(self, path, hierarchical): + """Returns an existing or new file reference corresponding to path. + + If hierarchical is True, this method will create or use the necessary + hierarchical group structure corresponding to path. Otherwise, it will + look in and create an item in the current group only. + + If an existing matching reference is found, it is returned, otherwise, a + new one will be created, added to the correct group, and returned. + + If path identifies a directory by virtue of carrying a trailing slash, + this method returns a PBXFileReference of "folder" type. If path + identifies a variant, by virtue of it identifying a file inside a directory + with an ".lproj" extension, this method returns a PBXVariantGroup + containing the variant named by path, and possibly other variants. For + all other paths, a "normal" PBXFileReference will be returned. + """ + + # Adding or getting a directory? Directories end with a trailing slash. + is_dir = False + if path.endswith("/"): + is_dir = True + path = posixpath.normpath(path) + if is_dir: + path = path + "/" + + # Adding or getting a variant? Variants are files inside directories + # with an ".lproj" extension. Xcode uses variants for localization. For + # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named + # MainMenu.nib inside path/to, and give it a variant named Language. In + # this example, grandparent would be set to path/to and parent_root would + # be set to Language. + variant_name = None + parent = posixpath.dirname(path) + grandparent = posixpath.dirname(parent) + parent_basename = posixpath.basename(parent) + (parent_root, parent_ext) = posixpath.splitext(parent_basename) + if parent_ext == ".lproj": + variant_name = parent_root + if grandparent == "": + grandparent = None + + # Putting a directory inside a variant group is not currently supported. + assert not is_dir or variant_name is None + + path_split = path.split(posixpath.sep) + if ( + len(path_split) == 1 + or ((is_dir or variant_name is not None) and len(path_split) == 2) + or not hierarchical + ): + # The PBXFileReference or PBXVariantGroup will be added to or gotten from + # this PBXGroup, no recursion necessary. + if variant_name is None: + # Add or get a PBXFileReference. + file_ref = self.GetChildByPath(path) + if file_ref is not None: + assert file_ref.__class__ == PBXFileReference + else: + file_ref = PBXFileReference({"path": path}) + self.AppendChild(file_ref) + else: + # Add or get a PBXVariantGroup. The variant group name is the same + # as the basename (MainMenu.nib in the example above). grandparent + # specifies the path to the variant group itself, and path_split[-2:] + # is the path of the specific variant relative to its group. + variant_group_name = posixpath.basename(path) + variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( + variant_group_name, grandparent + ) + variant_path = posixpath.sep.join(path_split[-2:]) + variant_ref = variant_group_ref.GetChildByPath(variant_path) + if variant_ref is not None: + assert variant_ref.__class__ == PBXFileReference + else: + variant_ref = PBXFileReference( + {"name": variant_name, "path": variant_path} + ) + variant_group_ref.AppendChild(variant_ref) + # The caller is interested in the variant group, not the specific + # variant file. + file_ref = variant_group_ref + return file_ref + else: + # Hierarchical recursion. Add or get a PBXGroup corresponding to the + # outermost path component, and then recurse into it, chopping off that + # path component. + next_dir = path_split[0] + group_ref = self.GetChildByPath(next_dir) + if group_ref is not None: + assert group_ref.__class__ == PBXGroup + else: + group_ref = PBXGroup({"path": next_dir}) + self.AppendChild(group_ref) + return group_ref.AddOrGetFileByPath( + posixpath.sep.join(path_split[1:]), hierarchical + ) + + def AddOrGetVariantGroupByNameAndPath(self, name, path): + """Returns an existing or new PBXVariantGroup for name and path. + + If a PBXVariantGroup identified by the name and path arguments is already + present as a child of this object, it is returned. Otherwise, a new + PBXVariantGroup with the correct properties is created, added as a child, + and returned. + + This method will generally be called by AddOrGetFileByPath, which knows + when to create a variant group based on the structure of the pathnames + passed to it. + """ + + key = (name, path) + if key in self._variant_children_by_name_and_path: + variant_group_ref = self._variant_children_by_name_and_path[key] + assert variant_group_ref.__class__ == PBXVariantGroup + return variant_group_ref + + variant_group_properties = {"name": name} + if path is not None: + variant_group_properties["path"] = path + variant_group_ref = PBXVariantGroup(variant_group_properties) + self.AppendChild(variant_group_ref) + + return variant_group_ref + + def TakeOverOnlyChild(self, recurse=False): + """If this PBXGroup has only one child and it's also a PBXGroup, take + it over by making all of its children this object's children. + + This function will continue to take over only children when those children + are groups. If there are three PBXGroups representing a, b, and c, with + c inside b and b inside a, and a and b have no other children, this will + result in a taking over both b and c, forming a PBXGroup for a/b/c. + + If recurse is True, this function will recurse into children and ask them + to collapse themselves by taking over only children as well. Assuming + an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f + (d1, d2, and f are files, the rest are groups), recursion will result in + a group for a/b/c containing a group for d3/e. + """ + + # At this stage, check that child class types are PBXGroup exactly, + # instead of using isinstance. The only subclass of PBXGroup, + # PBXVariantGroup, should not participate in reparenting in the same way: + # reparenting by merging different object types would be wrong. + while ( + len(self._properties["children"]) == 1 + and self._properties["children"][0].__class__ == PBXGroup + ): + # Loop to take over the innermost only-child group possible. + + child = self._properties["children"][0] + + # Assume the child's properties, including its children. Save a copy + # of this object's old properties, because they'll still be needed. + # This object retains its existing id and parent attributes. + old_properties = self._properties + self._properties = child._properties + self._children_by_path = child._children_by_path + + if ( + "sourceTree" not in self._properties + or self._properties["sourceTree"] == "" + ): + # The child was relative to its parent. Fix up the path. Note that + # children with a sourceTree other than "" are not relative to + # their parents, so no path fix-up is needed in that case. + if "path" in old_properties: + if "path" in self._properties: + # Both the original parent and child have paths set. + self._properties["path"] = posixpath.join( + old_properties["path"], self._properties["path"] + ) + else: + # Only the original parent has a path, use it. + self._properties["path"] = old_properties["path"] + if "sourceTree" in old_properties: + # The original parent had a sourceTree set, use it. + self._properties["sourceTree"] = old_properties["sourceTree"] + + # If the original parent had a name set, keep using it. If the original + # parent didn't have a name but the child did, let the child's name + # live on. If the name attribute seems unnecessary now, get rid of it. + if "name" in old_properties and old_properties["name"] not in ( + None, + self.Name(), + ): + self._properties["name"] = old_properties["name"] + if ( + "name" in self._properties + and "path" in self._properties + and self._properties["name"] == self._properties["path"] + ): + del self._properties["name"] + + # Notify all children of their new parent. + for child in self._properties["children"]: + child.parent = self + + # If asked to recurse, recurse. + if recurse: + for child in self._properties["children"]: + if child.__class__ == PBXGroup: + child.TakeOverOnlyChild(recurse) + + def SortGroup(self): + self._properties["children"] = sorted( + self._properties["children"], key=cmp_to_key(lambda x, y: x.Compare(y)) + ) + + # Recurse. + for child in self._properties["children"]: + if isinstance(child, PBXGroup): + child.SortGroup() + + +class XCFileLikeElement(XCHierarchicalElement): + # Abstract base for objects that can be used as the fileRef property of + # PBXBuildFile. + + def PathHashables(self): + # A PBXBuildFile that refers to this object will call this method to + # obtain additional hashables specific to this XCFileLikeElement. Don't + # just use this object's hashables, they're not specific and unique enough + # on their own (without access to the parent hashables.) Instead, provide + # hashables that identify this object by path by getting its hashables as + # well as the hashables of ancestor XCHierarchicalElement objects. + + hashables = [] + xche = self + while isinstance(xche, XCHierarchicalElement): + xche_hashables = xche.Hashables() + for index, xche_hashable in enumerate(xche_hashables): + hashables.insert(index, xche_hashable) + xche = xche.parent + return hashables + + +class XCContainerPortal(XCObject): + # Abstract base for objects that can be used as the containerPortal property + # of PBXContainerItemProxy. + pass + + +class XCRemoteObject(XCObject): + # Abstract base for objects that can be used as the remoteGlobalIDString + # property of PBXContainerItemProxy. + pass + + +class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): + _schema = XCFileLikeElement._schema.copy() + _schema.update( + { + "explicitFileType": [0, str, 0, 0], + "lastKnownFileType": [0, str, 0, 0], + "name": [0, str, 0, 0], + "path": [0, str, 0, 1], + } + ) + + # Weird output rules for PBXFileReference. + _should_print_single_line = True + # super + _encode_transforms = XCFileLikeElement._alternate_encode_transforms + + def __init__(self, properties=None, id=None, parent=None): + # super + XCFileLikeElement.__init__(self, properties, id, parent) + if "path" in self._properties and self._properties["path"].endswith("/"): + self._properties["path"] = self._properties["path"][:-1] + is_dir = True + else: + is_dir = False + + if ( + "path" in self._properties + and "lastKnownFileType" not in self._properties + and "explicitFileType" not in self._properties + ): + # TODO(mark): This is the replacement for a replacement for a quick hack. + # It is no longer incredibly sucky, but this list needs to be extended. + extension_map = { + "a": "archive.ar", + "app": "wrapper.application", + "bdic": "file", + "bundle": "wrapper.cfbundle", + "c": "sourcecode.c.c", + "cc": "sourcecode.cpp.cpp", + "cpp": "sourcecode.cpp.cpp", + "css": "text.css", + "cxx": "sourcecode.cpp.cpp", + "dart": "sourcecode", + "dylib": "compiled.mach-o.dylib", + "framework": "wrapper.framework", + "gyp": "sourcecode", + "gypi": "sourcecode", + "h": "sourcecode.c.h", + "hxx": "sourcecode.cpp.h", + "icns": "image.icns", + "java": "sourcecode.java", + "js": "sourcecode.javascript", + "kext": "wrapper.kext", + "m": "sourcecode.c.objc", + "mm": "sourcecode.cpp.objcpp", + "nib": "wrapper.nib", + "o": "compiled.mach-o.objfile", + "pdf": "image.pdf", + "pl": "text.script.perl", + "plist": "text.plist.xml", + "pm": "text.script.perl", + "png": "image.png", + "py": "text.script.python", + "r": "sourcecode.rez", + "rez": "sourcecode.rez", + "s": "sourcecode.asm", + "storyboard": "file.storyboard", + "strings": "text.plist.strings", + "swift": "sourcecode.swift", + "ttf": "file", + "xcassets": "folder.assetcatalog", + "xcconfig": "text.xcconfig", + "xcdatamodel": "wrapper.xcdatamodel", + "xcdatamodeld": "wrapper.xcdatamodeld", + "xib": "file.xib", + "y": "sourcecode.yacc", + } + + prop_map = { + "dart": "explicitFileType", + "gyp": "explicitFileType", + "gypi": "explicitFileType", + } + + if is_dir: + file_type = "folder" + prop_name = "lastKnownFileType" + else: + basename = posixpath.basename(self._properties["path"]) + (root, ext) = posixpath.splitext(basename) + # Check the map using a lowercase extension. + # TODO(mark): Maybe it should try with the original case first and fall + # back to lowercase, in case there are any instances where case + # matters. There currently aren't. + if ext != "": + ext = ext[1:].lower() + + # TODO(mark): "text" is the default value, but "file" is appropriate + # for unrecognized files not containing text. Xcode seems to choose + # based on content. + file_type = extension_map.get(ext, "text") + prop_name = prop_map.get(ext, "lastKnownFileType") + + self._properties[prop_name] = file_type + + +class PBXVariantGroup(PBXGroup, XCFileLikeElement): + """PBXVariantGroup is used by Xcode to represent localizations.""" + + # No additions to the schema relative to PBXGroup. + pass + + +# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below +# because it uses PBXContainerItemProxy, defined below. + + +class XCBuildConfiguration(XCObject): + _schema = XCObject._schema.copy() + _schema.update( + { + "baseConfigurationReference": [0, PBXFileReference, 0, 0], + "buildSettings": [0, dict, 0, 1, {}], + "name": [0, str, 0, 1], + } + ) + + def HasBuildSetting(self, key): + return key in self._properties["buildSettings"] + + def GetBuildSetting(self, key): + return self._properties["buildSettings"][key] + + def SetBuildSetting(self, key, value): + # TODO(mark): If a list, copy? + self._properties["buildSettings"][key] = value + + def AppendBuildSetting(self, key, value): + if key not in self._properties["buildSettings"]: + self._properties["buildSettings"][key] = [] + self._properties["buildSettings"][key].append(value) + + def DelBuildSetting(self, key): + if key in self._properties["buildSettings"]: + del self._properties["buildSettings"][key] + + def SetBaseConfiguration(self, value): + self._properties["baseConfigurationReference"] = value + + +class XCConfigurationList(XCObject): + # _configs is the default list of configurations. + _configs = [ + XCBuildConfiguration({"name": "Debug"}), + XCBuildConfiguration({"name": "Release"}), + ] + + _schema = XCObject._schema.copy() + _schema.update( + { + "buildConfigurations": [1, XCBuildConfiguration, 1, 1, _configs], + "defaultConfigurationIsVisible": [0, int, 0, 1, 1], + "defaultConfigurationName": [0, str, 0, 1, "Release"], + } + ) + + def Name(self): + return ( + "Build configuration list for " + + self.parent.__class__.__name__ + + ' "' + + self.parent.Name() + + '"' + ) + + def ConfigurationNamed(self, name): + """Convenience accessor to obtain an XCBuildConfiguration by name.""" + for configuration in self._properties["buildConfigurations"]: + if configuration._properties["name"] == name: + return configuration + + raise KeyError(name) + + def DefaultConfiguration(self): + """Convenience accessor to obtain the default XCBuildConfiguration.""" + return self.ConfigurationNamed(self._properties["defaultConfigurationName"]) + + def HasBuildSetting(self, key): + """Determines the state of a build setting in all XCBuildConfiguration + child objects. + + If all child objects have key in their build settings, and the value is the + same in all child objects, returns 1. + + If no child objects have the key in their build settings, returns 0. + + If some, but not all, child objects have the key in their build settings, + or if any children have different values for the key, returns -1. + """ + + has = None + value = None + for configuration in self._properties["buildConfigurations"]: + configuration_has = configuration.HasBuildSetting(key) + if has is None: + has = configuration_has + elif has != configuration_has: + return -1 + + if configuration_has: + configuration_value = configuration.GetBuildSetting(key) + if value is None: + value = configuration_value + elif value != configuration_value: + return -1 + + if not has: + return 0 + + return 1 + + def GetBuildSetting(self, key): + """Gets the build setting for key. + + All child XCConfiguration objects must have the same value set for the + setting, or a ValueError will be raised. + """ + + # TODO(mark): This is wrong for build settings that are lists. The list + # contents should be compared (and a list copy returned?) + + value = None + for configuration in self._properties["buildConfigurations"]: + configuration_value = configuration.GetBuildSetting(key) + if value is None: + value = configuration_value + else: + if value != configuration_value: + raise ValueError("Variant values for " + key) + + return value + + def SetBuildSetting(self, key, value): + """Sets the build setting for key to value in all child + XCBuildConfiguration objects. + """ + + for configuration in self._properties["buildConfigurations"]: + configuration.SetBuildSetting(key, value) + + def AppendBuildSetting(self, key, value): + """Appends value to the build setting for key, which is treated as a list, + in all child XCBuildConfiguration objects. + """ + + for configuration in self._properties["buildConfigurations"]: + configuration.AppendBuildSetting(key, value) + + def DelBuildSetting(self, key): + """Deletes the build setting key from all child XCBuildConfiguration + objects. + """ + + for configuration in self._properties["buildConfigurations"]: + configuration.DelBuildSetting(key) + + def SetBaseConfiguration(self, value): + """Sets the build configuration in all child XCBuildConfiguration objects. + """ + + for configuration in self._properties["buildConfigurations"]: + configuration.SetBaseConfiguration(value) + + +class PBXBuildFile(XCObject): + _schema = XCObject._schema.copy() + _schema.update( + { + "fileRef": [0, XCFileLikeElement, 0, 1], + "settings": [0, str, 0, 0], # hack, it's a dict + } + ) + + # Weird output rules for PBXBuildFile. + _should_print_single_line = True + _encode_transforms = XCObject._alternate_encode_transforms + + def Name(self): + # Example: "main.cc in Sources" + return self._properties["fileRef"].Name() + " in " + self.parent.Name() + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # It is not sufficient to just rely on Name() to get the + # XCFileLikeElement's name, because that is not a complete pathname. + # PathHashables returns hashables unique enough that no two + # PBXBuildFiles should wind up with the same set of hashables, unless + # someone adds the same file multiple times to the same target. That + # would be considered invalid anyway. + hashables.extend(self._properties["fileRef"].PathHashables()) + + return hashables + + +class XCBuildPhase(XCObject): + """Abstract base for build phase classes. Not represented in a project + file. + + Attributes: + _files_by_path: A dict mapping each path of a child in the files list by + path (keys) to the corresponding PBXBuildFile children (values). + _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) + to the corresponding PBXBuildFile children (values). + """ + + # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't + # actually have a "files" list. XCBuildPhase should not have "files" but + # another abstract subclass of it should provide this, and concrete build + # phase types that do have "files" lists should be derived from that new + # abstract subclass. XCBuildPhase should only provide buildActionMask and + # runOnlyForDeploymentPostprocessing, and not files or the various + # file-related methods and attributes. + + _schema = XCObject._schema.copy() + _schema.update( + { + "buildActionMask": [0, int, 0, 1, 0x7FFFFFFF], + "files": [1, PBXBuildFile, 1, 1, []], + "runOnlyForDeploymentPostprocessing": [0, int, 0, 1, 0], + } + ) + + def __init__(self, properties=None, id=None, parent=None): + # super + XCObject.__init__(self, properties, id, parent) + + self._files_by_path = {} + self._files_by_xcfilelikeelement = {} + for pbxbuildfile in self._properties.get("files", []): + self._AddBuildFileToDicts(pbxbuildfile) + + def FileGroup(self, path): + # Subclasses must override this by returning a two-element tuple. The + # first item in the tuple should be the PBXGroup to which "path" should be + # added, either as a child or deeper descendant. The second item should + # be a boolean indicating whether files should be added into hierarchical + # groups or one single flat group. + raise NotImplementedError(self.__class__.__name__ + " must implement FileGroup") + + def _AddPathToDict(self, pbxbuildfile, path): + """Adds path to the dict tracking paths belonging to this build phase. + + If the path is already a member of this build phase, raises an exception. + """ + + if path in self._files_by_path: + raise ValueError("Found multiple build files with path " + path) + self._files_by_path[path] = pbxbuildfile + + def _AddBuildFileToDicts(self, pbxbuildfile, path=None): + """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. + + If path is specified, then it is the path that is being added to the + phase, and pbxbuildfile must contain either a PBXFileReference directly + referencing that path, or it must contain a PBXVariantGroup that itself + contains a PBXFileReference referencing the path. + + If path is not specified, either the PBXFileReference's path or the paths + of all children of the PBXVariantGroup are taken as being added to the + phase. + + If the path is already present in the phase, raises an exception. + + If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile + are already present in the phase, referenced by a different PBXBuildFile + object, raises an exception. This does not raise an exception when + a PBXFileReference or PBXVariantGroup reappear and are referenced by the + same PBXBuildFile that has already introduced them, because in the case + of PBXVariantGroup objects, they may correspond to multiple paths that are + not all added simultaneously. When this situation occurs, the path needs + to be added to _files_by_path, but nothing needs to change in + _files_by_xcfilelikeelement, and the caller should have avoided adding + the PBXBuildFile if it is already present in the list of children. + """ + + xcfilelikeelement = pbxbuildfile._properties["fileRef"] + + paths = [] + if path is not None: + # It's best when the caller provides the path. + if isinstance(xcfilelikeelement, PBXVariantGroup): + paths.append(path) + else: + # If the caller didn't provide a path, there can be either multiple + # paths (PBXVariantGroup) or one. + if isinstance(xcfilelikeelement, PBXVariantGroup): + for variant in xcfilelikeelement._properties["children"]: + paths.append(variant.FullPath()) + else: + paths.append(xcfilelikeelement.FullPath()) + + # Add the paths first, because if something's going to raise, the + # messages provided by _AddPathToDict are more useful owing to its + # having access to a real pathname and not just an object's Name(). + for a_path in paths: + self._AddPathToDict(pbxbuildfile, a_path) + + # If another PBXBuildFile references this XCFileLikeElement, there's a + # problem. + if ( + xcfilelikeelement in self._files_by_xcfilelikeelement + and self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile + ): + raise ValueError( + "Found multiple build files for " + xcfilelikeelement.Name() + ) + self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile + + def AppendBuildFile(self, pbxbuildfile, path=None): + # Callers should use this instead of calling + # AppendProperty('files', pbxbuildfile) directly because this function + # maintains the object's dicts. Better yet, callers can just call AddFile + # with a pathname and not worry about building their own PBXBuildFile + # objects. + self.AppendProperty("files", pbxbuildfile) + self._AddBuildFileToDicts(pbxbuildfile, path) + + def AddFile(self, path, settings=None): + (file_group, hierarchical) = self.FileGroup(path) + file_ref = file_group.AddOrGetFileByPath(path, hierarchical) + + if file_ref in self._files_by_xcfilelikeelement and isinstance( + file_ref, PBXVariantGroup + ): + # There's already a PBXBuildFile in this phase corresponding to the + # PBXVariantGroup. path just provides a new variant that belongs to + # the group. Add the path to the dict. + pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] + self._AddBuildFileToDicts(pbxbuildfile, path) + else: + # Add a new PBXBuildFile to get file_ref into the phase. + if settings is None: + pbxbuildfile = PBXBuildFile({"fileRef": file_ref}) + else: + pbxbuildfile = PBXBuildFile({"fileRef": file_ref, "settings": settings}) + self.AppendBuildFile(pbxbuildfile, path) + + +class PBXHeadersBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return "Headers" + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + +class PBXResourcesBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return "Resources" + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + +class PBXSourcesBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return "Sources" + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + +class PBXFrameworksBuildPhase(XCBuildPhase): + # No additions to the schema relative to XCBuildPhase. + + def Name(self): + return "Frameworks" + + def FileGroup(self, path): + (root, ext) = posixpath.splitext(path) + if ext != "": + ext = ext[1:].lower() + if ext == "o": + # .o files are added to Xcode Frameworks phases, but conceptually aren't + # frameworks, they're more like sources or intermediates. Redirect them + # to show up in one of those other groups. + return self.PBXProjectAncestor().RootGroupForPath(path) + else: + return (self.PBXProjectAncestor().FrameworksGroup(), False) + + +class PBXShellScriptBuildPhase(XCBuildPhase): + _schema = XCBuildPhase._schema.copy() + _schema.update( + { + "inputPaths": [1, str, 0, 1, []], + "name": [0, str, 0, 0], + "outputPaths": [1, str, 0, 1, []], + "shellPath": [0, str, 0, 1, "/bin/sh"], + "shellScript": [0, str, 0, 1], + "showEnvVarsInLog": [0, int, 0, 0], + } + ) + + def Name(self): + if "name" in self._properties: + return self._properties["name"] + + return "ShellScript" + + +class PBXCopyFilesBuildPhase(XCBuildPhase): + _schema = XCBuildPhase._schema.copy() + _schema.update( + { + "dstPath": [0, str, 0, 1], + "dstSubfolderSpec": [0, int, 0, 1], + "name": [0, str, 0, 0], + } + ) + + # path_tree_re matches "$(DIR)/path", "$(DIR)/$(DIR2)/path" or just "$(DIR)". + # Match group 1 is "DIR", group 3 is "path" or "$(DIR2") or "$(DIR2)/path" + # or None. If group 3 is "path", group 4 will be None otherwise group 4 is + # "DIR2" and group 6 is "path". + path_tree_re = re.compile(r"^\$\((.*?)\)(/(\$\((.*?)\)(/(.*)|)|(.*)|)|)$") + + # path_tree_{first,second}_to_subfolder map names of Xcode variables to the + # associated dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase + # object. + path_tree_first_to_subfolder = { + # Types that can be chosen via the Xcode UI. + "BUILT_PRODUCTS_DIR": 16, # Products Directory + "BUILT_FRAMEWORKS_DIR": 10, # Not an official Xcode macro. + # Existed before support for the + # names below was added. Maps to + # "Frameworks". + } + + path_tree_second_to_subfolder = { + "WRAPPER_NAME": 1, # Wrapper + # Although Xcode's friendly name is "Executables", the destination + # is demonstrably the value of the build setting + # EXECUTABLE_FOLDER_PATH not EXECUTABLES_FOLDER_PATH. + "EXECUTABLE_FOLDER_PATH": 6, # Executables. + "UNLOCALIZED_RESOURCES_FOLDER_PATH": 7, # Resources + "JAVA_FOLDER_PATH": 15, # Java Resources + "FRAMEWORKS_FOLDER_PATH": 10, # Frameworks + "SHARED_FRAMEWORKS_FOLDER_PATH": 11, # Shared Frameworks + "SHARED_SUPPORT_FOLDER_PATH": 12, # Shared Support + "PLUGINS_FOLDER_PATH": 13, # PlugIns + # For XPC Services, Xcode sets both dstPath and dstSubfolderSpec. + # Note that it re-uses the BUILT_PRODUCTS_DIR value for + # dstSubfolderSpec. dstPath is set below. + "XPCSERVICES_FOLDER_PATH": 16, # XPC Services. + } + + def Name(self): + if "name" in self._properties: + return self._properties["name"] + + return "CopyFiles" + + def FileGroup(self, path): + return self.PBXProjectAncestor().RootGroupForPath(path) + + def SetDestination(self, path): + """Set the dstSubfolderSpec and dstPath properties from path. + + path may be specified in the same notation used for XCHierarchicalElements, + specifically, "$(DIR)/path". + """ + + path_tree_match = self.path_tree_re.search(path) + if path_tree_match: + path_tree = path_tree_match.group(1) + if path_tree in self.path_tree_first_to_subfolder: + subfolder = self.path_tree_first_to_subfolder[path_tree] + relative_path = path_tree_match.group(3) + if relative_path is None: + relative_path = "" + + if subfolder == 16 and path_tree_match.group(4) is not None: + # BUILT_PRODUCTS_DIR (16) is the first element in a path whose + # second element is possibly one of the variable names in + # path_tree_second_to_subfolder. Xcode sets the values of all these + # variables to relative paths so .gyp files must prefix them with + # BUILT_PRODUCTS_DIR, e.g. + # $(BUILT_PRODUCTS_DIR)/$(PLUGINS_FOLDER_PATH). Then + # xcode_emulation.py can export these variables with the same values + # as Xcode yet make & ninja files can determine the absolute path + # to the target. Xcode uses the dstSubfolderSpec value set here + # to determine the full path. + # + # An alternative of xcode_emulation.py setting the values to + # absolute paths when exporting these variables has been + # ruled out because then the values would be different + # depending on the build tool. + # + # Another alternative is to invent new names for the variables used + # to match to the subfolder indices in the second table. .gyp files + # then will not need to prepend $(BUILT_PRODUCTS_DIR) because + # xcode_emulation.py can set the values of those variables to + # the absolute paths when exporting. This is possibly the thinking + # behind BUILT_FRAMEWORKS_DIR which is used in exactly this manner. + # + # Requiring prepending BUILT_PRODUCTS_DIR has been chosen because + # this same way could be used to specify destinations in .gyp files + # that pre-date this addition to GYP. However they would only work + # with the Xcode generator. + # The previous version of xcode_emulation.py + # does not export these variables. Such files will get the benefit + # of the Xcode UI showing the proper destination name simply by + # regenerating the projects with this version of GYP. + path_tree = path_tree_match.group(4) + relative_path = path_tree_match.group(6) + separator = "/" + + if path_tree in self.path_tree_second_to_subfolder: + subfolder = self.path_tree_second_to_subfolder[path_tree] + if relative_path is None: + relative_path = "" + separator = "" + if path_tree == "XPCSERVICES_FOLDER_PATH": + relative_path = ( + "$(CONTENTS_FOLDER_PATH)/XPCServices" + + separator + + relative_path + ) + else: + # subfolder = 16 from above + # The second element of the path is an unrecognized variable. + # Include it and any remaining elements in relative_path. + relative_path = path_tree_match.group(3) + + else: + # The path starts with an unrecognized Xcode variable + # name like $(SRCROOT). Xcode will still handle this + # as an "absolute path" that starts with the variable. + subfolder = 0 + relative_path = path + elif path.startswith("/"): + # Special case. Absolute paths are in dstSubfolderSpec 0. + subfolder = 0 + relative_path = path[1:] + else: + raise ValueError( + f"Can't use path {path} in a {self.__class__.__name__}" + ) + + self._properties["dstPath"] = relative_path + self._properties["dstSubfolderSpec"] = subfolder + + +class PBXBuildRule(XCObject): + _schema = XCObject._schema.copy() + _schema.update( + { + "compilerSpec": [0, str, 0, 1], + "filePatterns": [0, str, 0, 0], + "fileType": [0, str, 0, 1], + "isEditable": [0, int, 0, 1, 1], + "outputFiles": [1, str, 0, 1, []], + "script": [0, str, 0, 0], + } + ) + + def Name(self): + # Not very inspired, but it's what Xcode uses. + return self.__class__.__name__ + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # Use the hashables of the weak objects that this object refers to. + hashables.append(self._properties["fileType"]) + if "filePatterns" in self._properties: + hashables.append(self._properties["filePatterns"]) + return hashables + + +class PBXContainerItemProxy(XCObject): + # When referencing an item in this project file, containerPortal is the + # PBXProject root object of this project file. When referencing an item in + # another project file, containerPortal is a PBXFileReference identifying + # the other project file. + # + # When serving as a proxy to an XCTarget (in this project file or another), + # proxyType is 1. When serving as a proxy to a PBXFileReference (in another + # project file), proxyType is 2. Type 2 is used for references to the + # producs of the other project file's targets. + # + # Xcode is weird about remoteGlobalIDString. Usually, it's printed without + # a comment, indicating that it's tracked internally simply as a string, but + # sometimes it's printed with a comment (usually when the object is initially + # created), indicating that it's tracked as a project file object at least + # sometimes. This module always tracks it as an object, but contains a hack + # to prevent it from printing the comment in the project file output. See + # _XCKVPrint. + _schema = XCObject._schema.copy() + _schema.update( + { + "containerPortal": [0, XCContainerPortal, 0, 1], + "proxyType": [0, int, 0, 1], + "remoteGlobalIDString": [0, XCRemoteObject, 0, 1], + "remoteInfo": [0, str, 0, 1], + } + ) + + def __repr__(self): + props = self._properties + name = "{}.gyp:{}".format(props["containerPortal"].Name(), props["remoteInfo"]) + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" + + def Name(self): + # Admittedly not the best name, but it's what Xcode uses. + return self.__class__.__name__ + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # Use the hashables of the weak objects that this object refers to. + hashables.extend(self._properties["containerPortal"].Hashables()) + hashables.extend(self._properties["remoteGlobalIDString"].Hashables()) + return hashables + + +class PBXTargetDependency(XCObject): + # The "target" property accepts an XCTarget object, and obviously not + # NoneType. But XCTarget is defined below, so it can't be put into the + # schema yet. The definition of PBXTargetDependency can't be moved below + # XCTarget because XCTarget's own schema references PBXTargetDependency. + # Python doesn't deal well with this circular relationship, and doesn't have + # a real way to do forward declarations. To work around, the type of + # the "target" property is reset below, after XCTarget is defined. + # + # At least one of "name" and "target" is required. + _schema = XCObject._schema.copy() + _schema.update( + { + "name": [0, str, 0, 0], + "target": [0, None.__class__, 0, 0], + "targetProxy": [0, PBXContainerItemProxy, 1, 1], + } + ) + + def __repr__(self): + name = self._properties.get("name") or self._properties["target"].Name() + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" + + def Name(self): + # Admittedly not the best name, but it's what Xcode uses. + return self.__class__.__name__ + + def Hashables(self): + # super + hashables = XCObject.Hashables(self) + + # Use the hashables of the weak objects that this object refers to. + hashables.extend(self._properties["targetProxy"].Hashables()) + return hashables + + +class PBXReferenceProxy(XCFileLikeElement): + _schema = XCFileLikeElement._schema.copy() + _schema.update( + { + "fileType": [0, str, 0, 1], + "path": [0, str, 0, 1], + "remoteRef": [0, PBXContainerItemProxy, 1, 1], + } + ) + + +class XCTarget(XCRemoteObject): + # An XCTarget is really just an XCObject, the XCRemoteObject thing is just + # to allow PBXProject to be used in the remoteGlobalIDString property of + # PBXContainerItemProxy. + # + # Setting a "name" property at instantiation may also affect "productName", + # which may in turn affect the "PRODUCT_NAME" build setting in children of + # "buildConfigurationList". See __init__ below. + _schema = XCRemoteObject._schema.copy() + _schema.update( + { + "buildConfigurationList": [ + 0, + XCConfigurationList, + 1, + 1, + XCConfigurationList(), + ], + "buildPhases": [1, XCBuildPhase, 1, 1, []], + "dependencies": [1, PBXTargetDependency, 1, 1, []], + "name": [0, str, 0, 1], + "productName": [0, str, 0, 1], + } + ) + + def __init__( + self, + properties=None, + id=None, + parent=None, + force_outdir=None, + force_prefix=None, + force_extension=None, + ): + # super + XCRemoteObject.__init__(self, properties, id, parent) + + # Set up additional defaults not expressed in the schema. If a "name" + # property was supplied, set "productName" if it is not present. Also set + # the "PRODUCT_NAME" build setting in each configuration, but only if + # the setting is not present in any build configuration. + if "name" in self._properties: + if "productName" not in self._properties: + self.SetProperty("productName", self._properties["name"]) + + if "productName" in self._properties: + if "buildConfigurationList" in self._properties: + configs = self._properties["buildConfigurationList"] + if configs.HasBuildSetting("PRODUCT_NAME") == 0: + configs.SetBuildSetting( + "PRODUCT_NAME", self._properties["productName"] + ) + + def AddDependency(self, other): + pbxproject = self.PBXProjectAncestor() + other_pbxproject = other.PBXProjectAncestor() + if pbxproject == other_pbxproject: + # Add a dependency to another target in the same project file. + container = PBXContainerItemProxy( + { + "containerPortal": pbxproject, + "proxyType": 1, + "remoteGlobalIDString": other, + "remoteInfo": other.Name(), + } + ) + dependency = PBXTargetDependency( + {"target": other, "targetProxy": container} + ) + self.AppendProperty("dependencies", dependency) + else: + # Add a dependency to a target in a different project file. + other_project_ref = pbxproject.AddOrGetProjectReference(other_pbxproject)[1] + container = PBXContainerItemProxy( + { + "containerPortal": other_project_ref, + "proxyType": 1, + "remoteGlobalIDString": other, + "remoteInfo": other.Name(), + } + ) + dependency = PBXTargetDependency( + {"name": other.Name(), "targetProxy": container} + ) + self.AppendProperty("dependencies", dependency) + + # Proxy all of these through to the build configuration list. + + def ConfigurationNamed(self, name): + return self._properties["buildConfigurationList"].ConfigurationNamed(name) + + def DefaultConfiguration(self): + return self._properties["buildConfigurationList"].DefaultConfiguration() + + def HasBuildSetting(self, key): + return self._properties["buildConfigurationList"].HasBuildSetting(key) + + def GetBuildSetting(self, key): + return self._properties["buildConfigurationList"].GetBuildSetting(key) + + def SetBuildSetting(self, key, value): + return self._properties["buildConfigurationList"].SetBuildSetting(key, value) + + def AppendBuildSetting(self, key, value): + return self._properties["buildConfigurationList"].AppendBuildSetting(key, value) + + def DelBuildSetting(self, key): + return self._properties["buildConfigurationList"].DelBuildSetting(key) + + +# Redefine the type of the "target" property. See PBXTargetDependency._schema +# above. +PBXTargetDependency._schema["target"][1] = XCTarget + + +class PBXNativeTarget(XCTarget): + # buildPhases is overridden in the schema to be able to set defaults. + # + # NOTE: Contrary to most objects, it is advisable to set parent when + # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject + # object. A parent reference is required for a PBXNativeTarget during + # construction to be able to set up the target defaults for productReference, + # because a PBXBuildFile object must be created for the target and it must + # be added to the PBXProject's mainGroup hierarchy. + _schema = XCTarget._schema.copy() + _schema.update( + { + "buildPhases": [ + 1, + XCBuildPhase, + 1, + 1, + [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()], + ], + "buildRules": [1, PBXBuildRule, 1, 1, []], + "productReference": [0, PBXFileReference, 0, 1], + "productType": [0, str, 0, 1], + } + ) + + # Mapping from Xcode product-types to settings. The settings are: + # filetype : used for explicitFileType in the project file + # prefix : the prefix for the file name + # suffix : the suffix for the file name + _product_filetypes = { + "com.apple.product-type.application": ["wrapper.application", "", ".app"], + "com.apple.product-type.application.watchapp": [ + "wrapper.application", + "", + ".app", + ], + "com.apple.product-type.watchkit-extension": [ + "wrapper.app-extension", + "", + ".appex", + ], + "com.apple.product-type.app-extension": ["wrapper.app-extension", "", ".appex"], + "com.apple.product-type.bundle": ["wrapper.cfbundle", "", ".bundle"], + "com.apple.product-type.framework": ["wrapper.framework", "", ".framework"], + "com.apple.product-type.library.dynamic": [ + "compiled.mach-o.dylib", + "lib", + ".dylib", + ], + "com.apple.product-type.library.static": ["archive.ar", "lib", ".a"], + "com.apple.product-type.tool": ["compiled.mach-o.executable", "", ""], + "com.apple.product-type.bundle.unit-test": ["wrapper.cfbundle", "", ".xctest"], + "com.apple.product-type.bundle.ui-testing": ["wrapper.cfbundle", "", ".xctest"], + "com.googlecode.gyp.xcode.bundle": ["compiled.mach-o.dylib", "", ".so"], + "com.apple.product-type.kernel-extension": ["wrapper.kext", "", ".kext"], + } + + def __init__( + self, + properties=None, + id=None, + parent=None, + force_outdir=None, + force_prefix=None, + force_extension=None, + ): + # super + XCTarget.__init__(self, properties, id, parent) + + if ( + "productName" in self._properties + and "productType" in self._properties + and "productReference" not in self._properties + and self._properties["productType"] in self._product_filetypes + ): + products_group = None + pbxproject = self.PBXProjectAncestor() + if pbxproject is not None: + products_group = pbxproject.ProductsGroup() + + if products_group is not None: + (filetype, prefix, suffix) = self._product_filetypes[ + self._properties["productType"] + ] + # Xcode does not have a distinct type for loadable modules that are + # pure BSD targets (not in a bundle wrapper). GYP allows such modules + # to be specified by setting a target type to loadable_module without + # having mac_bundle set. These are mapped to the pseudo-product type + # com.googlecode.gyp.xcode.bundle. + # + # By picking up this special type and converting it to a dynamic + # library (com.apple.product-type.library.dynamic) with fix-ups, + # single-file loadable modules can be produced. + # + # MACH_O_TYPE is changed to mh_bundle to produce the proper file type + # (as opposed to mh_dylib). In order for linking to succeed, + # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be + # cleared. They are meaningless for type mh_bundle. + # + # Finally, the .so extension is forcibly applied over the default + # (.dylib), unless another forced extension is already selected. + # .dylib is plainly wrong, and .bundle is used by loadable_modules in + # bundle wrappers (com.apple.product-type.bundle). .so seems an odd + # choice because it's used as the extension on many other systems that + # don't distinguish between linkable shared libraries and non-linkable + # loadable modules, but there's precedent: Python loadable modules on + # Mac OS X use an .so extension. + if self._properties["productType"] == "com.googlecode.gyp.xcode.bundle": + self._properties[ + "productType" + ] = "com.apple.product-type.library.dynamic" + self.SetBuildSetting("MACH_O_TYPE", "mh_bundle") + self.SetBuildSetting("DYLIB_CURRENT_VERSION", "") + self.SetBuildSetting("DYLIB_COMPATIBILITY_VERSION", "") + if force_extension is None: + force_extension = suffix[1:] + + if ( + self._properties["productType"] + == "com.apple.product-type-bundle.unit.test" + or self._properties["productType"] + == "com.apple.product-type-bundle.ui-testing" + ): + if force_extension is None: + force_extension = suffix[1:] + + if force_extension is not None: + # If it's a wrapper (bundle), set WRAPPER_EXTENSION. + # Extension override. + suffix = "." + force_extension + if filetype.startswith("wrapper."): + self.SetBuildSetting("WRAPPER_EXTENSION", force_extension) + else: + self.SetBuildSetting("EXECUTABLE_EXTENSION", force_extension) + + if filetype.startswith("compiled.mach-o.executable"): + product_name = self._properties["productName"] + product_name += suffix + suffix = "" + self.SetProperty("productName", product_name) + self.SetBuildSetting("PRODUCT_NAME", product_name) + + # Xcode handles most prefixes based on the target type, however there + # are exceptions. If a "BSD Dynamic Library" target is added in the + # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that + # behavior. + if force_prefix is not None: + prefix = force_prefix + if filetype.startswith("wrapper."): + self.SetBuildSetting("WRAPPER_PREFIX", prefix) + else: + self.SetBuildSetting("EXECUTABLE_PREFIX", prefix) + + if force_outdir is not None: + self.SetBuildSetting("TARGET_BUILD_DIR", force_outdir) + + # TODO(tvl): Remove the below hack. + # http://code.google.com/p/gyp/issues/detail?id=122 + + # Some targets include the prefix in the target_name. These targets + # really should just add a product_name setting that doesn't include + # the prefix. For example: + # target_name = 'libevent', product_name = 'event' + # This check cleans up for them. + product_name = self._properties["productName"] + prefix_len = len(prefix) + if prefix_len and (product_name[:prefix_len] == prefix): + product_name = product_name[prefix_len:] + self.SetProperty("productName", product_name) + self.SetBuildSetting("PRODUCT_NAME", product_name) + + ref_props = { + "explicitFileType": filetype, + "includeInIndex": 0, + "path": prefix + product_name + suffix, + "sourceTree": "BUILT_PRODUCTS_DIR", + } + file_ref = PBXFileReference(ref_props) + products_group.AppendChild(file_ref) + self.SetProperty("productReference", file_ref) + + def GetBuildPhaseByType(self, type): + if "buildPhases" not in self._properties: + return None + + the_phase = None + for phase in self._properties["buildPhases"]: + if isinstance(phase, type): + # Some phases may be present in multiples in a well-formed project file, + # but phases like PBXSourcesBuildPhase may only be present singly, and + # this function is intended as an aid to GetBuildPhaseByType. Loop + # over the entire list of phases and assert if more than one of the + # desired type is found. + assert the_phase is None + the_phase = phase + + return the_phase + + def HeadersPhase(self): + headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase) + if headers_phase is None: + headers_phase = PBXHeadersBuildPhase() + + # The headers phase should come before the resources, sources, and + # frameworks phases, if any. + insert_at = len(self._properties["buildPhases"]) + for index, phase in enumerate(self._properties["buildPhases"]): + if ( + isinstance(phase, PBXResourcesBuildPhase) + or isinstance(phase, PBXSourcesBuildPhase) + or isinstance(phase, PBXFrameworksBuildPhase) + ): + insert_at = index + break + + self._properties["buildPhases"].insert(insert_at, headers_phase) + headers_phase.parent = self + + return headers_phase + + def ResourcesPhase(self): + resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) + if resources_phase is None: + resources_phase = PBXResourcesBuildPhase() + + # The resources phase should come before the sources and frameworks + # phases, if any. + insert_at = len(self._properties["buildPhases"]) + for index, phase in enumerate(self._properties["buildPhases"]): + if isinstance(phase, PBXSourcesBuildPhase) or isinstance( + phase, PBXFrameworksBuildPhase + ): + insert_at = index + break + + self._properties["buildPhases"].insert(insert_at, resources_phase) + resources_phase.parent = self + + return resources_phase + + def SourcesPhase(self): + sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) + if sources_phase is None: + sources_phase = PBXSourcesBuildPhase() + self.AppendProperty("buildPhases", sources_phase) + + return sources_phase + + def FrameworksPhase(self): + frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) + if frameworks_phase is None: + frameworks_phase = PBXFrameworksBuildPhase() + self.AppendProperty("buildPhases", frameworks_phase) + + return frameworks_phase + + def AddDependency(self, other): + # super + XCTarget.AddDependency(self, other) + + static_library_type = "com.apple.product-type.library.static" + shared_library_type = "com.apple.product-type.library.dynamic" + framework_type = "com.apple.product-type.framework" + if ( + isinstance(other, PBXNativeTarget) + and "productType" in self._properties + and self._properties["productType"] != static_library_type + and "productType" in other._properties + and ( + other._properties["productType"] == static_library_type + or ( + ( + other._properties["productType"] == shared_library_type + or other._properties["productType"] == framework_type + ) + and ( + (not other.HasBuildSetting("MACH_O_TYPE")) + or other.GetBuildSetting("MACH_O_TYPE") != "mh_bundle" + ) + ) + ) + ): + + file_ref = other.GetProperty("productReference") + + pbxproject = self.PBXProjectAncestor() + other_pbxproject = other.PBXProjectAncestor() + if pbxproject != other_pbxproject: + other_project_product_group = pbxproject.AddOrGetProjectReference( + other_pbxproject + )[0] + file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) + + self.FrameworksPhase().AppendProperty( + "files", PBXBuildFile({"fileRef": file_ref}) + ) + + +class PBXAggregateTarget(XCTarget): + pass + + +class PBXProject(XCContainerPortal): + # A PBXProject is really just an XCObject, the XCContainerPortal thing is + # just to allow PBXProject to be used in the containerPortal property of + # PBXContainerItemProxy. + """ + + Attributes: + path: "sample.xcodeproj". TODO(mark) Document me! + _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each + value is a reference to the dict in the + projectReferences list associated with the keyed + PBXProject. + """ + + _schema = XCContainerPortal._schema.copy() + _schema.update( + { + "attributes": [0, dict, 0, 0], + "buildConfigurationList": [ + 0, + XCConfigurationList, + 1, + 1, + XCConfigurationList(), + ], + "compatibilityVersion": [0, str, 0, 1, "Xcode 3.2"], + "hasScannedForEncodings": [0, int, 0, 1, 1], + "mainGroup": [0, PBXGroup, 1, 1, PBXGroup()], + "projectDirPath": [0, str, 0, 1, ""], + "projectReferences": [1, dict, 0, 0], + "projectRoot": [0, str, 0, 1, ""], + "targets": [1, XCTarget, 1, 1, []], + } + ) + + def __init__(self, properties=None, id=None, parent=None, path=None): + self.path = path + self._other_pbxprojects = {} + # super + return XCContainerPortal.__init__(self, properties, id, parent) + + def Name(self): + name = self.path + if name[-10:] == ".xcodeproj": + name = name[:-10] + return posixpath.basename(name) + + def Path(self): + return self.path + + def Comment(self): + return "Project object" + + def Children(self): + # super + children = XCContainerPortal.Children(self) + + # Add children that the schema doesn't know about. Maybe there's a more + # elegant way around this, but this is the only case where we need to own + # objects in a dictionary (that is itself in a list), and three lines for + # a one-off isn't that big a deal. + if "projectReferences" in self._properties: + for reference in self._properties["projectReferences"]: + children.append(reference["ProductGroup"]) + + return children + + def PBXProjectAncestor(self): + return self + + def _GroupByName(self, name): + if "mainGroup" not in self._properties: + self.SetProperty("mainGroup", PBXGroup()) + + main_group = self._properties["mainGroup"] + group = main_group.GetChildByName(name) + if group is None: + group = PBXGroup({"name": name}) + main_group.AppendChild(group) + + return group + + # SourceGroup and ProductsGroup are created by default in Xcode's own + # templates. + def SourceGroup(self): + return self._GroupByName("Source") + + def ProductsGroup(self): + return self._GroupByName("Products") + + # IntermediatesGroup is used to collect source-like files that are generated + # by rules or script phases and are placed in intermediate directories such + # as DerivedSources. + def IntermediatesGroup(self): + return self._GroupByName("Intermediates") + + # FrameworksGroup and ProjectsGroup are top-level groups used to collect + # frameworks and projects. + def FrameworksGroup(self): + return self._GroupByName("Frameworks") + + def ProjectsGroup(self): + return self._GroupByName("Projects") + + def RootGroupForPath(self, path): + """Returns a PBXGroup child of this object to which path should be added. + + This method is intended to choose between SourceGroup and + IntermediatesGroup on the basis of whether path is present in a source + directory or an intermediates directory. For the purposes of this + determination, any path located within a derived file directory such as + PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates + directory. + + The returned value is a two-element tuple. The first element is the + PBXGroup, and the second element specifies whether that group should be + organized hierarchically (True) or as a single flat list (False). + """ + + # TODO(mark): make this a class variable and bind to self on call? + # Also, this list is nowhere near exhaustive. + # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by + # gyp.generator.xcode. There should probably be some way for that module + # to push the names in, rather than having to hard-code them here. + source_tree_groups = { + "DERIVED_FILE_DIR": (self.IntermediatesGroup, True), + "INTERMEDIATE_DIR": (self.IntermediatesGroup, True), + "PROJECT_DERIVED_FILE_DIR": (self.IntermediatesGroup, True), + "SHARED_INTERMEDIATE_DIR": (self.IntermediatesGroup, True), + } + + (source_tree, path) = SourceTreeAndPathFromPath(path) + if source_tree is not None and source_tree in source_tree_groups: + (group_func, hierarchical) = source_tree_groups[source_tree] + group = group_func() + return (group, hierarchical) + + # TODO(mark): make additional choices based on file extension. + + return (self.SourceGroup(), True) + + def AddOrGetFileInRootGroup(self, path): + """Returns a PBXFileReference corresponding to path in the correct group + according to RootGroupForPath's heuristics. + + If an existing PBXFileReference for path exists, it will be returned. + Otherwise, one will be created and returned. + """ + + (group, hierarchical) = self.RootGroupForPath(path) + return group.AddOrGetFileByPath(path, hierarchical) + + def RootGroupsTakeOverOnlyChildren(self, recurse=False): + """Calls TakeOverOnlyChild for all groups in the main group.""" + + for group in self._properties["mainGroup"]._properties["children"]: + if isinstance(group, PBXGroup): + group.TakeOverOnlyChild(recurse) + + def SortGroups(self): + # Sort the children of the mainGroup (like "Source" and "Products") + # according to their defined order. + self._properties["mainGroup"]._properties["children"] = sorted( + self._properties["mainGroup"]._properties["children"], + key=cmp_to_key(lambda x, y: x.CompareRootGroup(y)), + ) + + # Sort everything else by putting group before files, and going + # alphabetically by name within sections of groups and files. SortGroup + # is recursive. + for group in self._properties["mainGroup"]._properties["children"]: + if not isinstance(group, PBXGroup): + continue + + if group.Name() == "Products": + # The Products group is a special case. Instead of sorting + # alphabetically, sort things in the order of the targets that + # produce the products. To do this, just build up a new list of + # products based on the targets. + products = [] + for target in self._properties["targets"]: + if not isinstance(target, PBXNativeTarget): + continue + product = target._properties["productReference"] + # Make sure that the product is already in the products group. + assert product in group._properties["children"] + products.append(product) + + # Make sure that this process doesn't miss anything that was already + # in the products group. + assert len(products) == len(group._properties["children"]) + group._properties["children"] = products + else: + group.SortGroup() + + def AddOrGetProjectReference(self, other_pbxproject): + """Add a reference to another project file (via PBXProject object) to this + one. + + Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in + this project file that contains a PBXReferenceProxy object for each + product of each PBXNativeTarget in the other project file. ProjectRef is + a PBXFileReference to the other project file. + + If this project file already references the other project file, the + existing ProductGroup and ProjectRef are returned. The ProductGroup will + still be updated if necessary. + """ + + if "projectReferences" not in self._properties: + self._properties["projectReferences"] = [] + + product_group = None + project_ref = None + + if other_pbxproject not in self._other_pbxprojects: + # This project file isn't yet linked to the other one. Establish the + # link. + product_group = PBXGroup({"name": "Products"}) + + # ProductGroup is strong. + product_group.parent = self + + # There's nothing unique about this PBXGroup, and if left alone, it will + # wind up with the same set of hashables as all other PBXGroup objects + # owned by the projectReferences list. Add the hashables of the + # remote PBXProject that it's related to. + product_group._hashables.extend(other_pbxproject.Hashables()) + + # The other project reports its path as relative to the same directory + # that this project's path is relative to. The other project's path + # is not necessarily already relative to this project. Figure out the + # pathname that this project needs to use to refer to the other one. + this_path = posixpath.dirname(self.Path()) + projectDirPath = self.GetProperty("projectDirPath") + if projectDirPath: + if posixpath.isabs(projectDirPath[0]): + this_path = projectDirPath + else: + this_path = posixpath.join(this_path, projectDirPath) + other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) + + # ProjectRef is weak (it's owned by the mainGroup hierarchy). + project_ref = PBXFileReference( + { + "lastKnownFileType": "wrapper.pb-project", + "path": other_path, + "sourceTree": "SOURCE_ROOT", + } + ) + self.ProjectsGroup().AppendChild(project_ref) + + ref_dict = {"ProductGroup": product_group, "ProjectRef": project_ref} + self._other_pbxprojects[other_pbxproject] = ref_dict + self.AppendProperty("projectReferences", ref_dict) + + # Xcode seems to sort this list case-insensitively + self._properties["projectReferences"] = sorted( + self._properties["projectReferences"], + key=lambda x: x["ProjectRef"].Name().lower + ) + else: + # The link already exists. Pull out the relevnt data. + project_ref_dict = self._other_pbxprojects[other_pbxproject] + product_group = project_ref_dict["ProductGroup"] + project_ref = project_ref_dict["ProjectRef"] + + self._SetUpProductReferences(other_pbxproject, product_group, project_ref) + + inherit_unique_symroot = self._AllSymrootsUnique(other_pbxproject, False) + targets = other_pbxproject.GetProperty("targets") + if all(self._AllSymrootsUnique(t, inherit_unique_symroot) for t in targets): + dir_path = project_ref._properties["path"] + product_group._hashables.extend(dir_path) + + return [product_group, project_ref] + + def _AllSymrootsUnique(self, target, inherit_unique_symroot): + # Returns True if all configurations have a unique 'SYMROOT' attribute. + # The value of inherit_unique_symroot decides, if a configuration is assumed + # to inherit a unique 'SYMROOT' attribute from its parent, if it doesn't + # define an explicit value for 'SYMROOT'. + symroots = self._DefinedSymroots(target) + for s in self._DefinedSymroots(target): + if ( + s is not None + and not self._IsUniqueSymrootForTarget(s) + or s is None + and not inherit_unique_symroot + ): + return False + return True if symroots else inherit_unique_symroot + + def _DefinedSymroots(self, target): + # Returns all values for the 'SYMROOT' attribute defined in all + # configurations for this target. If any configuration doesn't define the + # 'SYMROOT' attribute, None is added to the returned set. If all + # configurations don't define the 'SYMROOT' attribute, an empty set is + # returned. + config_list = target.GetProperty("buildConfigurationList") + symroots = set() + for config in config_list.GetProperty("buildConfigurations"): + setting = config.GetProperty("buildSettings") + if "SYMROOT" in setting: + symroots.add(setting["SYMROOT"]) + else: + symroots.add(None) + if len(symroots) == 1 and None in symroots: + return set() + return symroots + + def _IsUniqueSymrootForTarget(self, symroot): + # This method returns True if all configurations in target contain a + # 'SYMROOT' attribute that is unique for the given target. A value is + # unique, if the Xcode macro '$SRCROOT' appears in it in any form. + uniquifier = ["$SRCROOT", "$(SRCROOT)"] + if any(x in symroot for x in uniquifier): + return True + return False + + def _SetUpProductReferences(self, other_pbxproject, product_group, project_ref): + # TODO(mark): This only adds references to products in other_pbxproject + # when they don't exist in this pbxproject. Perhaps it should also + # remove references from this pbxproject that are no longer present in + # other_pbxproject. Perhaps it should update various properties if they + # change. + for target in other_pbxproject._properties["targets"]: + if not isinstance(target, PBXNativeTarget): + continue + + other_fileref = target._properties["productReference"] + if product_group.GetChildByRemoteObject(other_fileref) is None: + # Xcode sets remoteInfo to the name of the target and not the name + # of its product, despite this proxy being a reference to the product. + container_item = PBXContainerItemProxy( + { + "containerPortal": project_ref, + "proxyType": 2, + "remoteGlobalIDString": other_fileref, + "remoteInfo": target.Name(), + } + ) + # TODO(mark): Does sourceTree get copied straight over from the other + # project? Can the other project ever have lastKnownFileType here + # instead of explicitFileType? (Use it if so?) Can path ever be + # unset? (I don't think so.) Can other_fileref have name set, and + # does it impact the PBXReferenceProxy if so? These are the questions + # that perhaps will be answered one day. + reference_proxy = PBXReferenceProxy( + { + "fileType": other_fileref._properties["explicitFileType"], + "path": other_fileref._properties["path"], + "sourceTree": other_fileref._properties["sourceTree"], + "remoteRef": container_item, + } + ) + + product_group.AppendChild(reference_proxy) + + def SortRemoteProductReferences(self): + # For each remote project file, sort the associated ProductGroup in the + # same order that the targets are sorted in the remote project file. This + # is the sort order used by Xcode. + + def CompareProducts(x, y, remote_products): + # x and y are PBXReferenceProxy objects. Go through their associated + # PBXContainerItem to get the remote PBXFileReference, which will be + # present in the remote_products list. + x_remote = x._properties["remoteRef"]._properties["remoteGlobalIDString"] + y_remote = y._properties["remoteRef"]._properties["remoteGlobalIDString"] + x_index = remote_products.index(x_remote) + y_index = remote_products.index(y_remote) + + # Use the order of each remote PBXFileReference in remote_products to + # determine the sort order. + return cmp(x_index, y_index) + + for other_pbxproject, ref_dict in self._other_pbxprojects.items(): + # Build up a list of products in the remote project file, ordered the + # same as the targets that produce them. + remote_products = [] + for target in other_pbxproject._properties["targets"]: + if not isinstance(target, PBXNativeTarget): + continue + remote_products.append(target._properties["productReference"]) + + # Sort the PBXReferenceProxy children according to the list of remote + # products. + product_group = ref_dict["ProductGroup"] + product_group._properties["children"] = sorted( + product_group._properties["children"], + key=cmp_to_key( + lambda x, y, rp=remote_products: CompareProducts(x, y, rp)), + ) + + +class XCProjectFile(XCObject): + _schema = XCObject._schema.copy() + _schema.update( + { + "archiveVersion": [0, int, 0, 1, 1], + "classes": [0, dict, 0, 1, {}], + "objectVersion": [0, int, 0, 1, 46], + "rootObject": [0, PBXProject, 1, 1], + } + ) + + def ComputeIDs(self, recursive=True, overwrite=True, hash=None): + # Although XCProjectFile is implemented here as an XCObject, it's not a + # proper object in the Xcode sense, and it certainly doesn't have its own + # ID. Pass through an attempt to update IDs to the real root object. + if recursive: + self._properties["rootObject"].ComputeIDs(recursive, overwrite, hash) + + def Print(self, file=sys.stdout): + self.VerifyHasRequiredProperties() + + # Add the special "objects" property, which will be caught and handled + # separately during printing. This structure allows a fairly standard + # loop do the normal printing. + self._properties["objects"] = {} + self._XCPrint(file, 0, "// !$*UTF8*$!\n") + if self._should_print_single_line: + self._XCPrint(file, 0, "{ ") + else: + self._XCPrint(file, 0, "{\n") + for property, value in sorted( + self._properties.items() + ): + if property == "objects": + self._PrintObjects(file) + else: + self._XCKVPrint(file, 1, property, value) + self._XCPrint(file, 0, "}\n") + del self._properties["objects"] + + def _PrintObjects(self, file): + if self._should_print_single_line: + self._XCPrint(file, 0, "objects = {") + else: + self._XCPrint(file, 1, "objects = {\n") + + objects_by_class = {} + for object in self.Descendants(): + if object == self: + continue + class_name = object.__class__.__name__ + if class_name not in objects_by_class: + objects_by_class[class_name] = [] + objects_by_class[class_name].append(object) + + for class_name in sorted(objects_by_class): + self._XCPrint(file, 0, "\n") + self._XCPrint(file, 0, "/* Begin " + class_name + " section */\n") + for object in sorted( + objects_by_class[class_name], key=attrgetter("id") + ): + object.Print(file) + self._XCPrint(file, 0, "/* End " + class_name + " section */\n") + + if self._should_print_single_line: + self._XCPrint(file, 0, "}; ") + else: + self._XCPrint(file, 1, "};\n") diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py new file mode 100644 index 0000000..5301963 --- /dev/null +++ b/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py @@ -0,0 +1,65 @@ +# Copyright (c) 2011 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Applies a fix to CR LF TAB handling in xml.dom. + +Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 +Working around this: http://bugs.python.org/issue5752 +TODO(bradnelson): Consider dropping this when we drop XP support. +""" + + +import xml.dom.minidom + + +def _Replacement_write_data(writer, data, is_attrib=False): + """Writes datachars to writer.""" + data = data.replace("&", "&").replace("<", "<") + data = data.replace('"', """).replace(">", ">") + if is_attrib: + data = data.replace("\r", " ").replace("\n", " ").replace("\t", " ") + writer.write(data) + + +def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): + # indent = current indentation + # addindent = indentation to add to higher levels + # newl = newline string + writer.write(indent + "<" + self.tagName) + + attrs = self._get_attributes() + a_names = sorted(attrs.keys()) + + for a_name in a_names: + writer.write(' %s="' % a_name) + _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) + writer.write('"') + if self.childNodes: + writer.write(">%s" % newl) + for node in self.childNodes: + node.writexml(writer, indent + addindent, addindent, newl) + writer.write(f"{indent}{newl}") + else: + writer.write("/>%s" % newl) + + +class XmlFix: + """Object to manage temporary patching of xml.dom.minidom.""" + + def __init__(self): + # Preserve current xml.dom.minidom functions. + self.write_data = xml.dom.minidom._write_data + self.writexml = xml.dom.minidom.Element.writexml + # Inject replacement versions of a function and a method. + xml.dom.minidom._write_data = _Replacement_write_data + xml.dom.minidom.Element.writexml = _Replacement_writexml + + def Cleanup(self): + if self.write_data: + xml.dom.minidom._write_data = self.write_data + xml.dom.minidom.Element.writexml = self.writexml + self.write_data = None + + def __del__(self): + self.Cleanup() diff --git a/node_modules/node-gyp/gyp/requirements_dev.txt b/node_modules/node-gyp/gyp/requirements_dev.txt new file mode 100644 index 0000000..28ecaca --- /dev/null +++ b/node_modules/node-gyp/gyp/requirements_dev.txt @@ -0,0 +1,2 @@ +flake8 +pytest diff --git a/node_modules/node-gyp/gyp/setup.py b/node_modules/node-gyp/gyp/setup.py new file mode 100644 index 0000000..cf9d7d2 --- /dev/null +++ b/node_modules/node-gyp/gyp/setup.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2009 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +from os import path + +from setuptools import setup + +here = path.abspath(path.dirname(__file__)) +# Get the long description from the README file +with open(path.join(here, "README.md")) as in_file: + long_description = in_file.read() + +setup( + name="gyp-next", + version="0.10.0", + description="A fork of the GYP build system for use in the Node.js projects", + long_description=long_description, + long_description_content_type="text/markdown", + author="Node.js contributors", + author_email="ryzokuken@disroot.org", + url="https://github.com/nodejs/gyp-next", + package_dir={"": "pylib"}, + packages=["gyp", "gyp.generator"], + entry_points={"console_scripts": ["gyp=gyp:script_main"]}, + python_requires=">=3.6", + classifiers=[ + "Development Status :: 3 - Alpha", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Natural Language :: English", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + ], +) diff --git a/node_modules/node-gyp/gyp/test_gyp.py b/node_modules/node-gyp/gyp/test_gyp.py new file mode 100644 index 0000000..9ba2641 --- /dev/null +++ b/node_modules/node-gyp/gyp/test_gyp.py @@ -0,0 +1,260 @@ +#!/usr/bin/env python3 +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""gyptest.py -- test runner for GYP tests.""" + + +import argparse +import os +import platform +import subprocess +import sys +import time + + +def is_test_name(f): + return f.startswith("gyptest") and f.endswith(".py") + + +def find_all_gyptest_files(directory): + result = [] + for root, dirs, files in os.walk(directory): + result.extend([os.path.join(root, f) for f in files if is_test_name(f)]) + result.sort() + return result + + +def main(argv=None): + if argv is None: + argv = sys.argv + + parser = argparse.ArgumentParser() + parser.add_argument("-a", "--all", action="store_true", help="run all tests") + parser.add_argument("-C", "--chdir", action="store", help="change to directory") + parser.add_argument( + "-f", + "--format", + action="store", + default="", + help="run tests with the specified formats", + ) + parser.add_argument( + "-G", + "--gyp_option", + action="append", + default=[], + help="Add -G options to the gyp command line", + ) + parser.add_argument( + "-l", "--list", action="store_true", help="list available tests and exit" + ) + parser.add_argument( + "-n", + "--no-exec", + action="store_true", + help="no execute, just print the command line", + ) + parser.add_argument( + "--path", action="append", default=[], help="additional $PATH directory" + ) + parser.add_argument( + "-q", + "--quiet", + action="store_true", + help="quiet, don't print anything unless there are failures", + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + help="print configuration info and test results.", + ) + parser.add_argument("tests", nargs="*") + args = parser.parse_args(argv[1:]) + + if args.chdir: + os.chdir(args.chdir) + + if args.path: + extra_path = [os.path.abspath(p) for p in args.path] + extra_path = os.pathsep.join(extra_path) + os.environ["PATH"] = extra_path + os.pathsep + os.environ["PATH"] + + if not args.tests: + if not args.all: + sys.stderr.write("Specify -a to get all tests.\n") + return 1 + args.tests = ["test"] + + tests = [] + for arg in args.tests: + if os.path.isdir(arg): + tests.extend(find_all_gyptest_files(os.path.normpath(arg))) + else: + if not is_test_name(os.path.basename(arg)): + print(arg, "is not a valid gyp test name.", file=sys.stderr) + sys.exit(1) + tests.append(arg) + + if args.list: + for test in tests: + print(test) + sys.exit(0) + + os.environ["PYTHONPATH"] = os.path.abspath("test/lib") + + if args.verbose: + print_configuration_info() + + if args.gyp_option and not args.quiet: + print("Extra Gyp options: %s\n" % args.gyp_option) + + if args.format: + format_list = args.format.split(",") + else: + format_list = { + "aix5": ["make"], + "freebsd7": ["make"], + "freebsd8": ["make"], + "openbsd5": ["make"], + "cygwin": ["msvs"], + "win32": ["msvs", "ninja"], + "linux": ["make", "ninja"], + "linux2": ["make", "ninja"], + "linux3": ["make", "ninja"], + # TODO: Re-enable xcode-ninja. + # https://bugs.chromium.org/p/gyp/issues/detail?id=530 + # 'darwin': ['make', 'ninja', 'xcode', 'xcode-ninja'], + "darwin": ["make", "ninja", "xcode"], + }[sys.platform] + + gyp_options = [] + for option in args.gyp_option: + gyp_options += ["-G", option] + + runner = Runner(format_list, tests, gyp_options, args.verbose) + runner.run() + + if not args.quiet: + runner.print_results() + + return 1 if runner.failures else 0 + + +def print_configuration_info(): + print("Test configuration:") + if sys.platform == "darwin": + sys.path.append(os.path.abspath("test/lib")) + import TestMac + + print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}") + print(f" Xcode {TestMac.Xcode.Version()}") + elif sys.platform == "win32": + sys.path.append(os.path.abspath("pylib")) + import gyp.MSVSVersion + + print(" Win %s %s\n" % platform.win32_ver()[0:2]) + print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description()) + elif sys.platform in ("linux", "linux2"): + print(" Linux %s" % " ".join(platform.linux_distribution())) + print(f" Python {platform.python_version()}") + print(f" PYTHONPATH={os.environ['PYTHONPATH']}") + print() + + +class Runner: + def __init__(self, formats, tests, gyp_options, verbose): + self.formats = formats + self.tests = tests + self.verbose = verbose + self.gyp_options = gyp_options + self.failures = [] + self.num_tests = len(formats) * len(tests) + num_digits = len(str(self.num_tests)) + self.fmt_str = "[%%%dd/%%%dd] (%%s) %%s" % (num_digits, num_digits) + self.isatty = sys.stdout.isatty() and not self.verbose + self.env = os.environ.copy() + self.hpos = 0 + + def run(self): + run_start = time.time() + + i = 1 + for fmt in self.formats: + for test in self.tests: + self.run_test(test, fmt, i) + i += 1 + + if self.isatty: + self.erase_current_line() + + self.took = time.time() - run_start + + def run_test(self, test, fmt, i): + if self.isatty: + self.erase_current_line() + + msg = self.fmt_str % (i, self.num_tests, fmt, test) + self.print_(msg) + + start = time.time() + cmd = [sys.executable, test] + self.gyp_options + self.env["TESTGYP_FORMAT"] = fmt + proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.env + ) + proc.wait() + took = time.time() - start + + stdout = proc.stdout.read().decode("utf8") + if proc.returncode == 2: + res = "skipped" + elif proc.returncode: + res = "failed" + self.failures.append(f"({test}) {fmt}") + else: + res = "passed" + res_msg = f" {res} {took:.3f}s" + self.print_(res_msg) + + if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")): + print() + print("\n".join(f" {line}" for line in stdout.splitlines())) + elif not self.isatty: + print() + + def print_(self, msg): + print(msg, end="") + index = msg.rfind("\n") + if index == -1: + self.hpos += len(msg) + else: + self.hpos = len(msg) - index + sys.stdout.flush() + + def erase_current_line(self): + print("\b" * self.hpos + " " * self.hpos + "\b" * self.hpos, end="") + sys.stdout.flush() + self.hpos = 0 + + def print_results(self): + num_failures = len(self.failures) + if num_failures: + print() + if num_failures == 1: + print("Failed the following test:") + else: + print("Failed the following %d tests:" % num_failures) + print("\t" + "\n\t".join(sorted(self.failures))) + print() + print( + "Ran %d tests in %.3fs, %d failed." + % (self.num_tests, self.took, num_failures) + ) + print() + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/node_modules/node-gyp/gyp/tools/README b/node_modules/node-gyp/gyp/tools/README new file mode 100644 index 0000000..84a73d1 --- /dev/null +++ b/node_modules/node-gyp/gyp/tools/README @@ -0,0 +1,15 @@ +pretty_vcproj: + Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] + + They key/value pair are used to resolve vsprops name. + + For example, if I want to diff the base.vcproj project: + + pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > original.txt + pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt + + And you can use your favorite diff tool to see the changes. + + Note: In the case of base.vcproj, the original vcproj is one level up the generated one. + I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt + before you perform the diff. \ No newline at end of file diff --git a/node_modules/node-gyp/gyp/tools/Xcode/README b/node_modules/node-gyp/gyp/tools/Xcode/README new file mode 100644 index 0000000..2492a2c --- /dev/null +++ b/node_modules/node-gyp/gyp/tools/Xcode/README @@ -0,0 +1,5 @@ +Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in + +~/Library/Application Support/Developer/Shared/Xcode/Specifications/ + +and restart Xcode. \ No newline at end of file diff --git a/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec b/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec new file mode 100644 index 0000000..85e2e26 --- /dev/null +++ b/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.pbfilespec @@ -0,0 +1,27 @@ +/* + gyp.pbfilespec + GYP source file spec for Xcode 3 + + There is not much documentation available regarding the format + of .pbfilespec files. As a starting point, see for instance the + outdated documentation at: + http://maxao.free.fr/xcode-plugin-interface/specifications.html + and the files in: + /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ + + Place this file in directory: + ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ +*/ + +( + { + Identifier = sourcecode.gyp; + BasedOn = sourcecode; + Name = "GYP Files"; + Extensions = ("gyp", "gypi"); + MIMETypes = ("text/gyp"); + Language = "xcode.lang.gyp"; + IsTextFile = YES; + IsSourceFile = YES; + } +) diff --git a/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec b/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec new file mode 100644 index 0000000..3b3506d --- /dev/null +++ b/node_modules/node-gyp/gyp/tools/Xcode/Specifications/gyp.xclangspec @@ -0,0 +1,226 @@ +/* + Copyright (c) 2011 Google Inc. All rights reserved. + Use of this source code is governed by a BSD-style license that can be + found in the LICENSE file. + + gyp.xclangspec + GYP language specification for Xcode 3 + + There is not much documentation available regarding the format + of .xclangspec files. As a starting point, see for instance the + outdated documentation at: + http://maxao.free.fr/xcode-plugin-interface/specifications.html + and the files in: + /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ + + Place this file in directory: + ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ +*/ + +( + + { + Identifier = "xcode.lang.gyp.keyword"; + Syntax = { + Words = ( + "and", + "or", + " (caar gyp-parse-history) target-point) + (setq gyp-parse-history (cdr gyp-parse-history)))) + +(defun gyp-parse-point () + "The point of the last parse state added by gyp-parse-to." + (caar gyp-parse-history)) + +(defun gyp-parse-sections () + "A list of section symbols holding at the last parse state point." + (cdar gyp-parse-history)) + +(defun gyp-inside-dictionary-p () + "Predicate returning true if the parser is inside a dictionary." + (not (eq (cadar gyp-parse-history) 'list))) + +(defun gyp-add-parse-history (point sections) + "Add parse state SECTIONS to the parse history at POINT so that parsing can be + resumed instantly." + (while (>= (caar gyp-parse-history) point) + (setq gyp-parse-history (cdr gyp-parse-history))) + (setq gyp-parse-history (cons (cons point sections) gyp-parse-history))) + +(defun gyp-parse-to (target-point) + "Parses from (point) to TARGET-POINT adding the parse state information to + gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a + string literal has been parsed. Returns nil if no further parsing can be + done, otherwise returns the position of the start of a parsed string, leaving + the point at the end of the string." + (let ((parsing t) + string-start) + (while parsing + (setq string-start nil) + ;; Parse up to a character that starts a sexp, or if the nesting + ;; level decreases. + (let ((state (parse-partial-sexp (gyp-parse-point) + target-point + -1 + t)) + (sections (gyp-parse-sections))) + (if (= (nth 0 state) -1) + (setq sections (cdr sections)) ; pop out a level + (cond ((looking-at-p "['\"]") ; a string + (setq string-start (point)) + (goto-char (scan-sexps (point) 1)) + (if (gyp-inside-dictionary-p) + ;; Look for sections inside a dictionary + (let ((section (gyp-section-name + (buffer-substring-no-properties + (+ 1 string-start) + (- (point) 1))))) + (setq sections (cons section (cdr sections))))) + ;; Stop after the string so it can be fontified. + (setq target-point (point))) + ((looking-at-p "{") + ;; Inside a dictionary. Increase nesting. + (forward-char 1) + (setq sections (cons 'unknown sections))) + ((looking-at-p "\\[") + ;; Inside a list. Increase nesting + (forward-char 1) + (setq sections (cons 'list sections))) + ((not (eobp)) + ;; other + (forward-char 1)))) + (gyp-add-parse-history (point) sections) + (setq parsing (< (point) target-point)))) + string-start)) + +(defun gyp-section-at-point () + "Transform the last parse state, which is a list of nested sections and return + the section symbol that should be used to determine font-lock information for + the string. Can return nil indicating the string should not have any attached + section." + (let ((sections (gyp-parse-sections))) + (cond + ((eq (car sections) 'conditions) + ;; conditions can occur in a variables section, but we still want to + ;; highlight it as a keyword. + nil) + ((and (eq (car sections) 'list) + (eq (cadr sections) 'list)) + ;; conditions and sources can have items in [[ ]] + (caddr sections)) + (t (cadr sections))))) + +(defun gyp-section-match (limit) + "Parse from (point) to LIMIT returning by means of match data what was + matched. The group of the match indicates what style font-lock should apply. + See also `gyp-add-font-lock-keywords'." + (gyp-invalidate-parse-states-after (point)) + (let ((group nil) + (string-start t)) + (while (and (< (point) limit) + (not group) + string-start) + (setq string-start (gyp-parse-to limit)) + (if string-start + (setq group (cl-case (gyp-section-at-point) + ('dependencies 1) + ('variables 2) + ('conditions 2) + ('sources 3) + ('defines 4) + (nil nil))))) + (if group + (progn + ;; Set the match data to indicate to the font-lock mechanism the + ;; highlighting to be performed. + (set-match-data (append (list string-start (point)) + (make-list (* (1- group) 2) nil) + (list (1+ string-start) (1- (point))))) + t)))) + +;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for +;;; canonical list of keywords. +(defun gyp-add-font-lock-keywords () + "Add gyp-mode keywords to font-lock mechanism." + ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match + ;; so that we can do the font-locking in a single font-lock pass. + (font-lock-add-keywords + nil + (list + ;; Top-level keywords + (list (concat "['\"]\\(" + (regexp-opt (list "action" "action_name" "actions" "cflags" + "cflags_cc" "conditions" "configurations" + "copies" "defines" "dependencies" "destination" + "direct_dependent_settings" + "export_dependent_settings" "extension" "files" + "include_dirs" "includes" "inputs" "ldflags" "libraries" + "link_settings" "mac_bundle" "message" + "msvs_external_rule" "outputs" "product_name" + "process_outputs_as_sources" "rules" "rule_name" + "sources" "suppress_wildcard" + "target_conditions" "target_defaults" + "target_defines" "target_name" "toolsets" + "targets" "type" "variables" "xcode_settings")) + "[!/+=]?\\)") 1 'font-lock-keyword-face t) + ;; Type of target + (list (concat "['\"]\\(" + (regexp-opt (list "loadable_module" "static_library" + "shared_library" "executable" "none")) + "\\)") 1 'font-lock-type-face t) + (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1 + 'font-lock-function-name-face t) + (list 'gyp-section-match + (list 1 'font-lock-function-name-face t t) ; dependencies + (list 2 'font-lock-variable-name-face t t) ; variables, conditions + (list 3 'font-lock-constant-face t t) ; sources + (list 4 'font-lock-preprocessor-face t t)) ; preprocessor + ;; Variable expansion + (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t) + ;; Command expansion + (list " "{dst}"') + + print("}") + + +def main(): + if len(sys.argv) < 2: + print(__doc__, file=sys.stderr) + print(file=sys.stderr) + print("usage: %s target1 target2..." % (sys.argv[0]), file=sys.stderr) + return 1 + + edges = LoadEdges("dump.json", sys.argv[1:]) + + WriteGraph(edges) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/node_modules/node-gyp/gyp/tools/pretty_gyp.py b/node_modules/node-gyp/gyp/tools/pretty_gyp.py new file mode 100644 index 0000000..4ffa444 --- /dev/null +++ b/node_modules/node-gyp/gyp/tools/pretty_gyp.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Pretty-prints the contents of a GYP file.""" + + +import sys +import re + + +# Regex to remove comments when we're counting braces. +COMMENT_RE = re.compile(r"\s*#.*") + +# Regex to remove quoted strings when we're counting braces. +# It takes into account quoted quotes, and makes sure that the quotes match. +# NOTE: It does not handle quotes that span more than one line, or +# cases where an escaped quote is preceded by an escaped backslash. +QUOTE_RE_STR = r'(?P[\'"])(.*?)(? 0: + after = True + + # This catches the special case of a closing brace having something + # other than just whitespace ahead of it -- we don't want to + # unindent that until after this line is printed so it stays with + # the previous indentation level. + if cnt < 0 and closing_prefix_re.match(stripline): + after = True + return (cnt, after) + + +def prettyprint_input(lines): + """Does the main work of indenting the input based on the brace counts.""" + indent = 0 + basic_offset = 2 + for line in lines: + if COMMENT_RE.match(line): + print(line) + else: + line = line.strip("\r\n\t ") # Otherwise doesn't strip \r on Unix. + if len(line) > 0: + (brace_diff, after) = count_braces(line) + if brace_diff != 0: + if after: + print(" " * (basic_offset * indent) + line) + indent += brace_diff + else: + indent += brace_diff + print(" " * (basic_offset * indent) + line) + else: + print(" " * (basic_offset * indent) + line) + else: + print("") + + +def main(): + if len(sys.argv) > 1: + data = open(sys.argv[1]).read().splitlines() + else: + data = sys.stdin.read().splitlines() + # Split up the double braces. + lines = split_double_braces(data) + + # Indent and print the output. + prettyprint_input(lines) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/node_modules/node-gyp/gyp/tools/pretty_sln.py b/node_modules/node-gyp/gyp/tools/pretty_sln.py new file mode 100644 index 0000000..6ca0cd1 --- /dev/null +++ b/node_modules/node-gyp/gyp/tools/pretty_sln.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Prints the information in a sln file in a diffable way. + + It first outputs each projects in alphabetical order with their + dependencies. + + Then it outputs a possible build order. +""" + + +import os +import re +import sys +import pretty_vcproj + +__author__ = "nsylvain (Nicolas Sylvain)" + + +def BuildProject(project, built, projects, deps): + # if all dependencies are done, we can build it, otherwise we try to build the + # dependency. + # This is not infinite-recursion proof. + for dep in deps[project]: + if dep not in built: + BuildProject(dep, built, projects, deps) + print(project) + built.append(project) + + +def ParseSolution(solution_file): + # All projects, their clsid and paths. + projects = dict() + + # A list of dependencies associated with a project. + dependencies = dict() + + # Regular expressions that matches the SLN format. + # The first line of a project definition. + begin_project = re.compile( + r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' + r'}"\) = "(.*)", "(.*)", "(.*)"$' + ) + # The last line of a project definition. + end_project = re.compile("^EndProject$") + # The first line of a dependency list. + begin_dep = re.compile(r"ProjectSection\(ProjectDependencies\) = postProject$") + # The last line of a dependency list. + end_dep = re.compile("EndProjectSection$") + # A line describing a dependency. + dep_line = re.compile(" *({.*}) = ({.*})$") + + in_deps = False + solution = open(solution_file) + for line in solution: + results = begin_project.search(line) + if results: + # Hack to remove icu because the diff is too different. + if results.group(1).find("icu") != -1: + continue + # We remove "_gyp" from the names because it helps to diff them. + current_project = results.group(1).replace("_gyp", "") + projects[current_project] = [ + results.group(2).replace("_gyp", ""), + results.group(3), + results.group(2), + ] + dependencies[current_project] = [] + continue + + results = end_project.search(line) + if results: + current_project = None + continue + + results = begin_dep.search(line) + if results: + in_deps = True + continue + + results = end_dep.search(line) + if results: + in_deps = False + continue + + results = dep_line.search(line) + if results and in_deps and current_project: + dependencies[current_project].append(results.group(1)) + continue + + # Change all dependencies clsid to name instead. + for project in dependencies: + # For each dependencies in this project + new_dep_array = [] + for dep in dependencies[project]: + # Look for the project name matching this cldis + for project_info in projects: + if projects[project_info][1] == dep: + new_dep_array.append(project_info) + dependencies[project] = sorted(new_dep_array) + + return (projects, dependencies) + + +def PrintDependencies(projects, deps): + print("---------------------------------------") + print("Dependencies for all projects") + print("---------------------------------------") + print("-- --") + + for (project, dep_list) in sorted(deps.items()): + print("Project : %s" % project) + print("Path : %s" % projects[project][0]) + if dep_list: + for dep in dep_list: + print(" - %s" % dep) + print("") + + print("-- --") + + +def PrintBuildOrder(projects, deps): + print("---------------------------------------") + print("Build order ") + print("---------------------------------------") + print("-- --") + + built = [] + for (project, _) in sorted(deps.items()): + if project not in built: + BuildProject(project, built, projects, deps) + + print("-- --") + + +def PrintVCProj(projects): + + for project in projects: + print("-------------------------------------") + print("-------------------------------------") + print(project) + print(project) + print(project) + print("-------------------------------------") + print("-------------------------------------") + + project_path = os.path.abspath( + os.path.join(os.path.dirname(sys.argv[1]), projects[project][2]) + ) + + pretty = pretty_vcproj + argv = [ + "", + project_path, + "$(SolutionDir)=%s\\" % os.path.dirname(sys.argv[1]), + ] + argv.extend(sys.argv[3:]) + pretty.main(argv) + + +def main(): + # check if we have exactly 1 parameter. + if len(sys.argv) < 2: + print('Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]) + return 1 + + (projects, deps) = ParseSolution(sys.argv[1]) + PrintDependencies(projects, deps) + PrintBuildOrder(projects, deps) + + if "--recursive" in sys.argv: + PrintVCProj(projects) + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/node_modules/node-gyp/gyp/tools/pretty_vcproj.py b/node_modules/node-gyp/gyp/tools/pretty_vcproj.py new file mode 100644 index 0000000..00d32de --- /dev/null +++ b/node_modules/node-gyp/gyp/tools/pretty_vcproj.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2012 Google Inc. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +"""Make the format of a vcproj really pretty. + + This script normalize and sort an xml. It also fetches all the properties + inside linked vsprops and include them explicitly in the vcproj. + + It outputs the resulting xml to stdout. +""" + + +import os +import sys + +from xml.dom.minidom import parse +from xml.dom.minidom import Node + +__author__ = "nsylvain (Nicolas Sylvain)" +ARGUMENTS = None +REPLACEMENTS = dict() + + +def cmp(x, y): + return (x > y) - (x < y) + + +class CmpTuple: + """Compare function between 2 tuple.""" + + def __call__(self, x, y): + return cmp(x[0], y[0]) + + +class CmpNode: + """Compare function between 2 xml nodes.""" + + def __call__(self, x, y): + def get_string(node): + node_string = "node" + node_string += node.nodeName + if node.nodeValue: + node_string += node.nodeValue + + if node.attributes: + # We first sort by name, if present. + node_string += node.getAttribute("Name") + + all_nodes = [] + for (name, value) in node.attributes.items(): + all_nodes.append((name, value)) + + all_nodes.sort(CmpTuple()) + for (name, value) in all_nodes: + node_string += name + node_string += value + + return node_string + + return cmp(get_string(x), get_string(y)) + + +def PrettyPrintNode(node, indent=0): + if node.nodeType == Node.TEXT_NODE: + if node.data.strip(): + print("{}{}".format(" " * indent, node.data.strip())) + return + + if node.childNodes: + node.normalize() + # Get the number of attributes + attr_count = 0 + if node.attributes: + attr_count = node.attributes.length + + # Print the main tag + if attr_count == 0: + print("{}<{}>".format(" " * indent, node.nodeName)) + else: + print("{}<{}".format(" " * indent, node.nodeName)) + + all_attributes = [] + for (name, value) in node.attributes.items(): + all_attributes.append((name, value)) + all_attributes.sort(CmpTuple()) + for (name, value) in all_attributes: + print('{} {}="{}"'.format(" " * indent, name, value)) + print("%s>" % (" " * indent)) + if node.nodeValue: + print("{} {}".format(" " * indent, node.nodeValue)) + + for sub_node in node.childNodes: + PrettyPrintNode(sub_node, indent=indent + 2) + print("{}".format(" " * indent, node.nodeName)) + + +def FlattenFilter(node): + """Returns a list of all the node and sub nodes.""" + node_list = [] + + if node.attributes and node.getAttribute("Name") == "_excluded_files": + # We don't add the "_excluded_files" filter. + return [] + + for current in node.childNodes: + if current.nodeName == "Filter": + node_list.extend(FlattenFilter(current)) + else: + node_list.append(current) + + return node_list + + +def FixFilenames(filenames, current_directory): + new_list = [] + for filename in filenames: + if filename: + for key in REPLACEMENTS: + filename = filename.replace(key, REPLACEMENTS[key]) + os.chdir(current_directory) + filename = filename.strip("\"' ") + if filename.startswith("$"): + new_list.append(filename) + else: + new_list.append(os.path.abspath(filename)) + return new_list + + +def AbsoluteNode(node): + """Makes all the properties we know about in this node absolute.""" + if node.attributes: + for (name, value) in node.attributes.items(): + if name in [ + "InheritedPropertySheets", + "RelativePath", + "AdditionalIncludeDirectories", + "IntermediateDirectory", + "OutputDirectory", + "AdditionalLibraryDirectories", + ]: + # We want to fix up these paths + path_list = value.split(";") + new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) + node.setAttribute(name, ";".join(new_list)) + if not value: + node.removeAttribute(name) + + +def CleanupVcproj(node): + """For each sub node, we call recursively this function.""" + for sub_node in node.childNodes: + AbsoluteNode(sub_node) + CleanupVcproj(sub_node) + + # Normalize the node, and remove all extraneous whitespaces. + for sub_node in node.childNodes: + if sub_node.nodeType == Node.TEXT_NODE: + sub_node.data = sub_node.data.replace("\r", "") + sub_node.data = sub_node.data.replace("\n", "") + sub_node.data = sub_node.data.rstrip() + + # Fix all the semicolon separated attributes to be sorted, and we also + # remove the dups. + if node.attributes: + for (name, value) in node.attributes.items(): + sorted_list = sorted(value.split(";")) + unique_list = [] + for i in sorted_list: + if not unique_list.count(i): + unique_list.append(i) + node.setAttribute(name, ";".join(unique_list)) + if not value: + node.removeAttribute(name) + + if node.childNodes: + node.normalize() + + # For each node, take a copy, and remove it from the list. + node_array = [] + while node.childNodes and node.childNodes[0]: + # Take a copy of the node and remove it from the list. + current = node.childNodes[0] + node.removeChild(current) + + # If the child is a filter, we want to append all its children + # to this same list. + if current.nodeName == "Filter": + node_array.extend(FlattenFilter(current)) + else: + node_array.append(current) + + # Sort the list. + node_array.sort(CmpNode()) + + # Insert the nodes in the correct order. + for new_node in node_array: + # But don't append empty tool node. + if new_node.nodeName == "Tool": + if new_node.attributes and new_node.attributes.length == 1: + # This one was empty. + continue + if new_node.nodeName == "UserMacro": + continue + node.appendChild(new_node) + + +def GetConfiguationNodes(vcproj): + # TODO(nsylvain): Find a better way to navigate the xml. + nodes = [] + for node in vcproj.childNodes: + if node.nodeName == "Configurations": + for sub_node in node.childNodes: + if sub_node.nodeName == "Configuration": + nodes.append(sub_node) + + return nodes + + +def GetChildrenVsprops(filename): + dom = parse(filename) + if dom.documentElement.attributes: + vsprops = dom.documentElement.getAttribute("InheritedPropertySheets") + return FixFilenames(vsprops.split(";"), os.path.dirname(filename)) + return [] + + +def SeekToNode(node1, child2): + # A text node does not have properties. + if child2.nodeType == Node.TEXT_NODE: + return None + + # Get the name of the current node. + current_name = child2.getAttribute("Name") + if not current_name: + # There is no name. We don't know how to merge. + return None + + # Look through all the nodes to find a match. + for sub_node in node1.childNodes: + if sub_node.nodeName == child2.nodeName: + name = sub_node.getAttribute("Name") + if name == current_name: + return sub_node + + # No match. We give up. + return None + + +def MergeAttributes(node1, node2): + # No attributes to merge? + if not node2.attributes: + return + + for (name, value2) in node2.attributes.items(): + # Don't merge the 'Name' attribute. + if name == "Name": + continue + value1 = node1.getAttribute(name) + if value1: + # The attribute exist in the main node. If it's equal, we leave it + # untouched, otherwise we concatenate it. + if value1 != value2: + node1.setAttribute(name, ";".join([value1, value2])) + else: + # The attribute does not exist in the main node. We append this one. + node1.setAttribute(name, value2) + + # If the attribute was a property sheet attributes, we remove it, since + # they are useless. + if name == "InheritedPropertySheets": + node1.removeAttribute(name) + + +def MergeProperties(node1, node2): + MergeAttributes(node1, node2) + for child2 in node2.childNodes: + child1 = SeekToNode(node1, child2) + if child1: + MergeProperties(child1, child2) + else: + node1.appendChild(child2.cloneNode(True)) + + +def main(argv): + """Main function of this vcproj prettifier.""" + global ARGUMENTS + ARGUMENTS = argv + + # check if we have exactly 1 parameter. + if len(argv) < 2: + print( + 'Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' + "[key2=value2]" % argv[0] + ) + return 1 + + # Parse the keys + for i in range(2, len(argv)): + (key, value) = argv[i].split("=") + REPLACEMENTS[key] = value + + # Open the vcproj and parse the xml. + dom = parse(argv[1]) + + # First thing we need to do is find the Configuration Node and merge them + # with the vsprops they include. + for configuration_node in GetConfiguationNodes(dom.documentElement): + # Get the property sheets associated with this configuration. + vsprops = configuration_node.getAttribute("InheritedPropertySheets") + + # Fix the filenames to be absolute. + vsprops_list = FixFilenames( + vsprops.strip().split(";"), os.path.dirname(argv[1]) + ) + + # Extend the list of vsprops with all vsprops contained in the current + # vsprops. + for current_vsprops in vsprops_list: + vsprops_list.extend(GetChildrenVsprops(current_vsprops)) + + # Now that we have all the vsprops, we need to merge them. + for current_vsprops in vsprops_list: + MergeProperties(configuration_node, parse(current_vsprops).documentElement) + + # Now that everything is merged, we need to cleanup the xml. + CleanupVcproj(dom.documentElement) + + # Finally, we use the prett xml function to print the vcproj back to the + # user. + # print dom.toprettyxml(newl="\n") + PrettyPrintNode(dom.documentElement) + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/node_modules/node-gyp/lib/Find-VisualStudio.cs b/node_modules/node-gyp/lib/Find-VisualStudio.cs new file mode 100644 index 0000000..d2e45a7 --- /dev/null +++ b/node_modules/node-gyp/lib/Find-VisualStudio.cs @@ -0,0 +1,250 @@ +// Copyright 2017 - Refael Ackermann +// Distributed under MIT style license +// See accompanying file LICENSE at https://github.com/node4good/windows-autoconf + +// Usage: +// powershell -ExecutionPolicy Unrestricted -Command "Add-Type -Path Find-VisualStudio.cs; [VisualStudioConfiguration.Main]::PrintJson()" +// This script needs to be compatible with PowerShell v2 to run on Windows 2008R2 and Windows 7. + +using System; +using System.Text; +using System.Runtime.InteropServices; +using System.Collections.Generic; + +namespace VisualStudioConfiguration +{ + [Flags] + public enum InstanceState : uint + { + None = 0, + Local = 1, + Registered = 2, + NoRebootRequired = 4, + NoErrors = 8, + Complete = 4294967295, + } + + [Guid("6380BCFF-41D3-4B2E-8B2E-BF8A6810C848")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface IEnumSetupInstances + { + + void Next([MarshalAs(UnmanagedType.U4), In] int celt, + [MarshalAs(UnmanagedType.LPArray, ArraySubType = UnmanagedType.Interface), Out] ISetupInstance[] rgelt, + [MarshalAs(UnmanagedType.U4)] out int pceltFetched); + + void Skip([MarshalAs(UnmanagedType.U4), In] int celt); + + void Reset(); + + [return: MarshalAs(UnmanagedType.Interface)] + IEnumSetupInstances Clone(); + } + + [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupConfiguration + { + } + + [Guid("26AAB78C-4A60-49D6-AF3B-3C35BC93365D")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupConfiguration2 : ISetupConfiguration + { + + [return: MarshalAs(UnmanagedType.Interface)] + IEnumSetupInstances EnumInstances(); + + [return: MarshalAs(UnmanagedType.Interface)] + ISetupInstance GetInstanceForCurrentProcess(); + + [return: MarshalAs(UnmanagedType.Interface)] + ISetupInstance GetInstanceForPath([MarshalAs(UnmanagedType.LPWStr), In] string path); + + [return: MarshalAs(UnmanagedType.Interface)] + IEnumSetupInstances EnumAllInstances(); + } + + [Guid("B41463C3-8866-43B5-BC33-2B0676F7F42E")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupInstance + { + } + + [Guid("89143C9A-05AF-49B0-B717-72E218A2185C")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupInstance2 : ISetupInstance + { + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstanceId(); + + [return: MarshalAs(UnmanagedType.Struct)] + System.Runtime.InteropServices.ComTypes.FILETIME GetInstallDate(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstallationName(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstallationPath(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetInstallationVersion(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetDisplayName([MarshalAs(UnmanagedType.U4), In] int lcid); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetDescription([MarshalAs(UnmanagedType.U4), In] int lcid); + + [return: MarshalAs(UnmanagedType.BStr)] + string ResolvePath([MarshalAs(UnmanagedType.LPWStr), In] string pwszRelativePath); + + [return: MarshalAs(UnmanagedType.U4)] + InstanceState GetState(); + + [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] + ISetupPackageReference[] GetPackages(); + + ISetupPackageReference GetProduct(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetProductPath(); + + [return: MarshalAs(UnmanagedType.VariantBool)] + bool IsLaunchable(); + + [return: MarshalAs(UnmanagedType.VariantBool)] + bool IsComplete(); + + [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_UNKNOWN)] + ISetupPropertyStore GetProperties(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetEnginePath(); + } + + [Guid("DA8D8A16-B2B6-4487-A2F1-594CCCCD6BF5")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupPackageReference + { + + [return: MarshalAs(UnmanagedType.BStr)] + string GetId(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetVersion(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetChip(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetLanguage(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetBranch(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetType(); + + [return: MarshalAs(UnmanagedType.BStr)] + string GetUniqueId(); + + [return: MarshalAs(UnmanagedType.VariantBool)] + bool GetIsExtension(); + } + + [Guid("c601c175-a3be-44bc-91f6-4568d230fc83")] + [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] + [ComImport] + public interface ISetupPropertyStore + { + + [return: MarshalAs(UnmanagedType.SafeArray, SafeArraySubType = VarEnum.VT_BSTR)] + string[] GetNames(); + + object GetValue([MarshalAs(UnmanagedType.LPWStr), In] string pwszName); + } + + [Guid("42843719-DB4C-46C2-8E7C-64F1816EFD5B")] + [CoClass(typeof(SetupConfigurationClass))] + [ComImport] + public interface SetupConfiguration : ISetupConfiguration2, ISetupConfiguration + { + } + + [Guid("177F0C4A-1CD3-4DE7-A32C-71DBBB9FA36D")] + [ClassInterface(ClassInterfaceType.None)] + [ComImport] + public class SetupConfigurationClass + { + } + + public static class Main + { + public static void PrintJson() + { + ISetupConfiguration query = new SetupConfiguration(); + ISetupConfiguration2 query2 = (ISetupConfiguration2)query; + IEnumSetupInstances e = query2.EnumAllInstances(); + + int pceltFetched; + ISetupInstance2[] rgelt = new ISetupInstance2[1]; + List instances = new List(); + while (true) + { + e.Next(1, rgelt, out pceltFetched); + if (pceltFetched <= 0) + { + Console.WriteLine(String.Format("[{0}]", string.Join(",", instances.ToArray()))); + return; + } + + try + { + instances.Add(InstanceJson(rgelt[0])); + } + catch (COMException) + { + // Ignore instances that can't be queried. + } + } + } + + private static string JsonString(string s) + { + return "\"" + s.Replace("\\", "\\\\").Replace("\"", "\\\"") + "\""; + } + + private static string InstanceJson(ISetupInstance2 setupInstance2) + { + // Visual Studio component directory: + // https://docs.microsoft.com/en-us/visualstudio/install/workload-and-component-ids + + StringBuilder json = new StringBuilder(); + json.Append("{"); + + string path = JsonString(setupInstance2.GetInstallationPath()); + json.Append(String.Format("\"path\":{0},", path)); + + string version = JsonString(setupInstance2.GetInstallationVersion()); + json.Append(String.Format("\"version\":{0},", version)); + + List packages = new List(); + foreach (ISetupPackageReference package in setupInstance2.GetPackages()) + { + string id = JsonString(package.GetId()); + packages.Add(id); + } + json.Append(String.Format("\"packages\":[{0}]", string.Join(",", packages.ToArray()))); + + json.Append("}"); + return json.ToString(); + } + } +} diff --git a/node_modules/node-gyp/lib/build.js b/node_modules/node-gyp/lib/build.js new file mode 100644 index 0000000..c2388fb --- /dev/null +++ b/node_modules/node-gyp/lib/build.js @@ -0,0 +1,204 @@ +'use strict' + +const fs = require('graceful-fs') +const path = require('path') +const glob = require('glob') +const log = require('npmlog') +const which = require('which') +const win = process.platform === 'win32' + +function build (gyp, argv, callback) { + var platformMake = 'make' + if (process.platform === 'aix') { + platformMake = 'gmake' + } else if (process.platform.indexOf('bsd') !== -1) { + platformMake = 'gmake' + } else if (win && argv.length > 0) { + argv = argv.map(function (target) { + return '/t:' + target + }) + } + + var makeCommand = gyp.opts.make || process.env.MAKE || platformMake + var command = win ? 'msbuild' : makeCommand + var jobs = gyp.opts.jobs || process.env.JOBS + var buildType + var config + var arch + var nodeDir + var guessedSolution + + loadConfigGypi() + + /** + * Load the "config.gypi" file that was generated during "configure". + */ + + function loadConfigGypi () { + var configPath = path.resolve('build', 'config.gypi') + + fs.readFile(configPath, 'utf8', function (err, data) { + if (err) { + if (err.code === 'ENOENT') { + callback(new Error('You must run `node-gyp configure` first!')) + } else { + callback(err) + } + return + } + config = JSON.parse(data.replace(/#.+\n/, '')) + + // get the 'arch', 'buildType', and 'nodeDir' vars from the config + buildType = config.target_defaults.default_configuration + arch = config.variables.target_arch + nodeDir = config.variables.nodedir + + if ('debug' in gyp.opts) { + buildType = gyp.opts.debug ? 'Debug' : 'Release' + } + if (!buildType) { + buildType = 'Release' + } + + log.verbose('build type', buildType) + log.verbose('architecture', arch) + log.verbose('node dev dir', nodeDir) + + if (win) { + findSolutionFile() + } else { + doWhich() + } + }) + } + + /** + * On Windows, find the first build/*.sln file. + */ + + function findSolutionFile () { + glob('build/*.sln', function (err, files) { + if (err) { + return callback(err) + } + if (files.length === 0) { + return callback(new Error('Could not find *.sln file. Did you run "configure"?')) + } + guessedSolution = files[0] + log.verbose('found first Solution file', guessedSolution) + doWhich() + }) + } + + /** + * Uses node-which to locate the msbuild / make executable. + */ + + function doWhich () { + // On Windows use msbuild provided by node-gyp configure + if (win) { + if (!config.variables.msbuild_path) { + return callback(new Error( + 'MSBuild is not set, please run `node-gyp configure`.')) + } + command = config.variables.msbuild_path + log.verbose('using MSBuild:', command) + doBuild() + return + } + // First make sure we have the build command in the PATH + which(command, function (err, execPath) { + if (err) { + // Some other error or 'make' not found on Unix, report that to the user + callback(err) + return + } + log.verbose('`which` succeeded for `' + command + '`', execPath) + doBuild() + }) + } + + /** + * Actually spawn the process and compile the module. + */ + + function doBuild () { + // Enable Verbose build + var verbose = log.levels[log.level] <= log.levels.verbose + var j + + if (!win && verbose) { + argv.push('V=1') + } + + if (win && !verbose) { + argv.push('/clp:Verbosity=minimal') + } + + if (win) { + // Turn off the Microsoft logo on Windows + argv.push('/nologo') + } + + // Specify the build type, Release by default + if (win) { + // Convert .gypi config target_arch to MSBuild /Platform + // Since there are many ways to state '32-bit Intel', default to it. + // N.B. msbuild's Condition string equality tests are case-insensitive. + var archLower = arch.toLowerCase() + var p = archLower === 'x64' ? 'x64' + : (archLower === 'arm' ? 'ARM' + : (archLower === 'arm64' ? 'ARM64' : 'Win32')) + argv.push('/p:Configuration=' + buildType + ';Platform=' + p) + if (jobs) { + j = parseInt(jobs, 10) + if (!isNaN(j) && j > 0) { + argv.push('/m:' + j) + } else if (jobs.toUpperCase() === 'MAX') { + argv.push('/m:' + require('os').cpus().length) + } + } + } else { + argv.push('BUILDTYPE=' + buildType) + // Invoke the Makefile in the 'build' dir. + argv.push('-C') + argv.push('build') + if (jobs) { + j = parseInt(jobs, 10) + if (!isNaN(j) && j > 0) { + argv.push('--jobs') + argv.push(j) + } else if (jobs.toUpperCase() === 'MAX') { + argv.push('--jobs') + argv.push(require('os').cpus().length) + } + } + } + + if (win) { + // did the user specify their own .sln file? + var hasSln = argv.some(function (arg) { + return path.extname(arg) === '.sln' + }) + if (!hasSln) { + argv.unshift(gyp.opts.solution || guessedSolution) + } + } + + var proc = gyp.spawn(command, argv) + proc.on('exit', onExit) + } + + function onExit (code, signal) { + if (code !== 0) { + return callback(new Error('`' + command + '` failed with exit code: ' + code)) + } + if (signal) { + return callback(new Error('`' + command + '` got signal: ' + signal)) + } + callback() + } +} + +module.exports = build +module.exports.usage = 'Invokes `' + (win ? 'msbuild' : 'make') + '` and builds the module' diff --git a/node_modules/node-gyp/lib/clean.js b/node_modules/node-gyp/lib/clean.js new file mode 100644 index 0000000..dbfa4db --- /dev/null +++ b/node_modules/node-gyp/lib/clean.js @@ -0,0 +1,15 @@ +'use strict' + +const rm = require('rimraf') +const log = require('npmlog') + +function clean (gyp, argv, callback) { + // Remove the 'build' dir + var buildDir = 'build' + + log.verbose('clean', 'removing "%s" directory', buildDir) + rm(buildDir, callback) +} + +module.exports = clean +module.exports.usage = 'Removes any generated build files and the "out" dir' diff --git a/node_modules/node-gyp/lib/configure.js b/node_modules/node-gyp/lib/configure.js new file mode 100644 index 0000000..d9b8fe3 --- /dev/null +++ b/node_modules/node-gyp/lib/configure.js @@ -0,0 +1,294 @@ +'use strict' + +const fs = require('graceful-fs') +const path = require('path') +const log = require('npmlog') +const os = require('os') +const processRelease = require('./process-release') +const win = process.platform === 'win32' +const findNodeDirectory = require('./find-node-directory') +const createConfigGypi = require('./create-config-gypi') +const msgFormat = require('util').format +var findPython = require('./find-python') +if (win) { + var findVisualStudio = require('./find-visualstudio') +} + +function configure (gyp, argv, callback) { + var python + var buildDir = path.resolve('build') + var configNames = ['config.gypi', 'common.gypi'] + var configs = [] + var nodeDir + var release = processRelease(argv, gyp, process.version, process.release) + + findPython(gyp.opts.python, function (err, found) { + if (err) { + callback(err) + } else { + python = found + getNodeDir() + } + }) + + function getNodeDir () { + // 'python' should be set by now + process.env.PYTHON = python + + if (gyp.opts.nodedir) { + // --nodedir was specified. use that for the dev files + nodeDir = gyp.opts.nodedir.replace(/^~/, os.homedir()) + + log.verbose('get node dir', 'compiling against specified --nodedir dev files: %s', nodeDir) + createBuildDir() + } else { + // if no --nodedir specified, ensure node dependencies are installed + if ('v' + release.version !== process.version) { + // if --target was given, then determine a target version to compile for + log.verbose('get node dir', 'compiling against --target node version: %s', release.version) + } else { + // if no --target was specified then use the current host node version + log.verbose('get node dir', 'no --target version specified, falling back to host node version: %s', release.version) + } + + if (!release.semver) { + // could not parse the version string with semver + return callback(new Error('Invalid version number: ' + release.version)) + } + + // If the tarball option is set, always remove and reinstall the headers + // into devdir. Otherwise only install if they're not already there. + gyp.opts.ensure = !gyp.opts.tarball + + gyp.commands.install([release.version], function (err) { + if (err) { + return callback(err) + } + log.verbose('get node dir', 'target node version installed:', release.versionDir) + nodeDir = path.resolve(gyp.devDir, release.versionDir) + createBuildDir() + }) + } + } + + function createBuildDir () { + log.verbose('build dir', 'attempting to create "build" dir: %s', buildDir) + fs.mkdir(buildDir, { recursive: true }, function (err, isNew) { + if (err) { + return callback(err) + } + log.verbose( + 'build dir', '"build" dir needed to be created?', isNew ? 'Yes' : 'No' + ) + if (win) { + findVisualStudio(release.semver, gyp.opts.msvs_version, + createConfigFile) + } else { + createConfigFile() + } + }) + } + + function createConfigFile (err, vsInfo) { + if (err) { + return callback(err) + } + if (process.platform === 'win32') { + process.env.GYP_MSVS_VERSION = Math.min(vsInfo.versionYear, 2015) + process.env.GYP_MSVS_OVERRIDE_PATH = vsInfo.path + } + createConfigGypi({ gyp, buildDir, nodeDir, vsInfo }).then(configPath => { + configs.push(configPath) + findConfigs() + }).catch(err => { + callback(err) + }) + } + + function findConfigs () { + var name = configNames.shift() + if (!name) { + return runGyp() + } + var fullPath = path.resolve(name) + + log.verbose(name, 'checking for gypi file: %s', fullPath) + fs.stat(fullPath, function (err) { + if (err) { + if (err.code === 'ENOENT') { + findConfigs() // check next gypi filename + } else { + callback(err) + } + } else { + log.verbose(name, 'found gypi file') + configs.push(fullPath) + findConfigs() + } + }) + } + + function runGyp (err) { + if (err) { + return callback(err) + } + + if (!~argv.indexOf('-f') && !~argv.indexOf('--format')) { + if (win) { + log.verbose('gyp', 'gyp format was not specified; forcing "msvs"') + // force the 'make' target for non-Windows + argv.push('-f', 'msvs') + } else { + log.verbose('gyp', 'gyp format was not specified; forcing "make"') + // force the 'make' target for non-Windows + argv.push('-f', 'make') + } + } + + // include all the ".gypi" files that were found + configs.forEach(function (config) { + argv.push('-I', config) + }) + + // For AIX and z/OS we need to set up the path to the exports file + // which contains the symbols needed for linking. + var nodeExpFile + if (process.platform === 'aix' || process.platform === 'os390') { + var ext = process.platform === 'aix' ? 'exp' : 'x' + var nodeRootDir = findNodeDirectory() + var candidates + + if (process.platform === 'aix') { + candidates = [ + 'include/node/node', + 'out/Release/node', + 'out/Debug/node', + 'node' + ].map(function (file) { + return file + '.' + ext + }) + } else { + candidates = [ + 'out/Release/obj.target/libnode', + 'out/Debug/obj.target/libnode', + 'lib/libnode' + ].map(function (file) { + return file + '.' + ext + }) + } + + var logprefix = 'find exports file' + nodeExpFile = findAccessibleSync(logprefix, nodeRootDir, candidates) + if (nodeExpFile !== undefined) { + log.verbose(logprefix, 'Found exports file: %s', nodeExpFile) + } else { + var msg = msgFormat('Could not find node.%s file in %s', ext, nodeRootDir) + log.error(logprefix, 'Could not find exports file') + return callback(new Error(msg)) + } + } + + // this logic ported from the old `gyp_addon` python file + var gypScript = path.resolve(__dirname, '..', 'gyp', 'gyp_main.py') + var addonGypi = path.resolve(__dirname, '..', 'addon.gypi') + var commonGypi = path.resolve(nodeDir, 'include/node/common.gypi') + fs.stat(commonGypi, function (err) { + if (err) { + commonGypi = path.resolve(nodeDir, 'common.gypi') + } + + var outputDir = 'build' + if (win) { + // Windows expects an absolute path + outputDir = buildDir + } + var nodeGypDir = path.resolve(__dirname, '..') + + var nodeLibFile = path.join(nodeDir, + !gyp.opts.nodedir ? '<(target_arch)' : '$(Configuration)', + release.name + '.lib') + + argv.push('-I', addonGypi) + argv.push('-I', commonGypi) + argv.push('-Dlibrary=shared_library') + argv.push('-Dvisibility=default') + argv.push('-Dnode_root_dir=' + nodeDir) + if (process.platform === 'aix' || process.platform === 'os390') { + argv.push('-Dnode_exp_file=' + nodeExpFile) + } + argv.push('-Dnode_gyp_dir=' + nodeGypDir) + + // Do this to keep Cygwin environments happy, else the unescaped '\' gets eaten up, + // resulting in bad paths, Ex c:parentFolderfolderanotherFolder instead of c:\parentFolder\folder\anotherFolder + if (win) { + nodeLibFile = nodeLibFile.replace(/\\/g, '\\\\') + } + argv.push('-Dnode_lib_file=' + nodeLibFile) + argv.push('-Dmodule_root_dir=' + process.cwd()) + argv.push('-Dnode_engine=' + + (gyp.opts.node_engine || process.jsEngine || 'v8')) + argv.push('--depth=.') + argv.push('--no-parallel') + + // tell gyp to write the Makefile/Solution files into output_dir + argv.push('--generator-output', outputDir) + + // tell make to write its output into the same dir + argv.push('-Goutput_dir=.') + + // enforce use of the "binding.gyp" file + argv.unshift('binding.gyp') + + // execute `gyp` from the current target nodedir + argv.unshift(gypScript) + + // make sure python uses files that came with this particular node package + var pypath = [path.join(__dirname, '..', 'gyp', 'pylib')] + if (process.env.PYTHONPATH) { + pypath.push(process.env.PYTHONPATH) + } + process.env.PYTHONPATH = pypath.join(win ? ';' : ':') + + var cp = gyp.spawn(python, argv) + cp.on('exit', onCpExit) + }) + } + + function onCpExit (code) { + if (code !== 0) { + callback(new Error('`gyp` failed with exit code: ' + code)) + } else { + // we're done + callback() + } + } +} + +/** + * Returns the first file or directory from an array of candidates that is + * readable by the current user, or undefined if none of the candidates are + * readable. + */ +function findAccessibleSync (logprefix, dir, candidates) { + for (var next = 0; next < candidates.length; next++) { + var candidate = path.resolve(dir, candidates[next]) + try { + var fd = fs.openSync(candidate, 'r') + } catch (e) { + // this candidate was not found or not readable, do nothing + log.silly(logprefix, 'Could not open %s: %s', candidate, e.message) + continue + } + fs.closeSync(fd) + log.silly(logprefix, 'Found readable %s', candidate) + return candidate + } + + return undefined +} + +module.exports = configure +module.exports.test = { + findAccessibleSync: findAccessibleSync +} +module.exports.usage = 'Generates ' + (win ? 'MSVC project files' : 'a Makefile') + ' for the current module' diff --git a/node_modules/node-gyp/lib/create-config-gypi.js b/node_modules/node-gyp/lib/create-config-gypi.js new file mode 100644 index 0000000..dbcb8b4 --- /dev/null +++ b/node_modules/node-gyp/lib/create-config-gypi.js @@ -0,0 +1,146 @@ +'use strict' + +const fs = require('graceful-fs') +const log = require('npmlog') +const path = require('path') + +function parseConfigGypi (config) { + // translated from tools/js2c.py of Node.js + // 1. string comments + config = config.replace(/#.*/g, '') + // 2. join multiline strings + config = config.replace(/'$\s+'/mg, '') + // 3. normalize string literals from ' into " + config = config.replace(/'/g, '"') + return JSON.parse(config) +} + +async function getBaseConfigGypi ({ gyp, nodeDir }) { + // try reading $nodeDir/include/node/config.gypi first when: + // 1. --dist-url or --nodedir is specified + // 2. and --force-process-config is not specified + const shouldReadConfigGypi = (gyp.opts.nodedir || gyp.opts['dist-url']) && !gyp.opts['force-process-config'] + if (shouldReadConfigGypi && nodeDir) { + try { + const baseConfigGypiPath = path.resolve(nodeDir, 'include/node/config.gypi') + const baseConfigGypi = await fs.promises.readFile(baseConfigGypiPath) + return parseConfigGypi(baseConfigGypi.toString()) + } catch (err) { + log.warn('read config.gypi', err.message) + } + } + + // fallback to process.config if it is invalid + return JSON.parse(JSON.stringify(process.config)) +} + +async function getCurrentConfigGypi ({ gyp, nodeDir, vsInfo }) { + const config = await getBaseConfigGypi({ gyp, nodeDir }) + if (!config.target_defaults) { + config.target_defaults = {} + } + if (!config.variables) { + config.variables = {} + } + + const defaults = config.target_defaults + const variables = config.variables + + // don't inherit the "defaults" from the base config.gypi. + // doing so could cause problems in cases where the `node` executable was + // compiled on a different machine (with different lib/include paths) than + // the machine where the addon is being built to + defaults.cflags = [] + defaults.defines = [] + defaults.include_dirs = [] + defaults.libraries = [] + + // set the default_configuration prop + if ('debug' in gyp.opts) { + defaults.default_configuration = gyp.opts.debug ? 'Debug' : 'Release' + } + + if (!defaults.default_configuration) { + defaults.default_configuration = 'Release' + } + + // set the target_arch variable + variables.target_arch = gyp.opts.arch || process.arch || 'ia32' + if (variables.target_arch === 'arm64') { + defaults.msvs_configuration_platform = 'ARM64' + defaults.xcode_configuration_platform = 'arm64' + } + + // set the node development directory + variables.nodedir = nodeDir + + // disable -T "thin" static archives by default + variables.standalone_static_library = gyp.opts.thin ? 0 : 1 + + if (process.platform === 'win32') { + defaults.msbuild_toolset = vsInfo.toolset + if (vsInfo.sdk) { + defaults.msvs_windows_target_platform_version = vsInfo.sdk + } + if (variables.target_arch === 'arm64') { + if (vsInfo.versionMajor > 15 || + (vsInfo.versionMajor === 15 && vsInfo.versionMajor >= 9)) { + defaults.msvs_enable_marmasm = 1 + } else { + log.warn('Compiling ARM64 assembly is only available in\n' + + 'Visual Studio 2017 version 15.9 and above') + } + } + variables.msbuild_path = vsInfo.msBuild + } + + // loop through the rest of the opts and add the unknown ones as variables. + // this allows for module-specific configure flags like: + // + // $ node-gyp configure --shared-libxml2 + Object.keys(gyp.opts).forEach(function (opt) { + if (opt === 'argv') { + return + } + if (opt in gyp.configDefs) { + return + } + variables[opt.replace(/-/g, '_')] = gyp.opts[opt] + }) + + return config +} + +async function createConfigGypi ({ gyp, buildDir, nodeDir, vsInfo }) { + const configFilename = 'config.gypi' + const configPath = path.resolve(buildDir, configFilename) + + log.verbose('build/' + configFilename, 'creating config file') + + const config = await getCurrentConfigGypi({ gyp, nodeDir, vsInfo }) + + // ensures that any boolean values in config.gypi get stringified + function boolsToString (k, v) { + if (typeof v === 'boolean') { + return String(v) + } + return v + } + + log.silly('build/' + configFilename, config) + + // now write out the config.gypi file to the build/ dir + const prefix = '# Do not edit. File was generated by node-gyp\'s "configure" step' + + const json = JSON.stringify(config, boolsToString, 2) + log.verbose('build/' + configFilename, 'writing out config file: %s', configPath) + await fs.promises.writeFile(configPath, [prefix, json, ''].join('\n')) + + return configPath +} + +module.exports = createConfigGypi +module.exports.test = { + parseConfigGypi: parseConfigGypi, + getCurrentConfigGypi: getCurrentConfigGypi +} diff --git a/node_modules/node-gyp/lib/find-node-directory.js b/node_modules/node-gyp/lib/find-node-directory.js new file mode 100644 index 0000000..0dd781a --- /dev/null +++ b/node_modules/node-gyp/lib/find-node-directory.js @@ -0,0 +1,63 @@ +'use strict' + +const path = require('path') +const log = require('npmlog') + +function findNodeDirectory (scriptLocation, processObj) { + // set dirname and process if not passed in + // this facilitates regression tests + if (scriptLocation === undefined) { + scriptLocation = __dirname + } + if (processObj === undefined) { + processObj = process + } + + // Have a look to see what is above us, to try and work out where we are + var npmParentDirectory = path.join(scriptLocation, '../../../..') + log.verbose('node-gyp root', 'npm_parent_directory is ' + + path.basename(npmParentDirectory)) + var nodeRootDir = '' + + log.verbose('node-gyp root', 'Finding node root directory') + if (path.basename(npmParentDirectory) === 'deps') { + // We are in a build directory where this script lives in + // deps/npm/node_modules/node-gyp/lib + nodeRootDir = path.join(npmParentDirectory, '..') + log.verbose('node-gyp root', 'in build directory, root = ' + + nodeRootDir) + } else if (path.basename(npmParentDirectory) === 'node_modules') { + // We are in a node install directory where this script lives in + // lib/node_modules/npm/node_modules/node-gyp/lib or + // node_modules/npm/node_modules/node-gyp/lib depending on the + // platform + if (processObj.platform === 'win32') { + nodeRootDir = path.join(npmParentDirectory, '..') + } else { + nodeRootDir = path.join(npmParentDirectory, '../..') + } + log.verbose('node-gyp root', 'in install directory, root = ' + + nodeRootDir) + } else { + // We don't know where we are, try working it out from the location + // of the node binary + var nodeDir = path.dirname(processObj.execPath) + var directoryUp = path.basename(nodeDir) + if (directoryUp === 'bin') { + nodeRootDir = path.join(nodeDir, '..') + } else if (directoryUp === 'Release' || directoryUp === 'Debug') { + // If we are a recently built node, and the directory structure + // is that of a repository. If we are on Windows then we only need + // to go one level up, everything else, two + if (processObj.platform === 'win32') { + nodeRootDir = path.join(nodeDir, '..') + } else { + nodeRootDir = path.join(nodeDir, '../..') + } + } + // Else return the default blank, "". + } + return nodeRootDir +} + +module.exports = findNodeDirectory diff --git a/node_modules/node-gyp/lib/find-python.js b/node_modules/node-gyp/lib/find-python.js new file mode 100644 index 0000000..a445e82 --- /dev/null +++ b/node_modules/node-gyp/lib/find-python.js @@ -0,0 +1,344 @@ +'use strict' + +const log = require('npmlog') +const semver = require('semver') +const cp = require('child_process') +const extend = require('util')._extend // eslint-disable-line +const win = process.platform === 'win32' +const logWithPrefix = require('./util').logWithPrefix + +const systemDrive = process.env.SystemDrive || 'C:' +const username = process.env.USERNAME || process.env.USER || getOsUserInfo() +const localAppData = process.env.LOCALAPPDATA || `${systemDrive}\\${username}\\AppData\\Local` +const foundLocalAppData = process.env.LOCALAPPDATA || username +const programFiles = process.env.ProgramW6432 || process.env.ProgramFiles || `${systemDrive}\\Program Files` +const programFilesX86 = process.env['ProgramFiles(x86)'] || `${programFiles} (x86)` + +const winDefaultLocationsArray = [] +for (const majorMinor of ['39', '38', '37', '36']) { + if (foundLocalAppData) { + winDefaultLocationsArray.push( + `${localAppData}\\Programs\\Python\\Python${majorMinor}\\python.exe`, + `${programFiles}\\Python${majorMinor}\\python.exe`, + `${localAppData}\\Programs\\Python\\Python${majorMinor}-32\\python.exe`, + `${programFiles}\\Python${majorMinor}-32\\python.exe`, + `${programFilesX86}\\Python${majorMinor}-32\\python.exe` + ) + } else { + winDefaultLocationsArray.push( + `${programFiles}\\Python${majorMinor}\\python.exe`, + `${programFiles}\\Python${majorMinor}-32\\python.exe`, + `${programFilesX86}\\Python${majorMinor}-32\\python.exe` + ) + } +} + +function getOsUserInfo () { + try { + return require('os').userInfo().username + } catch (e) {} +} + +function PythonFinder (configPython, callback) { + this.callback = callback + this.configPython = configPython + this.errorLog = [] +} + +PythonFinder.prototype = { + log: logWithPrefix(log, 'find Python'), + argsExecutable: ['-c', 'import sys; print(sys.executable);'], + argsVersion: ['-c', 'import sys; print("%s.%s.%s" % sys.version_info[:3]);'], + semverRange: '>=3.6.0', + + // These can be overridden for testing: + execFile: cp.execFile, + env: process.env, + win: win, + pyLauncher: 'py.exe', + winDefaultLocations: winDefaultLocationsArray, + + // Logs a message at verbose level, but also saves it to be displayed later + // at error level if an error occurs. This should help diagnose the problem. + addLog: function addLog (message) { + this.log.verbose(message) + this.errorLog.push(message) + }, + + // Find Python by trying a sequence of possibilities. + // Ignore errors, keep trying until Python is found. + findPython: function findPython () { + const SKIP = 0; const FAIL = 1 + var toCheck = getChecks.apply(this) + + function getChecks () { + if (this.env.NODE_GYP_FORCE_PYTHON) { + return [{ + before: () => { + this.addLog( + 'checking Python explicitly set from NODE_GYP_FORCE_PYTHON') + this.addLog('- process.env.NODE_GYP_FORCE_PYTHON is ' + + `"${this.env.NODE_GYP_FORCE_PYTHON}"`) + }, + check: this.checkCommand, + arg: this.env.NODE_GYP_FORCE_PYTHON + }] + } + + var checks = [ + { + before: () => { + if (!this.configPython) { + this.addLog( + 'Python is not set from command line or npm configuration') + return SKIP + } + this.addLog('checking Python explicitly set from command line or ' + + 'npm configuration') + this.addLog('- "--python=" or "npm config get python" is ' + + `"${this.configPython}"`) + }, + check: this.checkCommand, + arg: this.configPython + }, + { + before: () => { + if (!this.env.PYTHON) { + this.addLog('Python is not set from environment variable ' + + 'PYTHON') + return SKIP + } + this.addLog('checking Python explicitly set from environment ' + + 'variable PYTHON') + this.addLog(`- process.env.PYTHON is "${this.env.PYTHON}"`) + }, + check: this.checkCommand, + arg: this.env.PYTHON + }, + { + before: () => { this.addLog('checking if "python3" can be used') }, + check: this.checkCommand, + arg: 'python3' + }, + { + before: () => { this.addLog('checking if "python" can be used') }, + check: this.checkCommand, + arg: 'python' + } + ] + + if (this.win) { + for (var i = 0; i < this.winDefaultLocations.length; ++i) { + const location = this.winDefaultLocations[i] + checks.push({ + before: () => { + this.addLog('checking if Python is ' + + `${location}`) + }, + check: this.checkExecPath, + arg: location + }) + } + checks.push({ + before: () => { + this.addLog( + 'checking if the py launcher can be used to find Python 3') + }, + check: this.checkPyLauncher + }) + } + + return checks + } + + function runChecks (err) { + this.log.silly('runChecks: err = %j', (err && err.stack) || err) + + const check = toCheck.shift() + if (!check) { + return this.fail() + } + + const before = check.before.apply(this) + if (before === SKIP) { + return runChecks.apply(this) + } + if (before === FAIL) { + return this.fail() + } + + const args = [runChecks.bind(this)] + if (check.arg) { + args.unshift(check.arg) + } + check.check.apply(this, args) + } + + runChecks.apply(this) + }, + + // Check if command is a valid Python to use. + // Will exit the Python finder on success. + // If on Windows, run in a CMD shell to support BAT/CMD launchers. + checkCommand: function checkCommand (command, errorCallback) { + var exec = command + var args = this.argsExecutable + var shell = false + if (this.win) { + // Arguments have to be manually quoted + exec = `"${exec}"` + args = args.map(a => `"${a}"`) + shell = true + } + + this.log.verbose(`- executing "${command}" to get executable path`) + this.run(exec, args, shell, function (err, execPath) { + // Possible outcomes: + // - Error: not in PATH, not executable or execution fails + // - Gibberish: the next command to check version will fail + // - Absolute path to executable + if (err) { + this.addLog(`- "${command}" is not in PATH or produced an error`) + return errorCallback(err) + } + this.addLog(`- executable path is "${execPath}"`) + this.checkExecPath(execPath, errorCallback) + }.bind(this)) + }, + + // Check if the py launcher can find a valid Python to use. + // Will exit the Python finder on success. + // Distributions of Python on Windows by default install with the "py.exe" + // Python launcher which is more likely to exist than the Python executable + // being in the $PATH. + // Because the Python launcher supports Python 2 and Python 3, we should + // explicitly request a Python 3 version. This is done by supplying "-3" as + // the first command line argument. Since "py.exe -3" would be an invalid + // executable for "execFile", we have to use the launcher to figure out + // where the actual "python.exe" executable is located. + checkPyLauncher: function checkPyLauncher (errorCallback) { + this.log.verbose( + `- executing "${this.pyLauncher}" to get Python 3 executable path`) + this.run(this.pyLauncher, ['-3', ...this.argsExecutable], false, + function (err, execPath) { + // Possible outcomes: same as checkCommand + if (err) { + this.addLog( + `- "${this.pyLauncher}" is not in PATH or produced an error`) + return errorCallback(err) + } + this.addLog(`- executable path is "${execPath}"`) + this.checkExecPath(execPath, errorCallback) + }.bind(this)) + }, + + // Check if a Python executable is the correct version to use. + // Will exit the Python finder on success. + checkExecPath: function checkExecPath (execPath, errorCallback) { + this.log.verbose(`- executing "${execPath}" to get version`) + this.run(execPath, this.argsVersion, false, function (err, version) { + // Possible outcomes: + // - Error: executable can not be run (likely meaning the command wasn't + // a Python executable and the previous command produced gibberish) + // - Gibberish: somehow the last command produced an executable path, + // this will fail when verifying the version + // - Version of the Python executable + if (err) { + this.addLog(`- "${execPath}" could not be run`) + return errorCallback(err) + } + this.addLog(`- version is "${version}"`) + + const range = new semver.Range(this.semverRange) + var valid = false + try { + valid = range.test(version) + } catch (err) { + this.log.silly('range.test() threw:\n%s', err.stack) + this.addLog(`- "${execPath}" does not have a valid version`) + this.addLog('- is it a Python executable?') + return errorCallback(err) + } + + if (!valid) { + this.addLog(`- version is ${version} - should be ${this.semverRange}`) + this.addLog('- THIS VERSION OF PYTHON IS NOT SUPPORTED') + return errorCallback(new Error( + `Found unsupported Python version ${version}`)) + } + this.succeed(execPath, version) + }.bind(this)) + }, + + // Run an executable or shell command, trimming the output. + run: function run (exec, args, shell, callback) { + var env = extend({}, this.env) + env.TERM = 'dumb' + const opts = { env: env, shell: shell } + + this.log.silly('execFile: exec = %j', exec) + this.log.silly('execFile: args = %j', args) + this.log.silly('execFile: opts = %j', opts) + try { + this.execFile(exec, args, opts, execFileCallback.bind(this)) + } catch (err) { + this.log.silly('execFile: threw:\n%s', err.stack) + return callback(err) + } + + function execFileCallback (err, stdout, stderr) { + this.log.silly('execFile result: err = %j', (err && err.stack) || err) + this.log.silly('execFile result: stdout = %j', stdout) + this.log.silly('execFile result: stderr = %j', stderr) + if (err) { + return callback(err) + } + const execPath = stdout.trim() + callback(null, execPath) + } + }, + + succeed: function succeed (execPath, version) { + this.log.info(`using Python version ${version} found at "${execPath}"`) + process.nextTick(this.callback.bind(null, null, execPath)) + }, + + fail: function fail () { + const errorLog = this.errorLog.join('\n') + + const pathExample = this.win ? 'C:\\Path\\To\\python.exe' + : '/path/to/pythonexecutable' + // For Windows 80 col console, use up to the column before the one marked + // with X (total 79 chars including logger prefix, 58 chars usable here): + // X + const info = [ + '**********************************************************', + 'You need to install the latest version of Python.', + 'Node-gyp should be able to find and use Python. If not,', + 'you can try one of the following options:', + `- Use the switch --python="${pathExample}"`, + ' (accepted by both node-gyp and npm)', + '- Set the environment variable PYTHON', + '- Set the npm configuration variable python:', + ` npm config set python "${pathExample}"`, + 'For more information consult the documentation at:', + 'https://github.com/nodejs/node-gyp#installation', + '**********************************************************' + ].join('\n') + + this.log.error(`\n${errorLog}\n\n${info}\n`) + process.nextTick(this.callback.bind(null, new Error( + 'Could not find any Python installation to use'))) + } +} + +function findPython (configPython, callback) { + var finder = new PythonFinder(configPython, callback) + finder.findPython() +} + +module.exports = findPython +module.exports.test = { + PythonFinder: PythonFinder, + findPython: findPython +} diff --git a/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/node-gyp/lib/find-visualstudio.js new file mode 100644 index 0000000..64af7be --- /dev/null +++ b/node_modules/node-gyp/lib/find-visualstudio.js @@ -0,0 +1,446 @@ +'use strict' + +const log = require('npmlog') +const execFile = require('child_process').execFile +const fs = require('fs') +const path = require('path').win32 +const logWithPrefix = require('./util').logWithPrefix +const regSearchKeys = require('./util').regSearchKeys + +function findVisualStudio (nodeSemver, configMsvsVersion, callback) { + const finder = new VisualStudioFinder(nodeSemver, configMsvsVersion, + callback) + finder.findVisualStudio() +} + +function VisualStudioFinder (nodeSemver, configMsvsVersion, callback) { + this.nodeSemver = nodeSemver + this.configMsvsVersion = configMsvsVersion + this.callback = callback + this.errorLog = [] + this.validVersions = [] +} + +VisualStudioFinder.prototype = { + log: logWithPrefix(log, 'find VS'), + + regSearchKeys: regSearchKeys, + + // Logs a message at verbose level, but also saves it to be displayed later + // at error level if an error occurs. This should help diagnose the problem. + addLog: function addLog (message) { + this.log.verbose(message) + this.errorLog.push(message) + }, + + findVisualStudio: function findVisualStudio () { + this.configVersionYear = null + this.configPath = null + if (this.configMsvsVersion) { + this.addLog('msvs_version was set from command line or npm config') + if (this.configMsvsVersion.match(/^\d{4}$/)) { + this.configVersionYear = parseInt(this.configMsvsVersion, 10) + this.addLog( + `- looking for Visual Studio version ${this.configVersionYear}`) + } else { + this.configPath = path.resolve(this.configMsvsVersion) + this.addLog( + `- looking for Visual Studio installed in "${this.configPath}"`) + } + } else { + this.addLog('msvs_version not set from command line or npm config') + } + + if (process.env.VCINSTALLDIR) { + this.envVcInstallDir = + path.resolve(process.env.VCINSTALLDIR, '..') + this.addLog('running in VS Command Prompt, installation path is:\n' + + `"${this.envVcInstallDir}"\n- will only use this version`) + } else { + this.addLog('VCINSTALLDIR not set, not running in VS Command Prompt') + } + + this.findVisualStudio2017OrNewer((info) => { + if (info) { + return this.succeed(info) + } + this.findVisualStudio2015((info) => { + if (info) { + return this.succeed(info) + } + this.findVisualStudio2013((info) => { + if (info) { + return this.succeed(info) + } + this.fail() + }) + }) + }) + }, + + succeed: function succeed (info) { + this.log.info(`using VS${info.versionYear} (${info.version}) found at:` + + `\n"${info.path}"` + + '\nrun with --verbose for detailed information') + process.nextTick(this.callback.bind(null, null, info)) + }, + + fail: function fail () { + if (this.configMsvsVersion && this.envVcInstallDir) { + this.errorLog.push( + 'msvs_version does not match this VS Command Prompt or the', + 'installation cannot be used.') + } else if (this.configMsvsVersion) { + // If msvs_version was specified but finding VS failed, print what would + // have been accepted + this.errorLog.push('') + if (this.validVersions) { + this.errorLog.push('valid versions for msvs_version:') + this.validVersions.forEach((version) => { + this.errorLog.push(`- "${version}"`) + }) + } else { + this.errorLog.push('no valid versions for msvs_version were found') + } + } + + const errorLog = this.errorLog.join('\n') + + // For Windows 80 col console, use up to the column before the one marked + // with X (total 79 chars including logger prefix, 62 chars usable here): + // X + const infoLog = [ + '**************************************************************', + 'You need to install the latest version of Visual Studio', + 'including the "Desktop development with C++" workload.', + 'For more information consult the documentation at:', + 'https://github.com/nodejs/node-gyp#on-windows', + '**************************************************************' + ].join('\n') + + this.log.error(`\n${errorLog}\n\n${infoLog}\n`) + process.nextTick(this.callback.bind(null, new Error( + 'Could not find any Visual Studio installation to use'))) + }, + + // Invoke the PowerShell script to get information about Visual Studio 2017 + // or newer installations + findVisualStudio2017OrNewer: function findVisualStudio2017OrNewer (cb) { + var ps = path.join(process.env.SystemRoot, 'System32', + 'WindowsPowerShell', 'v1.0', 'powershell.exe') + var csFile = path.join(__dirname, 'Find-VisualStudio.cs') + var psArgs = [ + '-ExecutionPolicy', + 'Unrestricted', + '-NoProfile', + '-Command', + '&{Add-Type -Path \'' + csFile + '\';' + '[VisualStudioConfiguration.Main]::PrintJson()}' + ] + + this.log.silly('Running', ps, psArgs) + var child = execFile(ps, psArgs, { encoding: 'utf8' }, + (err, stdout, stderr) => { + this.parseData(err, stdout, stderr, cb) + }) + child.stdin.end() + }, + + // Parse the output of the PowerShell script and look for an installation + // of Visual Studio 2017 or newer to use + parseData: function parseData (err, stdout, stderr, cb) { + this.log.silly('PS stderr = %j', stderr) + + const failPowershell = () => { + this.addLog( + 'could not use PowerShell to find Visual Studio 2017 or newer, try re-running with \'--loglevel silly\' for more details') + cb(null) + } + + if (err) { + this.log.silly('PS err = %j', err && (err.stack || err)) + return failPowershell() + } + + var vsInfo + try { + vsInfo = JSON.parse(stdout) + } catch (e) { + this.log.silly('PS stdout = %j', stdout) + this.log.silly(e) + return failPowershell() + } + + if (!Array.isArray(vsInfo)) { + this.log.silly('PS stdout = %j', stdout) + return failPowershell() + } + + vsInfo = vsInfo.map((info) => { + this.log.silly(`processing installation: "${info.path}"`) + info.path = path.resolve(info.path) + var ret = this.getVersionInfo(info) + ret.path = info.path + ret.msBuild = this.getMSBuild(info, ret.versionYear) + ret.toolset = this.getToolset(info, ret.versionYear) + ret.sdk = this.getSDK(info) + return ret + }) + this.log.silly('vsInfo:', vsInfo) + + // Remove future versions or errors parsing version number + vsInfo = vsInfo.filter((info) => { + if (info.versionYear) { + return true + } + this.addLog(`unknown version "${info.version}" found at "${info.path}"`) + return false + }) + + // Sort to place newer versions first + vsInfo.sort((a, b) => b.versionYear - a.versionYear) + + for (var i = 0; i < vsInfo.length; ++i) { + const info = vsInfo[i] + this.addLog(`checking VS${info.versionYear} (${info.version}) found ` + + `at:\n"${info.path}"`) + + if (info.msBuild) { + this.addLog('- found "Visual Studio C++ core features"') + } else { + this.addLog('- "Visual Studio C++ core features" missing') + continue + } + + if (info.toolset) { + this.addLog(`- found VC++ toolset: ${info.toolset}`) + } else { + this.addLog('- missing any VC++ toolset') + continue + } + + if (info.sdk) { + this.addLog(`- found Windows SDK: ${info.sdk}`) + } else { + this.addLog('- missing any Windows SDK') + continue + } + + if (!this.checkConfigVersion(info.versionYear, info.path)) { + continue + } + + return cb(info) + } + + this.addLog( + 'could not find a version of Visual Studio 2017 or newer to use') + cb(null) + }, + + // Helper - process version information + getVersionInfo: function getVersionInfo (info) { + const match = /^(\d+)\.(\d+)\..*/.exec(info.version) + if (!match) { + this.log.silly('- failed to parse version:', info.version) + return {} + } + this.log.silly('- version match = %j', match) + var ret = { + version: info.version, + versionMajor: parseInt(match[1], 10), + versionMinor: parseInt(match[2], 10) + } + if (ret.versionMajor === 15) { + ret.versionYear = 2017 + return ret + } + if (ret.versionMajor === 16) { + ret.versionYear = 2019 + return ret + } + if (ret.versionMajor === 17) { + ret.versionYear = 2022 + return ret + } + this.log.silly('- unsupported version:', ret.versionMajor) + return {} + }, + + // Helper - process MSBuild information + getMSBuild: function getMSBuild (info, versionYear) { + const pkg = 'Microsoft.VisualStudio.VC.MSBuild.Base' + const msbuildPath = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'MSBuild.exe') + if (info.packages.indexOf(pkg) !== -1) { + this.log.silly('- found VC.MSBuild.Base') + if (versionYear === 2017) { + return path.join(info.path, 'MSBuild', '15.0', 'Bin', 'MSBuild.exe') + } + if (versionYear === 2019) { + return msbuildPath + } + } + // visual studio 2022 don't has msbuild pkg + if (fs.existsSync(msbuildPath)) { + return msbuildPath + } + return null + }, + + // Helper - process toolset information + getToolset: function getToolset (info, versionYear) { + const pkg = 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64' + const express = 'Microsoft.VisualStudio.WDExpress' + + if (info.packages.indexOf(pkg) !== -1) { + this.log.silly('- found VC.Tools.x86.x64') + } else if (info.packages.indexOf(express) !== -1) { + this.log.silly('- found Visual Studio Express (looking for toolset)') + } else { + return null + } + + if (versionYear === 2017) { + return 'v141' + } else if (versionYear === 2019) { + return 'v142' + } else if (versionYear === 2022) { + return 'v143' + } + this.log.silly('- invalid versionYear:', versionYear) + return null + }, + + // Helper - process Windows SDK information + getSDK: function getSDK (info) { + const win8SDK = 'Microsoft.VisualStudio.Component.Windows81SDK' + const win10SDKPrefix = 'Microsoft.VisualStudio.Component.Windows10SDK.' + + var Win10SDKVer = 0 + info.packages.forEach((pkg) => { + if (!pkg.startsWith(win10SDKPrefix)) { + return + } + const parts = pkg.split('.') + if (parts.length > 5 && parts[5] !== 'Desktop') { + this.log.silly('- ignoring non-Desktop Win10SDK:', pkg) + return + } + const foundSdkVer = parseInt(parts[4], 10) + if (isNaN(foundSdkVer)) { + // Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb + this.log.silly('- failed to parse Win10SDK number:', pkg) + return + } + this.log.silly('- found Win10SDK:', foundSdkVer) + Win10SDKVer = Math.max(Win10SDKVer, foundSdkVer) + }) + + if (Win10SDKVer !== 0) { + return `10.0.${Win10SDKVer}.0` + } else if (info.packages.indexOf(win8SDK) !== -1) { + this.log.silly('- found Win8SDK') + return '8.1' + } + return null + }, + + // Find an installation of Visual Studio 2015 to use + findVisualStudio2015: function findVisualStudio2015 (cb) { + return this.findOldVS({ + version: '14.0', + versionMajor: 14, + versionMinor: 0, + versionYear: 2015, + toolset: 'v140' + }, cb) + }, + + // Find an installation of Visual Studio 2013 to use + findVisualStudio2013: function findVisualStudio2013 (cb) { + if (this.nodeSemver.major >= 9) { + this.addLog( + 'not looking for VS2013 as it is only supported up to Node.js 8') + return cb(null) + } + return this.findOldVS({ + version: '12.0', + versionMajor: 12, + versionMinor: 0, + versionYear: 2013, + toolset: 'v120' + }, cb) + }, + + // Helper - common code for VS2013 and VS2015 + findOldVS: function findOldVS (info, cb) { + const regVC7 = ['HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7', + 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7'] + const regMSBuild = 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions' + + this.addLog(`looking for Visual Studio ${info.versionYear}`) + this.regSearchKeys(regVC7, info.version, [], (err, res) => { + if (err) { + this.addLog('- not found') + return cb(null) + } + + const vsPath = path.resolve(res, '..') + this.addLog(`- found in "${vsPath}"`) + + const msBuildRegOpts = process.arch === 'ia32' ? [] : ['/reg:32'] + this.regSearchKeys([`${regMSBuild}\\${info.version}`], + 'MSBuildToolsPath', msBuildRegOpts, (err, res) => { + if (err) { + this.addLog( + '- could not find MSBuild in registry for this version') + return cb(null) + } + + const msBuild = path.join(res, 'MSBuild.exe') + this.addLog(`- MSBuild in "${msBuild}"`) + + if (!this.checkConfigVersion(info.versionYear, vsPath)) { + return cb(null) + } + + info.path = vsPath + info.msBuild = msBuild + info.sdk = null + cb(info) + }) + }) + }, + + // After finding a usable version of Visual Studio: + // - add it to validVersions to be displayed at the end if a specific + // version was requested and not found; + // - check if this is the version that was requested. + // - check if this matches the Visual Studio Command Prompt + checkConfigVersion: function checkConfigVersion (versionYear, vsPath) { + this.validVersions.push(versionYear) + this.validVersions.push(vsPath) + + if (this.configVersionYear && this.configVersionYear !== versionYear) { + this.addLog('- msvs_version does not match this version') + return false + } + if (this.configPath && + path.relative(this.configPath, vsPath) !== '') { + this.addLog('- msvs_version does not point to this installation') + return false + } + if (this.envVcInstallDir && + path.relative(this.envVcInstallDir, vsPath) !== '') { + this.addLog('- does not match this Visual Studio Command Prompt') + return false + } + + return true + } +} + +module.exports = findVisualStudio +module.exports.test = { + VisualStudioFinder: VisualStudioFinder, + findVisualStudio: findVisualStudio +} diff --git a/node_modules/node-gyp/lib/install.js b/node_modules/node-gyp/lib/install.js new file mode 100644 index 0000000..99f6d85 --- /dev/null +++ b/node_modules/node-gyp/lib/install.js @@ -0,0 +1,376 @@ +'use strict' + +const fs = require('graceful-fs') +const os = require('os') +const tar = require('tar') +const path = require('path') +const util = require('util') +const stream = require('stream') +const crypto = require('crypto') +const log = require('npmlog') +const semver = require('semver') +const fetch = require('make-fetch-happen') +const processRelease = require('./process-release') +const win = process.platform === 'win32' +const streamPipeline = util.promisify(stream.pipeline) + +/** + * @param {typeof import('graceful-fs')} fs + */ + +async function install (fs, gyp, argv) { + const release = processRelease(argv, gyp, process.version, process.release) + + // Determine which node dev files version we are installing + log.verbose('install', 'input version string %j', release.version) + + if (!release.semver) { + // could not parse the version string with semver + throw new Error('Invalid version number: ' + release.version) + } + + if (semver.lt(release.version, '0.8.0')) { + throw new Error('Minimum target version is `0.8.0` or greater. Got: ' + release.version) + } + + // 0.x.y-pre versions are not published yet and cannot be installed. Bail. + if (release.semver.prerelease[0] === 'pre') { + log.verbose('detected "pre" node version', release.version) + if (!gyp.opts.nodedir) { + throw new Error('"pre" versions of node cannot be installed, use the --nodedir flag instead') + } + log.verbose('--nodedir flag was passed; skipping install', gyp.opts.nodedir) + return + } + + // flatten version into String + log.verbose('install', 'installing version: %s', release.versionDir) + + // the directory where the dev files will be installed + const devDir = path.resolve(gyp.devDir, release.versionDir) + + // If '--ensure' was passed, then don't *always* install the version; + // check if it is already installed, and only install when needed + if (gyp.opts.ensure) { + log.verbose('install', '--ensure was passed, so won\'t reinstall if already installed') + try { + await fs.promises.stat(devDir) + } catch (err) { + if (err.code === 'ENOENT') { + log.verbose('install', 'version not already installed, continuing with install', release.version) + try { + return await go() + } catch (err) { + return rollback(err) + } + } else if (err.code === 'EACCES') { + return eaccesFallback(err) + } + throw err + } + log.verbose('install', 'version is already installed, need to check "installVersion"') + const installVersionFile = path.resolve(devDir, 'installVersion') + let installVersion = 0 + try { + const ver = await fs.promises.readFile(installVersionFile, 'ascii') + installVersion = parseInt(ver, 10) || 0 + } catch (err) { + if (err.code !== 'ENOENT') { + throw err + } + } + log.verbose('got "installVersion"', installVersion) + log.verbose('needs "installVersion"', gyp.package.installVersion) + if (installVersion < gyp.package.installVersion) { + log.verbose('install', 'version is no good; reinstalling') + try { + return await go() + } catch (err) { + return rollback(err) + } + } + log.verbose('install', 'version is good') + } else { + try { + return await go() + } catch (err) { + return rollback(err) + } + } + + async function go () { + log.verbose('ensuring nodedir is created', devDir) + + // first create the dir for the node dev files + try { + const created = await fs.promises.mkdir(devDir, { recursive: true }) + + if (created) { + log.verbose('created nodedir', created) + } + } catch (err) { + if (err.code === 'EACCES') { + return eaccesFallback(err) + } + + throw err + } + + // now download the node tarball + const tarPath = gyp.opts.tarball + let extractCount = 0 + const contentShasums = {} + const expectShasums = {} + + // checks if a file to be extracted from the tarball is valid. + // only .h header files and the gyp files get extracted + function isValid (path) { + const isValid = valid(path) + if (isValid) { + log.verbose('extracted file from tarball', path) + extractCount++ + } else { + // invalid + log.silly('ignoring from tarball', path) + } + return isValid + } + + // download the tarball and extract! + + if (tarPath) { + await tar.extract({ + file: tarPath, + strip: 1, + filter: isValid, + cwd: devDir + }) + } else { + try { + const res = await download(gyp, release.tarballUrl) + + if (res.status !== 200) { + throw new Error(`${res.status} response downloading ${release.tarballUrl}`) + } + + await streamPipeline( + res.body, + // content checksum + new ShaSum((_, checksum) => { + const filename = path.basename(release.tarballUrl).trim() + contentShasums[filename] = checksum + log.verbose('content checksum', filename, checksum) + }), + tar.extract({ + strip: 1, + cwd: devDir, + filter: isValid + }) + ) + } catch (err) { + // something went wrong downloading the tarball? + if (err.code === 'ENOTFOUND') { + throw new Error('This is most likely not a problem with node-gyp or the package itself and\n' + + 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' + + 'network settings.') + } + throw err + } + } + + // invoked after the tarball has finished being extracted + if (extractCount === 0) { + throw new Error('There was a fatal problem while downloading/extracting the tarball') + } + + log.verbose('tarball', 'done parsing tarball') + + const installVersionPath = path.resolve(devDir, 'installVersion') + await Promise.all([ + // need to download node.lib + ...(win ? downloadNodeLib() : []), + // write the "installVersion" file + fs.promises.writeFile(installVersionPath, gyp.package.installVersion + '\n'), + // Only download SHASUMS.txt if we downloaded something in need of SHA verification + ...(!tarPath || win ? [downloadShasums()] : []) + ]) + + log.verbose('download contents checksum', JSON.stringify(contentShasums)) + // check content shasums + for (const k in contentShasums) { + log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k]) + if (contentShasums[k] !== expectShasums[k]) { + throw new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k]) + } + } + + async function downloadShasums () { + log.verbose('check download content checksum, need to download `SHASUMS256.txt`...') + log.verbose('checksum url', release.shasumsUrl) + + const res = await download(gyp, release.shasumsUrl) + + if (res.status !== 200) { + throw new Error(`${res.status} status code downloading checksum`) + } + + for (const line of (await res.text()).trim().split('\n')) { + const items = line.trim().split(/\s+/) + if (items.length !== 2) { + return + } + + // 0035d18e2dcf9aad669b1c7c07319e17abfe3762 ./node-v0.11.4.tar.gz + const name = items[1].replace(/^\.\//, '') + expectShasums[name] = items[0] + } + + log.verbose('checksum data', JSON.stringify(expectShasums)) + } + + function downloadNodeLib () { + log.verbose('on Windows; need to download `' + release.name + '.lib`...') + const archs = ['ia32', 'x64', 'arm64'] + return archs.map(async (arch) => { + const dir = path.resolve(devDir, arch) + const targetLibPath = path.resolve(dir, release.name + '.lib') + const { libUrl, libPath } = release[arch] + const name = `${arch} ${release.name}.lib` + log.verbose(name, 'dir', dir) + log.verbose(name, 'url', libUrl) + + await fs.promises.mkdir(dir, { recursive: true }) + log.verbose('streaming', name, 'to:', targetLibPath) + + const res = await download(gyp, libUrl) + + if (res.status === 403 || res.status === 404) { + if (arch === 'arm64') { + // Arm64 is a newer platform on Windows and not all node distributions provide it. + log.verbose(`${name} was not found in ${libUrl}`) + } else { + log.warn(`${name} was not found in ${libUrl}`) + } + return + } else if (res.status !== 200) { + throw new Error(`${res.status} status code downloading ${name}`) + } + + return streamPipeline( + res.body, + new ShaSum((_, checksum) => { + contentShasums[libPath] = checksum + log.verbose('content checksum', libPath, checksum) + }), + fs.createWriteStream(targetLibPath) + ) + }) + } // downloadNodeLib() + } // go() + + /** + * Checks if a given filename is "valid" for this installation. + */ + + function valid (file) { + // header files + const extname = path.extname(file) + return extname === '.h' || extname === '.gypi' + } + + async function rollback (err) { + log.warn('install', 'got an error, rolling back install') + // roll-back the install if anything went wrong + await util.promisify(gyp.commands.remove)([release.versionDir]) + throw err + } + + /** + * The EACCES fallback is a workaround for npm's `sudo` behavior, where + * it drops the permissions before invoking any child processes (like + * node-gyp). So what happens is the "nobody" user doesn't have + * permission to create the dev dir. As a fallback, make the tmpdir() be + * the dev dir for this installation. This is not ideal, but at least + * the compilation will succeed... + */ + + async function eaccesFallback (err) { + const noretry = '--node_gyp_internal_noretry' + if (argv.indexOf(noretry) !== -1) { + throw err + } + const tmpdir = os.tmpdir() + gyp.devDir = path.resolve(tmpdir, '.node-gyp') + let userString = '' + try { + // os.userInfo can fail on some systems, it's not critical here + userString = ` ("${os.userInfo().username}")` + } catch (e) {} + log.warn('EACCES', 'current user%s does not have permission to access the dev dir "%s"', userString, devDir) + log.warn('EACCES', 'attempting to reinstall using temporary dev dir "%s"', gyp.devDir) + if (process.cwd() === tmpdir) { + log.verbose('tmpdir == cwd', 'automatically will remove dev files after to save disk space') + gyp.todo.push({ name: 'remove', args: argv }) + } + return util.promisify(gyp.commands.install)([noretry].concat(argv)) + } +} + +class ShaSum extends stream.Transform { + constructor (callback) { + super() + this._callback = callback + this._digester = crypto.createHash('sha256') + } + + _transform (chunk, _, callback) { + this._digester.update(chunk) + callback(null, chunk) + } + + _flush (callback) { + this._callback(null, this._digester.digest('hex')) + callback() + } +} + +async function download (gyp, url) { + log.http('GET', url) + + const requestOpts = { + headers: { + 'User-Agent': `node-gyp v${gyp.version} (node ${process.version})`, + Connection: 'keep-alive' + }, + proxy: gyp.opts.proxy, + noProxy: gyp.opts.noproxy + } + + const cafile = gyp.opts.cafile + if (cafile) { + requestOpts.ca = await readCAFile(cafile) + } + + const res = await fetch(url, requestOpts) + log.http(res.status, res.url) + + return res +} + +async function readCAFile (filename) { + // The CA file can contain multiple certificates so split on certificate + // boundaries. [\S\s]*? is used to match everything including newlines. + const ca = await fs.promises.readFile(filename, 'utf8') + const re = /(-----BEGIN CERTIFICATE-----[\S\s]*?-----END CERTIFICATE-----)/g + return ca.match(re) +} + +module.exports = function (gyp, argv, callback) { + install(fs, gyp, argv).then(callback.bind(undefined, null), callback) +} +module.exports.test = { + download, + install, + readCAFile +} +module.exports.usage = 'Install node development files for the specified node version.' diff --git a/node_modules/node-gyp/lib/list.js b/node_modules/node-gyp/lib/list.js new file mode 100644 index 0000000..405ebc0 --- /dev/null +++ b/node_modules/node-gyp/lib/list.js @@ -0,0 +1,27 @@ +'use strict' + +const fs = require('graceful-fs') +const log = require('npmlog') + +function list (gyp, args, callback) { + var devDir = gyp.devDir + log.verbose('list', 'using node-gyp dir:', devDir) + + fs.readdir(devDir, onreaddir) + + function onreaddir (err, versions) { + if (err && err.code !== 'ENOENT') { + return callback(err) + } + + if (Array.isArray(versions)) { + versions = versions.filter(function (v) { return v !== 'current' }) + } else { + versions = [] + } + callback(null, versions) + } +} + +module.exports = list +module.exports.usage = 'Prints a listing of the currently installed node development files' diff --git a/node_modules/node-gyp/lib/node-gyp.js b/node_modules/node-gyp/lib/node-gyp.js new file mode 100644 index 0000000..0f11185 --- /dev/null +++ b/node_modules/node-gyp/lib/node-gyp.js @@ -0,0 +1,211 @@ +'use strict' + +const path = require('path') +const nopt = require('nopt') +const log = require('npmlog') +const childProcess = require('child_process') +const EE = require('events').EventEmitter +const inherits = require('util').inherits +const commands = [ + // Module build commands + 'build', + 'clean', + 'configure', + 'rebuild', + // Development Header File management commands + 'install', + 'list', + 'remove' +] +const aliases = { + ls: 'list', + rm: 'remove' +} + +// differentiate node-gyp's logs from npm's +log.heading = 'gyp' + +function gyp () { + return new Gyp() +} + +function Gyp () { + var self = this + + this.devDir = '' + this.commands = {} + + commands.forEach(function (command) { + self.commands[command] = function (argv, callback) { + log.verbose('command', command, argv) + return require('./' + command)(self, argv, callback) + } + }) +} +inherits(Gyp, EE) +exports.Gyp = Gyp +var proto = Gyp.prototype + +/** + * Export the contents of the package.json. + */ + +proto.package = require('../package.json') + +/** + * nopt configuration definitions + */ + +proto.configDefs = { + help: Boolean, // everywhere + arch: String, // 'configure' + cafile: String, // 'install' + debug: Boolean, // 'build' + directory: String, // bin + make: String, // 'build' + msvs_version: String, // 'configure' + ensure: Boolean, // 'install' + solution: String, // 'build' (windows only) + proxy: String, // 'install' + noproxy: String, // 'install' + devdir: String, // everywhere + nodedir: String, // 'configure' + loglevel: String, // everywhere + python: String, // 'configure' + 'dist-url': String, // 'install' + tarball: String, // 'install' + jobs: String, // 'build' + thin: String, // 'configure' + 'force-process-config': Boolean // 'configure' +} + +/** + * nopt shorthands + */ + +proto.shorthands = { + release: '--no-debug', + C: '--directory', + debug: '--debug', + j: '--jobs', + silly: '--loglevel=silly', + verbose: '--loglevel=verbose', + silent: '--loglevel=silent' +} + +/** + * expose the command aliases for the bin file to use. + */ + +proto.aliases = aliases + +/** + * Parses the given argv array and sets the 'opts', + * 'argv' and 'command' properties. + */ + +proto.parseArgv = function parseOpts (argv) { + this.opts = nopt(this.configDefs, this.shorthands, argv) + this.argv = this.opts.argv.remain.slice() + + var commands = this.todo = [] + + // create a copy of the argv array with aliases mapped + argv = this.argv.map(function (arg) { + // is this an alias? + if (arg in this.aliases) { + arg = this.aliases[arg] + } + return arg + }, this) + + // process the mapped args into "command" objects ("name" and "args" props) + argv.slice().forEach(function (arg) { + if (arg in this.commands) { + var args = argv.splice(0, argv.indexOf(arg)) + argv.shift() + if (commands.length > 0) { + commands[commands.length - 1].args = args + } + commands.push({ name: arg, args: [] }) + } + }, this) + if (commands.length > 0) { + commands[commands.length - 1].args = argv.splice(0) + } + + // support for inheriting config env variables from npm + var npmConfigPrefix = 'npm_config_' + Object.keys(process.env).forEach(function (name) { + if (name.indexOf(npmConfigPrefix) !== 0) { + return + } + var val = process.env[name] + if (name === npmConfigPrefix + 'loglevel') { + log.level = val + } else { + // add the user-defined options to the config + name = name.substring(npmConfigPrefix.length) + // gyp@741b7f1 enters an infinite loop when it encounters + // zero-length options so ensure those don't get through. + if (name) { + this.opts[name] = val + } + } + }, this) + + if (this.opts.loglevel) { + log.level = this.opts.loglevel + } + log.resume() +} + +/** + * Spawns a child process and emits a 'spawn' event. + */ + +proto.spawn = function spawn (command, args, opts) { + if (!opts) { + opts = {} + } + if (!opts.silent && !opts.stdio) { + opts.stdio = [0, 1, 2] + } + var cp = childProcess.spawn(command, args, opts) + log.info('spawn', command) + log.info('spawn args', args) + return cp +} + +/** + * Returns the usage instructions for node-gyp. + */ + +proto.usage = function usage () { + var str = [ + '', + ' Usage: node-gyp [options]', + '', + ' where is one of:', + commands.map(function (c) { + return ' - ' + c + ' - ' + require('./' + c).usage + }).join('\n'), + '', + 'node-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), + 'node@' + process.versions.node + ].join('\n') + return str +} + +/** + * Version number getter. + */ + +Object.defineProperty(proto, 'version', { + get: function () { + return this.package.version + }, + enumerable: true +}) + +module.exports = exports = gyp diff --git a/node_modules/node-gyp/lib/process-release.js b/node_modules/node-gyp/lib/process-release.js new file mode 100644 index 0000000..95b55e4 --- /dev/null +++ b/node_modules/node-gyp/lib/process-release.js @@ -0,0 +1,147 @@ +/* eslint-disable node/no-deprecated-api */ + +'use strict' + +const semver = require('semver') +const url = require('url') +const path = require('path') +const log = require('npmlog') + +// versions where -headers.tar.gz started shipping +const headersTarballRange = '>= 3.0.0 || ~0.12.10 || ~0.10.42' +const bitsre = /\/win-(x86|x64|arm64)\// +const bitsreV3 = /\/win-(x86|ia32|x64)\// // io.js v3.x.x shipped with "ia32" but should +// have been "x86" + +// Captures all the logic required to determine download URLs, local directory and +// file names. Inputs come from command-line switches (--target, --dist-url), +// `process.version` and `process.release` where it exists. +function processRelease (argv, gyp, defaultVersion, defaultRelease) { + var version = (semver.valid(argv[0]) && argv[0]) || gyp.opts.target || defaultVersion + var versionSemver = semver.parse(version) + var overrideDistUrl = gyp.opts['dist-url'] || gyp.opts.disturl + var isDefaultVersion + var isNamedForLegacyIojs + var name + var distBaseUrl + var baseUrl + var libUrl32 + var libUrl64 + var libUrlArm64 + var tarballUrl + var canGetHeaders + + if (!versionSemver) { + // not a valid semver string, nothing we can do + return { version: version } + } + // flatten version into String + version = versionSemver.version + + // defaultVersion should come from process.version so ought to be valid semver + isDefaultVersion = version === semver.parse(defaultVersion).version + + // can't use process.release if we're using --target=x.y.z + if (!isDefaultVersion) { + defaultRelease = null + } + + if (defaultRelease) { + // v3 onward, has process.release + name = defaultRelease.name.replace(/io\.js/, 'iojs') // remove the '.' for directory naming purposes + } else { + // old node or alternative --target= + // semver.satisfies() doesn't like prerelease tags so test major directly + isNamedForLegacyIojs = versionSemver.major >= 1 && versionSemver.major < 4 + // isNamedForLegacyIojs is required to support Electron < 4 (in particular Electron 3) + // as previously this logic was used to ensure "iojs" was used to download iojs releases + // and "node" for node releases. Unfortunately the logic was broad enough that electron@3 + // published release assets as "iojs" so that the node-gyp logic worked. Once Electron@3 has + // been EOL for a while (late 2019) we should remove this hack. + name = isNamedForLegacyIojs ? 'iojs' : 'node' + } + + // check for the nvm.sh standard mirror env variables + if (!overrideDistUrl && process.env.NODEJS_ORG_MIRROR) { + overrideDistUrl = process.env.NODEJS_ORG_MIRROR + } + + if (overrideDistUrl) { + log.verbose('download', 'using dist-url', overrideDistUrl) + } + + if (overrideDistUrl) { + distBaseUrl = overrideDistUrl.replace(/\/+$/, '') + } else { + distBaseUrl = 'https://nodejs.org/dist' + } + distBaseUrl += '/v' + version + '/' + + // new style, based on process.release so we have a lot of the data we need + if (defaultRelease && defaultRelease.headersUrl && !overrideDistUrl) { + baseUrl = url.resolve(defaultRelease.headersUrl, './') + libUrl32 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x86', versionSemver.major) + libUrl64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'x64', versionSemver.major) + libUrlArm64 = resolveLibUrl(name, defaultRelease.libUrl || baseUrl || distBaseUrl, 'arm64', versionSemver.major) + tarballUrl = defaultRelease.headersUrl + } else { + // older versions without process.release are captured here and we have to make + // a lot of assumptions, additionally if you --target=x.y.z then we can't use the + // current process.release + baseUrl = distBaseUrl + libUrl32 = resolveLibUrl(name, baseUrl, 'x86', versionSemver.major) + libUrl64 = resolveLibUrl(name, baseUrl, 'x64', versionSemver.major) + libUrlArm64 = resolveLibUrl(name, baseUrl, 'arm64', versionSemver.major) + + // making the bold assumption that anything with a version number >3.0.0 will + // have a *-headers.tar.gz file in its dist location, even some frankenstein + // custom version + canGetHeaders = semver.satisfies(versionSemver, headersTarballRange) + tarballUrl = url.resolve(baseUrl, name + '-v' + version + (canGetHeaders ? '-headers' : '') + '.tar.gz') + } + + return { + version: version, + semver: versionSemver, + name: name, + baseUrl: baseUrl, + tarballUrl: tarballUrl, + shasumsUrl: url.resolve(baseUrl, 'SHASUMS256.txt'), + versionDir: (name !== 'node' ? name + '-' : '') + version, + ia32: { + libUrl: libUrl32, + libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl32).path)) + }, + x64: { + libUrl: libUrl64, + libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrl64).path)) + }, + arm64: { + libUrl: libUrlArm64, + libPath: normalizePath(path.relative(url.parse(baseUrl).path, url.parse(libUrlArm64).path)) + } + } +} + +function normalizePath (p) { + return path.normalize(p).replace(/\\/g, '/') +} + +function resolveLibUrl (name, defaultUrl, arch, versionMajor) { + var base = url.resolve(defaultUrl, './') + var hasLibUrl = bitsre.test(defaultUrl) || (versionMajor === 3 && bitsreV3.test(defaultUrl)) + + if (!hasLibUrl) { + // let's assume it's a baseUrl then + if (versionMajor >= 1) { + return url.resolve(base, 'win-' + arch + '/' + name + '.lib') + } + // prior to io.js@1.0.0 32-bit node.lib lives in /, 64-bit lives in /x64/ + return url.resolve(base, (arch === 'x86' ? '' : arch + '/') + name + '.lib') + } + + // else we have a proper url to a .lib, just make sure it's the right arch + return defaultUrl.replace(versionMajor === 3 ? bitsreV3 : bitsre, '/win-' + arch + '/') +} + +module.exports = processRelease diff --git a/node_modules/node-gyp/lib/rebuild.js b/node_modules/node-gyp/lib/rebuild.js new file mode 100644 index 0000000..a1c5b27 --- /dev/null +++ b/node_modules/node-gyp/lib/rebuild.js @@ -0,0 +1,13 @@ +'use strict' + +function rebuild (gyp, argv, callback) { + gyp.todo.push( + { name: 'clean', args: [] } + , { name: 'configure', args: argv } + , { name: 'build', args: [] } + ) + process.nextTick(callback) +} + +module.exports = rebuild +module.exports.usage = 'Runs "clean", "configure" and "build" all at once' diff --git a/node_modules/node-gyp/lib/remove.js b/node_modules/node-gyp/lib/remove.js new file mode 100644 index 0000000..8c945e5 --- /dev/null +++ b/node_modules/node-gyp/lib/remove.js @@ -0,0 +1,46 @@ +'use strict' + +const fs = require('fs') +const rm = require('rimraf') +const path = require('path') +const log = require('npmlog') +const semver = require('semver') + +function remove (gyp, argv, callback) { + var devDir = gyp.devDir + log.verbose('remove', 'using node-gyp dir:', devDir) + + // get the user-specified version to remove + var version = argv[0] || gyp.opts.target + log.verbose('remove', 'removing target version:', version) + + if (!version) { + return callback(new Error('You must specify a version number to remove. Ex: "' + process.version + '"')) + } + + var versionSemver = semver.parse(version) + if (versionSemver) { + // flatten the version Array into a String + version = versionSemver.version + } + + var versionPath = path.resolve(gyp.devDir, version) + log.verbose('remove', 'removing development files for version:', version) + + // first check if its even installed + fs.stat(versionPath, function (err) { + if (err) { + if (err.code === 'ENOENT') { + callback(null, 'version was already uninstalled: ' + version) + } else { + callback(err) + } + return + } + // Go ahead and delete the dir + rm(versionPath, callback) + }) +} + +module.exports = exports = remove +module.exports.usage = 'Removes the node development files for the specified version' diff --git a/node_modules/node-gyp/lib/util.js b/node_modules/node-gyp/lib/util.js new file mode 100644 index 0000000..3e23c62 --- /dev/null +++ b/node_modules/node-gyp/lib/util.js @@ -0,0 +1,64 @@ +'use strict' + +const log = require('npmlog') +const execFile = require('child_process').execFile +const path = require('path') + +function logWithPrefix (log, prefix) { + function setPrefix (logFunction) { + return (...args) => logFunction.apply(null, [ prefix, ...args ]) // eslint-disable-line + } + return { + silly: setPrefix(log.silly), + verbose: setPrefix(log.verbose), + info: setPrefix(log.info), + warn: setPrefix(log.warn), + error: setPrefix(log.error) + } +} + +function regGetValue (key, value, addOpts, cb) { + const outReValue = value.replace(/\W/g, '.') + const outRe = new RegExp(`^\\s+${outReValue}\\s+REG_\\w+\\s+(\\S.*)$`, 'im') + const reg = path.join(process.env.SystemRoot, 'System32', 'reg.exe') + const regArgs = ['query', key, '/v', value].concat(addOpts) + + log.silly('reg', 'running', reg, regArgs) + const child = execFile(reg, regArgs, { encoding: 'utf8' }, + function (err, stdout, stderr) { + log.silly('reg', 'reg.exe stdout = %j', stdout) + if (err || stderr.trim() !== '') { + log.silly('reg', 'reg.exe err = %j', err && (err.stack || err)) + log.silly('reg', 'reg.exe stderr = %j', stderr) + return cb(err, stderr) + } + + const result = outRe.exec(stdout) + if (!result) { + log.silly('reg', 'error parsing stdout') + return cb(new Error('Could not parse output of reg.exe')) + } + log.silly('reg', 'found: %j', result[1]) + cb(null, result[1]) + }) + child.stdin.end() +} + +function regSearchKeys (keys, value, addOpts, cb) { + var i = 0 + const search = () => { + log.silly('reg-search', 'looking for %j in %j', value, keys[i]) + regGetValue(keys[i], value, addOpts, (err, res) => { + ++i + if (err && i < keys.length) { return search() } + cb(err, res) + }) + } + search() +} + +module.exports = { + logWithPrefix: logWithPrefix, + regGetValue: regGetValue, + regSearchKeys: regSearchKeys +} diff --git a/node_modules/node-gyp/macOS_Catalina.md b/node_modules/node-gyp/macOS_Catalina.md new file mode 100644 index 0000000..4fe0f29 --- /dev/null +++ b/node_modules/node-gyp/macOS_Catalina.md @@ -0,0 +1,104 @@ +# Installation notes for macOS Catalina (v10.15) + +_This document specifically refers to upgrades from previous versions of macOS to Catalina (10.15). It should be removed from the source repository when Catalina ceases to be the latest macOS version or when future Catalina versions no longer raise these issues._ + +**Both upgrading to macOS Catalina and running a Software Update in Catalina may cause normal `node-gyp` installations to fail. This might manifest as the following error during `npm install`:** + +```console +gyp: No Xcode or CLT version detected! +``` + +## node-gyp v7 + +The newest release of `node-gyp` should solve this problem. If you are using `node-gyp` directly then you should be able to install v7 and use it as-is. + +If you need to use `node-gyp` from within `npm` (e.g. through `npm install`), you will have to install `node-gyp` (either globally with `-g` or to a predictable location) and tell `npm` where the new version is. Either use: + +* `npm config set node_gyp `; or +* run `npm` with an environment variable prefix: `npm_config_node_gyp= npm install` + +Where "path to node-gyp" is to the `node-gyp` executable which may be a symlink in your global bin directory (e.g. `/usr/local/bin/node-gyp`), or a path to the `node-gyp` installation directory and the `bin/node-gyp.js` file within it (e.g. `/usr/local/lib/node_modules/node-gyp/bin/node-gyp.js`). + +**If you use `npm config set` to change your global `node_gyp` you are responsible for keeping it up to date and can't rely on `npm` to give you a newer version when available.** Use `npm config delete node_gyp` to unset this configuration option. + +## Fixing Catalina for older versions of `node-gyp` + +### Is my Mac running macOS Catalina? +Let's first make sure that your Mac is running Catalina: +``` +% sw_vers + ProductName: Mac OS X + ProductVersion: 10.15 + BuildVersion: 19A602 +``` +If `ProductVersion` is less then `10.15` then this document is not for you. Normal install docs for `node-gyp` on macOS can be found at https://github.com/nodejs/node-gyp#on-macos + + +### The acid test +To see if `Xcode Command Line Tools` is installed in a way that will work with `node-gyp`, run: +``` +curl -sL https://github.com/nodejs/node-gyp/raw/master/macOS_Catalina_acid_test.sh | bash +``` + +If test succeeded, _you are done_! You should be ready to [install](https://github.com/nodejs/node-gyp#installation) `node-gyp`. + +If test failed, there is a problem with your Xcode Command Line Tools installation. [Continue to Solutions](#Solutions). + +### Solutions +There are three ways to install the Xcode libraries `node-gyp` needs on macOS. People running Catalina have had success with some but not others in a way that has been unpredictable. + +1. With the full Xcode (~7.6 GB download) from the `App Store` app. +2. With the _much_ smaller Xcode Command Line Tools via `xcode-select --install` +3. With the _much_ smaller Xcode Command Line Tools via manual download. **For people running the latest version of Catalina (10.15.2 at the time of this writing), this has worked when the other two solutions haven't.** + +### Installing `node-gyp` using the full Xcode +1. `xcodebuild -version` should show `Xcode 11.1` or later. + * If not, then install/upgrade Xcode from the App Store app. +2. Open the Xcode app and... + * Under __Preferences > Locations__ select the tools if their location is empty. + * Allow Xcode app to do an essential install of the most recent compiler tools. +3. Once all installations are _complete_, quit out of Xcode. +4. `sudo xcodebuild -license accept` # If you agree with the licensing terms. +5. `softwareupdate -l` # No listing is a good sign. + * If Xcode or Tools upgrades are listed, use "Software Upgrade" to install them. +6. `xcode-select -version` # Should return `xcode-select version 2370` or later. +7. `xcode-select -print-path` # Should return `/Applications/Xcode.app/Contents/Developer` +8. Try the [_acid test_ steps above](#The-acid-test) to see if your Mac is ready. +9. If the _acid test_ does _not_ pass then... +10. `sudo xcode-select --reset` # Enter root password. No output is normal. +11. Repeat step 7 above. Is the path different this time? Repeat the _acid test_. + +### Installing `node-gyp` using the Xcode Command Line Tools via `xcode-select --install` +1. If the _acid test_ has not succeeded, then try `xcode-select --install` +2. If the installation command returns `xcode-select: error: command line tools are already installed, use "Software Update" to install updates`, continue to [remove and reinstall](#i-did-all-that-and-the-acid-test-still-does-not-pass--) +3. Wait until the install process is _complete_. +4. `softwareupdate -l` # No listing is a good sign. + * If Xcode or Tools upgrades are listed, use "Software Update" to install them. +5. `xcode-select -version` # Should return `xcode-select version 2370` or later. +6. `xcode-select -print-path` # Should return `/Library/Developer/CommandLineTools` +7. Try the [_acid test_ steps above](#The-acid-test) to see if your Mac is ready. +8. If the _acid test_ does _not_ pass then... +9. `sudo xcode-select --reset` # Enter root password. No output is normal. +10. Repeat step 5 above. Is the path different this time? Repeat the _acid test_. + +### Installing `node-gyp` using the Xcode Command Line Tools via manual download +1. Download the appropriate version of the "Command Line Tools for Xcode" for your version of Catalina from . As of MacOS 10.15.5, that's [Command_Line_Tools_for_Xcode_11.5.dmg](https://download.developer.apple.com/Developer_Tools/Command_Line_Tools_for_Xcode_11.5/Command_Line_Tools_for_Xcode_11.5.dmg) +2. Install the package. +3. Run the [_acid test_ steps above](#The-acid-test). + +### I did all that and the acid test still does not pass :-( +1. `sudo rm -rf $(xcode-select -print-path)` # Enter root password. No output is normal. +2. `sudo rm -rf /Library/Developer/CommandLineTools` # Enter root password. +3. `sudo xcode-select --reset` +4. `xcode-select --install` +5. If the [_acid test_ steps above](#The-acid-test) still does _not_ pass then... +6. `npm explore npm -g -- npm install node-gyp@latest` +7. `npm explore npm -g -- npm explore npm-lifecycle -- npm install node-gyp@latest` +8. If the _acid test_ still does _not_ pass then... +9. Add a comment to https://github.com/nodejs/node-gyp/issues/1927 so we can improve. + +Lessons learned from: +* https://github.com/nodejs/node-gyp/issues/1779 +* https://github.com/nodejs/node-gyp/issues/1861 +* https://github.com/nodejs/node-gyp/issues/1927 and elsewhere +* Thanks to @rrrix for discovering Solution 3 diff --git a/node_modules/node-gyp/macOS_Catalina_acid_test.sh b/node_modules/node-gyp/macOS_Catalina_acid_test.sh new file mode 100644 index 0000000..e1e9894 --- /dev/null +++ b/node_modules/node-gyp/macOS_Catalina_acid_test.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +pkgs=( + "com.apple.pkg.DeveloperToolsCLILeo" # standalone + "com.apple.pkg.DeveloperToolsCLI" # from XCode + "com.apple.pkg.CLTools_Executables" # Mavericks +) + +for pkg in "${pkgs[@]}"; do + output=$(/usr/sbin/pkgutil --pkg-info "$pkg" 2>/dev/null) + if [ "$output" ]; then + version=$(echo "$output" | grep 'version' | cut -d' ' -f2) + break + fi +done + +if [ "$version" ]; then + echo "Command Line Tools version: $version" +else + echo >&2 'Command Line Tools not found' +fi diff --git a/node_modules/node-gyp/node_modules/.bin/node-which b/node_modules/node-gyp/node_modules/.bin/node-which new file mode 100644 index 0000000..325adf0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/node-which @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../which/bin/node-which" "$@" + ret=$? +else + node "$basedir/../../../which/bin/node-which" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/.bin/node-which.cmd b/node_modules/node-gyp/node_modules/.bin/node-which.cmd new file mode 100644 index 0000000..8b65dbc --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/node-which.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\which\bin\node-which" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\which\bin\node-which" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/.bin/nopt b/node_modules/node-gyp/node_modules/.bin/nopt new file mode 100644 index 0000000..ad2e5bb --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/nopt @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../nopt/bin/nopt.js" "$@" + ret=$? +else + node "$basedir/../../../nopt/bin/nopt.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/.bin/nopt.cmd b/node_modules/node-gyp/node_modules/.bin/nopt.cmd new file mode 100644 index 0000000..a196694 --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/nopt.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\nopt\bin\nopt.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\nopt\bin\nopt.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/.bin/rimraf b/node_modules/node-gyp/node_modules/.bin/rimraf new file mode 100644 index 0000000..96e5c25 --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/rimraf @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../rimraf/bin.js" "$@" + ret=$? +else + node "$basedir/../../../rimraf/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/.bin/rimraf.cmd b/node_modules/node-gyp/node_modules/.bin/rimraf.cmd new file mode 100644 index 0000000..93b1691 --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/rimraf.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\rimraf\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\rimraf\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/.bin/semver b/node_modules/node-gyp/node_modules/.bin/semver new file mode 100644 index 0000000..62e2968 --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/semver @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../semver/bin/semver.js" "$@" + ret=$? +else + node "$basedir/../../../semver/bin/semver.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/.bin/semver.cmd b/node_modules/node-gyp/node_modules/.bin/semver.cmd new file mode 100644 index 0000000..9fdcaa0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/.bin/semver.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\semver\bin\semver.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\semver\bin\semver.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md b/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 0000000..5fc208f --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/README.md b/node_modules/node-gyp/node_modules/@npmcli/fs/README.md new file mode 100644 index 0000000..bc71a11 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/README.md @@ -0,0 +1,60 @@ +# @npmcli/fs + +polyfills, and extensions, of the core `fs` module. + +## Features + +- all exposed functions return promises +- `fs.rm` polyfill for node versions < 14.14.0 +- `fs.mkdir` polyfill adding support for the `recursive` and `force` options in node versions < 10.12.0 +- `fs.copyFile` extended to accept an `owner` option +- `fs.mkdir` extended to accept an `owner` option +- `fs.mkdtemp` extended to accept an `owner` option +- `fs.writeFile` extended to accept an `owner` option +- `fs.withTempDir` added +- `fs.cp` polyfill for node < 16.7.0 + +## The `owner` option + +The `copyFile`, `mkdir`, `mkdtemp`, `writeFile`, and `withTempDir` functions +all accept a new `owner` property in their options. It can be used in two ways: + +- `{ owner: { uid: 100, gid: 100 } }` - set the `uid` and `gid` explicitly +- `{ owner: 100 }` - use one value, will set both `uid` and `gid` the same + +The special string `'inherit'` may be passed instead of a number, which will +cause this module to automatically determine the correct `uid` and/or `gid` +from the nearest existing parent directory of the target. + +## `fs.withTempDir(root, fn, options) -> Promise` + +### Parameters + +- `root`: the directory in which to create the temporary directory +- `fn`: a function that will be called with the path to the temporary directory +- `options` + - `tmpPrefix`: a prefix to be used in the generated directory name + +### Usage + +The `withTempDir` function creates a temporary directory, runs the provided +function (`fn`), then removes the temporary directory and resolves or rejects +based on the result of `fn`. + +```js +const fs = require('@npmcli/fs') +const os = require('os') + +// this function will be called with the full path to the temporary directory +// it is called with `await` behind the scenes, so can be async if desired. +const myFunction = async (tempPath) => { + return 'done!' +} + +const main = async () => { + const result = await fs.withTempDir(os.tmpdir(), myFunction) + // result === 'done!' +} + +main() +``` diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js new file mode 100644 index 0000000..7755d1c --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/file-url-to-path/index.js @@ -0,0 +1,17 @@ +const url = require('url') + +const node = require('../node.js') +const polyfill = require('./polyfill.js') + +const useNative = node.satisfies('>=10.12.0') + +const fileURLToPath = (path) => { + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? url.fileURLToPath(path) + : polyfill(path) +} + +module.exports = fileURLToPath diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js new file mode 100644 index 0000000..6cc90f0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/file-url-to-path/polyfill.js @@ -0,0 +1,121 @@ +const { URL, domainToUnicode } = require('url') + +const CHAR_LOWERCASE_A = 97 +const CHAR_LOWERCASE_Z = 122 + +const isWindows = process.platform === 'win32' + +class ERR_INVALID_FILE_URL_HOST extends TypeError { + constructor (platform) { + super(`File URL host must be "localhost" or empty on ${platform}`) + this.code = 'ERR_INVALID_FILE_URL_HOST' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ERR_INVALID_FILE_URL_PATH extends TypeError { + constructor (msg) { + super(`File URL path ${msg}`) + this.code = 'ERR_INVALID_FILE_URL_PATH' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ERR_INVALID_ARG_TYPE extends TypeError { + constructor (name, actual) { + super(`The "${name}" argument must be one of type string or an instance ` + + `of URL. Received type ${typeof actual} ${actual}`) + this.code = 'ERR_INVALID_ARG_TYPE' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ERR_INVALID_URL_SCHEME extends TypeError { + constructor (expected) { + super(`The URL must be of scheme ${expected}`) + this.code = 'ERR_INVALID_URL_SCHEME' + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +const isURLInstance = (input) => { + return input != null && input.href && input.origin +} + +const getPathFromURLWin32 = (url) => { + const hostname = url.hostname + let pathname = url.pathname + for (let n = 0; n < pathname.length; n++) { + if (pathname[n] === '%') { + const third = pathname.codePointAt(n + 2) | 0x20 + if ((pathname[n + 1] === '2' && third === 102) || + (pathname[n + 1] === '5' && third === 99)) { + throw new ERR_INVALID_FILE_URL_PATH('must not include encoded \\ or / characters') + } + } + } + + pathname = pathname.replace(/\//g, '\\') + pathname = decodeURIComponent(pathname) + if (hostname !== '') { + return `\\\\${domainToUnicode(hostname)}${pathname}` + } + + const letter = pathname.codePointAt(1) | 0x20 + const sep = pathname[2] + if (letter < CHAR_LOWERCASE_A || letter > CHAR_LOWERCASE_Z || + (sep !== ':')) { + throw new ERR_INVALID_FILE_URL_PATH('must be absolute') + } + + return pathname.slice(1) +} + +const getPathFromURLPosix = (url) => { + if (url.hostname !== '') { + throw new ERR_INVALID_FILE_URL_HOST(process.platform) + } + + const pathname = url.pathname + + for (let n = 0; n < pathname.length; n++) { + if (pathname[n] === '%') { + const third = pathname.codePointAt(n + 2) | 0x20 + if (pathname[n + 1] === '2' && third === 102) { + throw new ERR_INVALID_FILE_URL_PATH('must not include encoded / characters') + } + } + } + + return decodeURIComponent(pathname) +} + +const fileURLToPath = (path) => { + if (typeof path === 'string') { + path = new URL(path) + } else if (!isURLInstance(path)) { + throw new ERR_INVALID_ARG_TYPE('path', ['string', 'URL'], path) + } + + if (path.protocol !== 'file:') { + throw new ERR_INVALID_URL_SCHEME('file') + } + + return isWindows + ? getPathFromURLWin32(path) + : getPathFromURLPosix(path) +} + +module.exports = fileURLToPath diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 0000000..cb5982f --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 0000000..4d13bc0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js new file mode 100644 index 0000000..e3468b0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js @@ -0,0 +1,92 @@ +const { dirname, resolve } = require('path') + +const fileURLToPath = require('./file-url-to-path/index.js') +const fs = require('../fs.js') + +// given a path, find the owner of the nearest parent +const find = async (path) => { + // if we have no getuid, permissions are irrelevant on this platform + if (!process.getuid) { + return {} + } + + // fs methods accept URL objects with a scheme of file: so we need to unwrap + // those into an actual path string before we can resolve it + const resolved = path != null && path.href && path.origin + ? resolve(fileURLToPath(path)) + : resolve(path) + + let stat + + try { + stat = await fs.lstat(resolved) + } finally { + // if we got a stat, return its contents + if (stat) { + return { uid: stat.uid, gid: stat.gid } + } + + // try the parent directory + if (resolved !== dirname(resolved)) { + return find(dirname(resolved)) + } + + // no more parents, never got a stat, just return an empty object + return {} + } +} + +// given a path, uid, and gid update the ownership of the path if necessary +const update = async (path, uid, gid) => { + // nothing to update, just exit + if (uid === undefined && gid === undefined) { + return + } + + try { + // see if the permissions are already the same, if they are we don't + // need to do anything, so return early + const stat = await fs.stat(path) + if (uid === stat.uid && gid === stat.gid) { + return + } + } catch (err) {} + + try { + await fs.chown(path, uid, gid) + } catch (err) {} +} + +// accepts a `path` and the `owner` property of an options object and normalizes +// it into an object with numerical `uid` and `gid` +const validate = async (path, input) => { + let uid + let gid + + if (typeof input === 'string' || typeof input === 'number') { + uid = input + gid = input + } else if (input && typeof input === 'object') { + uid = input.uid + gid = input.gid + } + + if (uid === 'inherit' || gid === 'inherit') { + const owner = await find(path) + if (uid === 'inherit') { + uid = owner.uid + } + + if (gid === 'inherit') { + gid = owner.gid + } + } + + return { uid, gid } +} + +module.exports = { + find, + update, + validate, +} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js new file mode 100644 index 0000000..d9875ab --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js @@ -0,0 +1,22 @@ +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner.js') + +const copyFile = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['mode', 'owner'], + wrap: 'mode', + }) + + const { uid, gid } = await owner.validate(dest, options.owner) + + // the node core method as of 16.5.0 does not support the mode being in an + // object, so we have to pass the mode value directly + const result = await fs.copyFile(src, dest, options.mode) + + await owner.update(dest, uid, gid) + + return result +} + +module.exports = copyFile diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 0000000..93546df --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 0000000..5da4739 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('../fs.js') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 0000000..f83ccbf --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('../errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('../fs.js') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js new file mode 100644 index 0000000..1cd1e05 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js new file mode 100644 index 0000000..29e5fb5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js @@ -0,0 +1,8 @@ +const fs = require('fs') +const promisify = require('@gar/promisify') + +// this module returns the core fs module wrapped in a proxy that promisifies +// method calls within the getter. we keep it in a separate module so that the +// overridden methods have a consistent way to get to promisified fs methods +// without creating a circular dependency +module.exports = promisify(fs) diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 0000000..e40d748 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,10 @@ +module.exports = { + ...require('./fs.js'), + copyFile: require('./copy-file.js'), + cp: require('./cp/index.js'), + mkdir: require('./mkdir/index.js'), + mkdtemp: require('./mkdtemp.js'), + rm: require('./rm/index.js'), + withTempDir: require('./with-temp-dir.js'), + writeFile: require('./write-file.js'), +} diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir/index.js new file mode 100644 index 0000000..04ff447 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir/index.js @@ -0,0 +1,32 @@ +const fs = require('../fs.js') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const owner = require('../common/owner.js') + +const polyfill = require('./polyfill.js') + +// node 10.12.0 added the options parameter, which allows recursive and mode +// properties to be passed +const useNative = node.satisfies('>=10.12.0') + +// extends mkdir with the ability to specify an owner of the new dir +const mkdir = async (path, opts) => { + const options = getOptions(opts, { + copy: ['mode', 'recursive', 'owner'], + wrap: 'mode', + }) + const { uid, gid } = await owner.validate(path, options.owner) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + const result = useNative + ? await fs.mkdir(path, options) + : await polyfill(path, options) + + await owner.update(path, uid, gid) + + return result +} + +module.exports = mkdir diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir/polyfill.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir/polyfill.js new file mode 100644 index 0000000..4f8e6f0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir/polyfill.js @@ -0,0 +1,81 @@ +const { dirname } = require('path') + +const fileURLToPath = require('../common/file-url-to-path/index.js') +const fs = require('../fs.js') + +const defaultOptions = { + mode: 0o777, + recursive: false, +} + +const mkdir = async (path, opts) => { + const options = { ...defaultOptions, ...opts } + + // if we're not in recursive mode, just call the real mkdir with the path and + // the mode option only + if (!options.recursive) { + return fs.mkdir(path, options.mode) + } + + const makeDirectory = async (dir, mode) => { + // we can't use dirname directly since these functions support URL + // objects with the file: protocol as the path input, so first we get a + // string path, then we can call dirname on that + const parent = dir != null && dir.href && dir.origin + ? dirname(fileURLToPath(dir)) + : dirname(dir) + + // if the parent is the dir itself, try to create it. anything but EISDIR + // should be rethrown + if (parent === dir) { + try { + await fs.mkdir(dir, opts) + } catch (err) { + if (err.code !== 'EISDIR') { + throw err + } + } + return undefined + } + + try { + await fs.mkdir(dir, mode) + return dir + } catch (err) { + // ENOENT means the parent wasn't there, so create that + if (err.code === 'ENOENT') { + const made = await makeDirectory(parent, mode) + await makeDirectory(dir, mode) + // return the shallowest path we created, i.e. the result of creating + // the parent + return made + } + + // an EEXIST means there's already something there + // an EROFS means we have a read-only filesystem and can't create a dir + // any other error is fatal and we should give up now + if (err.code !== 'EEXIST' && err.code !== 'EROFS') { + throw err + } + + // stat the directory, if the result is a directory, then we successfully + // created this one so return its path. otherwise, we reject with the + // original error by ignoring the error in the catch + try { + const stat = await fs.stat(dir) + if (stat.isDirectory()) { + // if it already existed, we didn't create anything so return + // undefined + return undefined + } + } catch (_) {} + + // if the thing that's there isn't a directory, then just re-throw + throw err + } + } + + return makeDirectory(path, options.mode) +} + +module.exports = mkdir diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js new file mode 100644 index 0000000..b7f0780 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js @@ -0,0 +1,28 @@ +const { dirname, sep } = require('path') + +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner.js') + +const mkdtemp = async (prefix, opts) => { + const options = getOptions(opts, { + copy: ['encoding', 'owner'], + wrap: 'encoding', + }) + + // mkdtemp relies on the trailing path separator to indicate if it should + // create a directory inside of the prefix. if that's the case then the root + // we infer ownership from is the prefix itself, otherwise it's the dirname + // /tmp -> /tmpABCDEF, infers from / + // /tmp/ -> /tmp/ABCDEF, infers from /tmp + const root = prefix.endsWith(sep) ? prefix : dirname(prefix) + const { uid, gid } = await owner.validate(root, options.owner) + + const result = await fs.mkdtemp(prefix, options) + + await owner.update(result, uid, gid) + + return result +} + +module.exports = mkdtemp diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js new file mode 100644 index 0000000..cb81fbd --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js @@ -0,0 +1,22 @@ +const fs = require('../fs.js') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 14.14.0 added fs.rm, which allows both the force and recursive options +const useNative = node.satisfies('>=14.14.0') + +const rm = async (path, opts) => { + const options = getOptions(opts, { + copy: ['retryDelay', 'maxRetries', 'recursive', 'force'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.rm(path, options) + : polyfill(path, options) +} + +module.exports = rm diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js new file mode 100644 index 0000000..a25c174 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js @@ -0,0 +1,239 @@ +// this file is a modified version of the code in node core >=14.14.0 +// which is, in turn, a modified version of the rimraf module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +const errnos = require('os').constants.errno +const { join } = require('path') +const fs = require('../fs.js') + +// error codes that mean we need to remove contents +const notEmptyCodes = new Set([ + 'ENOTEMPTY', + 'EEXIST', + 'EPERM', +]) + +// error codes we can retry later +const retryCodes = new Set([ + 'EBUSY', + 'EMFILE', + 'ENFILE', + 'ENOTEMPTY', + 'EPERM', +]) + +const isWindows = process.platform === 'win32' + +const defaultOptions = { + retryDelay: 100, + maxRetries: 0, + recursive: false, + force: false, +} + +// this is drastically simplified, but should be roughly equivalent to what +// node core throws +class ERR_FS_EISDIR extends Error { + constructor (path) { + super() + this.info = { + code: 'EISDIR', + message: 'is a directory', + path, + syscall: 'rm', + errno: errnos.EISDIR, + } + this.name = 'SystemError' + this.code = 'ERR_FS_EISDIR' + this.errno = errnos.EISDIR + this.syscall = 'rm' + this.path = path + this.message = `Path is a directory: ${this.syscall} returned ` + + `${this.info.code} (is a directory) ${path}` + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } +} + +class ENOTDIR extends Error { + constructor (path) { + super() + this.name = 'Error' + this.code = 'ENOTDIR' + this.errno = errnos.ENOTDIR + this.syscall = 'rmdir' + this.path = path + this.message = `not a directory, ${this.syscall} '${this.path}'` + } + + toString () { + return `${this.name}: ${this.code}: ${this.message}` + } +} + +// force is passed separately here because we respect it for the first entry +// into rimraf only, any further calls that are spawned as a result (i.e. to +// delete content within the target) will ignore ENOENT errors +const rimraf = async (path, options, isTop = false) => { + const force = isTop ? options.force : true + const stat = await fs.lstat(path) + .catch((err) => { + // we only ignore ENOENT if we're forcing this call + if (err.code === 'ENOENT' && force) { + return + } + + if (isWindows && err.code === 'EPERM') { + return fixEPERM(path, options, err, isTop) + } + + throw err + }) + + // no stat object here means either lstat threw an ENOENT, or lstat threw + // an EPERM and the fixPERM function took care of things. either way, we're + // already done, so return early + if (!stat) { + return + } + + if (stat.isDirectory()) { + return rmdir(path, options, null, isTop) + } + + return fs.unlink(path) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return + } + + if (err.code === 'EISDIR') { + return rmdir(path, options, err, isTop) + } + + if (err.code === 'EPERM') { + // in windows, we handle this through fixEPERM which will also try to + // delete things again. everywhere else since deleting the target as a + // file didn't work we go ahead and try to delete it as a directory + return isWindows + ? fixEPERM(path, options, err, isTop) + : rmdir(path, options, err, isTop) + } + + throw err + }) +} + +const fixEPERM = async (path, options, originalErr, isTop) => { + const force = isTop ? options.force : true + const targetMissing = await fs.chmod(path, 0o666) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return true + } + + throw originalErr + }) + + // got an ENOENT above, return now. no file = no problem + if (targetMissing) { + return + } + + // this function does its own lstat rather than calling rimraf again to avoid + // infinite recursion for a repeating EPERM + const stat = await fs.lstat(path) + .catch((err) => { + if (err.code === 'ENOENT' && force) { + return + } + + throw originalErr + }) + + if (!stat) { + return + } + + if (stat.isDirectory()) { + return rmdir(path, options, originalErr, isTop) + } + + return fs.unlink(path) +} + +const rmdir = async (path, options, originalErr, isTop) => { + if (!options.recursive && isTop) { + throw originalErr || new ERR_FS_EISDIR(path) + } + const force = isTop ? options.force : true + + return fs.rmdir(path) + .catch(async (err) => { + // in Windows, calling rmdir on a file path will fail with ENOENT rather + // than ENOTDIR. to determine if that's what happened, we have to do + // another lstat on the path. if the path isn't actually gone, we throw + // away the ENOENT and replace it with our own ENOTDIR + if (isWindows && err.code === 'ENOENT') { + const stillExists = await fs.lstat(path).then(() => true, () => false) + if (stillExists) { + err = new ENOTDIR(path) + } + } + + // not there, not a problem + if (err.code === 'ENOENT' && force) { + return + } + + // we may not have originalErr if lstat tells us our target is a + // directory but that changes before we actually remove it, so + // only throw it here if it's set + if (originalErr && err.code === 'ENOTDIR') { + throw originalErr + } + + // the directory isn't empty, remove the contents and try again + if (notEmptyCodes.has(err.code)) { + const files = await fs.readdir(path) + await Promise.all(files.map((file) => { + const target = join(path, file) + return rimraf(target, options) + })) + return fs.rmdir(path) + } + + throw err + }) +} + +const rm = async (path, opts) => { + const options = { ...defaultOptions, ...opts } + let retries = 0 + + const errHandler = async (err) => { + if (retryCodes.has(err.code) && ++retries < options.maxRetries) { + const delay = retries * options.retryDelay + await promiseTimeout(delay) + return rimraf(path, options, true).catch(errHandler) + } + + throw err + } + + return rimraf(path, options, true).catch(errHandler) +} + +const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms)) + +module.exports = rm diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 0000000..353d555 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const mkdir = require('./mkdir/index.js') +const mkdtemp = require('./mkdtemp.js') +const rm = require('./rm/index.js') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory, and fix its ownership + await mkdir(root, { recursive: true, owner: 'inherit' }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' }) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch (err) {} + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js new file mode 100644 index 0000000..01de531 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js @@ -0,0 +1,19 @@ +const fs = require('./fs.js') +const getOptions = require('./common/get-options.js') +const owner = require('./common/owner.js') + +const writeFile = async (file, data, opts) => { + const options = getOptions(opts, { + copy: ['encoding', 'mode', 'flag', 'signal', 'owner'], + wrap: 'encoding', + }) + const { uid, gid } = await owner.validate(file, options.owner) + + const result = await fs.writeFile(file, data, options) + + await owner.update(file, uid, gid) + + return result +} + +module.exports = writeFile diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/node_modules/.bin/semver b/node_modules/node-gyp/node_modules/@npmcli/fs/node_modules/.bin/semver new file mode 100644 index 0000000..2e70248 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/node_modules/.bin/semver @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../../../semver/bin/semver.js" "$@" + ret=$? +else + node "$basedir/../../../../../../semver/bin/semver.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/node_modules/.bin/semver.cmd b/node_modules/node-gyp/node_modules/@npmcli/fs/node_modules/.bin/semver.cmd new file mode 100644 index 0000000..c1f3649 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/node_modules/.bin/semver.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\..\..\semver\bin\semver.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\..\..\semver\bin\semver.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/@npmcli/fs/package.json b/node_modules/node-gyp/node_modules/@npmcli/fs/package.json new file mode 100644 index 0000000..0296aa7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/fs/package.json @@ -0,0 +1,38 @@ +{ + "name": "@npmcli/fs", + "version": "1.1.1", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin", + "lib" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint '**/*.js'", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "npm-template-check" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/template-oss": "^2.3.1", + "tap": "^15.0.9" + }, + "dependencies": { + "@gar/promisify": "^1.0.1", + "semver": "^7.3.5" + }, + "templateVersion": "2.3.1" +} diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/LICENSE.md b/node_modules/node-gyp/node_modules/@npmcli/move-file/LICENSE.md new file mode 100644 index 0000000..072bf20 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/LICENSE.md @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) +Copyright (c) npm, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/README.md b/node_modules/node-gyp/node_modules/@npmcli/move-file/README.md new file mode 100644 index 0000000..8a5a57f --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/README.md @@ -0,0 +1,69 @@ +# @npmcli/move-file + +A fork of [move-file](https://github.com/sindresorhus/move-file) with +compatibility with all node 10.x versions. + +> Move a file (or directory) + +The built-in +[`fs.rename()`](https://nodejs.org/api/fs.html#fs_fs_rename_oldpath_newpath_callback) +is just a JavaScript wrapper for the C `rename(2)` function, which doesn't +support moving files across partitions or devices. This module is what you +would have expected `fs.rename()` to be. + +## Highlights + +- Promise API. +- Supports moving a file across partitions and devices. +- Optionally prevent overwriting an existing file. +- Creates non-existent destination directories for you. +- Support for Node versions that lack built-in recursive `fs.mkdir()` +- Automatically recurses when source is a directory. + +## Install + +``` +$ npm install @npmcli/move-file +``` + +## Usage + +```js +const moveFile = require('@npmcli/move-file'); + +(async () => { + await moveFile('source/unicorn.png', 'destination/unicorn.png'); + console.log('The file has been moved'); +})(); +``` + +## API + +### moveFile(source, destination, options?) + +Returns a `Promise` that resolves when the file has been moved. + +### moveFile.sync(source, destination, options?) + +#### source + +Type: `string` + +File, or directory, you want to move. + +#### destination + +Type: `string` + +Where you want the file or directory moved. + +#### options + +Type: `object` + +##### overwrite + +Type: `boolean`\ +Default: `true` + +Overwrite existing destination file(s). diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/index.js b/node_modules/node-gyp/node_modules/@npmcli/move-file/index.js new file mode 100644 index 0000000..95d1888 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/index.js @@ -0,0 +1,162 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const rimraf_ = require('rimraf') +const { promisify } = require('util') +const { + access: access_, + accessSync, + copyFile: copyFile_, + copyFileSync, + unlink: unlink_, + unlinkSync, + readdir: readdir_, + readdirSync, + rename: rename_, + renameSync, + stat: stat_, + statSync, + lstat: lstat_, + lstatSync, + symlink: symlink_, + symlinkSync, + readlink: readlink_, + readlinkSync +} = require('fs') + +const access = promisify(access_) +const copyFile = promisify(copyFile_) +const unlink = promisify(unlink_) +const readdir = promisify(readdir_) +const rename = promisify(rename_) +const stat = promisify(stat_) +const lstat = promisify(lstat_) +const symlink = promisify(symlink_) +const readlink = promisify(readlink_) +const rimraf = promisify(rimraf_) +const rimrafSync = rimraf_.sync + +const mkdirp = require('mkdirp') + +const pathExists = async path => { + try { + await access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const pathExistsSync = path => { + try { + accessSync(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await mkdirp(dirname(destination)) + + try { + await rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await lstat(source) + if (sourceStat.isDirectory()) { + const files = await readdir(source) + await Promise.all(files.map((file) => moveFile(join(source, file), join(destination, file), options, false, symlinks))) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source, destination }) => { + let target = await readlink(source) + // junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination + if (isAbsolute(target)) + target = resolve(destination, relative(source, target)) + // try to determine what the actual file is so we can create the correct type of symlink in windows + let targetStat + try { + targetStat = await stat(resolve(dirname(source), target)) + } catch (err) {} + await symlink(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file') + })) + await rimraf(source) + } +} + +const moveFileSync = (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options + } + + if (!options.overwrite && pathExistsSync(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + mkdirp.sync(dirname(destination)) + + try { + renameSync(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = lstatSync(source) + if (sourceStat.isDirectory()) { + const files = readdirSync(source) + for (const file of files) { + moveFileSync(join(source, file), join(destination, file), options, false, symlinks) + } + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + copyFileSync(source, destination) + } + } else { + throw error + } + } + + if (root) { + for (const { source, destination } of symlinks) { + let target = readlinkSync(source) + // junction symlinks in windows will be absolute paths, so we need to make sure they point to the destination + if (isAbsolute(target)) + target = resolve(destination, relative(source, target)) + // try to determine what the actual file is so we can create the correct type of symlink in windows + let targetStat + try { + targetStat = statSync(resolve(dirname(source), target)) + } catch (err) {} + symlinkSync(target, destination, targetStat && targetStat.isDirectory() ? 'junction' : 'file') + } + rimrafSync(source) + } +} + +module.exports = moveFile +module.exports.sync = moveFileSync diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp new file mode 100644 index 0000000..f404b57 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../../../mkdirp/bin/cmd.js" "$@" + ret=$? +else + node "$basedir/../../../../../../mkdirp/bin/cmd.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp.cmd b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp.cmd new file mode 100644 index 0000000..0936b1a --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/mkdirp.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\..\..\mkdirp\bin\cmd.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\..\..\mkdirp\bin\cmd.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/rimraf b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/rimraf new file mode 100644 index 0000000..a10633f --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/rimraf @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../../../rimraf/bin.js" "$@" + ret=$? +else + node "$basedir/../../../../../../rimraf/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/rimraf.cmd b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/rimraf.cmd new file mode 100644 index 0000000..0cc5171 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/node_modules/.bin/rimraf.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\..\..\rimraf\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\..\..\rimraf\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/@npmcli/move-file/package.json b/node_modules/node-gyp/node_modules/@npmcli/move-file/package.json new file mode 100644 index 0000000..0c066db --- /dev/null +++ b/node_modules/node-gyp/node_modules/@npmcli/move-file/package.json @@ -0,0 +1,34 @@ +{ + "name": "@npmcli/move-file", + "version": "1.1.2", + "files": [ + "index.js" + ], + "description": "move a file (fork of move-file)", + "dependencies": { + "mkdirp": "^1.0.4", + "rimraf": "^3.0.2" + }, + "devDependencies": { + "require-inject": "^1.4.4", + "tap": "^14.10.7" + }, + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/move-file" + }, + "tap": { + "check-coverage": true + }, + "license": "MIT", + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts new file mode 100644 index 0000000..a7efe94 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts @@ -0,0 +1,14 @@ +/// +import { EventEmitter } from 'events'; +declare function once(emitter: EventEmitter, name: string): once.CancelablePromise; +declare namespace once { + interface CancelFunction { + (): void; + } + interface CancelablePromise extends Promise { + cancel: CancelFunction; + } + type CancellablePromise = CancelablePromise; + function spread(emitter: EventEmitter, name: string): once.CancelablePromise; +} +export = once; diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js new file mode 100644 index 0000000..bfd0dc8 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js @@ -0,0 +1,39 @@ +"use strict"; +function noop() { } +function once(emitter, name) { + const o = once.spread(emitter, name); + const r = o.then((args) => args[0]); + r.cancel = o.cancel; + return r; +} +(function (once) { + function spread(emitter, name) { + let c = null; + const p = new Promise((resolve, reject) => { + function cancel() { + emitter.removeListener(name, onEvent); + emitter.removeListener('error', onError); + p.cancel = noop; + } + function onEvent(...args) { + cancel(); + resolve(args); + } + function onError(err) { + cancel(); + reject(err); + } + c = cancel; + emitter.on(name, onEvent); + emitter.on('error', onError); + }); + if (!c) { + throw new TypeError('Could not get `cancel()` function'); + } + p.cancel = c; + return p; + } + once.spread = spread; +})(once || (once = {})); +module.exports = once; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map new file mode 100644 index 0000000..30d2049 --- /dev/null +++ b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA,SAAS,IAAI,KAAI,CAAC;AAElB,SAAS,IAAI,CACZ,OAAqB,EACrB,IAAY;IAEZ,MAAM,CAAC,GAAG,IAAI,CAAC,MAAM,CAAM,OAAO,EAAE,IAAI,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAA8B,CAAC;IACtE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC;IACpB,OAAO,CAAC,CAAC;AACV,CAAC;AAED,WAAU,IAAI;IAWb,SAAgB,MAAM,CACrB,OAAqB,EACrB,IAAY;QAEZ,IAAI,CAAC,GAA+B,IAAI,CAAC;QACzC,MAAM,CAAC,GAAG,IAAI,OAAO,CAAI,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC5C,SAAS,MAAM;gBACd,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;gBACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;gBACzC,CAAC,CAAC,MAAM,GAAG,IAAI,CAAC;YACjB,CAAC;YACD,SAAS,OAAO,CAAC,GAAG,IAAW;gBAC9B,MAAM,EAAE,CAAC;gBACT,OAAO,CAAC,IAAS,CAAC,CAAC;YACpB,CAAC;YACD,SAAS,OAAO,CAAC,GAAU;gBAC1B,MAAM,EAAE,CAAC;gBACT,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;YACD,CAAC,GAAG,MAAM,CAAC;YACX,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC9B,CAAC,CAA8B,CAAC;QAChC,IAAI,CAAC,CAAC,EAAE;YACP,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC,CAAC;SACzD;QACD,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;QACb,OAAO,CAAC,CAAC;IACV,CAAC;IA5Be,WAAM,SA4BrB,CAAA;AACF,CAAC,EAxCS,IAAI,KAAJ,IAAI,QAwCb;AAED,iBAAS,IAAI,CAAC"} \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/package.json b/node_modules/node-gyp/node_modules/@tootallnate/once/package.json new file mode 100644 index 0000000..8343f9f --- /dev/null +++ b/node_modules/node-gyp/node_modules/@tootallnate/once/package.json @@ -0,0 +1,45 @@ +{ + "name": "@tootallnate/once", + "version": "1.1.2", + "description": "Creates a Promise that waits for a single event", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha --reporter spec", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/once.git" + }, + "keywords": [], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/once/issues" + }, + "devDependencies": { + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "rimraf": "^3.0.0", + "typescript": "^3.7.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/node_modules/node-gyp/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000..8d28acf --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/README.md b/node_modules/node-gyp/node_modules/cacache/README.md new file mode 100644 index 0000000..6dc11ba --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/README.md @@ -0,0 +1,703 @@ +# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest) + +[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing +local key and content address caches. It's really fast, really good at +concurrency, and it will never give you corrupted data, even if cache files +get corrupted or manipulated. + +On systems that support user and group settings on files, cacache will +match the `uid` and `gid` values to the folder where the cache lives, even +when running as `root`. + +It was written to be used as [npm](https://npm.im)'s local cache, but can +just as easily be used on its own. + +## Install + +`$ npm install --save cacache` + +## Table of Contents + +* [Example](#example) +* [Features](#features) +* [Contributing](#contributing) +* [API](#api) + * [Using localized APIs](#localized-api) + * Reading + * [`ls`](#ls) + * [`ls.stream`](#ls-stream) + * [`get`](#get-data) + * [`get.stream`](#get-stream) + * [`get.info`](#get-info) + * [`get.hasContent`](#get-hasContent) + * Writing + * [`put`](#put-data) + * [`put.stream`](#put-stream) + * [`rm.all`](#rm-all) + * [`rm.entry`](#rm-entry) + * [`rm.content`](#rm-content) + * [`index.compact`](#index-compact) + * [`index.insert`](#index-insert) + * Utilities + * [`clearMemoized`](#clear-memoized) + * [`tmp.mkdir`](#tmp-mkdir) + * [`tmp.withTmp`](#with-tmp) + * Integrity + * [Subresource Integrity](#integrity) + * [`verify`](#verify) + * [`verify.lastRun`](#verify-last-run) + +### Example + +```javascript +const cacache = require('cacache') +const fs = require('fs') + +const tarball = '/path/to/mytar.tgz' +const cachePath = '/tmp/my-toy-cache' +const key = 'my-unique-key-1234' + +// Cache it! Use `cachePath` as the root of the content cache +cacache.put(cachePath, key, '10293801983029384').then(integrity => { + console.log(`Saved content to ${cachePath}.`) +}) + +const destination = '/tmp/mytar.tgz' + +// Copy the contents out of the cache and into their destination! +// But this time, use stream instead! +cacache.get.stream( + cachePath, key +).pipe( + fs.createWriteStream(destination) +).on('finish', () => { + console.log('done extracting!') +}) + +// The same thing, but skip the key index. +cacache.get.byDigest(cachePath, integrityHash).then(data => { + fs.writeFile(destination, data, err => { + console.log('tarball data fetched based on its sha512sum and written out!') + }) +}) +``` + +### Features + +* Extraction by key or by content address (shasum, etc) +* [Subresource Integrity](#integrity) web standard support +* Multi-hash support - safely host sha1, sha512, etc, in a single cache +* Automatic content deduplication +* Fault tolerance (immune to corruption, partial writes, process races, etc) +* Consistency guarantees on read and write (full data verification) +* Lockless, high-concurrency cache access +* Streaming support +* Promise support +* Fast -- sub-millisecond reads and writes including verification +* Arbitrary metadata storage +* Garbage collection and additional offline verification +* Thorough test coverage +* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔 + +### Contributing + +The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. + +All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other. + +Please refer to the [Changelog](CHANGELOG.md) for project history details, too. + +Happy hacking! + +### API + +#### `> cacache.ls(cache) -> Promise` + +Lists info for all entries currently in the cache as a single large object. Each +entry in the object will be keyed by the unique index key, with corresponding +[`get.info`](#get-info) objects as the values. + +##### Example + +```javascript +cacache.ls(cachePath).then(console.log) +// Output +{ + 'my-thing': { + key: 'my-thing', + integrity: 'sha512-BaSe64/EnCoDED+HAsh==' + path: '.testcache/content/deadbeef', // joined with `cachePath` + time: 12345698490, + size: 4023948, + metadata: { + name: 'blah', + version: '1.2.3', + description: 'this was once a package but now it is my-thing' + } + }, + 'other-thing': { + key: 'other-thing', + integrity: 'sha1-ANothER+hasH=', + path: '.testcache/content/bada55', + time: 11992309289, + size: 111112 + } +} +``` + +#### `> cacache.ls.stream(cache) -> Readable` + +Lists info for all entries currently in the cache as a single large object. + +This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are +returned as `'data'` events on the returned stream. + +##### Example + +```javascript +cacache.ls.stream(cachePath).on('data', console.log) +// Output +{ + key: 'my-thing', + integrity: 'sha512-BaSe64HaSh', + path: '.testcache/content/deadbeef', // joined with `cachePath` + time: 12345698490, + size: 13423, + metadata: { + name: 'blah', + version: '1.2.3', + description: 'this was once a package but now it is my-thing' + } +} + +{ + key: 'other-thing', + integrity: 'whirlpool-WoWSoMuchSupport', + path: '.testcache/content/bada55', + time: 11992309289, + size: 498023984029 +} + +{ + ... +} +``` + +#### `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})` + +Returns an object with the cached data, digest, and metadata identified by +`key`. The `data` property of this object will be a `Buffer` instance that +presumably holds some data that means something to you. I'm sure you know what +to do with it! cacache just won't care. + +`integrity` is a [Subresource +Integrity](#integrity) +string. That is, a string that can be used to verify `data`, which looks like +`-`. + +If there is no content identified by `key`, or if the locally-stored data does +not pass the validity checksum, the promise will be rejected. + +A sub-function, `get.byDigest` may be used for identical behavior, except lookup +will happen by integrity hash, bypassing the index entirely. This version of the +function *only* returns `data` itself, without any wrapper. + +See: [options](#get-options) + +##### Note + +This function loads the entire cache entry into memory before returning it. If +you're dealing with Very Large data, consider using [`get.stream`](#get-stream) +instead. + +##### Example + +```javascript +// Look up by key +cache.get(cachePath, 'my-thing').then(console.log) +// Output: +{ + metadata: { + thingName: 'my' + }, + integrity: 'sha512-BaSe64HaSh', + data: Buffer#, + size: 9320 +} + +// Look up by digest +cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log) +// Output: +Buffer# +``` + +#### `> cacache.get.stream(cache, key, [opts]) -> Readable` + +Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`. + +If there is no content identified by `key`, or if the locally-stored data does +not pass the validity checksum, an error will be emitted. + +`metadata` and `integrity` events will be emitted before the stream closes, if +you need to collect that extra data about the cached entry. + +A sub-function, `get.stream.byDigest` may be used for identical behavior, +except lookup will happen by integrity hash, bypassing the index entirely. This +version does not emit the `metadata` and `integrity` events at all. + +See: [options](#get-options) + +##### Example + +```javascript +// Look up by key +cache.get.stream( + cachePath, 'my-thing' +).on('metadata', metadata => { + console.log('metadata:', metadata) +}).on('integrity', integrity => { + console.log('integrity:', integrity) +}).pipe( + fs.createWriteStream('./x.tgz') +) +// Outputs: +metadata: { ... } +integrity: 'sha512-SoMeDIGest+64==' + +// Look up by digest +cache.get.stream.byDigest( + cachePath, 'sha512-SoMeDIGest+64==' +).pipe( + fs.createWriteStream('./x.tgz') +) +``` + +#### `> cacache.get.info(cache, key) -> Promise` + +Looks up `key` in the cache index, returning information about the entry if +one exists. + +##### Fields + +* `key` - Key the entry was looked up under. Matches the `key` argument. +* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to. +* `path` - Filesystem path where content is stored, joined with `cache` argument. +* `time` - Timestamp the entry was first added on. +* `metadata` - User-assigned metadata associated with the entry/content. + +##### Example + +```javascript +cacache.get.info(cachePath, 'my-thing').then(console.log) + +// Output +{ + key: 'my-thing', + integrity: 'sha256-MUSTVERIFY+ALL/THINGS==' + path: '.testcache/content/deadbeef', + time: 12345698490, + size: 849234, + metadata: { + name: 'blah', + version: '1.2.3', + description: 'this was once a package but now it is my-thing' + } +} +``` + +#### `> cacache.get.hasContent(cache, integrity) -> Promise` + +Looks up a [Subresource Integrity hash](#integrity) in the cache. If content +exists for this `integrity`, it will return an object, with the specific single integrity hash +that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`. + +##### Example + +```javascript +cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log) + +// Output +{ + sri: { + source: 'sha256-MUSTVERIFY+ALL/THINGS==', + algorithm: 'sha256', + digest: 'MUSTVERIFY+ALL/THINGS==', + options: [] + }, + size: 9001 +} + +cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log) + +// Output +false +``` + +##### Options + +##### `opts.integrity` +If present, the pre-calculated digest for the inserted content. If this option +is provided and does not match the post-insertion digest, insertion will fail +with an `EINTEGRITY` error. + +##### `opts.memoize` + +Default: null + +If explicitly truthy, cacache will read from memory and memoize data on bulk read. If `false`, cacache will read from disk data. Reader functions by default read from in-memory cache. + +##### `opts.size` +If provided, the data stream will be verified to check that enough data was +passed through. If there's more or less data than expected, insertion will fail +with an `EBADSIZE` error. + + +#### `> cacache.put(cache, key, data, [opts]) -> Promise` + +Inserts data passed to it into the cache. The returned Promise resolves with a +digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the +cache entry has been successfully written. + +See: [options](#put-options) + +##### Example + +```javascript +fetch( + 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' +).then(data => { + return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data) +}).then(integrity => { + console.log('integrity hash is', integrity) +}) +``` + +#### `> cacache.put.stream(cache, key, [opts]) -> Writable` + +Returns a [Writable +Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts +data written to it into the cache. Emits an `integrity` event with the digest of +written contents when it succeeds. + +See: [options](#put-options) + +##### Example + +```javascript +request.get( + 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' +).pipe( + cacache.put.stream( + cachePath, 'registry.npmjs.org|cacache@1.0.0' + ).on('integrity', d => console.log(`integrity digest is ${d}`)) +) +``` + +##### Options + +##### `opts.metadata` + +Arbitrary metadata to be attached to the inserted key. + +##### `opts.size` + +If provided, the data stream will be verified to check that enough data was +passed through. If there's more or less data than expected, insertion will fail +with an `EBADSIZE` error. + +##### `opts.integrity` + +If present, the pre-calculated digest for the inserted content. If this option +is provided and does not match the post-insertion digest, insertion will fail +with an `EINTEGRITY` error. + +`algorithms` has no effect if this option is present. + +##### `opts.algorithms` + +Default: ['sha512'] + +Hashing algorithms to use when calculating the [subresource integrity +digest](#integrity) +for inserted data. Can use any algorithm listed in `crypto.getHashes()` or +`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You +may also use any anagram of `'modnar'` to use this feature. + +Currently only supports one algorithm at a time (i.e., an array length of +exactly `1`). Has no effect if `opts.integrity` is present. + +##### `opts.memoize` + +Default: null + +If provided, cacache will memoize the given cache insertion in memory, bypassing +any filesystem checks for that key or digest in future cache fetches. Nothing +will be written to the in-memory cache unless this option is explicitly truthy. + +If `opts.memoize` is an object or a `Map`-like (that is, an object with `get` +and `set` methods), it will be written to instead of the global memoization +cache. + +Reading from disk data can be forced by explicitly passing `memoize: false` to +the reader functions, but their default will be to read from memory. + +##### `opts.tmpPrefix` +Default: null + +Prefix to append on the temporary directory name inside the cache's tmp dir. + +#### `> cacache.rm.all(cache) -> Promise` + +Clears the entire cache. Mainly by blowing away the cache directory itself. + +##### Example + +```javascript +cacache.rm.all(cachePath).then(() => { + console.log('THE APOCALYPSE IS UPON US 😱') +}) +``` + +#### `> cacache.rm.entry(cache, key, [opts]) -> Promise` + +Alias: `cacache.rm` + +Removes the index entry for `key`. Content will still be accessible if +requested directly by content address ([`get.stream.byDigest`](#get-stream)). + +By default, this appends a new entry to the index with an integrity of `null`. +If `opts.removeFully` is set to `true` then the index file itself will be +physically deleted rather than appending a `null`. + +To remove the content itself (which might still be used by other entries), use +[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use +[`verify`](#verify). + +##### Example + +```javascript +cacache.rm.entry(cachePath, 'my-thing').then(() => { + console.log('I did not like it anyway') +}) +``` + +#### `> cacache.rm.content(cache, integrity) -> Promise` + +Removes the content identified by `integrity`. Any index entries referring to it +will not be usable again until the content is re-added to the cache with an +identical digest. + +##### Example + +```javascript +cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => { + console.log('data for my-thing is gone!') +}) +``` + +#### `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise` + +Uses `matchFn`, which must be a synchronous function that accepts two entries +and returns a boolean indicating whether or not the two entries match, to +deduplicate all entries in the cache for the given `key`. + +If `opts.validateEntry` is provided, it will be called as a function with the +only parameter being a single index entry. The function must return a Boolean, +if it returns `true` the entry is considered valid and will be kept in the index, +if it returns `false` the entry will be removed from the index. + +If `opts.validateEntry` is not provided, however, every entry in the index will +be deduplicated and kept until the first `null` integrity is reached, removing +all entries that were written before the `null`. + +The deduplicated list of entries is both written to the index, replacing the +existing content, and returned in the Promise. + +#### `> cacache.index.insert(cache, key, integrity, opts) -> Promise` + +Writes an index entry to the cache for the given `key` without writing content. + +It is assumed if you are using this method, you have already stored the content +some other way and you only wish to add a new index to that content. The `metadata` +and `size` properties are read from `opts` and used as part of the index entry. + +Returns a Promise resolving to the newly added entry. + +#### `> cacache.clearMemoized()` + +Completely resets the in-memory entry cache. + +#### `> tmp.mkdir(cache, opts) -> Promise` + +Returns a unique temporary directory inside the cache's `tmp` dir. This +directory will use the same safe user assignment that all the other stuff use. + +Once the directory is made, it's the user's responsibility that all files +within are given the appropriate `gid`/`uid` ownership settings to match +the rest of the cache. If not, you can ask cacache to do it for you by +calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory +permissions. + +If you want automatic cleanup of this directory, use +[`tmp.withTmp()`](#with-tpm) + +See: [options](#tmp-options) + +##### Example + +```javascript +cacache.tmp.mkdir(cache).then(dir => { + fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...) +}) +``` + +#### `> tmp.fix(cache) -> Promise` + +Sets the `uid` and `gid` properties on all files and folders within the tmp +folder to match the rest of the cache. + +Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or +[`tmp.withTmp`](#with-tmp). + +##### Example + +```javascript +cacache.tmp.mkdir(cache).then(dir => { + writeFile(path.join(dir, 'file'), someData).then(() => { + // make sure we didn't just put a root-owned file in the cache + cacache.tmp.fix().then(() => { + // all uids and gids match now + }) + }) +}) +``` + +#### `> tmp.withTmp(cache, opts, cb) -> Promise` + +Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb` +with it. The created temporary directory will be removed when the return value +of `cb()` resolves, the tmp directory will be automatically deleted once that +promise completes. + +The same caveats apply when it comes to managing permissions for the tmp dir's +contents. + +See: [options](#tmp-options) + +##### Example + +```javascript +cacache.tmp.withTmp(cache, dir => { + return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...) +}).then(() => { + // `dir` no longer exists +}) +``` + +##### Options + +##### `opts.tmpPrefix` +Default: null + +Prefix to append on the temporary directory name inside the cache's tmp dir. + +#### Subresource Integrity Digests + +For content verification and addressing, cacache uses strings following the +[Subresource +Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity). +That is, any time cacache expects an `integrity` argument or option, it +should be in the format `-`. + +One deviation from the current spec is that cacache will support any hash +algorithms supported by the underlying Node.js process. You can use +`crypto.getHashes()` to see which ones you can use. + +##### Generating Digests Yourself + +If you have an existing content shasum, they are generally formatted as a +hexadecimal string (that is, a sha1 would look like: +`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with +cacache, you'll need to convert this to an equivalent subresource integrity +string. For this example, the corresponding hash would be: +`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`. + +If you want to generate an integrity string yourself for existing data, you can +use something like this: + +```javascript +const crypto = require('crypto') +const hashAlgorithm = 'sha512' +const data = 'foobarbaz' + +const integrity = ( + hashAlgorithm + + '-' + + crypto.createHash(hashAlgorithm).update(data).digest('base64') +) +``` + +You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality +around SRI strings, including generation, parsing, and translating from existing +hex-formatted strings. + +#### `> cacache.verify(cache, opts) -> Promise` + +Checks out and fixes up your cache: + +* Cleans up corrupted or invalid index entries. +* Custom entry filtering options. +* Garbage collects any content entries not referenced by the index. +* Checks integrity for all content entries and removes invalid content. +* Fixes cache ownership. +* Removes the `tmp` directory in the cache and all its contents. + +When it's done, it'll return an object with various stats about the verification +process, including amount of storage reclaimed, number of valid entries, number +of entries removed, etc. + +##### Options + +##### `opts.concurrency` + +Default: 20 + +Number of concurrently read files in the filesystem while doing clean up. + +##### `opts.filter` +Receives a formatted entry. Return false to remove it. +Note: might be called more than once on the same entry. + +##### `opts.log` +Custom logger function: +``` + log: { silly () {} } + log.silly('verify', 'verifying cache at', cache) +``` + +##### Example + +```sh +echo somegarbage >> $CACHEPATH/content/deadbeef +``` + +```javascript +cacache.verify(cachePath).then(stats => { + // deadbeef collected, because of invalid checksum. + console.log('cache is much nicer now! stats:', stats) +}) +``` + +#### `> cacache.verify.lastRun(cache) -> Promise` + +Returns a `Date` representing the last time `cacache.verify` was run on `cache`. + +##### Example + +```javascript +cacache.verify(cachePath).then(() => { + cacache.verify.lastRun(cachePath).then(lastTime => { + console.log('cacache.verify was last called on' + lastTime) + }) +}) +``` diff --git a/node_modules/node-gyp/node_modules/cacache/get.js b/node_modules/node-gyp/node_modules/cacache/get.js new file mode 100644 index 0000000..4e905e7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/get.js @@ -0,0 +1,237 @@ +'use strict' + +const Collect = require('minipass-collect') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const fs = require('fs') +const util = require('util') + +const index = require('./lib/entry-index') +const memo = require('./lib/memoization') +const read = require('./lib/content/read') + +const writeFile = util.promisify(fs.writeFile) + +function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve({ + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + }) + } + + return index.find(cache, key, opts).then((entry) => { + if (!entry) + throw new index.NotFoundError(cache, key) + + return read(cache, entry.integrity, { integrity, size }).then((data) => { + if (memoize) + memo.put(cache, entry, data, opts) + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) +} +module.exports = getData + +function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) + return Promise.resolve(memoized) + + return read(cache, key, { integrity, size }).then((res) => { + if (memoize) + memo.put.byDigest(cache, key, res, opts) + return res + }) +} +module.exports.byDigest = getDataByDigest + +function getDataSync (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + const entry = index.find.sync(cache, key, opts) + if (!entry) + throw new index.NotFoundError(cache, key) + const data = read.sync(cache, entry.integrity, { + integrity: integrity, + size: size, + }) + const res = { + metadata: entry.metadata, + data: data, + size: entry.size, + integrity: entry.integrity, + } + if (memoize) + memo.put(cache, entry, res.data, opts) + + return res +} + +module.exports.sync = getDataSync + +function getDataByDigestSync (cache, digest, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, digest, opts) + + if (memoized && memoize !== false) + return memoized + + const res = read.sync(cache, digest, { + integrity: integrity, + size: size, + }) + if (memoize) + memo.put.byDigest(cache, digest, res, opts) + + return res +} +module.exports.sync.byDigest = getDataByDigestSync + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) + return getMemoizedStream(memoized) + + const stream = new Pipeline() + index + .find(cache, key) + .then((entry) => { + if (!entry) + throw new index.NotFoundError(cache, key) + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + }) + .catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) + return stream + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) + return Promise.resolve(memoized.entry) + else + return index.find(cache, key) +} +module.exports.info = info + +function copy (cache, key, dest, opts = {}) { + if (read.copy) { + return index.find(cache, key, opts).then((entry) => { + if (!entry) + throw new index.NotFoundError(cache, key) + return read.copy(cache, entry.integrity, dest, opts) + .then(() => { + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } + }) + }) + } + + return getData(cache, key, opts).then((res) => { + return writeFile(dest, res.data).then(() => { + return { + metadata: res.metadata, + size: res.size, + integrity: res.integrity, + } + }) + }) +} +module.exports.copy = copy + +function copyByDigest (cache, key, dest, opts = {}) { + if (read.copy) + return read.copy(cache, key, dest, opts).then(() => key) + + return getDataByDigest(cache, key, opts).then((res) => { + return writeFile(dest, res).then(() => key) + }) +} +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/node-gyp/node_modules/cacache/index.js b/node_modules/node-gyp/node_modules/cacache/index.js new file mode 100644 index 0000000..c8c52b0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/index.js @@ -0,0 +1,46 @@ +'use strict' + +const ls = require('./ls.js') +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./lib/memoization.js') +const tmp = require('./lib/util/tmp.js') +const index = require('./lib/entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = ls +module.exports.ls.stream = ls.stream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.sync = get.sync +module.exports.get.sync.byDigest = get.sync.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent +module.exports.get.hasContent.sync = get.hasContent.sync + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000..ad5a76a --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000..034e8ee --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js @@ -0,0 +1,244 @@ +'use strict' + +const util = require('util') + +const fs = require('fs') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +const lstat = util.promisify(fs.lstat) +const readFile = util.promisify(fs.readFile) + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +function read (cache, integrity, opts = {}) { + const { size } = opts + return withContentSri(cache, integrity, (cpath, sri) => { + // get size + return lstat(cpath).then(stat => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && stat.size !== size) + throw sizeError(size, stat.size) + + if (stat.size > MAX_SINGLE_READ_SIZE) + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + + return readFile(cpath, null).then((data) => { + if (!ssri.checkData(data, sri)) + throw integrityError(sri, cpath) + + return data + }) + }) +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.sync = readSync + +function readSync (cache, integrity, opts = {}) { + const { size } = opts + return withContentSriSync(cache, integrity, (cpath, sri) => { + const data = fs.readFileSync(cpath) + if (typeof size === 'number' && size !== data.length) + throw sizeError(size, data.length) + + if (ssri.checkData(data, sri)) + return data + + throw integrityError(sri, cpath) + }) +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + withContentSri(cache, integrity, (cpath, sri) => { + // just lstat to ensure it exists + return lstat(cpath).then((stat) => ({ stat, cpath, sri })) + }).then(({ stat, cpath, sri }) => { + if (typeof size === 'number' && size !== stat.size) + return stream.emit('error', sizeError(size, stat.size)) + + readPipeline(cpath, stat.size, sri, stream) + }, er => stream.emit('error', er)) + + return stream +} + +let copyFile +if (fs.copyFile) { + module.exports.copy = copy + module.exports.copy.sync = copySync + copyFile = util.promisify(fs.copyFile) +} + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return copyFile(cpath, dest) + }) +} + +function copySync (cache, integrity, dest) { + return withContentSriSync(cache, integrity, (cpath, sri) => { + return fs.copyFileSync(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +function hasContent (cache, integrity) { + if (!integrity) + return Promise.resolve(false) + + return withContentSri(cache, integrity, (cpath, sri) => { + return lstat(cpath).then((stat) => ({ size: stat.size, sri, stat })) + }).catch((err) => { + if (err.code === 'ENOENT') + return false + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') + throw err + else + return false + } + }) +} + +module.exports.hasContent.sync = hasContentSync + +function hasContentSync (cache, integrity) { + if (!integrity) + return false + + return withContentSriSync(cache, integrity, (cpath, sri) => { + try { + const stat = fs.lstatSync(cpath) + return { size: stat.size, sri, stat } + } catch (err) { + if (err.code === 'ENOENT') + return false + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') + throw err + else + return false + } + } + }) +} + +function withContentSri (cache, integrity, fn) { + const tryFn = () => { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + return Promise + .all(digests.map((meta) => { + return withContentSri(cache, meta, fn) + .catch((err) => { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + }) + })) + .then((results) => { + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) + return result + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) + throw enoentError + + // Throw generic error + throw results.find((r) => r instanceof Error) + }) + } + } + + return new Promise((resolve, reject) => { + try { + tryFn() + .then(resolve) + .catch(reject) + } catch (err) { + reject(err) + } + }) +} + +function withContentSriSync (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + let lastErr = null + for (const meta of digests) { + try { + return withContentSriSync(cache, meta, fn) + } catch (err) { + lastErr = err + } + } + throw lastErr + } +} + +function sizeError (expected, found) { + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000..6a3d1a3 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,19 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const { hasContent } = require('./read') +const rimraf = util.promisify(require('rimraf')) + +module.exports = rm + +function rm (cache, integrity) { + return hasContent(cache, integrity).then((content) => { + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) + return rimraf(contentPath(cache, content.sri)).then(() => true) + else + return false + }) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000..dde1bd1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js @@ -0,0 +1,189 @@ +'use strict' + +const util = require('util') + +const contentPath = require('./path') +const fixOwner = require('../util/fix-owner') +const fs = require('fs') +const moveFile = require('../util/move-file') +const Minipass = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const { disposer } = require('./../util/disposer') +const fsm = require('fs-minipass') + +const writeFile = util.promisify(fs.writeFile) + +module.exports = write + +function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + if (algorithms && algorithms.length > 1) + throw new Error('opts.algorithms only supports a single algorithm for now') + + if (typeof size === 'number' && data.length !== size) + return Promise.reject(sizeError(size, data.length)) + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) + return Promise.reject(checksumError(integrity, sri)) + + return disposer(makeTmp(cache, opts), makeTmpDisposer, + (tmp) => { + return writeFile(tmp.target, data, { flag: 'wx' }) + .then(() => moveToDestination(tmp, cache, sri, opts)) + }) + .then(() => ({ integrity: sri, size: data.length })) +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +function handleContent (inputStream, cache, opts) { + return disposer(makeTmp(cache, opts), makeTmpDisposer, (tmp) => { + return pipeToTmp(inputStream, cache, tmp.target, opts) + .then((res) => { + return moveToDestination( + tmp, + cache, + res.integrity, + opts + ).then(() => res) + }) + }) +} + +function pipeToTmp (inputStream, cache, tmpTarget, opts) { + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + // NB: this can throw if the hashStream has a problem with + // it, and the data is fully written. but pipeToTmp is only + // called in promisory contexts where that is handled. + const pipeline = new Pipeline( + inputStream, + hashStream, + outStream + ) + + return pipeline.promise() + .then(() => ({ integrity, size })) + .catch(er => rimraf(tmpTarget).then(() => { + throw er + })) +} + +function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + return fixOwner.mkdirfix(cache, path.dirname(tmpTarget)).then(() => ({ + target: tmpTarget, + moved: false, + })) +} + +function makeTmpDisposer (tmp) { + if (tmp.moved) + return Promise.resolve() + + return rimraf(tmp.target) +} + +function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + + return fixOwner + .mkdirfix(cache, destDir) + .then(() => { + return moveFile(tmp.target, destination) + }) + .then(() => { + tmp.moved = true + return fixOwner.chownr(cache, destination) + }) +} + +function sizeError (expected, found) { + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000..71aac5e --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,394 @@ +'use strict' + +const util = require('util') +const crypto = require('crypto') +const fs = require('fs') +const Minipass = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const { disposer } = require('./util/disposer') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const moveFile = require('@npmcli/move-file') +const _rimraf = require('rimraf') +const rimraf = util.promisify(_rimraf) +rimraf.sync = _rimraf.sync + +const appendFile = util.promisify(fs.appendFile) +const readFile = util.promisify(fs.readFile) +const readdir = util.promisify(fs.readdir) +const writeFile = util.promisify(fs.writeFile) + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) + break + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) + newEntries.unshift(entry) + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fixOwner.mkdirfix(cache, path.dirname(target)) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) + return rimraf(tmp.target) + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await fixOwner.mkdirfix(cache, path.dirname(bucket)) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + try { + await fixOwner.chownr(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') + throw err + } + } + + // write the file atomically + await disposer(setup(), teardown, write) + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +function insert (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + return fixOwner + .mkdirfix(cache, path.dirname(bucket)) + .then(() => { + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + }) + .then(() => fixOwner.chownr(cache, bucket)) + .catch((err) => { + if (err.code === 'ENOENT') + return undefined + + throw err + // There's a class of race conditions that happen when things get deleted + // during fixOwner, or between the two mkdirfix/chownr calls. + // + // It's perfectly fine to just not bother in those cases and lie + // that the index entry was written. Because it's a cache. + }) + .then(() => { + return formatEntry(cache, entry) + }) +} + +module.exports.insert.sync = insertSync + +function insertSync (cache, key, integrity, opts = {}) { + const { metadata, size } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: Date.now(), + size, + metadata, + } + fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) + const stringified = JSON.stringify(entry) + fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + try { + fixOwner.chownr.sync(cache, bucket) + } catch (err) { + if (err.code !== 'ENOENT') + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +function find (cache, key) { + const bucket = bucketPath(cache, key) + return bucketEntries(bucket) + .then((entries) => { + return entries.reduce((latest, next) => { + if (next && next.key === key) + return formatEntry(cache, next) + else + return latest + }, null) + }) + .catch((err) => { + if (err.code === 'ENOENT') + return null + else + throw err + }) +} + +module.exports.find.sync = findSync + +function findSync (cache, key) { + const bucket = bucketPath(cache, key) + try { + return bucketEntriesSync(bucket).reduce((latest, next) => { + if (next && next.key === key) + return formatEntry(cache, next) + else + return latest + }, null) + } catch (err) { + if (err.code === 'ENOENT') + return null + else + throw err + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) + return insert(cache, key, null, opts) + + const bucket = bucketPath(cache, key) + return rimraf(bucket) +} + +module.exports.delete.sync = delSync + +function delSync (cache, key, opts = {}) { + if (!opts.removeFully) + return insertSync(cache, key, null, opts) + + const bucket = bucketPath(cache, key) + return rimraf.sync(bucket) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + readdirOrEmpty(indexDir).then(buckets => Promise.all( + buckets.map(bucket => { + const bucketPath = path.join(indexDir, bucket) + return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all( + subbuckets.map(subbucket => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + return readdirOrEmpty(subbucketPath).then(entries => Promise.all( + entries.map(entry => { + const entryPath = path.join(subbucketPath, entry) + return bucketEntries(entryPath).then(entries => + // using a Map here prevents duplicate keys from + // showing up twice, I guess? + entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + ).then(reduced => { + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) + stream.write(formatted) + } + }).catch(err => { + if (err.code === 'ENOENT') + return undefined + throw err + }) + }) + )) + }) + )) + }) + )) + .then( + () => stream.end(), + err => stream.emit('error', err) + ) + + return stream +} + +module.exports.ls = ls + +function ls (cache) { + return lsStream(cache).collect().then(entries => + entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) + ) +} + +module.exports.bucketEntries = bucketEntries + +function bucketEntries (bucket, filter) { + return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter)) +} + +module.exports.bucketEntries.sync = bucketEntriesSync + +function bucketEntriesSync (bucket, filter) { + const data = fs.readFileSync(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) + return + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (e) { + // Entry is corrupted! + return + } + if (obj) + entries.push(obj) + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) + return null + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') + return [] + + throw err + }) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000..d5465f3 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js @@ -0,0 +1,73 @@ +'use strict' + +const LRU = require('lru-cache') + +const MAX_SIZE = 50 * 1024 * 1024 // 50MB +const MAX_AGE = 3 * 60 * 1000 + +const MEMOIZED = new LRU({ + max: MAX_SIZE, + maxAge: MAX_AGE, + length: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.reset() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) + return MEMOIZED + else if (opts.memoize.get && opts.memoize.set) + return opts.memoize + else if (typeof opts.memoize === 'object') + return new ObjProxy(opts.memoize) + else + return MEMOIZED +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/disposer.js b/node_modules/node-gyp/node_modules/cacache/lib/util/disposer.js new file mode 100644 index 0000000..aa8aed5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/disposer.js @@ -0,0 +1,30 @@ +'use strict' + +module.exports.disposer = disposer + +function disposer (creatorFn, disposerFn, fn) { + const runDisposer = (resource, result, shouldThrow = false) => { + return disposerFn(resource) + .then( + // disposer resolved, do something with original fn's promise + () => { + if (shouldThrow) + throw result + + return result + }, + // Disposer fn failed, crash process + (err) => { + throw err + // Or process.exit? + }) + } + + return creatorFn + .then((resource) => { + // fn(resource) can throw, so wrap in a promise here + return Promise.resolve().then(() => fn(resource)) + .then((result) => runDisposer(resource, result)) + .catch((err) => runDisposer(resource, err, true)) + }) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js b/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js new file mode 100644 index 0000000..90ffece --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js @@ -0,0 +1,142 @@ +'use strict' + +const util = require('util') + +const chownr = util.promisify(require('chownr')) +const mkdirp = require('mkdirp') +const inflight = require('promise-inflight') +const inferOwner = require('infer-owner') + +// Memoize getuid()/getgid() calls. +// patch process.setuid/setgid to invalidate cached value on change +const self = { uid: null, gid: null } +const getSelf = () => { + if (typeof self.uid !== 'number') { + self.uid = process.getuid() + const setuid = process.setuid + process.setuid = (uid) => { + self.uid = null + process.setuid = setuid + return process.setuid(uid) + } + } + if (typeof self.gid !== 'number') { + self.gid = process.getgid() + const setgid = process.setgid + process.setgid = (gid) => { + self.gid = null + process.setgid = setgid + return process.setgid(gid) + } + } +} + +module.exports.chownr = fixOwner + +function fixOwner (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return Promise.resolve() + } + + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return Promise.resolve() + } + + return Promise.resolve(inferOwner(cache)).then((owner) => { + const { uid, gid } = owner + + // No need to override if it's already what we used. + if (self.uid === uid && self.gid === gid) + return + + return inflight('fixOwner: fixing ownership on ' + filepath, () => + chownr( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ).catch((err) => { + if (err.code === 'ENOENT') + return null + + throw err + }) + ) + }) +} + +module.exports.chownr.sync = fixOwnerSync + +function fixOwnerSync (cache, filepath) { + if (!process.getuid) { + // This platform doesn't need ownership fixing + return + } + const { uid, gid } = inferOwner.sync(cache) + getSelf() + if (self.uid !== 0) { + // almost certainly can't chown anyway + return + } + + if (self.uid === uid && self.gid === gid) { + // No need to override if it's already what we used. + return + } + try { + chownr.sync( + filepath, + typeof uid === 'number' ? uid : self.uid, + typeof gid === 'number' ? gid : self.gid + ) + } catch (err) { + // only catch ENOENT, any other error is a problem. + if (err.code === 'ENOENT') + return null + + throw err + } +} + +module.exports.mkdirfix = mkdirfix + +function mkdirfix (cache, p, cb) { + // we have to infer the owner _before_ making the directory, even though + // we aren't going to use the results, since the cache itself might not + // exist yet. If we mkdirp it, then our current uid/gid will be assumed + // to be correct if it creates the cache folder in the process. + return Promise.resolve(inferOwner(cache)).then(() => { + return mkdirp(p) + .then((made) => { + if (made) + return fixOwner(cache, made).then(() => made) + }) + .catch((err) => { + if (err.code === 'EEXIST') + return fixOwner(cache, p).then(() => null) + + throw err + }) + }) +} + +module.exports.mkdirfix.sync = mkdirfixSync + +function mkdirfixSync (cache, p) { + try { + inferOwner.sync(cache) + const made = mkdirp.sync(p) + if (made) { + fixOwnerSync(cache, made) + return made + } + } catch (err) { + if (err.code === 'EEXIST') { + fixOwnerSync(cache, p) + return null + } else + throw err + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000..445599b --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js b/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js new file mode 100644 index 0000000..c3f9e35 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js @@ -0,0 +1,67 @@ +'use strict' + +const fs = require('fs') +const util = require('util') +const chmod = util.promisify(fs.chmod) +const unlink = util.promisify(fs.unlink) +const stat = util.promisify(fs.stat) +const move = require('@npmcli/move-file') +const pinflight = require('promise-inflight') + +module.exports = moveFile + +function moveFile (src, dest) { + const isWindows = global.__CACACHE_TEST_FAKE_WINDOWS__ || + process.platform === 'win32' + + // This isn't quite an fs.rename -- the assumption is that + // if `dest` already exists, and we get certain errors while + // trying to move it, we should just not bother. + // + // In the case of cache corruption, users will receive an + // EINTEGRITY error elsewhere, and can remove the offending + // content their own way. + // + // Note that, as the name suggests, this strictly only supports file moves. + return new Promise((resolve, reject) => { + fs.link(src, dest, (err) => { + if (err) { + if (isWindows && err.code === 'EPERM') { + // XXX This is a really weird way to handle this situation, as it + // results in the src file being deleted even though the dest + // might not exist. Since we pretty much always write files to + // deterministic locations based on content hash, this is likely + // ok (or at worst, just ends in a future cache miss). But it would + // be worth investigating at some time in the future if this is + // really what we want to do here. + return resolve() + } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { + // file already exists, so whatever + return resolve() + } else + return reject(err) + } else + return resolve() + }) + }) + .then(() => { + // content should never change for any reason, so make it read-only + return Promise.all([ + unlink(src), + !isWindows && chmod(dest, '0444'), + ]) + }) + .catch(() => { + return pinflight('cacache-move-file:' + dest, () => { + return stat(dest).catch((err) => { + if (err.code !== 'ENOENT') { + // Something else is wrong here. Bail bail bail + throw err + } + // file doesn't already exist! let's try a rename -> copy fallback + // only delete if it successfully copies + return move(src, dest) + }) + }) + }) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000..0a5a50e --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,35 @@ +'use strict' + +const fs = require('@npmcli/fs') + +const fixOwner = require('./fix-owner') +const path = require('path') + +module.exports.mkdir = mktmpdir + +function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + return fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + .then(() => { + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) + }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) +} + +module.exports.fix = fixtmpdir + +function fixtmpdir (cache) { + return fixOwner(cache, path.join(cache, 'tmp')) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/node_modules/node-gyp/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000..e9d679e --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/verify.js @@ -0,0 +1,287 @@ +'use strict' + +const util = require('util') + +const pMap = require('p-map') +const contentPath = require('./content/path') +const fixOwner = require('./util/fix-owner') +const fs = require('fs') +const fsm = require('fs-minipass') +const glob = util.promisify(require('glob')) +const index = require('./entry-index') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const stat = util.promisify(fs.stat) +const truncate = util.promisify(fs.truncate) +const writeFile = util.promisify(fs.writeFile) +const readFile = util.promisify(fs.readFile) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + return steps + .reduce((promise, step, i) => { + const label = step.name + const start = new Date() + return promise.then((stats) => { + return step(cache, opts).then((s) => { + s && + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + const end = new Date() + if (!stats.runTime) + stats.runTime = {} + + stats.runTime[label] = end - start + return Promise.resolve(stats) + }) + }) + }, Promise.resolve({})) + .then((stats) => { + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats + }) +} + +function markStartTime (cache, opts) { + return Promise.resolve({ startTime: new Date() }) +} + +function markEndTime (cache, opts) { + return Promise.resolve({ endTime: new Date() }) +} + +function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + return fixOwner + .mkdirfix(cache, cache) + .then(() => { + // TODO - fix file permissions too + return fixOwner.chownr(cache, cache) + }) + .then(() => null) +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rimraf it. +// +function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) + return + + liveContent.add(entry.integrity.toString()) + }) + return new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }).then(() => { + const contentDir = contentPath.contentDir(cache) + return glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }).then((files) => { + return Promise.resolve({ + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + }).then((stats) => + pMap( + files, + (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + return verifyContent(f, integrity).then((info) => { + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + return stats + }) + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + return stat(f).then((s) => { + return rimraf(f).then(() => { + stats.reclaimedSize += s.size + return stats + }) + }) + } + }, + { concurrency: opts.concurrency } + ).then(() => stats) + ) + }) + }) +} + +function verifyContent (filepath, sri) { + return stat(filepath) + .then((s) => { + const contentInfo = { + size: s.size, + valid: true, + } + return ssri + .checkStream(new fsm.ReadStream(filepath), sri) + .catch((err) => { + if (err.code !== 'EINTEGRITY') + throw err + + return rimraf(filepath).then(() => { + contentInfo.valid = false + }) + }) + .then(() => contentInfo) + }) + .catch((err) => { + if (err.code === 'ENOENT') + return { size: 0, valid: false } + + throw err + }) +} + +function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + return index.ls(cache).then((entries) => { + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) + buckets[hashed].push(entry) + else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + return pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ).then(() => stats) + }) +} + +function rebuildBucket (cache, bucket, stats, opts) { + return truncate(bucket._path).then(() => { + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + return bucket.reduce((promise, entry) => { + return promise.then(() => { + const content = contentPath(cache, entry.integrity) + return stat(content) + .then(() => { + return index + .insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + }) + .then(() => { + stats.totalEntries++ + }) + }) + .catch((err) => { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + return + } + throw err + }) + }) + }, Promise.resolve()) + }) +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rimraf(path.join(cache, 'tmp')) +} + +function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + try { + return writeFile(verifile, '' + +new Date()) + } finally { + fixOwner.chownr.sync(cache, verifile) + } +} + +module.exports.lastRun = lastRun + +function lastRun (cache) { + return readFile(path.join(cache, '_lastverified'), 'utf8').then( + (data) => new Date(+data) + ) +} diff --git a/node_modules/node-gyp/node_modules/cacache/ls.js b/node_modules/node-gyp/node_modules/cacache/ls.js new file mode 100644 index 0000000..6006c99 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/ls.js @@ -0,0 +1,6 @@ +'use strict' + +const index = require('./lib/entry-index') + +module.exports = index.ls +module.exports.stream = index.lsStream diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/mkdirp b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/mkdirp new file mode 100644 index 0000000..41e1f95 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/mkdirp @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../../mkdirp/bin/cmd.js" "$@" + ret=$? +else + node "$basedir/../../../../../mkdirp/bin/cmd.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/mkdirp.cmd b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/mkdirp.cmd new file mode 100644 index 0000000..98fc07f --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/mkdirp.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\..\mkdirp\bin\cmd.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\..\mkdirp\bin\cmd.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/rimraf b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/rimraf new file mode 100644 index 0000000..b25e096 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/rimraf @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../../../rimraf/bin.js" "$@" + ret=$? +else + node "$basedir/../../../../../rimraf/bin.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/rimraf.cmd b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/rimraf.cmd new file mode 100644 index 0000000..f658eff --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/.bin/rimraf.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\..\..\rimraf\bin.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\..\..\rimraf\bin.js" %* +) \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/package.json b/node_modules/node-gyp/node_modules/cacache/package.json new file mode 100644 index 0000000..6cb4140 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/package.json @@ -0,0 +1,80 @@ +{ + "name": "cacache", + "version": "15.3.0", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "index.js", + "files": [ + "*.js", + "lib" + ], + "scripts": { + "benchmarks": "node test/benchmarks", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "npm run npmclilint -- \"*.*js\" \"lib/**/*.*js\" \"test/**/*.*js\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --" + }, + "repository": "https://github.com/npm/cacache", + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^1.0.0", + "@npmcli/move-file": "^1.0.1", + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "glob": "^7.1.4", + "infer-owner": "^1.0.4", + "lru-cache": "^6.0.0", + "minipass": "^3.1.1", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.2", + "mkdirp": "^1.0.3", + "p-map": "^4.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.0.2", + "unique-filename": "^1.1.1" + }, + "devDependencies": { + "@npmcli/lint": "^1.0.1", + "benchmark": "^2.1.4", + "chalk": "^4.0.0", + "require-inject": "^1.4.4", + "tacks": "^1.3.0", + "tap": "^15.0.9" + }, + "tap": { + "100": true, + "test-regex": "test/[^/]*.js" + }, + "engines": { + "node": ">= 10" + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/put.js b/node_modules/node-gyp/node_modules/cacache/put.js new file mode 100644 index 0000000..84e9562 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/put.js @@ -0,0 +1,83 @@ +'use strict' + +const index = require('./lib/entry-index') +const memo = require('./lib/memoization') +const write = require('./lib/content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + return write(cache, data, opts).then((res) => { + return index + .insert(cache, key, res.integrity, { ...opts, size: res.size }) + .then((entry) => { + if (memoize) + memo.put(cache, entry, data, opts) + + return res.integrity + }) + }) +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + flush () { + return index + .insert(cache, key, integrity, { ...opts, size }) + .then((entry) => { + if (memoize && memoData) + memo.put(cache, entry, memoData, opts) + + if (integrity) + pipeline.emit('integrity', integrity) + + if (size) + pipeline.emit('size', size) + }) + }, + })) + + return pipeline +} diff --git a/node_modules/node-gyp/node_modules/cacache/rm.js b/node_modules/node-gyp/node_modules/cacache/rm.js new file mode 100644 index 0000000..f2ef6b1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const util = require('util') + +const index = require('./lib/entry-index') +const memo = require('./lib/memoization') +const path = require('path') +const rimraf = util.promisify(require('rimraf')) +const rmContent = require('./lib/content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +function all (cache) { + memo.clearMemoized() + return rimraf(path.join(cache, '*(content-*|index-*)')) +} diff --git a/node_modules/node-gyp/node_modules/cacache/verify.js b/node_modules/node-gyp/node_modules/cacache/verify.js new file mode 100644 index 0000000..db7763d --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/verify.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = require('./lib/verify') diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/README.md b/node_modules/node-gyp/node_modules/http-proxy-agent/README.md new file mode 100644 index 0000000..d60e206 --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/README.md @@ -0,0 +1,74 @@ +http-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTP +[![Build Status](https://github.com/TooTallNate/node-http-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-http-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `http` module. + +__Note:__ For HTTP proxy usage with the `https` module, check out +[`node-https-proxy-agent`](https://github.com/TooTallNate/node-https-proxy-agent). + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install http-proxy-agent +``` + + +Example +------- + +``` js +var url = require('url'); +var http = require('http'); +var HttpProxyAgent = require('http-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// HTTP endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'http://nodejs.org/api/'; +console.log('attempting to GET %j', endpoint); +var opts = url.parse(endpoint); + +// create an instance of the `HttpProxyAgent` class with the proxy server information +var agent = new HttpProxyAgent(proxy); +opts.agent = agent; + +http.get(opts, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts new file mode 100644 index 0000000..3f043f7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts @@ -0,0 +1,32 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpProxyAgentOptions } from '.'; +interface HttpProxyAgentClientRequest extends ClientRequest { + path: string; + output?: string[]; + outputData?: { + data: string; + }[]; + _header?: string | null; + _implicitHeader(): void; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +export default class HttpProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: HttpProxyAgentClientRequest, opts: RequestOptions): Promise; +} +export {}; diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js new file mode 100644 index 0000000..0252850 --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js @@ -0,0 +1,145 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const debug_1 = __importDefault(require("debug")); +const once_1 = __importDefault(require("@tootallnate/once")); +const agent_base_1 = require("agent-base"); +const debug = debug_1.default('http-proxy-agent'); +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +/** + * The `HttpProxyAgent` implements an HTTP Agent subclass that connects + * to the specified "HTTP proxy server" in order to proxy HTTP requests. + * + * @api public + */ +class HttpProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('Creating new HttpProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + const parsed = url_1.default.parse(req.path); + if (!parsed.protocol) { + parsed.protocol = 'http:'; + } + if (!parsed.hostname) { + parsed.hostname = opts.hostname || opts.host || null; + } + if (parsed.port == null && typeof opts.port) { + parsed.port = String(opts.port); + } + if (parsed.port === '80') { + // if port is 80, then we can remove the port so that the + // ":80" portion is not on the produced URL + delete parsed.port; + } + // Change the `http.ClientRequest` instance's "path" field + // to the absolute path of the URL that will be requested. + req.path = url_1.default.format(parsed); + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); + } + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + // At this point, the http ClientRequest's internal `_header` field + // might have already been set. If this is the case then we'll need + // to re-generate the string since we just changed the `req.path`. + if (req._header) { + let first; + let endOfHeaders; + debug('Regenerating stored HTTP header string for request'); + req._header = null; + req._implicitHeader(); + if (req.output && req.output.length > 0) { + // Node < 12 + debug('Patching connection write() output buffer with updated header'); + first = req.output[0]; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.output[0] = req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.output); + } + else if (req.outputData && req.outputData.length > 0) { + // Node >= 12 + debug('Patching connection write() output buffer with updated header'); + first = req.outputData[0].data; + endOfHeaders = first.indexOf('\r\n\r\n') + 4; + req.outputData[0].data = + req._header + first.substring(endOfHeaders); + debug('Output buffer: %o', req.outputData[0].data); + } + } + // Wait for the socket's `connect` event, so that this `callback()` + // function throws instead of the `http` request machinery. This is + // important for i.e. `PacProxyAgent` which determines a failed proxy + // connection via the `callback()` function throwing. + yield once_1.default(socket, 'connect'); + return socket; + }); + } +} +exports.default = HttpProxyAgent; +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map new file mode 100644 index 0000000..7a40762 --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,eAAW,CAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,OAAO,MAAM,CAAC,IAAI,CAAC;aACnB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,cAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"} \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..24bdb52 --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts @@ -0,0 +1,21 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import _HttpProxyAgent from './agent'; +declare function createHttpProxyAgent(opts: string | createHttpProxyAgent.HttpProxyAgentOptions): _HttpProxyAgent; +declare namespace createHttpProxyAgent { + interface BaseHttpProxyAgentOptions { + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpProxyAgentOptions extends AgentOptions, BaseHttpProxyAgentOptions, Partial> { + } + export type HttpProxyAgent = _HttpProxyAgent; + export const HttpProxyAgent: typeof _HttpProxyAgent; + export {}; +} +export = createHttpProxyAgent; diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js new file mode 100644 index 0000000..0a71180 --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpProxyAgent) { + createHttpProxyAgent.HttpProxyAgent = agent_1.default; + createHttpProxyAgent.prototype = agent_1.default.prototype; +})(createHttpProxyAgent || (createHttpProxyAgent = {})); +module.exports = createHttpProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..e07dae5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAIA,oDAAsC;AAEtC,SAAS,oBAAoB,CAC5B,IAAyD;IAEzD,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC;AAClC,CAAC;AAED,WAAU,oBAAoB;IAmBhB,mCAAc,GAAG,eAAe,CAAC;IAE9C,oBAAoB,CAAC,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;AAC5D,CAAC,EAtBS,oBAAoB,KAApB,oBAAoB,QAsB7B;AAED,iBAAS,oBAAoB,CAAC"} \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/package.json b/node_modules/node-gyp/node_modules/http-proxy-agent/package.json new file mode 100644 index 0000000..870dd5d --- /dev/null +++ b/node_modules/node-gyp/node_modules/http-proxy-agent/package.json @@ -0,0 +1,57 @@ +{ + "name": "http-proxy-agent", + "version": "4.0.1", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-http-proxy-agent.git" + }, + "keywords": [ + "http", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues" + }, + "dependencies": { + "@tootallnate/once": "1", + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^3.5.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 0000000..8d28acf --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/README.md b/node_modules/node-gyp/node_modules/make-fetch-happen/README.md new file mode 100644 index 0000000..87659c9 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/README.md @@ -0,0 +1,395 @@ +# make-fetch-happen +[![npm version](https://img.shields.io/npm/v/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![license](https://img.shields.io/npm/l/make-fetch-happen.svg)](https://npm.im/make-fetch-happen) [![Travis](https://img.shields.io/travis/npm/make-fetch-happen.svg)](https://travis-ci.org/npm/make-fetch-happen) [![Coverage Status](https://coveralls.io/repos/github/npm/make-fetch-happen/badge.svg?branch=latest)](https://coveralls.io/github/npm/make-fetch-happen?branch=latest) + +[`make-fetch-happen`](https://github.com/npm/make-fetch-happen) is a Node.js +library that wraps [`minipass-fetch`](https://github.com/npm/minipass-fetch) with additional +features [`minipass-fetch`](https://github.com/npm/minipass-fetch) doesn't intend to include, including HTTP Cache support, request +pooling, proxies, retries, [and more](#features)! + +## Install + +`$ npm install --save make-fetch-happen` + +## Table of Contents + +* [Example](#example) +* [Features](#features) +* [Contributing](#contributing) +* [API](#api) + * [`fetch`](#fetch) + * [`fetch.defaults`](#fetch-defaults) + * [`minipass-fetch` options](#minipass-fetch-options) + * [`make-fetch-happen` options](#extra-options) + * [`opts.cachePath`](#opts-cache-path) + * [`opts.cache`](#opts-cache) + * [`opts.proxy`](#opts-proxy) + * [`opts.noProxy`](#opts-no-proxy) + * [`opts.ca, opts.cert, opts.key`](#https-opts) + * [`opts.maxSockets`](#opts-max-sockets) + * [`opts.retry`](#opts-retry) + * [`opts.onRetry`](#opts-onretry) + * [`opts.integrity`](#opts-integrity) +* [Message From Our Sponsors](#wow) + +### Example + +```javascript +const fetch = require('make-fetch-happen').defaults({ + cachePath: './my-cache' // path where cache will be written (and read) +}) + +fetch('https://registry.npmjs.org/make-fetch-happen').then(res => { + return res.json() // download the body as JSON +}).then(body => { + console.log(`got ${body.name} from web`) + return fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'no-cache' // forces a conditional request + }) +}).then(res => { + console.log(res.status) // 304! cache validated! + return res.json().then(body => { + console.log(`got ${body.name} from cache`) + }) +}) +``` + +### Features + +* Builds around [`minipass-fetch`](https://npm.im/minipass-fetch) for the core [`fetch` API](https://fetch.spec.whatwg.org) implementation +* Request pooling out of the box +* Quite fast, really +* Automatic HTTP-semantics-aware request retries +* Cache-fallback automatic "offline mode" +* Proxy support (http, https, socks, socks4, socks5) +* Built-in request caching following full HTTP caching rules (`Cache-Control`, `ETag`, `304`s, cache fallback on error, etc). +* Customize cache storage with any [Cache API](https://developer.mozilla.org/en-US/docs/Web/API/Cache)-compliant `Cache` instance. Cache to Redis! +* Node.js Stream support +* Transparent gzip and deflate support +* [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) support +* Literally punches nazis +* (PENDING) Range request caching and resuming + +### Contributing + +The make-fetch-happen team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](https://github.com/npm/cli/blob/latest/CONTRIBUTING.md) outlines the process for community interaction and contribution. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear. + +All participants and maintainers in this project are expected to follow the [npm Code of Conduct](https://www.npmjs.com/policies/conduct), and just generally be excellent to each other. + +Please refer to the [Changelog](CHANGELOG.md) for project history details, too. + +Happy hacking! + +### API + +#### `> fetch(uriOrRequest, [opts]) -> Promise` + +This function implements most of the [`fetch` API](https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch): given a `uri` string or a `Request` instance, it will fire off an http request and return a Promise containing the relevant response. + +If `opts` is provided, the [`minipass-fetch`-specific options](#minipass-fetch-options) will be passed to that library. There are also [additional options](#extra-options) specific to make-fetch-happen that add various features, such as HTTP caching, integrity verification, proxy support, and more. + +##### Example + +```javascript +fetch('https://google.com').then(res => res.buffer()) +``` + +#### `> fetch.defaults([defaultUrl], [defaultOpts])` + +Returns a new `fetch` function that will call `make-fetch-happen` using `defaultUrl` and `defaultOpts` as default values to any calls. + +A defaulted `fetch` will also have a `.defaults()` method, so they can be chained. + +##### Example + +```javascript +const fetch = require('make-fetch-happen').defaults({ + cachePath: './my-local-cache' +}) + +fetch('https://registry.npmjs.org/make-fetch-happen') // will always use the cache +``` + +#### `> minipass-fetch options` + +The following options for `minipass-fetch` are used as-is: + +* method +* body +* redirect +* follow +* timeout +* compress +* size + +These other options are modified or augmented by make-fetch-happen: + +* headers - Default `User-Agent` set to make-fetch happen. `Connection` is set to `keep-alive` or `close` automatically depending on `opts.agent`. +* agent + * If agent is null, an http or https Agent will be automatically used. By default, these will be `http.globalAgent` and `https.globalAgent`. + * If [`opts.proxy`](#opts-proxy) is provided and `opts.agent` is null, the agent will be set to an appropriate proxy-handling agent. + * If `opts.agent` is an object, it will be used as the request-pooling agent argument for this request. + * If `opts.agent` is `false`, it will be passed as-is to the underlying request library. This causes a new Agent to be spawned for every request. + +For more details, see [the documentation for `minipass-fetch` itself](https://github.com/npm/minipass-fetch#options). + +#### `> make-fetch-happen options` + +make-fetch-happen augments the `minipass-fetch` API with additional features available through extra options. The following extra options are available: + +* [`opts.cachePath`](#opts-cache-path) - Cache target to read/write +* [`opts.cache`](#opts-cache) - `fetch` cache mode. Controls cache *behavior*. +* [`opts.proxy`](#opts-proxy) - Proxy agent +* [`opts.noProxy`](#opts-no-proxy) - Domain segments to disable proxying for. +* [`opts.ca, opts.cert, opts.key, opts.strictSSL`](#https-opts) +* [`opts.localAddress`](#opts-local-address) +* [`opts.maxSockets`](#opts-max-sockets) +* [`opts.retry`](#opts-retry) - Request retry settings +* [`opts.onRetry`](#opts-onretry) - a function called whenever a retry is attempted +* [`opts.integrity`](#opts-integrity) - [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. + +#### `> opts.cachePath` + +A string `Path` to be used as the cache root for [`cacache`](https://npm.im/cacache). + +**NOTE**: Requests will not be cached unless their response bodies are consumed. You will need to use one of the `res.json()`, `res.buffer()`, etc methods on the response, or drain the `res.body` stream, in order for it to be written. + +The default cache manager also adds the following headers to cached responses: + +* `X-Local-Cache`: Path to the cache the content was found in +* `X-Local-Cache-Key`: Unique cache entry key for this response +* `X-Local-Cache-Mode`: Either `stream` or `buffer` to indicate how the response was read from cacache +* `X-Local-Cache-Hash`: Specific integrity hash for the cached entry +* `X-Local-Cache-Status`: One of `miss`, `hit`, `stale`, `revalidated`, `updated`, or `skip` to signal how the response was created +* `X-Local-Cache-Time`: UTCString of the cache insertion time for the entry + +Using [`cacache`](https://npm.im/cacache), a call like this may be used to +manually fetch the cached entry: + +```javascript +const h = response.headers +cacache.get(h.get('x-local-cache'), h.get('x-local-cache-key')) + +// grab content only, directly: +cacache.get.byDigest(h.get('x-local-cache'), h.get('x-local-cache-hash')) +``` + +##### Example + +```javascript +fetch('https://registry.npmjs.org/make-fetch-happen', { + cachePath: './my-local-cache' +}) // -> 200-level response will be written to disk +``` + +A possible (minimal) implementation for `MyCustomRedisCache`: + +```javascript +const bluebird = require('bluebird') +const redis = require("redis") +bluebird.promisifyAll(redis.RedisClient.prototype) +class MyCustomRedisCache { + constructor (opts) { + this.redis = redis.createClient(opts) + } + match (req) { + return this.redis.getAsync(req.url).then(res => { + if (res) { + const parsed = JSON.parse(res) + return new fetch.Response(parsed.body, { + url: req.url, + headers: parsed.headers, + status: 200 + }) + } + }) + } + put (req, res) { + return res.buffer().then(body => { + return this.redis.setAsync(req.url, JSON.stringify({ + body: body, + headers: res.headers.raw() + })) + }).then(() => { + // return the response itself + return res + }) + } + 'delete' (req) { + return this.redis.unlinkAsync(req.url) + } +} +``` + +#### `> opts.cache` + +This option follows the standard `fetch` API cache option. This option will do nothing if [`opts.cachePath`](#opts-cache-path) is null. The following values are accepted (as strings): + +* `default` - Fetch will inspect the HTTP cache on the way to the network. If there is a fresh response it will be used. If there is a stale response a conditional request will be created, and a normal request otherwise. It then updates the HTTP cache with the response. If the revalidation request fails (for example, on a 500 or if you're offline), the stale response will be returned. +* `no-store` - Fetch behaves as if there is no HTTP cache at all. +* `reload` - Fetch behaves as if there is no HTTP cache on the way to the network. Ergo, it creates a normal request and updates the HTTP cache with the response. +* `no-cache` - Fetch creates a conditional request if there is a response in the HTTP cache and a normal request otherwise. It then updates the HTTP cache with the response. +* `force-cache` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it creates a normal request and updates the HTTP cache with the response. +* `only-if-cached` - Fetch uses any response in the HTTP cache matching the request, not paying attention to staleness. If there was no response, it returns a network error. (Can only be used when request’s mode is "same-origin". Any cached redirects will be followed assuming request’s redirect mode is "follow" and the redirects do not violate request’s mode.) + +(Note: option descriptions are taken from https://fetch.spec.whatwg.org/#http-network-or-cache-fetch) + +##### Example + +```javascript +const fetch = require('make-fetch-happen').defaults({ + cachePath: './my-cache' +}) + +// Will error with ENOTCACHED if we haven't already cached this url +fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'only-if-cached' +}) + +// Will refresh any local content and cache the new response +fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'reload' +}) + +// Will use any local data, even if stale. Otherwise, will hit network. +fetch('https://registry.npmjs.org/make-fetch-happen', { + cache: 'force-cache' +}) +``` + +#### `> opts.proxy` + +A string or `new url.URL()`-d URI to proxy through. Different Proxy handlers will be +used depending on the proxy's protocol. + +Additionally, `process.env.HTTP_PROXY`, `process.env.HTTPS_PROXY`, and +`process.env.PROXY` are used if present and no `opts.proxy` value is provided. + +(Pending) `process.env.NO_PROXY` may also be configured to skip proxying requests for all, or specific domains. + +##### Example + +```javascript +fetch('https://registry.npmjs.org/make-fetch-happen', { + proxy: 'https://corporate.yourcompany.proxy:4445' +}) + +fetch('https://registry.npmjs.org/make-fetch-happen', { + proxy: { + protocol: 'https:', + hostname: 'corporate.yourcompany.proxy', + port: 4445 + } +}) +``` + +#### `> opts.noProxy` + +If present, should be a comma-separated string or an array of domain extensions +that a proxy should _not_ be used for. + +This option may also be provided through `process.env.NO_PROXY`. + +#### `> opts.ca, opts.cert, opts.key, opts.strictSSL` + +These values are passed in directly to the HTTPS agent and will be used for both +proxied and unproxied outgoing HTTPS requests. They mostly correspond to the +same options the `https` module accepts, which will be themselves passed to +`tls.connect()`. `opts.strictSSL` corresponds to `rejectUnauthorized`. + +#### `> opts.localAddress` + +Passed directly to `http` and `https` request calls. Determines the local +address to bind to. + +#### `> opts.maxSockets` + +Default: 15 + +Maximum number of active concurrent sockets to use for the underlying +Http/Https/Proxy agents. This setting applies once per spawned agent. + +15 is probably a _pretty good value_ for most use-cases, and balances speed +with, uh, not knocking out people's routers. 🤓 + +#### `> opts.retry` + +An object that can be used to tune request retry settings. Retries will only be attempted on the following conditions: + +* Request method is NOT `POST` AND +* Request status is one of: `408`, `420`, `429`, or any status in the 500-range. OR +* Request errored with `ECONNRESET`, `ECONNREFUSED`, `EADDRINUSE`, `ETIMEDOUT`, or the `fetch` error `request-timeout`. + +The following are worth noting as explicitly not retried: + +* `getaddrinfo ENOTFOUND` and will be assumed to be either an unreachable domain or the user will be assumed offline. If a response is cached, it will be returned immediately. + +If `opts.retry` is `false`, it is equivalent to `{retries: 0}` + +If `opts.retry` is a number, it is equivalent to `{retries: num}` + +The following retry options are available if you want more control over it: + +* retries +* factor +* minTimeout +* maxTimeout +* randomize + +For details on what each of these do, refer to the [`retry`](https://npm.im/retry) documentation. + +##### Example + +```javascript +fetch('https://flaky.site.com', { + retry: { + retries: 10, + randomize: true + } +}) + +fetch('http://reliable.site.com', { + retry: false +}) + +fetch('http://one-more.site.com', { + retry: 3 +}) +``` + +#### `> opts.onRetry` + +A function called whenever a retry is attempted. + +##### Example + +```javascript +fetch('https://flaky.site.com', { + onRetry() { + console.log('we will retry!') + } +}) +``` + +#### `> opts.integrity` + +Matches the response body against the given [Subresource Integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity) metadata. If verification fails, the request will fail with an `EINTEGRITY` error. + +`integrity` may either be a string or an [`ssri`](https://npm.im/ssri) `Integrity`-like. + +##### Example + +```javascript +fetch('https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', { + integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q=' +}) // -> ok + +fetch('https://malicious-registry.org/make-fetch-happen/-/make-fetch-happen-1.0.0.tgz', { + integrity: 'sha1-o47j7zAYnedYFn1dF/fR9OV3z8Q=' +}) // Error: EINTEGRITY +``` + +### Message From Our Sponsors + +![](stop.gif) + +![](happening.gif) diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js new file mode 100644 index 0000000..3675dd8 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js @@ -0,0 +1,194 @@ +'use strict' +const LRU = require('lru-cache') +const url = require('url') +const isLambda = require('is-lambda') + +const AGENT_CACHE = new LRU({ max: 50 }) +const HttpAgent = require('agentkeepalive') +const HttpsAgent = HttpAgent.HttpsAgent + +module.exports = getAgent + +const getAgentTimeout = timeout => + typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 + +const getMaxSockets = maxSockets => maxSockets || 15 + +function getAgent (uri, opts) { + const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) + const isHttps = parsedUri.protocol === 'https:' + const pxuri = getProxyUri(parsedUri.href, opts) + + // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout + // of zero disables the timeout behavior (OS limits still apply). Else, if + // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that + // the node-fetch-npm timeout will always fire first, giving us more + // consistent errors. + const agentTimeout = getAgentTimeout(opts.timeout) + const agentMaxSockets = getMaxSockets(opts.maxSockets) + + const key = [ + `https:${isHttps}`, + pxuri + ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` + : '>no-proxy<', + `local-address:${opts.localAddress || '>no-local-address<'}`, + `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, + `ca:${(isHttps && opts.ca) || '>no-ca<'}`, + `cert:${(isHttps && opts.cert) || '>no-cert<'}`, + `key:${(isHttps && opts.key) || '>no-key<'}`, + `timeout:${agentTimeout}`, + `maxSockets:${agentMaxSockets}`, + ].join(':') + + if (opts.agent != null) { // `agent: false` has special behavior! + return opts.agent + } + + // keep alive in AWS lambda makes no sense + const lambdaAgent = !isLambda ? null + : isHttps ? require('https').globalAgent + : require('http').globalAgent + + if (isLambda && !pxuri) + return lambdaAgent + + if (AGENT_CACHE.peek(key)) + return AGENT_CACHE.get(key) + + if (pxuri) { + const pxopts = isLambda ? { + ...opts, + agent: lambdaAgent, + } : opts + const proxy = getProxy(pxuri, pxopts, isHttps) + AGENT_CACHE.set(key, proxy) + return proxy + } + + const agent = isHttps ? new HttpsAgent({ + maxSockets: agentMaxSockets, + ca: opts.ca, + cert: opts.cert, + key: opts.key, + localAddress: opts.localAddress, + rejectUnauthorized: opts.rejectUnauthorized, + timeout: agentTimeout, + }) : new HttpAgent({ + maxSockets: agentMaxSockets, + localAddress: opts.localAddress, + timeout: agentTimeout, + }) + AGENT_CACHE.set(key, agent) + return agent +} + +function checkNoProxy (uri, opts) { + const host = new url.URL(uri).hostname.split('.').reverse() + let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) + if (typeof noproxy === 'string') + noproxy = noproxy.split(/\s*,\s*/g) + + return noproxy && noproxy.some(no => { + const noParts = no.split('.').filter(x => x).reverse() + if (!noParts.length) + return false + for (let i = 0; i < noParts.length; i++) { + if (host[i] !== noParts[i]) + return false + } + return true + }) +} + +module.exports.getProcessEnv = getProcessEnv + +function getProcessEnv (env) { + if (!env) + return + + let value + + if (Array.isArray(env)) { + for (const e of env) { + value = process.env[e] || + process.env[e.toUpperCase()] || + process.env[e.toLowerCase()] + if (typeof value !== 'undefined') + break + } + } + + if (typeof env === 'string') { + value = process.env[env] || + process.env[env.toUpperCase()] || + process.env[env.toLowerCase()] + } + + return value +} + +module.exports.getProxyUri = getProxyUri +function getProxyUri (uri, opts) { + const protocol = new url.URL(uri).protocol + + const proxy = opts.proxy || + ( + protocol === 'https:' && + getProcessEnv('https_proxy') + ) || + ( + protocol === 'http:' && + getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) + ) + if (!proxy) + return null + + const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy + + return !checkNoProxy(uri, opts) && parsedProxy +} + +const getAuth = u => + u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) + : u.username ? decodeURIComponent(u.username) + : null + +const getPath = u => u.pathname + u.search + u.hash + +const HttpProxyAgent = require('http-proxy-agent') +const HttpsProxyAgent = require('https-proxy-agent') +const SocksProxyAgent = require('socks-proxy-agent') +module.exports.getProxy = getProxy +function getProxy (proxyUrl, opts, isHttps) { + const popts = { + host: proxyUrl.hostname, + port: proxyUrl.port, + protocol: proxyUrl.protocol, + path: getPath(proxyUrl), + auth: getAuth(proxyUrl), + ca: opts.ca, + cert: opts.cert, + key: opts.key, + timeout: getAgentTimeout(opts.timeout), + localAddress: opts.localAddress, + maxSockets: getMaxSockets(opts.maxSockets), + rejectUnauthorized: opts.rejectUnauthorized, + } + + if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { + if (!isHttps) + return new HttpProxyAgent(popts) + else + return new HttpsProxyAgent(popts) + } else if (proxyUrl.protocol.startsWith('socks')) + return new SocksProxyAgent(popts) + else { + throw Object.assign( + new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), + { + url: proxyUrl.href, + } + ) + } +} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 0000000..a2acea1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,460 @@ +const { Request, Response } = require('minipass-fetch') +const Minipass = require('minipass') +const MinipassCollect = require('minipass-collect') +const MinipassFlush = require('minipass-flush') +const MinipassPipeline = require('minipass-pipeline') +const cacache = require('cacache') +const url = require('url') + +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// maximum amount of data we will buffer into memory +// if we'll exceed this, we switch to streaming +const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) + metadata.status = response.status + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) + metadata.reqHeaders[name] = request.headers.get(name) + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) + metadata.reqHeaders.host = host + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + // explicitly ignore accept-encoding here + if (name !== 'accept-encoding' && request.headers.has(name)) + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) + metadata.resHeaders[name] = response.headers.get(name) + } + + // we only store accept-encoding and content-encoding if the user + // has disabled automatic compression and decompression in minipass-fetch + // since if it's enabled (the default) then the content will have + // already been decompressed making the header a lie + if (options.compress === false) { + metadata.reqHeaders['accept-encoding'] = request.headers.get('accept-encoding') + metadata.resHeaders['content-encoding'] = response.headers.get('content-encoding') + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else + this.key = cacheKey(request) + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) + return !!(entry.metadata && entry.metadata.status) + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') + return + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if (this.request.method !== 'GET' || ![200, 301, 308].includes(this.response.status) || !this.policy.storable()) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const fitsInMemory = !!size && Number(size) < MAX_MEM_SIZE + const shouldBuffer = this.options.memoize !== false && fitsInMemory + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + memoize: fitsInMemory && this.options.memoize, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }) + + body = new MinipassPipeline(new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + + let abortStream, onResume + if (shouldBuffer) { + // if the result fits in memory, use a collect stream to gather + // the response and write it to cacache while also passing it through + // to the user + onResume = () => { + const collector = new MinipassCollect.PassThrough() + abortStream = collector + collector.on('collect', (data) => { + // TODO if the cache write fails, log a warning but return the response anyway + cacache.put(this.options.cachePath, this.key, data, cacheOpts).then(cacheWriteResolve, cacheWriteReject) + }) + body.unshift(collector) + body.unshift(this.response.body) + } + } else { + // if it does not fit in memory, create a tee stream and use + // that to pipe to both the cache and the user simultaneously + onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + abortStream = cacheStream + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + this.response.body.on('error', (err) => { + // the abortStream will either be a MinipassCollect if we buffer + // or a cacache write stream, either way be sure to listen for + // errors from the actual response and avoid writing data that we + // know to be invalid to the cache + abortStream.destroy(err) + }) + } else + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + const size = Number(this.response.headers.get('content-length')) + const fitsInMemory = !!size && size < MAX_MEM_SIZE + const shouldBuffer = this.options.memoize !== false && fitsInMemory + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const removeOnResume = () => body.removeListener('resume', onResume) + let onResume + if (shouldBuffer) { + onResume = async () => { + removeOnResume() + try { + const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + body.end(content) + } catch (err) { + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) + body.emit('error', err) + } + } + } else { + onResume = () => { + const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') + await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }) + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') + await CacheEntry.invalidate(this.request, this.options) + body.emit('error', err) + cacheStream.resume() + }) + cacheStream.pipe(body) + } + } + + body.once('resume', onResume) + body.once('end', removeOnResume) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers: { + ...this.policy.responseHeaders(), + }, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) + return this.respond(request.method, options, 'stale') + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if (!hasOwnProperty(metadata.resHeaders, name) && hasOwnProperty(this.entry.metadata.resHeaders, name)) + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 0000000..31e97c4 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,10 @@ +class NotCachedError extends Error { + constructor (url) { + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 0000000..cca93d9 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,45 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') + throw new NotCachedError(request.url) + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const entry = new CacheEntry({ request, response, options }) + return entry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') + return entry.revalidate(request, options) + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) + return + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 0000000..f7684d5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 0000000..e0959f6 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// HACK: negotiator lazy loads several of its own modules +// as a micro optimization. we need to be sure that they're +// in memory as soon as possible at startup so that we do +// not try to lazy load them after the directory has been +// retired during a self update of the npm CLI, we do this +// by calling all of the methods that trigger a lazy load +// on a fake instance. +const preloadNegotiator = new Negotiator({ headers: {} }) +preloadNegotiator.charsets() +preloadNegotiator.encodings() +preloadNegotiator.languages() +preloadNegotiator.mediaTypes() + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) + return false + + // user explicitly asked not to cache + if (options.cache === 'no-store') + return false + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) + return false + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) + return false + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) + return false + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) + return false + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) + return false + + if (this.options.integrity) + return ssri.parse(this.options.integrity).match(this.entry.integrity) + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 0000000..dfded79 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,100 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) + return false + + if (options.redirect === 'manual') + return false + + if (options.redirect === 'error') + throw new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect', { code: 'ENOREDIRECT' }) + + if (!response.headers.has('location')) + throw new FetchError(`redirect location header missing for: ${request.url}`, 'no-location', { code: 'EINVALIDREDIRECT' }) + + if (request.counter >= request.follow) + throw new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect', { code: 'EMAXREDIRECT' }) + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + // Copyright (c) 2010-2012 Mikeal Rogers + // Licensed under the Apache License, Version 2.0 (the "License"); + // you may not use this file except in compliance with the License. + // You may obtain a copy of the License at + // http://www.apache.org/licenses/LICENSE-2.0 + // Unless required by applicable law or agreed to in writing, + // software distributed under the License is distributed on an "AS + // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + // express or implied. See the License for the specific language + // governing permissions and limitations under the License. + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) + request.headers.delete('authorization') + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if (response.status === 303 || (request.method === 'POST' && [301, 302].includes(response.status))) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) + await cache.invalidate(request, options) + + if (!canFollowRedirect(request, response, options)) + return response + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 0000000..6028bc0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,40 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return makeFetchHappen(finalUrl, finalOptions) + } + + defaultedFetch.defaults = makeFetchHappen.defaults + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 0000000..f6138e6 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,44 @@ +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const {strictSSL, ...options} = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) + options.retry = { retries: 0 } + else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) + options.retry = { retries } + else + options.retry = { retries: 0 } + } else if (typeof options.retry === 'number') + options.retry = { retries: options.retry } + else + options.retry = { retries: 0, ...options.retry } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) + options.cache = 'no-store' + } + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) + options.cachePath = options.cacheManager + + return options +} + +module.exports = configureOptions diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 0000000..7e4ed24 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,102 @@ +const Minipass = require('minipass') +const MinipassPipeline = require('minipass-pipeline') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') + +const getAgent = require('./agent.js') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) + request.headers.set('connection', agent ? 'keep-alive' : 'close') + + if (!request.headers.has('user-agent')) + request.headers.set('user-agent', USER_AGENT) + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ integrity: _opts.integrity }) + res = new fetch.Response(new MinipassPipeline(res.body, integrityStream), res) + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') + options.onRetry(res) + + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) + throw err + + if (typeof options.onRetry === 'function') + options.onRetry(err) + + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') + return err + + throw err + }) +} + +module.exports = remoteFetch diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json new file mode 100644 index 0000000..dae7b37 --- /dev/null +++ b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json @@ -0,0 +1,76 @@ +{ + "name": "make-fetch-happen", + "version": "9.1.0", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "lib" + ], + "scripts": { + "preversion": "npm t", + "postversion": "npm publish", + "prepublishOnly": "git push --follow-tags", + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "npm run eslint -- lib test", + "lintfix": "npm run lint -- --fix" + }, + "repository": "https://github.com/npm/make-fetch-happen", + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": { + "name": "Kat Marchán", + "email": "kzm@zkat.tech", + "twitter": "maybekatz" + }, + "license": "ISC", + "dependencies": { + "agentkeepalive": "^4.1.3", + "cacache": "^15.2.0", + "http-cache-semantics": "^4.1.0", + "http-proxy-agent": "^4.0.1", + "https-proxy-agent": "^5.0.0", + "is-lambda": "^1.0.1", + "lru-cache": "^6.0.0", + "minipass": "^3.1.3", + "minipass-collect": "^1.0.2", + "minipass-fetch": "^1.3.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.2", + "promise-retry": "^2.0.1", + "socks-proxy-agent": "^6.0.0", + "ssri": "^8.0.0" + }, + "devDependencies": { + "eslint": "^7.26.0", + "eslint-plugin-import": "^2.23.2", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "mkdirp": "^1.0.4", + "nock": "^13.0.11", + "npmlog": "^5.0.0", + "require-inject": "^1.4.2", + "rimraf": "^3.0.2", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.0", + "tap": "^15.0.9" + }, + "engines": { + "node": ">= 10" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true + } +} diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE b/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE new file mode 100644 index 0000000..3c3410c --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/README.md b/node_modules/node-gyp/node_modules/minipass-fetch/README.md new file mode 100644 index 0000000..925e6be --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/README.md @@ -0,0 +1,29 @@ +# minipass-fetch + +An implementation of window.fetch in Node.js using Minipass streams + +This is a fork (or more precisely, a reimplementation) of +[node-fetch](http://npm.im/node-fetch). All streams have been replaced +with [minipass streams](http://npm.im/minipass). + +The goal of this module is to stay in sync with the API presented by +`node-fetch`, with the exception of the streaming interface provided. + +## Why + +Minipass streams are faster and more deterministic in their timing contract +than node-core streams, making them a better fit for many server-side use +cases. + +## API + +See [node-fetch](http://npm.im/node-fetch) + +Differences from `node-fetch` (and, by extension, from the WhatWG Fetch +specification): + +- Returns [minipass](http://npm.im/minipass) streams instead of node-core + streams. +- Supports the full set of [TLS Options that may be provided to + `https.request()`](https://nodejs.org/api/https.html#https_https_request_options_callback) + when making `https` requests. diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/index.js b/node_modules/node-gyp/node_modules/minipass-fetch/index.js new file mode 100644 index 0000000..07efc17 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/index.js @@ -0,0 +1 @@ +module.exports = require('./lib/index.js') diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 0000000..b18f643 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 0000000..9225db6 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const Minipass = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 0000000..fb7ffc3 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,334 @@ +'use strict' +const Minipass = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const {BUFFER} = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) {} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + json () { + return this[CONSUME_BODY]().then(buf => { + try { + return JSON.parse(buf.toString()) + } catch (er) { + return Promise.reject(new FetchError( + `invalid json response body at ${ + this.url} reason: ${er.message}`, 'invalid-json')) + } + }) + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) + return Promise.reject(this[INTERNALS].error) + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) + return Promise.resolve(this.body) + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) + return Promise.resolve(Buffer.alloc(0)) + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body + const resTimeout = this.timeout ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve, reject) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') + throw er + else if (er.name === 'RangeError') + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + else + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + }) + } + + static clone (instance) { + if (instance.bodyUsed) + throw new Error('cannot clone body after it is used') + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else + return instance.body + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const {body} = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length == 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const {body} = instance + + if (body === null || body === undefined) + dest.end() + else if (Buffer.isBuffer(body) || typeof body === 'string') + dest.end(body) + else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}) + + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') + throw new Error('The package `encoding` must be installed to use the textConverted() function') + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res, str + + // header + if (ct) + res = /charset=([^;]*)/i.exec(ct) + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 0000000..233675f --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,250 @@ +'use strict' +const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') + throw new TypeError(`${name} is not a legal HTTP header name`) +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) + throw new TypeError(`${value} is not a legal HTTP header value`) +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) + return key + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) + return + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') + throw new TypeError('Header pairs must be iterable') + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') + throw new TypeError('Each header pair must be iterable') + const arrPair = Array.from(pair) + if (arrPair.length !== 2) + throw new TypeError('Each header pair must be a name/value tuple') + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else + throw new TypeError('Provided initializer must be an object') + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) + return null + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) + this[MAP][key].push(value) + else + this[MAP][name] = [value] + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) + delete this[MAP][key] + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator]() { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) + obj[hostHeaderKey] = obj[hostHeaderKey][0] + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) + continue + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) + continue + + if (headers[MAP][name] === undefined) + headers[MAP][name] = [val] + else + headers[MAP][name].push(val) + } + } else if (!invalidHeaderCharRegex.test(obj[name])) + headers[MAP][name] = [obj[name]] + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) + throw new TypeError('Value of `this` is not a HeadersIterator') + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 0000000..2ffcba8 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,341 @@ +'use strict' +const Url = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const Minipass = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +const resolveUrl = Url.resolve + +const fetch = (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + try { + const split = url.split(',') + const data = Buffer.from(split[1], 'base64') + const type = split[0].match(/^data:(.*);base64$/)[1] + return Promise.resolve(new Response(data, { + headers: { + 'Content-Type': type, + 'Content-Length': data.length, + } + })) + } catch (er) { + return Promise.reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) + return abort() + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) + signal.removeEventListener('abort', abortAndFinalize) + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) + signal.addEventListener('abort', abortAndFinalize) + + let reqTimeout = null + if (request.timeout) { + req.once('socket', socket => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) + req.res.emit('error', er) + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + const locationURL = location === null ? null + : resolveUrl(request.url, location) + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${ + request.url}`, 'no-redirect')) + finalize() + return + + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + break + + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', Url.parse(locationURL).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolve => + res.on('end', () => resolve(createHeadersLenient(res.trailers)))) + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + + // for br + if (codings == 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 0000000..278b278 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,263 @@ +'use strict' +const Url = require('url') +const Minipass = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const { parse: parseUrl, format: formatUrl } = Url + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? Url.parse(input.url) + : input && input.href ? Url.parse(input.href) + : Url.parse(`${input}`) + + if (isRequest(input)) + init = { ...input[INTERNALS], ...init } + else if (!input || typeof input === 'string') + input = {} + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) + throw new TypeError('Request with GET/HEAD method cannot have body') + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) + headers.append('Content-Type', contentType) + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) + throw new TypeError('Expected signal must be an instanceof AbortSignal') + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method() { + return this[INTERNALS].method + } + + get url() { + return formatUrl(this[INTERNALS].parsedURL) + } + + get headers() { + return this[INTERNALS].headers + } + + get redirect() { + return this[INTERNALS].redirect + } + + get signal() { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) + headers.set('Accept', '*/*') + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) + throw new TypeError('Only absolute URLs are supported') + + if (!/^https?:$/.test(parsedURL.protocol)) + throw new TypeError('Only HTTP(S) protocols are supported') + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) + headers.set('Content-Length', contentLengthValue + '') + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) + headers.set('User-Agent', defaultUserAgent) + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) + headers.set('Accept-Encoding', 'gzip,deflate') + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) + headers.set('Connection', 'close') + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return { + ...parsedURL, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js b/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 0000000..854f789 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,89 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) + headers.append('Content-Type', contentType) + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/node_modules/node-gyp/node_modules/minipass-fetch/package.json b/node_modules/node-gyp/node_modules/minipass-fetch/package.json new file mode 100644 index 0000000..1d7d9e4 --- /dev/null +++ b/node_modules/node-gyp/node_modules/minipass-fetch/package.json @@ -0,0 +1,53 @@ +{ + "name": "minipass-fetch", + "version": "1.4.1", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true + }, + "devDependencies": { + "@ungap/url-search-params": "^0.1.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.3.0", + "form-data": "^2.5.1", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^15.0.9", + "whatwg-url": "^7.0.0" + }, + "dependencies": { + "minipass": "^3.1.0", + "minipass-sized": "^1.0.3", + "minizlib": "^2.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.12" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "index.js", + "lib/*.js" + ], + "engines": { + "node": ">=8" + } +} diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/README.md b/node_modules/node-gyp/node_modules/socks-proxy-agent/README.md new file mode 100644 index 0000000..2c44170 --- /dev/null +++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/README.md @@ -0,0 +1,152 @@ +socks-proxy-agent +================ +### A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS +[![Build Status](https://github.com/TooTallNate/node-socks-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-socks-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a +specified SOCKS proxy server, and can be used with the built-in `http` +and `https` modules. + +It can also be used in conjunction with the `ws` module to establish a WebSocket +connection over a SOCKS proxy. See the "Examples" section below. + +Installation +------------ + +Install with `npm`: + +``` bash +npm install socks-proxy-agent +``` + + +Examples +-------- + +#### TypeScript example + +```ts +import https from 'https'; +import { SocksProxyAgent } from 'socks-proxy-agent'; + +const info = { + hostname: 'br41.nordvpn.com', + userId: 'your-name@gmail.com', + password: 'abcdef12345124' +}; +const agent = new SocksProxyAgent(info); + +https.get('https://ipinfo.io', { agent }, (res) => { + console.log(res.headers); + res.pipe(process.stdout); +}); +``` + +#### `http` module example + +```js +var url = require('url'); +var http = require('http'); +var { SocksProxyAgent } = require('socks-proxy-agent'); + +// SOCKS proxy to connect to +var proxy = process.env.socks_proxy || 'socks://127.0.0.1:1080'; +console.log('using proxy server %j', proxy); + +// HTTP endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'http://nodejs.org/api/'; +console.log('attempting to GET %j', endpoint); +var opts = url.parse(endpoint); + +// create an instance of the `SocksProxyAgent` class with the proxy server information +var agent = new SocksProxyAgent(proxy); +opts.agent = agent; + +http.get(opts, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +#### `https` module example + +```js +var url = require('url'); +var https = require('https'); +var { SocksProxyAgent } = require('socks-proxy-agent'); + +// SOCKS proxy to connect to +var proxy = process.env.socks_proxy || 'socks://127.0.0.1:1080'; +console.log('using proxy server %j', proxy); + +// HTTP endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'https://encrypted.google.com/'; +console.log('attempting to GET %j', endpoint); +var opts = url.parse(endpoint); + +// create an instance of the `SocksProxyAgent` class with the proxy server information +var agent = new SocksProxyAgent(proxy); +opts.agent = agent; + +https.get(opts, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +#### `ws` WebSocket connection example + +``` js +var WebSocket = require('ws'); +var { SocksProxyAgent } = require('socks-proxy-agent'); + +// SOCKS proxy to connect to +var proxy = process.env.socks_proxy || 'socks://127.0.0.1:1080'; +console.log('using proxy server %j', proxy); + +// WebSocket endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'ws://echo.websocket.org'; +console.log('attempting to connect to WebSocket %j', endpoint); + +// create an instance of the `SocksProxyAgent` class with the proxy server information +var agent = new SocksProxyAgent(proxy); + +// initiate the WebSocket connection +var socket = new WebSocket(endpoint, { agent: agent }); + +socket.on('open', function () { + console.log('"open" event!'); + socket.send('hello world'); +}); + +socket.on('message', function (data, flags) { + console.log('"message" event! %j %j', data, flags); + socket.close(); +}); +``` + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..20dc42f --- /dev/null +++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.d.ts @@ -0,0 +1,38 @@ +/// +import { SocksProxy } from 'socks'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { AgentOptions } from 'agent-base'; +import { Url } from 'url'; +import net from 'net'; +import tls from 'tls'; +interface BaseSocksProxyAgentOptions { + /** + * hostname is preferred over host + * + * @deprecated + */ + host?: string | null; + port?: string | number | null; + username?: string | null; + tls?: tls.ConnectionOptions | null; +} +interface SocksProxyAgentOptionsExtra { + timeout?: number; +} +export interface SocksProxyAgentOptions extends AgentOptions, BaseSocksProxyAgentOptions, Partial> { +} +export declare class SocksProxyAgent extends Agent { + private readonly shouldLookup; + private readonly proxy; + private readonly tlsConnectionOptions; + timeout: number | null; + constructor(input: string | SocksProxyAgentOptions, options?: SocksProxyAgentOptionsExtra); + /** + * Initiates a SOCKS connection to the specified SOCKS proxy server, + * which in turn connects to the specified remote host and port. + * + * @api protected + */ + callback(req: ClientRequest, opts: RequestOptions): Promise; +} +export {}; diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js new file mode 100644 index 0000000..f615034 --- /dev/null +++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js @@ -0,0 +1,197 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SocksProxyAgent = void 0; +const socks_1 = require("socks"); +const agent_base_1 = require("agent-base"); +const debug_1 = __importDefault(require("debug")); +const dns_1 = __importDefault(require("dns")); +const tls_1 = __importDefault(require("tls")); +const debug = (0, debug_1.default)('socks-proxy-agent'); +function parseSocksProxy(opts) { + var _a; + let port = 0; + let lookup = false; + let type = 5; + const host = opts.hostname || opts.host; + if (host == null) { + throw new TypeError('No "hostname"'); + } + if (typeof opts.port === 'number') { + port = opts.port; + } + else if (typeof opts.port === 'string') { + port = parseInt(opts.port, 10); + } + // From RFC 1928, Section 3: https://tools.ietf.org/html/rfc1928#section-3 + // "The SOCKS service is conventionally located on TCP port 1080" + if (port == null) { + port = 1080; + } + // figure out if we want socks v4 or v5, based on the "protocol" used. + // Defaults to 5. + if (opts.protocol != null) { + switch (opts.protocol.replace(':', '')) { + case 'socks4': + lookup = true; + // pass through + case 'socks4a': + type = 4; + break; + case 'socks5': + lookup = true; + // pass through + case 'socks': // no version specified, default to 5h + case 'socks5h': + type = 5; + break; + default: + throw new TypeError(`A "socks" protocol must be specified! Got: ${String(opts.protocol)}`); + } + } + if (typeof opts.type !== 'undefined') { + if (opts.type === 4 || opts.type === 5) { + type = opts.type; + } + else { + throw new TypeError(`"type" must be 4 or 5, got: ${String(opts.type)}`); + } + } + const proxy = { + host, + port, + type + }; + let userId = (_a = opts.userId) !== null && _a !== void 0 ? _a : opts.username; + let password = opts.password; + if (opts.auth != null) { + const auth = opts.auth.split(':'); + userId = auth[0]; + password = auth[1]; + } + if (userId != null) { + Object.defineProperty(proxy, 'userId', { + value: userId, + enumerable: false + }); + } + if (password != null) { + Object.defineProperty(proxy, 'password', { + value: password, + enumerable: false + }); + } + return { lookup, proxy }; +} +const normalizeProxyOptions = (input) => { + let proxyOptions; + if (typeof input === 'string') { + proxyOptions = new URL(input); + } + else { + proxyOptions = input; + } + if (proxyOptions == null) { + throw new TypeError('a SOCKS proxy server `hostname` and `port` must be specified!'); + } + return proxyOptions; +}; +class SocksProxyAgent extends agent_base_1.Agent { + constructor(input, options) { + var _a; + const proxyOptions = normalizeProxyOptions(input); + super(proxyOptions); + const parsedProxy = parseSocksProxy(proxyOptions); + this.shouldLookup = parsedProxy.lookup; + this.proxy = parsedProxy.proxy; + this.tlsConnectionOptions = proxyOptions.tls != null ? proxyOptions.tls : {}; + this.timeout = (_a = options === null || options === void 0 ? void 0 : options.timeout) !== null && _a !== void 0 ? _a : null; + } + /** + * Initiates a SOCKS connection to the specified SOCKS proxy server, + * which in turn connects to the specified remote host and port. + * + * @api protected + */ + callback(req, opts) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const { shouldLookup, proxy, timeout } = this; + let { host, port, lookup: lookupCallback } = opts; + if (host == null) { + throw new Error('No `host` defined!'); + } + if (shouldLookup) { + // Client-side DNS resolution for "4" and "5" socks proxy versions. + host = yield new Promise((resolve, reject) => { + // Use the request's custom lookup, if one was configured: + const lookupFn = lookupCallback !== null && lookupCallback !== void 0 ? lookupCallback : dns_1.default.lookup; + lookupFn(host, {}, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + }); + } + const socksOpts = { + proxy, + destination: { host, port }, + command: 'connect', + timeout: timeout !== null && timeout !== void 0 ? timeout : undefined + }; + const cleanup = (tlsSocket) => { + req.destroy(); + socket.destroy(); + if (tlsSocket) + tlsSocket.destroy(); + }; + debug('Creating socks proxy connection: %o', socksOpts); + const { socket } = yield socks_1.SocksClient.createConnection(socksOpts); + debug('Successfully created socks proxy connection'); + if (timeout !== null) { + socket.setTimeout(timeout); + socket.on('timeout', () => cleanup()); + } + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = (_a = opts.servername) !== null && _a !== void 0 ? _a : opts.host; + const tlsSocket = tls_1.default.connect(Object.assign(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername }), this.tlsConnectionOptions)); + tlsSocket.once('error', (error) => { + debug('socket TLS error', error.message); + cleanup(tlsSocket); + }); + return tlsSocket; + } + return socket; + }); + } +} +exports.SocksProxyAgent = SocksProxyAgent; +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..96d8fab --- /dev/null +++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,iCAAmE;AACnE,2CAAiE;AAEjE,kDAA+B;AAE/B,8CAAqB;AAErB,8CAAqB;AAkBrB,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,mBAAmB,CAAC,CAAA;AAE9C,SAAS,eAAe,CAAE,IAA4B;;IACpD,IAAI,IAAI,GAAG,CAAC,CAAA;IACZ,IAAI,MAAM,GAAG,KAAK,CAAA;IAClB,IAAI,IAAI,GAAuB,CAAC,CAAA;IAEhC,MAAM,IAAI,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAA;IAEvC,IAAI,IAAI,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CAAC,eAAe,CAAC,CAAA;KACrC;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QACjC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;KACjB;SAAM,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;QACxC,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,CAAC,CAAA;KAC/B;IAED,0EAA0E;IAC1E,iEAAiE;IACjE,IAAI,IAAI,IAAI,IAAI,EAAE;QAChB,IAAI,GAAG,IAAI,CAAA;KACZ;IAED,sEAAsE;IACtE,iBAAiB;IACjB,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;QACzB,QAAQ,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;YACtC,KAAK,QAAQ;gBACX,MAAM,GAAG,IAAI,CAAA;YACf,eAAe;YACf,KAAK,SAAS;gBACZ,IAAI,GAAG,CAAC,CAAA;gBACR,MAAK;YACP,KAAK,QAAQ;gBACX,MAAM,GAAG,IAAI,CAAA;YACf,eAAe;YACf,KAAK,OAAO,CAAC,CAAC,sCAAsC;YACpD,KAAK,SAAS;gBACZ,IAAI,GAAG,CAAC,CAAA;gBACR,MAAK;YACP;gBACE,MAAM,IAAI,SAAS,CAAC,8CAA8C,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAA;SAC7F;KACF;IAED,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;QACpC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,IAAI,IAAI,CAAC,IAAI,KAAK,CAAC,EAAE;YACtC,IAAI,GAAG,IAAI,CAAC,IAAI,CAAA;SACjB;aAAM;YACL,MAAM,IAAI,SAAS,CAAC,+BAA+B,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;SACxE;KACF;IAED,MAAM,KAAK,GAAe;QACxB,IAAI;QACJ,IAAI;QACJ,IAAI;KACL,CAAA;IAED,IAAI,MAAM,GAAG,MAAA,IAAI,CAAC,MAAM,mCAAI,IAAI,CAAC,QAAQ,CAAA;IACzC,IAAI,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAA;IAC5B,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;QACrB,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACjC,MAAM,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QAChB,QAAQ,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;KACnB;IACD,IAAI,MAAM,IAAI,IAAI,EAAE;QAClB,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,QAAQ,EAAE;YACrC,KAAK,EAAE,MAAM;YACb,UAAU,EAAE,KAAK;SAClB,CAAC,CAAA;KACH;IACD,IAAI,QAAQ,IAAI,IAAI,EAAE;QACpB,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,UAAU,EAAE;YACvC,KAAK,EAAE,QAAQ;YACf,UAAU,EAAE,KAAK;SAClB,CAAC,CAAA;KACH;IAED,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,CAAA;AAC1B,CAAC;AAED,MAAM,qBAAqB,GAAG,CAAC,KAAsC,EAA0B,EAAE;IAC/F,IAAI,YAAoC,CAAA;IACxC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,YAAY,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,CAAA;KAC9B;SAAM;QACL,YAAY,GAAG,KAAK,CAAA;KACrB;IACD,IAAI,YAAY,IAAI,IAAI,EAAE;QACxB,MAAM,IAAI,SAAS,CAAC,+DAA+D,CAAC,CAAA;KACrF;IAED,OAAO,YAAY,CAAA;AACrB,CAAC,CAAA;AAID,MAAa,eAAgB,SAAQ,kBAAK;IAMxC,YAAa,KAAsC,EAAE,OAAqC;;QACxF,MAAM,YAAY,GAAG,qBAAqB,CAAC,KAAK,CAAC,CAAA;QACjD,KAAK,CAAC,YAAY,CAAC,CAAA;QAEnB,MAAM,WAAW,GAAG,eAAe,CAAC,YAAY,CAAC,CAAA;QAEjD,IAAI,CAAC,YAAY,GAAG,WAAW,CAAC,MAAM,CAAA;QACtC,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAA;QAC9B,IAAI,CAAC,oBAAoB,GAAG,YAAY,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAA;QAC5E,IAAI,CAAC,OAAO,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,mCAAI,IAAI,CAAA;IACzC,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CAAE,GAAkB,EAAE,IAAoB;;;YACtD,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,OAAO,EAAE,GAAG,IAAI,CAAA;YAE7C,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,EAAE,cAAc,EAAE,GAAG,IAAI,CAAA;YAEjD,IAAI,IAAI,IAAI,IAAI,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,oBAAoB,CAAC,CAAA;aACtC;YAED,IAAI,YAAY,EAAE;gBAChB,mEAAmE;gBACnE,IAAI,GAAG,MAAM,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;oBACnD,0DAA0D;oBAC1D,MAAM,QAAQ,GAAG,cAAc,aAAd,cAAc,cAAd,cAAc,GAAI,aAAG,CAAC,MAAM,CAAA;oBAC7C,QAAQ,CAAC,IAAK,EAAE,EAAE,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;wBAC/B,IAAI,GAAG,EAAE;4BACP,MAAM,CAAC,GAAG,CAAC,CAAA;yBACZ;6BAAM;4BACL,OAAO,CAAC,GAAG,CAAC,CAAA;yBACb;oBACH,CAAC,CAAC,CAAA;gBACJ,CAAC,CAAC,CAAA;aACH;YAED,MAAM,SAAS,GAAuB;gBACpC,KAAK;gBACL,WAAW,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE;gBAC3B,OAAO,EAAE,SAAS;gBAClB,OAAO,EAAE,OAAO,aAAP,OAAO,cAAP,OAAO,GAAI,SAAS;aAC9B,CAAA;YAED,MAAM,OAAO,GAAG,CAAC,SAAyB,EAAE,EAAE;gBAC5C,GAAG,CAAC,OAAO,EAAE,CAAA;gBACb,MAAM,CAAC,OAAO,EAAE,CAAA;gBAChB,IAAI,SAAS;oBAAE,SAAS,CAAC,OAAO,EAAE,CAAA;YACpC,CAAC,CAAA;YAED,KAAK,CAAC,qCAAqC,EAAE,SAAS,CAAC,CAAA;YACvD,MAAM,EAAE,MAAM,EAAE,GAAG,MAAM,mBAAW,CAAC,gBAAgB,CAAC,SAAS,CAAC,CAAA;YAChE,KAAK,CAAC,6CAA6C,CAAC,CAAA;YAEpD,IAAI,OAAO,KAAK,IAAI,EAAE;gBACpB,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;gBAC1B,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAA;aACtC;YAED,IAAI,IAAI,CAAC,cAAc,EAAE;gBACvB,sDAAsD;gBACtD,8CAA8C;gBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAA;gBAC3C,MAAM,UAAU,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,IAAI,CAAC,IAAI,CAAA;gBAE/C,MAAM,SAAS,GAAG,aAAG,CAAC,OAAO,+CACxB,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,KACjD,MAAM;oBACN,UAAU,KACP,IAAI,CAAC,oBAAoB,EAC5B,CAAA;gBAEF,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;oBAChC,KAAK,CAAC,kBAAkB,EAAE,KAAK,CAAC,OAAO,CAAC,CAAA;oBACxC,OAAO,CAAC,SAAS,CAAC,CAAA;gBACpB,CAAC,CAAC,CAAA;gBAEF,OAAO,SAAS,CAAA;aACjB;YAED,OAAO,MAAM,CAAA;;KACd;CACF;AA7FD,0CA6FC;AAED,SAAS,IAAI,CACX,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAAgD,CAAA;IAC5D,IAAI,GAAqB,CAAA;IACzB,KAAK,GAAG,IAAI,GAAG,EAAE;QACf,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACvB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAA;SACpB;KACF;IACD,OAAO,GAAG,CAAA;AACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json b/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json new file mode 100644 index 0000000..e9b655a --- /dev/null +++ b/node_modules/node-gyp/node_modules/socks-proxy-agent/package.json @@ -0,0 +1,181 @@ +{ + "name": "socks-proxy-agent", + "description": "A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS", + "homepage": "https://github.com/TooTallNate/node-socks-proxy-agent#readme", + "version": "6.2.1", + "main": "dist/index.js", + "author": { + "email": "nathan@tootallnate.net", + "name": "Nathan Rajlich", + "url": "http://n8.io/" + }, + "contributors": [ + { + "name": "Kiko Beats", + "email": "josefrancisco.verdu@gmail.com" + }, + { + "name": "Josh Glazebrook", + "email": "josh@joshglazebrook.com" + }, + { + "name": "talmobi", + "email": "talmobi@users.noreply.github.com" + }, + { + "name": "Indospace.io", + "email": "justin@indospace.io" + }, + { + "name": "Kilian von Pflugk", + "email": "github@jumoog.io" + }, + { + "name": "Kyle", + "email": "admin@hk1229.cn" + }, + { + "name": "Matheus Fernandes", + "email": "matheus.frndes@gmail.com" + }, + { + "name": "Ricky Miller", + "email": "richardkazuomiller@gmail.com" + }, + { + "name": "Shantanu Sharma", + "email": "shantanu34@outlook.com" + }, + { + "name": "Tim Perry", + "email": "pimterry@gmail.com" + }, + { + "name": "Vadim Baryshev", + "email": "vadimbaryshev@gmail.com" + }, + { + "name": "jigu", + "email": "luo1257857309@gmail.com" + }, + { + "name": "Alba Mendez", + "email": "me@jmendeth.com" + }, + { + "name": "Дмитрий Гуденков", + "email": "Dimangud@rambler.ru" + }, + { + "name": "Andrei Bitca", + "email": "63638922+andrei-bitca-dc@users.noreply.github.com" + }, + { + "name": "Andrew Casey", + "email": "amcasey@users.noreply.github.com" + }, + { + "name": "Brandon Ros", + "email": "brandonros1@gmail.com" + }, + { + "name": "Dang Duy Thanh", + "email": "thanhdd.it@gmail.com" + }, + { + "name": "Dimitar Nestorov", + "email": "8790386+dimitarnestorov@users.noreply.github.com" + } + ], + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-socks-proxy-agent.git" + }, + "bugs": { + "url": "https://github.com/TooTallNate/node-socks-proxy-agent/issues" + }, + "keywords": [ + "agent", + "http", + "https", + "proxy", + "socks", + "socks4", + "socks4a", + "socks5", + "socks5h" + ], + "dependencies": { + "agent-base": "^6.0.2", + "debug": "^4.3.3", + "socks": "^2.6.2" + }, + "devDependencies": { + "@commitlint/cli": "latest", + "@commitlint/config-conventional": "latest", + "@types/debug": "latest", + "@types/node": "latest", + "cacheable-lookup": "^6.0.4", + "conventional-github-releaser": "latest", + "dns2": "latest", + "finepack": "latest", + "git-authors-cli": "latest", + "mocha": "9", + "nano-staged": "latest", + "npm-check-updates": "latest", + "prettier-standard": "latest", + "raw-body": "latest", + "rimraf": "latest", + "simple-git-hooks": "latest", + "socksv5": "github:TooTallNate/socksv5#fix/dstSock-close-event", + "standard": "latest", + "standard-markdown": "latest", + "standard-version": "latest", + "ts-standard": "latest", + "typescript": "latest" + }, + "engines": { + "node": ">= 10" + }, + "files": [ + "dist" + ], + "scripts": { + "build": "tsc", + "clean": "rimraf node_modules", + "contributors": "(git-authors-cli && finepack && git add package.json && git commit -m 'build: contributors' --no-verify) || true", + "lint": "ts-standard", + "postrelease": "npm run release:tags && npm run release:github && (ci-publish || npm publish --access=public)", + "prebuild": "rimraf dist", + "prepublishOnly": "npm run build", + "prerelease": "npm run update:check && npm run contributors", + "release": "standard-version -a", + "release:github": "conventional-github-releaser -p angular", + "release:tags": "git push --follow-tags origin HEAD:master", + "test": "mocha --reporter spec", + "update": "ncu -u", + "update:check": "ncu -- --error-level 2" + }, + "license": "MIT", + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "nano-staged": { + "*.js": [ + "prettier-standard" + ], + "*.md": [ + "standard-markdown" + ], + "package.json": [ + "finepack" + ] + }, + "simple-git-hooks": { + "commit-msg": "npx commitlint --edit", + "pre-commit": "npx nano-staged" + }, + "typings": "dist/index.d.ts" +} diff --git a/node_modules/node-gyp/node_modules/unique-filename/.nyc_output/54942.json b/node_modules/node-gyp/node_modules/unique-filename/.nyc_output/54942.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/.nyc_output/54942.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/unique-filename/.nyc_output/54944.json b/node_modules/node-gyp/node_modules/unique-filename/.nyc_output/54944.json new file mode 100644 index 0000000..3ce72e5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/.nyc_output/54944.json @@ -0,0 +1 @@ +{"./index.js":{"path":"./index.js","s":{"1":1,"2":1,"3":1,"4":6},"b":{"1":[4,2]},"f":{"1":6},"fnMap":{"1":{"name":"(anonymous_1)","line":6,"loc":{"start":{"line":6,"column":17},"end":{"line":6,"column":51}}}},"statementMap":{"1":{"start":{"line":2,"column":0},"end":{"line":2,"column":26}},"2":{"start":{"line":4,"column":0},"end":{"line":4,"column":39}},"3":{"start":{"line":6,"column":0},"end":{"line":8,"column":1}},"4":{"start":{"line":7,"column":2},"end":{"line":7,"column":77}}},"branchMap":{"1":{"line":7,"type":"cond-expr","locations":[{"start":{"line":7,"column":39},"end":{"line":7,"column":51}},{"start":{"line":7,"column":54},"end":{"line":7,"column":56}}]}}}} \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/unique-filename/LICENSE b/node_modules/node-gyp/node_modules/unique-filename/LICENSE new file mode 100644 index 0000000..69619c1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/LICENSE @@ -0,0 +1,5 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/unique-filename/README.md b/node_modules/node-gyp/node_modules/unique-filename/README.md new file mode 100644 index 0000000..74b62b2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/README.md @@ -0,0 +1,33 @@ +unique-filename +=============== + +Generate a unique filename for use in temporary directories or caches. + +``` +var uniqueFilename = require('unique-filename') + +// returns something like: /tmp/912ec803b2ce49e4a541068d495ab570 +var randomTmpfile = uniqueFilename(os.tmpdir()) + +// returns something like: /tmp/my-test-912ec803b2ce49e4a541068d495ab570 +var randomPrefixedTmpfile = uniqueFilename(os.tmpdir(), 'my-test') + +var uniqueTmpfile = uniqueFilename('/tmp', 'testing', '/my/thing/to/uniq/on') +``` + +### uniqueFilename(*dir*, *fileprefix*, *uniqstr*) → String + +Returns the full path of a unique filename that looks like: +`dir/prefix-7ddd44c0` +or `dir/7ddd44c0` + +*dir* – The path you want the filename in. `os.tmpdir()` is a good choice for this. + +*fileprefix* – A string to append prior to the unique part of the filename. +The parameter is required if *uniqstr* is also passed in but is otherwise +optional and can be `undefined`/`null`/`''`. If present and not empty +then this string plus a hyphen are prepended to the unique part. + +*uniqstr* – Optional, if not passed the unique part of the resulting +filename will be random. If passed in it will be generated from this string +in a reproducable way. diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.html b/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.html new file mode 100644 index 0000000..cd55391 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.html @@ -0,0 +1,73 @@ + + + + Code coverage report for __root__/ + + + + + + +
+

Code coverage report for __root__/

+

+ Statements: 100% (4 / 4)      + Branches: 100% (2 / 2)      + Functions: 100% (1 / 1)      + Lines: 100% (4 / 4)      + Ignored: none      +

+
All files » __root__/
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
index.js100%(4 / 4)100%(2 / 2)100%(1 / 1)100%(4 / 4)
+
+
+ + + + + + diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.js.html b/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.js.html new file mode 100644 index 0000000..02e5768 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/coverage/__root__/index.js.html @@ -0,0 +1,69 @@ + + + + Code coverage report for index.js + + + + + + +
+

Code coverage report for index.js

+

+ Statements: 100% (4 / 4)      + Branches: 100% (2 / 2)      + Functions: 100% (1 / 1)      + Lines: 100% (4 / 4)      + Ignored: none      +

+
All files » __root__/ » index.js
+
+
+

+
+
1 +2 +3 +4 +5 +6 +7 +8 +9  +1 +  +1 +  +1 +6 +  + 
'use strict'
+var path = require('path')
+ 
+var uniqueSlug = require('unique-slug')
+ 
+module.exports = function (filepath, prefix, uniq) {
+  return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq))
+}
+ 
+ +
+ + + + + + diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/base.css b/node_modules/node-gyp/node_modules/unique-filename/coverage/base.css new file mode 100644 index 0000000..a6a2f32 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/coverage/base.css @@ -0,0 +1,182 @@ +body, html { + margin:0; padding: 0; +} +body { + font-family: Helvetica Neue, Helvetica,Arial; + font-size: 10pt; +} +div.header, div.footer { + background: #eee; + padding: 1em; +} +div.header { + z-index: 100; + position: fixed; + top: 0; + border-bottom: 1px solid #666; + width: 100%; +} +div.footer { + border-top: 1px solid #666; +} +div.body { + margin-top: 10em; +} +div.meta { + font-size: 90%; + text-align: center; +} +h1, h2, h3 { + font-weight: normal; +} +h1 { + font-size: 12pt; +} +h2 { + font-size: 10pt; +} +pre { + font-family: Consolas, Menlo, Monaco, monospace; + margin: 0; + padding: 0; + line-height: 1.3; + font-size: 14px; + -moz-tab-size: 2; + -o-tab-size: 2; + tab-size: 2; +} + +div.path { font-size: 110%; } +div.path a:link, div.path a:visited { color: #000; } +table.coverage { border-collapse: collapse; margin:0; padding: 0 } + +table.coverage td { + margin: 0; + padding: 0; + color: #111; + vertical-align: top; +} +table.coverage td.line-count { + width: 50px; + text-align: right; + padding-right: 5px; +} +table.coverage td.line-coverage { + color: #777 !important; + text-align: right; + border-left: 1px solid #666; + border-right: 1px solid #666; +} + +table.coverage td.text { +} + +table.coverage td span.cline-any { + display: inline-block; + padding: 0 5px; + width: 40px; +} +table.coverage td span.cline-neutral { + background: #eee; +} +table.coverage td span.cline-yes { + background: #b5d592; + color: #999; +} +table.coverage td span.cline-no { + background: #fc8c84; +} + +.cstat-yes { color: #111; } +.cstat-no { background: #fc8c84; color: #111; } +.fstat-no { background: #ffc520; color: #111 !important; } +.cbranch-no { background: yellow !important; color: #111; } + +.cstat-skip { background: #ddd; color: #111; } +.fstat-skip { background: #ddd; color: #111 !important; } +.cbranch-skip { background: #ddd !important; color: #111; } + +.missing-if-branch { + display: inline-block; + margin-right: 10px; + position: relative; + padding: 0 4px; + background: black; + color: yellow; +} + +.skip-if-branch { + display: none; + margin-right: 10px; + position: relative; + padding: 0 4px; + background: #ccc; + color: white; +} + +.missing-if-branch .typ, .skip-if-branch .typ { + color: inherit !important; +} + +.entity, .metric { font-weight: bold; } +.metric { display: inline-block; border: 1px solid #333; padding: 0.3em; background: white; } +.metric small { font-size: 80%; font-weight: normal; color: #666; } + +div.coverage-summary table { border-collapse: collapse; margin: 3em; font-size: 110%; } +div.coverage-summary td, div.coverage-summary table th { margin: 0; padding: 0.25em 1em; border-top: 1px solid #666; border-bottom: 1px solid #666; } +div.coverage-summary th { text-align: left; border: 1px solid #666; background: #eee; font-weight: normal; } +div.coverage-summary th.file { border-right: none !important; } +div.coverage-summary th.pic { border-left: none !important; text-align: right; } +div.coverage-summary th.pct { border-right: none !important; } +div.coverage-summary th.abs { border-left: none !important; text-align: right; } +div.coverage-summary td.pct { text-align: right; border-left: 1px solid #666; } +div.coverage-summary td.abs { text-align: right; font-size: 90%; color: #444; border-right: 1px solid #666; } +div.coverage-summary td.file { border-left: 1px solid #666; white-space: nowrap; } +div.coverage-summary td.pic { min-width: 120px !important; } +div.coverage-summary a:link { text-decoration: none; color: #000; } +div.coverage-summary a:visited { text-decoration: none; color: #777; } +div.coverage-summary a:hover { text-decoration: underline; } +div.coverage-summary tfoot td { border-top: 1px solid #666; } + +div.coverage-summary .sorter { + height: 10px; + width: 7px; + display: inline-block; + margin-left: 0.5em; + background: url(sort-arrow-sprite.png) no-repeat scroll 0 0 transparent; +} +div.coverage-summary .sorted .sorter { + background-position: 0 -20px; +} +div.coverage-summary .sorted-desc .sorter { + background-position: 0 -10px; +} + +.high { background: #b5d592 !important; } +.medium { background: #ffe87c !important; } +.low { background: #fc8c84 !important; } + +span.cover-fill, span.cover-empty { + display:inline-block; + border:1px solid #444; + background: white; + height: 12px; +} +span.cover-fill { + background: #ccc; + border-right: 1px solid #444; +} +span.cover-empty { + background: white; + border-left: none; +} +span.cover-full { + border-right: none !important; +} +pre.prettyprint { + border: none !important; + padding: 0 !important; + margin: 0 !important; +} +.com { color: #999 !important; } +.ignore-none { color: #999; font-weight: normal; } diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/index.html b/node_modules/node-gyp/node_modules/unique-filename/coverage/index.html new file mode 100644 index 0000000..b10d186 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/coverage/index.html @@ -0,0 +1,73 @@ + + + + Code coverage report for All files + + + + + + +
+

Code coverage report for All files

+

+ Statements: 100% (4 / 4)      + Branches: 100% (2 / 2)      + Functions: 100% (1 / 1)      + Lines: 100% (4 / 4)      + Ignored: none      +

+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
__root__/100%(4 / 4)100%(2 / 2)100%(1 / 1)100%(4 / 4)
+
+
+ + + + + + diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.css b/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.css new file mode 100644 index 0000000..b317a7c --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.css @@ -0,0 +1 @@ +.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.js b/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.js new file mode 100644 index 0000000..ef51e03 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/coverage/prettify.js @@ -0,0 +1 @@ +window.PR_SHOULD_USE_CONTINUATION=true;(function(){var h=["break,continue,do,else,for,if,return,while"];var u=[h,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"];var p=[u,"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"];var l=[p,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"];var x=[p,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"];var R=[x,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"];var r="all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes";var w=[p,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"];var s="caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END";var I=[h,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"];var f=[h,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"];var H=[h,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"];var A=[l,R,w,s+I,f,H];var e=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/;var C="str";var z="kwd";var j="com";var O="typ";var G="lit";var L="pun";var F="pln";var m="tag";var E="dec";var J="src";var P="atn";var n="atv";var N="nocode";var M="(?:^^\\.?|[+-]|\\!|\\!=|\\!==|\\#|\\%|\\%=|&|&&|&&=|&=|\\(|\\*|\\*=|\\+=|\\,|\\-=|\\->|\\/|\\/=|:|::|\\;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|\\?|\\@|\\[|\\^|\\^=|\\^\\^|\\^\\^=|\\{|\\||\\|=|\\|\\||\\|\\|=|\\~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\\s*";function k(Z){var ad=0;var S=false;var ac=false;for(var V=0,U=Z.length;V122)){if(!(al<65||ag>90)){af.push([Math.max(65,ag)|32,Math.min(al,90)|32])}if(!(al<97||ag>122)){af.push([Math.max(97,ag)&~32,Math.min(al,122)&~32])}}}}af.sort(function(av,au){return(av[0]-au[0])||(au[1]-av[1])});var ai=[];var ap=[NaN,NaN];for(var ar=0;arat[0]){if(at[1]+1>at[0]){an.push("-")}an.push(T(at[1]))}}an.push("]");return an.join("")}function W(al){var aj=al.source.match(new RegExp("(?:\\[(?:[^\\x5C\\x5D]|\\\\[\\s\\S])*\\]|\\\\u[A-Fa-f0-9]{4}|\\\\x[A-Fa-f0-9]{2}|\\\\[0-9]+|\\\\[^ux0-9]|\\(\\?[:!=]|[\\(\\)\\^]|[^\\x5B\\x5C\\(\\)\\^]+)","g"));var ah=aj.length;var an=[];for(var ak=0,am=0;ak=2&&ai==="["){aj[ak]=X(ag)}else{if(ai!=="\\"){aj[ak]=ag.replace(/[a-zA-Z]/g,function(ao){var ap=ao.charCodeAt(0);return"["+String.fromCharCode(ap&~32,ap|32)+"]"})}}}}return aj.join("")}var aa=[];for(var V=0,U=Z.length;V=0;){S[ac.charAt(ae)]=Y}}var af=Y[1];var aa=""+af;if(!ag.hasOwnProperty(aa)){ah.push(af);ag[aa]=null}}ah.push(/[\0-\uffff]/);V=k(ah)})();var X=T.length;var W=function(ah){var Z=ah.sourceCode,Y=ah.basePos;var ad=[Y,F];var af=0;var an=Z.match(V)||[];var aj={};for(var ae=0,aq=an.length;ae=5&&"lang-"===ap.substring(0,5);if(am&&!(ai&&typeof ai[1]==="string")){am=false;ap=J}if(!am){aj[ag]=ap}}var ab=af;af+=ag.length;if(!am){ad.push(Y+ab,ap)}else{var al=ai[1];var ak=ag.indexOf(al);var ac=ak+al.length;if(ai[2]){ac=ag.length-ai[2].length;ak=ac-al.length}var ar=ap.substring(5);B(Y+ab,ag.substring(0,ak),W,ad);B(Y+ab+ak,al,q(ar,al),ad);B(Y+ab+ac,ag.substring(ac),W,ad)}}ah.decorations=ad};return W}function i(T){var W=[],S=[];if(T.tripleQuotedStrings){W.push([C,/^(?:\'\'\'(?:[^\'\\]|\\[\s\S]|\'{1,2}(?=[^\']))*(?:\'\'\'|$)|\"\"\"(?:[^\"\\]|\\[\s\S]|\"{1,2}(?=[^\"]))*(?:\"\"\"|$)|\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$))/,null,"'\""])}else{if(T.multiLineStrings){W.push([C,/^(?:\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$)|\`(?:[^\\\`]|\\[\s\S])*(?:\`|$))/,null,"'\"`"])}else{W.push([C,/^(?:\'(?:[^\\\'\r\n]|\\.)*(?:\'|$)|\"(?:[^\\\"\r\n]|\\.)*(?:\"|$))/,null,"\"'"])}}if(T.verbatimStrings){S.push([C,/^@\"(?:[^\"]|\"\")*(?:\"|$)/,null])}var Y=T.hashComments;if(Y){if(T.cStyleComments){if(Y>1){W.push([j,/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,null,"#"])}else{W.push([j,/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\r\n]*)/,null,"#"])}S.push([C,/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,null])}else{W.push([j,/^#[^\r\n]*/,null,"#"])}}if(T.cStyleComments){S.push([j,/^\/\/[^\r\n]*/,null]);S.push([j,/^\/\*[\s\S]*?(?:\*\/|$)/,null])}if(T.regexLiterals){var X=("/(?=[^/*])(?:[^/\\x5B\\x5C]|\\x5C[\\s\\S]|\\x5B(?:[^\\x5C\\x5D]|\\x5C[\\s\\S])*(?:\\x5D|$))+/");S.push(["lang-regex",new RegExp("^"+M+"("+X+")")])}var V=T.types;if(V){S.push([O,V])}var U=(""+T.keywords).replace(/^ | $/g,"");if(U.length){S.push([z,new RegExp("^(?:"+U.replace(/[\s,]+/g,"|")+")\\b"),null])}W.push([F,/^\s+/,null," \r\n\t\xA0"]);S.push([G,/^@[a-z_$][a-z_$@0-9]*/i,null],[O,/^(?:[@_]?[A-Z]+[a-z][A-Za-z_$@0-9]*|\w+_t\b)/,null],[F,/^[a-z_$][a-z_$@0-9]*/i,null],[G,new RegExp("^(?:0x[a-f0-9]+|(?:\\d(?:_\\d+)*\\d*(?:\\.\\d*)?|\\.\\d\\+)(?:e[+\\-]?\\d+)?)[a-z]*","i"),null,"0123456789"],[F,/^\\[\s\S]?/,null],[L,/^.[^\s\w\.$@\'\"\`\/\#\\]*/,null]);return g(W,S)}var K=i({keywords:A,hashComments:true,cStyleComments:true,multiLineStrings:true,regexLiterals:true});function Q(V,ag){var U=/(?:^|\s)nocode(?:\s|$)/;var ab=/\r\n?|\n/;var ac=V.ownerDocument;var S;if(V.currentStyle){S=V.currentStyle.whiteSpace}else{if(window.getComputedStyle){S=ac.defaultView.getComputedStyle(V,null).getPropertyValue("white-space")}}var Z=S&&"pre"===S.substring(0,3);var af=ac.createElement("LI");while(V.firstChild){af.appendChild(V.firstChild)}var W=[af];function ae(al){switch(al.nodeType){case 1:if(U.test(al.className)){break}if("BR"===al.nodeName){ad(al);if(al.parentNode){al.parentNode.removeChild(al)}}else{for(var an=al.firstChild;an;an=an.nextSibling){ae(an)}}break;case 3:case 4:if(Z){var am=al.nodeValue;var aj=am.match(ab);if(aj){var ai=am.substring(0,aj.index);al.nodeValue=ai;var ah=am.substring(aj.index+aj[0].length);if(ah){var ak=al.parentNode;ak.insertBefore(ac.createTextNode(ah),al.nextSibling)}ad(al);if(!ai){al.parentNode.removeChild(al)}}}break}}function ad(ak){while(!ak.nextSibling){ak=ak.parentNode;if(!ak){return}}function ai(al,ar){var aq=ar?al.cloneNode(false):al;var ao=al.parentNode;if(ao){var ap=ai(ao,1);var an=al.nextSibling;ap.appendChild(aq);for(var am=an;am;am=an){an=am.nextSibling;ap.appendChild(am)}}return aq}var ah=ai(ak.nextSibling,0);for(var aj;(aj=ah.parentNode)&&aj.nodeType===1;){ah=aj}W.push(ah)}for(var Y=0;Y=S){ah+=2}if(V>=ap){Z+=2}}}var t={};function c(U,V){for(var S=V.length;--S>=0;){var T=V[S];if(!t.hasOwnProperty(T)){t[T]=U}else{if(window.console){console.warn("cannot override language handler %s",T)}}}}function q(T,S){if(!(T&&t.hasOwnProperty(T))){T=/^\s*]*(?:>|$)/],[j,/^<\!--[\s\S]*?(?:-\->|$)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],[L,/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]),["default-markup","htm","html","mxml","xhtml","xml","xsl"]);c(g([[F,/^[\s]+/,null," \t\r\n"],[n,/^(?:\"[^\"]*\"?|\'[^\']*\'?)/,null,"\"'"]],[[m,/^^<\/?[a-z](?:[\w.:-]*\w)?|\/?>$/i],[P,/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^>\'\"\s]*(?:[^>\'\"\s\/]|\/(?=\s)))/],[L,/^[=<>\/]+/],["lang-js",/^on\w+\s*=\s*\"([^\"]+)\"/i],["lang-js",/^on\w+\s*=\s*\'([^\']+)\'/i],["lang-js",/^on\w+\s*=\s*([^\"\'>\s]+)/i],["lang-css",/^style\s*=\s*\"([^\"]+)\"/i],["lang-css",/^style\s*=\s*\'([^\']+)\'/i],["lang-css",/^style\s*=\s*([^\"\'>\s]+)/i]]),["in.tag"]);c(g([],[[n,/^[\s\S]+/]]),["uq.val"]);c(i({keywords:l,hashComments:true,cStyleComments:true,types:e}),["c","cc","cpp","cxx","cyc","m"]);c(i({keywords:"null,true,false"}),["json"]);c(i({keywords:R,hashComments:true,cStyleComments:true,verbatimStrings:true,types:e}),["cs"]);c(i({keywords:x,cStyleComments:true}),["java"]);c(i({keywords:H,hashComments:true,multiLineStrings:true}),["bsh","csh","sh"]);c(i({keywords:I,hashComments:true,multiLineStrings:true,tripleQuotedStrings:true}),["cv","py"]);c(i({keywords:s,hashComments:true,multiLineStrings:true,regexLiterals:true}),["perl","pl","pm"]);c(i({keywords:f,hashComments:true,multiLineStrings:true,regexLiterals:true}),["rb"]);c(i({keywords:w,cStyleComments:true,regexLiterals:true}),["js"]);c(i({keywords:r,hashComments:3,cStyleComments:true,multilineStrings:true,tripleQuotedStrings:true,regexLiterals:true}),["coffee"]);c(g([],[[C,/^[\s\S]+/]]),["regex"]);function d(V){var U=V.langExtension;try{var S=a(V.sourceNode);var T=S.sourceCode;V.sourceCode=T;V.spans=S.spans;V.basePos=0;q(U,T)(V);D(V)}catch(W){if("console" in window){console.log(W&&W.stack?W.stack:W)}}}function y(W,V,U){var S=document.createElement("PRE");S.innerHTML=W;if(U){Q(S,U)}var T={langExtension:V,numberLines:U,sourceNode:S};d(T);return S.innerHTML}function b(ad){function Y(af){return document.getElementsByTagName(af)}var ac=[Y("pre"),Y("code"),Y("xmp")];var T=[];for(var aa=0;aa=0){var ah=ai.match(ab);var am;if(!ah&&(am=o(aj))&&"CODE"===am.tagName){ah=am.className.match(ab)}if(ah){ah=ah[1]}var al=false;for(var ak=aj.parentNode;ak;ak=ak.parentNode){if((ak.tagName==="pre"||ak.tagName==="code"||ak.tagName==="xmp")&&ak.className&&ak.className.indexOf("prettyprint")>=0){al=true;break}}if(!al){var af=aj.className.match(/\blinenums\b(?::(\d+))?/);af=af?af[1]&&af[1].length?+af[1]:true:false;if(af){Q(aj,af)}S={langExtension:ah,sourceNode:aj,numberLines:af};d(S)}}}if(X]*(?:>|$)/],[PR.PR_COMMENT,/^<\!--[\s\S]*?(?:-\->|$)/],[PR.PR_PUNCTUATION,/^(?:<[%?]|[%?]>)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-handlebars",/^]*type\s*=\s*['"]?text\/x-handlebars-template['"]?\b[^>]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i],[PR.PR_DECLARATION,/^{{[#^>/]?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{&?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{{>?\s*[\w.][^}]*}}}/],[PR.PR_COMMENT,/^{{![^}]*}}/]]),["handlebars","hbs"]);PR.registerLangHandler(PR.createSimpleLexer([[PR.PR_PLAIN,/^[ \t\r\n\f]+/,null," \t\r\n\f"]],[[PR.PR_STRING,/^\"(?:[^\n\r\f\\\"]|\\(?:\r\n?|\n|\f)|\\[\s\S])*\"/,null],[PR.PR_STRING,/^\'(?:[^\n\r\f\\\']|\\(?:\r\n?|\n|\f)|\\[\s\S])*\'/,null],["lang-css-str",/^url\(([^\)\"\']*)\)/i],[PR.PR_KEYWORD,/^(?:url|rgb|\!important|@import|@page|@media|@charset|inherit)(?=[^\-\w]|$)/i,null],["lang-css-kw",/^(-?(?:[_a-z]|(?:\\[0-9a-f]+ ?))(?:[_a-z0-9\-]|\\(?:\\[0-9a-f]+ ?))*)\s*:/i],[PR.PR_COMMENT,/^\/\*[^*]*\*+(?:[^\/*][^*]*\*+)*\//],[PR.PR_COMMENT,/^(?:)/],[PR.PR_LITERAL,/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],[PR.PR_LITERAL,/^#(?:[0-9a-f]{3}){1,2}/i],[PR.PR_PLAIN,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i],[PR.PR_PUNCTUATION,/^[^\s\w\'\"]+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_KEYWORD,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_STRING,/^[^\)\"\']+/]]),["css-str"]); diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/sort-arrow-sprite.png b/node_modules/node-gyp/node_modules/unique-filename/coverage/sort-arrow-sprite.png new file mode 100644 index 0000000..03f704a Binary files /dev/null and b/node_modules/node-gyp/node_modules/unique-filename/coverage/sort-arrow-sprite.png differ diff --git a/node_modules/node-gyp/node_modules/unique-filename/coverage/sorter.js b/node_modules/node-gyp/node_modules/unique-filename/coverage/sorter.js new file mode 100644 index 0000000..6afb736 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/coverage/sorter.js @@ -0,0 +1,156 @@ +var addSorting = (function () { + "use strict"; + var cols, + currentSort = { + index: 0, + desc: false + }; + + // returns the summary table element + function getTable() { return document.querySelector('.coverage-summary table'); } + // returns the thead element of the summary table + function getTableHeader() { return getTable().querySelector('thead tr'); } + // returns the tbody element of the summary table + function getTableBody() { return getTable().querySelector('tbody'); } + // returns the th element for nth column + function getNthColumn(n) { return getTableHeader().querySelectorAll('th')[n]; } + + // loads all columns + function loadColumns() { + var colNodes = getTableHeader().querySelectorAll('th'), + colNode, + cols = [], + col, + i; + + for (i = 0; i < colNodes.length; i += 1) { + colNode = colNodes[i]; + col = { + key: colNode.getAttribute('data-col'), + sortable: !colNode.getAttribute('data-nosort'), + type: colNode.getAttribute('data-type') || 'string' + }; + cols.push(col); + if (col.sortable) { + col.defaultDescSort = col.type === 'number'; + colNode.innerHTML = colNode.innerHTML + ''; + } + } + return cols; + } + // attaches a data attribute to every tr element with an object + // of data values keyed by column name + function loadRowData(tableRow) { + var tableCols = tableRow.querySelectorAll('td'), + colNode, + col, + data = {}, + i, + val; + for (i = 0; i < tableCols.length; i += 1) { + colNode = tableCols[i]; + col = cols[i]; + val = colNode.getAttribute('data-value'); + if (col.type === 'number') { + val = Number(val); + } + data[col.key] = val; + } + return data; + } + // loads all row data + function loadData() { + var rows = getTableBody().querySelectorAll('tr'), + i; + + for (i = 0; i < rows.length; i += 1) { + rows[i].data = loadRowData(rows[i]); + } + } + // sorts the table using the data for the ith column + function sortByIndex(index, desc) { + var key = cols[index].key, + sorter = function (a, b) { + a = a.data[key]; + b = b.data[key]; + return a < b ? -1 : a > b ? 1 : 0; + }, + finalSorter = sorter, + tableBody = document.querySelector('.coverage-summary tbody'), + rowNodes = tableBody.querySelectorAll('tr'), + rows = [], + i; + + if (desc) { + finalSorter = function (a, b) { + return -1 * sorter(a, b); + }; + } + + for (i = 0; i < rowNodes.length; i += 1) { + rows.push(rowNodes[i]); + tableBody.removeChild(rowNodes[i]); + } + + rows.sort(finalSorter); + + for (i = 0; i < rows.length; i += 1) { + tableBody.appendChild(rows[i]); + } + } + // removes sort indicators for current column being sorted + function removeSortIndicators() { + var col = getNthColumn(currentSort.index), + cls = col.className; + + cls = cls.replace(/ sorted$/, '').replace(/ sorted-desc$/, ''); + col.className = cls; + } + // adds sort indicators for current column being sorted + function addSortIndicators() { + getNthColumn(currentSort.index).className += currentSort.desc ? ' sorted-desc' : ' sorted'; + } + // adds event listeners for all sorter widgets + function enableUI() { + var i, + el, + ithSorter = function ithSorter(i) { + var col = cols[i]; + + return function () { + var desc = col.defaultDescSort; + + if (currentSort.index === i) { + desc = !currentSort.desc; + } + sortByIndex(i, desc); + removeSortIndicators(); + currentSort.index = i; + currentSort.desc = desc; + addSortIndicators(); + }; + }; + for (i =0 ; i < cols.length; i += 1) { + if (cols[i].sortable) { + el = getNthColumn(i).querySelector('.sorter'); + if (el.addEventListener) { + el.addEventListener('click', ithSorter(i)); + } else { + el.attachEvent('onclick', ithSorter(i)); + } + } + } + } + // adds sorting functionality to the UI + return function () { + if (!getTable()) { + return; + } + cols = loadColumns(); + loadData(cols); + addSortIndicators(); + enableUI(); + }; +})(); + +window.addEventListener('load', addSorting); diff --git a/node_modules/node-gyp/node_modules/unique-filename/index.js b/node_modules/node-gyp/node_modules/unique-filename/index.js new file mode 100644 index 0000000..02bf1e2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/index.js @@ -0,0 +1,8 @@ +'use strict' +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/node_modules/node-gyp/node_modules/unique-filename/package.json b/node_modules/node-gyp/node_modules/unique-filename/package.json new file mode 100644 index 0000000..bc429aa --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/package.json @@ -0,0 +1,27 @@ +{ + "name": "unique-filename", + "version": "1.1.1", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "index.js", + "scripts": { + "test": "standard && tap test" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/unique-filename.git" + }, + "keywords": [], + "author": "Rebecca Turner (http://re-becca.org/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "standard": "^5.4.1", + "tap": "^2.3.1" + }, + "dependencies": { + "unique-slug": "^2.0.0" + } +} diff --git a/node_modules/node-gyp/node_modules/unique-filename/test/index.js b/node_modules/node-gyp/node_modules/unique-filename/test/index.js new file mode 100644 index 0000000..105b4e5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-filename/test/index.js @@ -0,0 +1,23 @@ +'sue strict' +var t = require('tap') +var uniqueFilename = require('../index.js') + +t.plan(6) + +var randomTmpfile = uniqueFilename('tmp') +t.like(randomTmpfile, /^tmp.[a-f0-9]{8}$/, 'random tmp file') + +var randomAgain = uniqueFilename('tmp') +t.notEqual(randomAgain, randomTmpfile, 'random tmp files are not the same') + +var randomPrefixedTmpfile = uniqueFilename('tmp', 'my-test') +t.like(randomPrefixedTmpfile, /^tmp.my-test-[a-f0-9]{8}$/, 'random prefixed tmp file') + +var randomPrefixedAgain = uniqueFilename('tmp', 'my-test') +t.notEqual(randomPrefixedAgain, randomPrefixedTmpfile, 'random prefixed tmp files are not the same') + +var uniqueTmpfile = uniqueFilename('tmp', 'testing', '/my/thing/to/uniq/on') +t.like(uniqueTmpfile, /^tmp.testing-7ddd44c0$/, 'unique filename') + +var uniqueAgain = uniqueFilename('tmp', 'testing', '/my/thing/to/uniq/on') +t.is(uniqueTmpfile, uniqueAgain, 'same unique string component produces same filename') diff --git a/node_modules/node-gyp/node_modules/unique-slug/.travis.yml b/node_modules/node-gyp/node_modules/unique-slug/.travis.yml new file mode 100644 index 0000000..5651fce --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-slug/.travis.yml @@ -0,0 +1,10 @@ +language: node_js +sudo: false +before_install: + - "npm -g install npm" +node_js: + - "6" + - "8" + - "10" + - "lts/*" + - "node" diff --git a/node_modules/node-gyp/node_modules/unique-slug/LICENSE b/node_modules/node-gyp/node_modules/unique-slug/LICENSE new file mode 100644 index 0000000..7953647 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/unique-slug/README.md b/node_modules/node-gyp/node_modules/unique-slug/README.md new file mode 100644 index 0000000..87f92f1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-slug/README.md @@ -0,0 +1,19 @@ +unique-slug +=========== + +Generate a unique character string suitible for use in files and URLs. + +``` +var uniqueSlug = require('unique-slug') + +var randomSlug = uniqueSlug() +var fileSlug = uniqueSlug('/etc/passwd') +``` + +### uniqueSlug(*str*) → String (8 chars) + +If *str* is passed in then the return value will be its murmur hash in +hex. + +If *str* is not passed in, it will be 4 randomly generated bytes +converted into 8 hexadecimal characters. diff --git a/node_modules/node-gyp/node_modules/unique-slug/index.js b/node_modules/node-gyp/node_modules/unique-slug/index.js new file mode 100644 index 0000000..fa4761a --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-slug/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).substr(-8) + } else { + return (Math.random().toString(16) + '0000000').substr(2, 8) + } +} diff --git a/node_modules/node-gyp/node_modules/unique-slug/package.json b/node_modules/node-gyp/node_modules/unique-slug/package.json new file mode 100644 index 0000000..2142e68 --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-slug/package.json @@ -0,0 +1,23 @@ +{ + "name": "unique-slug", + "version": "2.0.2", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "index.js", + "scripts": { + "test": "standard && tap --coverage test" + }, + "keywords": [], + "author": "Rebecca Turner (http://re-becca.org)", + "license": "ISC", + "devDependencies": { + "standard": "^12.0.1", + "tap": "^12.7.0" + }, + "repository": { + "type": "git", + "url": "git://github.com/iarna/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + } +} diff --git a/node_modules/node-gyp/node_modules/unique-slug/test/index.js b/node_modules/node-gyp/node_modules/unique-slug/test/index.js new file mode 100644 index 0000000..0f4ccad --- /dev/null +++ b/node_modules/node-gyp/node_modules/unique-slug/test/index.js @@ -0,0 +1,13 @@ +'use strict' +var t = require('tap') +var uniqueSlug = require('../index.js') + +t.plan(5) +var slugA = uniqueSlug() +t.is(slugA.length, 8, 'random slugs are 8 chars') +t.notEqual(slugA, uniqueSlug(), "two slugs aren't the same") +var base = '/path/to/thingy' +var slugB = uniqueSlug(base) +t.is(slugB.length, 8, 'string based slugs are 8 chars') +t.is(slugB, uniqueSlug(base), 'two string based slugs, from the same string are the same') +t.notEqual(slugB, uniqueSlug(slugA), 'two string based slongs, from diff strings are different') diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json new file mode 100644 index 0000000..fbeae5e --- /dev/null +++ b/node_modules/node-gyp/package.json @@ -0,0 +1,50 @@ +{ + "name": "node-gyp", + "description": "Node.js native addon build tool", + "license": "MIT", + "keywords": [ + "native", + "addon", + "module", + "c", + "c++", + "bindings", + "gyp" + ], + "version": "8.4.1", + "installVersion": 9, + "author": "Nathan Rajlich (http://tootallnate.net)", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/node-gyp.git" + }, + "preferGlobal": true, + "bin": "./bin/node-gyp.js", + "main": "./lib/node-gyp.js", + "dependencies": { + "env-paths": "^2.2.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^9.1.0", + "nopt": "^5.0.0", + "npmlog": "^6.0.0", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^2.0.2" + }, + "engines": { + "node": ">= 10.12.0" + }, + "devDependencies": { + "bindings": "^1.5.0", + "nan": "^2.14.2", + "require-inject": "^1.4.4", + "standard": "^14.3.4", + "tap": "^12.7.0" + }, + "scripts": { + "lint": "standard */*.js test/**/*.js", + "test": "npm run lint && tap --timeout=120 test/test-*" + } +} diff --git a/node_modules/node-gyp/src/win_delay_load_hook.cc b/node_modules/node-gyp/src/win_delay_load_hook.cc new file mode 100644 index 0000000..169f802 --- /dev/null +++ b/node_modules/node-gyp/src/win_delay_load_hook.cc @@ -0,0 +1,39 @@ +/* + * When this file is linked to a DLL, it sets up a delay-load hook that + * intervenes when the DLL is trying to load the host executable + * dynamically. Instead of trying to locate the .exe file it'll just + * return a handle to the process image. + * + * This allows compiled addons to work when the host executable is renamed. + */ + +#ifdef _MSC_VER + +#pragma managed(push, off) + +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif + +#include + +#include +#include + +static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) { + HMODULE m; + if (event != dliNotePreLoadLibrary) + return NULL; + + if (_stricmp(info->szDll, HOST_BINARY) != 0) + return NULL; + + m = GetModuleHandle(NULL); + return (FARPROC) m; +} + +decltype(__pfnDliNotifyHook2) __pfnDliNotifyHook2 = load_exe_hook; + +#pragma managed(pop) + +#endif diff --git a/node_modules/node-gyp/test/common.js b/node_modules/node-gyp/test/common.js new file mode 100644 index 0000000..b714ee2 --- /dev/null +++ b/node_modules/node-gyp/test/common.js @@ -0,0 +1,3 @@ +const envPaths = require('env-paths') + +module.exports.devDir = () => envPaths('node-gyp', { suffix: '' }).cache diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt b/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt new file mode 100644 index 0000000..244f6b0 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_BuildTools_minimal.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Editors","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt b/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt new file mode 100644 index 0000000..dd5e77d --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_Community_workload.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Component.Windows10SDK.IpOverUsb","Win10SDK_IpOverUsb","Microsoft.VisualStudio.Component.VC.ATL.ARM64","Microsoft.VisualCpp.ATL.ARM64","Microsoft.VisualStudio.Component.VC.ATL.ARM","Microsoft.VisualCpp.ATL.ARM","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.Icecap.Analysis","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM.Resources","Microsoft.VisualCpp.Premium.Tools.ARM.Base","Microsoft.VisualCpp.Premium.Tools.ARM.Base.Resources","Microsoft.VisualCpp.PGO.ARM","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualStudio.Product.Community","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.Icecap.Analysis.Resources","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM64","Microsoft.VisualCpp.Tools.Core","Microsoft.VisualCpp.PGO.ARM64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualCpp.Premium.Tools.ARM64.Base","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Component.WixToolset.VisualStudioExtension.Dev15","WixToolset.VisualStudioExtension.Dev15","Microsoft.VisualCpp.MFC.X64","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.Component.Windows10SDK.17763","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","MLGen","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Component.TestTools.Core","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.NuGet.Licenses","SQLCommon","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.HTMLHelpWorkshop.Msi","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.VCTip.hostX64.targetARM","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualCpp.Tools.HostX64.TargetARM.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.Component.MSBuild","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.RazorExtension","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualCpp.MFC.Source","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.MFC.Redist.X86","Microsoft.VisualStudio.WebToolsExtensions.Chip","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualCpp.MFC.Redist.X64","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualCpp.MFC.MBCS","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Component.TextTemplating","Win10SDK_10.0.17763","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualCpp.MFC.MBCS.X64","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualCpp.MFC.Headers","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualCpp.CRT.Headers","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64","Microsoft.VisualCpp.VCTip.hostX64.targetARM64","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.Icecap.Collection.Msi","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.Tools.HostX64.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.ATLMFC","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualStudio.Component.Graphics.Win81","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.MFC.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.CredentialProvider","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.Component.NuGet","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualCpp.PGO.Headers","Microsoft.DiagnosticsHub.Collection","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualStudio.Branding.Community","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.NuGet.Core","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.DiaSymReader.Native","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.InteractiveWindow","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.Debugger.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","sqlsysclrtypes","Microsoft.VisualStudio.ProTools","Component.Microsoft.VisualStudio.RazorExtension","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.Build.Dependencies","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","sqlsysclrtypes","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.Component.Windows10SDK.17134","Microsoft.VisualStudio.PackageGroup.Core","PortableFacades","Microsoft.DiaSymReader","Microsoft.DiagnosticsHub.Runtime","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.Community","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.ServiceHub","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.TeamExplorer","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.GraphProvider","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.NuGet.Build.Tasks","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.Build","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.MinShell.Interop","Microsoft.Build.FileTracker.Msi","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.CoreEditor","Win10SDK_10.0.17134","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.VC.Ide.MFC","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.Devenv","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.MefHosting","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualCpp.MFC.X86","Microsoft.VisualStudio.Log.Resources","Microsoft.Icecap.Analysis.Targeted","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.VisualStudio.Initializer","Microsoft.Net.PackageGroup.4.6.Redist"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt b/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt new file mode 100644 index 0000000..c4b3b5f --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_Express.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress","version":"15.9.28307.858","packages":["Microsoft.VisualStudio.Product.WDExpress","Microsoft.VisualStudio.Workload.WDExpress","Microsoft.VisualStudio.Component.Windows10SDK.17763","MLGen","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.Windows10SDK.14393","Win10SDK_10.0.14393.795","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM64","Microsoft.VisualStudio.VC.MSBuild.Arm64","Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.ARM64","Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ARM64.Store","Microsoft.VisualCpp.CRT.ARM64.Desktop","Microsoft.VisualCpp.Tools.Hostx86.Targetarm64","Microsoft.VisualCpp.VCTip.hostX86.targetARM64","Microsoft.VisualCpp.Tools.HostX86.TargetARM64.Resources","Microsoft.VisualStudio.Component.VC.Tools.ARM","Microsoft.VisualCpp.Tools.Hostx86.Targetarm","Microsoft.VisualCpp.VCTip.hostX86.targetARM","Microsoft.VisualCpp.Tools.HostX86.TargetARM.Resources","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.OneCore.Desktop","Microsoft.VisualCpp.CRT.arm.Store","Microsoft.VisualCpp.CRT.arm.Desktop","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.JavaScript.LanguageService","Microsoft.VisualStudio.JavaScript.LanguageService.Resources","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.VC.CLI.Support","Microsoft.VisualCpp.CLI.X86","Microsoft.VisualCpp.CLI.X64","Microsoft.VisualCpp.CLI.Source","Microsoft.VisualCpp.CLI.ARM64","Microsoft.VisualCpp.CLI.ARM","Microsoft.VisualStudio.VC.Templates.CLR","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Templates.CLR.Resources","Microsoft.Component.VC.Runtime.OSSupport","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.Tools.Msi","Microsoft.Windows.UniversalCRT.ExtensionSDK.Msi","Microsoft.Windows.UniversalCRT.HeadersLibsSources.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.Component.HelpViewer","Microsoft.HelpViewer","Microsoft.VisualStudio.Help.Configuration.Msi","Microsoft.VisualStudio.Component.SQL.DataSources","Microsoft.VisualStudio.Component.SQL.SSDT","Microsoft.VisualStudio.Component.SQL.CMDUtils","sqlcmdlnutils","Microsoft.VisualStudio.Component.Common.Azure.Tools","Microsoft.VisualStudio.Azure.CommonAzureTools","SSDT","Microsoft.VisualStudio.Component.SQL.ADAL","sql_adalsql","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.Component.EntityFramework","Microsoft.VisualStudio.PackageGroup.DslRuntime","Microsoft.VisualStudio.Dsl.Core","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.VisualStudio.Dsl.Core.Resources","Microsoft.VisualStudio.EntityFrameworkTools","Microsoft.VisualStudio.EntityFrameworkTools.Msi","Microsoft.VisualStudio.Component.Roslyn.LanguageServices","Microsoft.VisualStudio.InteractiveWindow","Microsoft.DiaSymReader.Native","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents.Resources","Microsoft.CodeAnalysis.VisualStudio.InteractiveComponents","Microsoft.CodeAnalysis.VisualStudio.Setup.Interactive.Resources","Microsoft.Net.ComponentGroup.TargetingPacks.Common","Microsoft.Net.Component.4.6.TargetingPack","Microsoft.Net.4.6.TargetingPack","Microsoft.Net.Component.4.5.2.TargetingPack","Microsoft.Net.4.5.2.TargetingPack","Microsoft.Net.Component.4.5.1.TargetingPack","Microsoft.Net.4.5.1.TargetingPack","Microsoft.Net.Component.4.5.TargetingPack","Microsoft.Net.4.5.TargetingPack","Microsoft.Net.Component.4.TargetingPack","Microsoft.Net.4.TargetingPack","Microsoft.Net.ComponentGroup.DevelopmentPrerequisites","Microsoft.Net.Component.4.6.1.TargetingPack","Microsoft.Net.4.6.1.TargetingPack","Microsoft.Net.Cumulative.TargetingPack.Resources","Microsoft.Net.Component.4.6.1.SDK","Microsoft.Net.4.6.1.SDK","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.Component.VisualStudioData","Microsoft.VisualStudio.Component.SQL.CLR","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.XamlDiagnostics","Microsoft.VisualStudio.XamlDiagnostics.Resources","Microsoft.VisualStudio.XamlDesigner","Microsoft.VisualStudio.XamlDesigner.Resources","Microsoft.VisualStudio.XamlDesigner.Executables","Microsoft.VisualStudio.XamlShared","Microsoft.VisualStudio.XamlShared.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.Managed","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TestWindow.SourceBasedTestDiscovery","Microsoft.VisualStudio.TestWindow.Dotnet","Microsoft.VisualStudio.TestTools.TestGeneration","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Enterprise","Microsoft.VisualStudio.PackageGroup.TestTools.MSTestV2.Managed","Microsoft.VisualStudio.TestTools.MSTestV2.WizardExtension.UnitTest","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.Component.ClickOnce","Microsoft.VisualStudio.PackageGroup.ClickOnce.MSBuild","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.ClickOnce.SignTool.Msi","Microsoft.SQL.ClickOnceBootstrapper.Msi","Microsoft.Net.ClickOnceBootstrapper","Microsoft.ClickOnce.BootStrapper.Msi.Resources","Microsoft.ClickOnce.BootStrapper.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.ClickOnce.Resources","Microsoft.VisualStudio.ClickOnce","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.VisualStudio.TemplateEngine","Microsoft.VisualStudio.WebToolsExtensions.Common","Microsoft.NET.Sdk","Microsoft.VisualStudio.PackageGroup.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed","Microsoft.VisualStudio.TestTools.Templates.Managed.Resources","Microsoft.VisualStudio.Templates.VB.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.CS.MSTestv2.Desktop.UnitTest","Microsoft.VisualStudio.Templates.VB.Wpf","Microsoft.VisualStudio.Templates.VB.Wpf.Resources","Microsoft.VisualStudio.Templates.VB.Winforms","Microsoft.VisualStudio.Templates.VB.ManagedCore","Microsoft.VisualStudio.Templates.VB.Shared","Microsoft.VisualStudio.Templates.VB.Shared.Resources","Microsoft.VisualStudio.Templates.VB.ManagedCore.Resources","Microsoft.VisualStudio.Templates.CS.GettingStarted.Desktop.Package","Microsoft.VisualStudio.Templates.GetStarted.Desktop.Setup","Microsoft.VisualStudio.Templates.CS.GettingStarted.Console.Package","Microsoft.VisualStudio.Templates.GetStarted.Resources","Microsoft.VisualStudio.Templates.GetStarted.Common.Setup","Microsoft.VisualStudio.Templates.GetStarted.Console.Setup","Microsoft.VisualStudio.Templates.CS.Wpf","Microsoft.VisualStudio.Templates.CS.Wpf.Resources","Microsoft.VisualStudio.Templates.CS.Winforms","Microsoft.VisualStudio.Templates.CS.ManagedCore","Microsoft.VisualStudio.Templates.CS.Shared","Microsoft.VisualStudio.Templates.Editorconfig.Wizard.Setup","Templates.Editorconfig.SolutionFile.Setup","Microsoft.VisualStudio.Templates.CS.Shared.Resources","Microsoft.VisualStudio.Templates.CS.ManagedCore.Resources","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","PortableFacades","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer","Microsoft.TeamFoundation.OfficeIntegration","Microsoft.TeamFoundation.OfficeIntegration.Resources","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.WDExpress","Microsoft.VisualStudio.WDExpress.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.6.Redist","Microsoft.VisualStudio.Branding.WDExpress"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt b/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt new file mode 100644 index 0000000..fc0a257 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2017_Unusable.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildToolsUnusable","version":"15.9.28307.665","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers.Resources","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.6.1.Redist","Microsoft.Net.4.6.1.FullRedist.NonThreshold","Microsoft.Windows.UniversalCRT.Msu.81","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt b/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt new file mode 100644 index 0000000..f07d254 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2019_BuildTools_minimal.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Product.BuildTools","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VisualC.Logging","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.DiaSymReader","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.VisualStudio.Editors","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.VisualStudio.Component.Windows10SDK.17134","Win10SDK_10.0.17134","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.Workload.MSBuildTools","Microsoft.VisualStudio.Component.CoreBuildTools","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.BuildTools.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Component.MSBuild","Microsoft.PythonTools.BuildCore.Vsix","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.NativeImageSupport","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.BuildTools"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt b/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt new file mode 100644 index 0000000..50071c2 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2019_Community_workload.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community","version":"16.1.28922.388","packages":["Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling.ExternalDependencies","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies","Microsoft.DiagnosticsHub.Runtime.ExternalDependencies.Targeted","Microsoft.DiagnosticsHub.Collection.ExternalDependencies.x64","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Component.IntelliCode","Microsoft.VisualStudio.IntelliCode","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.LiveShareApi","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.DiagnosticsHub.KB2882822.Win7","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.VisualC.Logging","Microsoft.WebTools.Shared","Microsoft.WebTools.DotNet.Core.ItemTemplates","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.Windows.UniversalCRT.Msu.7","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Microsoft.VisualStudio.NuGet.PowershellBindingRedirect","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Remote.DbgHelp.Win8","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.DbgHelp.Win8","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Product.Community","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.PackageGroup.NuGet","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.OpenFolder.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.PerfLib","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.Net.4.7.2.FullRedist","Microsoft.VisualStudio.Branding.Community"]}] diff --git a/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt b/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt new file mode 100644 index 0000000..806509e --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/VS_2019_Preview.txt @@ -0,0 +1 @@ +[{"path":"C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview","version":"16.0.28608.199","packages":["Microsoft.VisualStudio.Product.Enterprise","Microsoft.VisualStudio.Workload.NativeDesktop","Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest","Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest","Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest","Microsoft.VisualStudio.Component.VC.ATL","Microsoft.VisualStudio.VC.Ide.ATL","Microsoft.VisualStudio.VC.Ide.ATL.Resources","Microsoft.VisualCpp.ATL.X86","Microsoft.VisualCpp.ATL.X64","Microsoft.VisualCpp.ATL.Source","Microsoft.VisualCpp.ATL.Headers","Microsoft.VisualStudio.Component.VC.CMake.Project","Microsoft.VisualStudio.VC.CMake","Microsoft.VisualStudio.VC.CMake.Project","Microsoft.VisualStudio.VC.ExternalBuildFramework","Microsoft.VisualStudio.Component.VC.DiagnosticTools","Microsoft.VisualStudio.Component.Graphics.Tools","Microsoft.VisualStudio.Graphics.Viewers","Microsoft.VisualStudio.Graphics.Viewers.Resources","Microsoft.VisualStudio.Graphics.EnableTools","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Msi","Microsoft.VisualStudio.Graphics.Analyzer","Microsoft.VisualStudio.Graphics.Analyzer.Targeted","Microsoft.VisualStudio.Graphics.Analyzer.Resources","Microsoft.VisualStudio.Graphics.Appid","Microsoft.VisualStudio.Graphics.Appid.Resources","Microsoft.VisualStudio.Component.Windows10SDK.17763","Win10SDK_10.0.17763","Microsoft.VisualStudio.Component.VC.Tools.x86.x64","Microsoft.VisualCpp.CodeAnalysis.Extensions","Microsoft.VisualCpp.CodeAnalysis.Extensions.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X86.Resources","Microsoft.VisualCpp.CodeAnalysis.Extensions.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64","Microsoft.VisualCpp.CodeAnalysis.ConcurrencyCheck.X64.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX86","Microsoft.VisualCpp.VCTip.HostX64.TargetX86","Microsoft.VisualCpp.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Tools.HostX64.TargetX64","Microsoft.VisualCpp.VCTip.HostX64.TargetX64","Microsoft.VisualCpp.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64","Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86.Resources","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64","Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64.Resources","Microsoft.VisualCpp.PGO.X86","Microsoft.VisualCpp.PGO.X64","Microsoft.VisualCpp.PGO.Headers","Microsoft.VisualCpp.CRT.x86.Store","Microsoft.VisualCpp.CRT.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.x64.Store","Microsoft.VisualCpp.CRT.x64.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop","Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop","Microsoft.VisualStudio.PackageGroup.VC.Tools.x86","Microsoft.VisualCpp.Tools.HostX86.TargetX64","Microsoft.VisualCpp.VCTip.hostX86.targetX64","Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Resources","Microsoft.VisualCpp.Tools.HostX86.TargetX86","Microsoft.VisualCpp.VCTip.hostX86.targetX86","Microsoft.VisualCpp.Tools.HostX86.TargetX86.Resources","Microsoft.VisualCpp.Tools.Core.Resources","Microsoft.VisualCpp.Tools.Core.x86","Microsoft.VisualCpp.DIA.SDK","Microsoft.VisualCpp.CRT.x86.Desktop","Microsoft.VisualCpp.CRT.x64.Desktop","Microsoft.VisualCpp.CRT.Source","Microsoft.VisualCpp.CRT.Redist.X86","Microsoft.VisualCpp.CRT.Redist.X64","Microsoft.VisualCpp.CRT.Redist.Resources","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.RuntimeDebug.14","Microsoft.VisualCpp.CRT.Headers","Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core","Microsoft.VisualStudio.PackageGroup.TestTools.Native","Microsoft.VisualStudio.ComponentGroup.ArchitectureTools.Native","Microsoft.VisualStudio.Component.ClassDesigner","Microsoft.VisualStudio.ClassDesigner","Microsoft.VisualStudio.ClassDesigner.Resources","Microsoft.VisualStudio.Component.VC.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualCpp.Redist.14.Latest","Microsoft.VisualStudio.VC.Templates.UnitTest","Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP","Microsoft.VisualStudio.VC.Templates.UnitTest.Resources","Microsoft.VisualStudio.VC.Templates.Desktop","Microsoft.VisualStudio.Component.VC.CoreIde","Microsoft.VisualStudio.VC.Ide.Pro","Microsoft.VisualStudio.VC.Ide.Pro.Resources","Microsoft.VisualStudio.VC.Templates.Pro","Microsoft.VisualStudio.VC.Templates.Pro.Resources","Microsoft.VisualStudio.VC.Items.Pro","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced","Microsoft.VisualStudio.VC.Ide.x64","Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express","Microsoft.VisualStudio.VC.MSBuild.X64.v142","Microsoft.VisualStudio.VC.MSBuild.X64","Microsoft.VS.VC.MSBuild.X64.Resources","Microsoft.VisualStudio.VC.MSBuild.ARM.v142","Microsoft.VisualStudio.VC.MSBuild.ARM","Microsoft.VisualStudio.VC.MSBuild.x86.v142","Microsoft.VisualStudio.VC.MSBuild.X86","Microsoft.VisualStudio.VC.MSBuild.Base","Microsoft.VisualStudio.VC.MSBuild.Base.Resources","Microsoft.VisualStudio.VC.Ide.WinXPlus","Microsoft.VisualStudio.VC.Ide.Dskx","Microsoft.VisualStudio.VC.Ide.Dskx.Resources","Microsoft.VisualStudio.VC.Ide.Base","Microsoft.VisualStudio.VC.Ide.LanguageService","Microsoft.VisualStudio.VC.Ide.Core","Microsoft.VisualStudio.VC.Ide.Core.Resources","Microsoft.VisualStudio.VC.Ide.VCPkgDatabase","Microsoft.VisualStudio.VC.Ide.Progression.Enterprise","Microsoft.VisualStudio.VC.Ide.ProjectSystem","Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine","Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources","Microsoft.VisualStudio.VC.Ide.LanguageService.Resources","Microsoft.VisualStudio.VC.Ide.Base.Resources","Microsoft.VisualStudio.Component.CodeMap","Microsoft.VisualStudio.Component.GraphDocument","Microsoft.VisualStudio.Vmp","Microsoft.VisualStudio.GraphDocument","Microsoft.VisualStudio.GraphDocument.Resources","Microsoft.VisualStudio.CodeMap","Microsoft.VisualStudio.Component.SQL.LocalDB.Runtime","Microsoft.VisualStudio.Component.SQL.NCLI","sqllocaldb","sqlncli","Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions","Microsoft.VisualStudio.WebToolsExtensions","Microsoft.VisualStudio.WebTools","Microsoft.VisualStudio.WebTools.Resources","Microsoft.VisualStudio.WebTools.MSBuild","Microsoft.VisualStudio.PackageGroup.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Msi","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.Debugger.Script.Resources","Microsoft.VisualStudio.VC.Ide.MDD","Microsoft.VisualStudio.Component.NuGet","Microsoft.CredentialProvider","Component.Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.LiveShare","Microsoft.VisualStudio.Component.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi","Microsoft.VisualStudio.Debugger.JustInTime","Microsoft.VisualStudio.Debugger.JustInTime.Msi","Microsoft.VisualStudio.Component.IntelliTrace.FrontEnd","Microsoft.IntelliTrace.DiagnosticsHubAgent.Targeted","Microsoft.IntelliTrace.Debugger","Microsoft.IntelliTrace.Debugger.Targeted","Microsoft.IntelliTrace.FrontEnd","Microsoft.DiagnosticsHub.Instrumentation","Microsoft.DiagnosticsHub.CpuSampling","Microsoft.DiagnosticsHub.CpuSampling.Targeted","Microsoft.PackageGroup.DiagnosticsHub.Platform","Microsoft.DiagnosticsHub.Collection.StopService.Uninstall","Microsoft.DiagnosticsHub.Runtime","Microsoft.DiagnosticsHub.Runtime.Targeted","Microsoft.DiagnosticsHub.Runtime.Resources","Microsoft.DiagnosticsHub.Collection","Microsoft.DiagnosticsHub.Collection.Service","Microsoft.DiagnosticsHub.Collection.StopService.Install","Microsoft.VisualStudio.Dsl.GraphObject","Microsoft.Net.4.TargetingPack","Microsoft.VisualStudio.VC.Ide.ResourceEditor","Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources","Microsoft.VisualStudio.NuGet.Licenses","Microsoft.WebTools.Shared","Microsoft.VisualStudio.WebToolsExtensions.DotNet.Core.ItemTemplates","Microsoft.VisualStudio.Component.TextTemplating","Microsoft.VisualStudio.TextTemplating.MSBuild","Microsoft.VisualStudio.TextTemplating.Integration","Microsoft.VisualStudio.TextTemplating.Core","Microsoft.VisualStudio.TextTemplating.Integration.Resources","Microsoft.VisualStudio.ProTools","sqlsysclrtypes","sqlsysclrtypes","SQLCommon","Microsoft.VisualStudio.ProTools.Resources","Microsoft.VisualStudio.NuGet.Core","Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage","Microsoft.VisualStudio.PackageGroup.IntelliTrace.Core","Microsoft.IntelliTrace.Core","Microsoft.IntelliTrace.Core.Concord","Microsoft.IntelliTrace.Core.Targeted","Microsoft.IntelliTrace.ProfilerProxy.Msi.x64","Microsoft.IntelliTrace.ProfilerProxy.Msi","Microsoft.VisualStudio.TestTools.DynamicCodeCoverage","Microsoft.VisualStudio.TestTools.CodeCoverage.Msi","Microsoft.VisualStudio.TestTools.CodeCoverage","Microsoft.Icecap.Analysis","Microsoft.Icecap.Analysis.Targeted","Microsoft.Icecap.Analysis.Resources","Microsoft.Icecap.Analysis.Resources.Targeted","Microsoft.Icecap.Collection.Msi","Microsoft.Icecap.Collection.Msi.Targeted","Microsoft.Icecap.Collection.Msi.Resources","Microsoft.Icecap.Collection.Msi.Resources.Targeted","Microsoft.VisualStudio.PackageGroup.TestTools.Core","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI","Microsoft.VisualStudio.TestTools.Pex.Common","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy","Microsoft.VisualStudio.PackageGroup.MinShell.Interop","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi","Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestTools","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Remote","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Premium","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common","Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources","Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent","Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.TestTools.TestWIExtension","Microsoft.VisualStudio.TestTools.TestPlatform.IDE","Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors","Microsoft.VisualStudio.TestTools.NE.Msi.Targeted","Microsoft.VisualStudio.TestTools.NetworkEmulation","Microsoft.VisualStudio.TestTools.DataCollectors","Microsoft.VisualCpp.CRT.ClickOnce.Msi","Microsoft.VisualStudio.WebTools.WSP.FSA","Microsoft.VisualStudio.WebTools.WSP.FSA.Resources","Microsoft.VisualStudio.Component.Static.Analysis.Tools","Microsoft.VisualCpp.Redist.14","Microsoft.VisualCpp.Redist.14","Microsoft.VisualStudio.StaticAnalysis","Microsoft.VisualStudio.StaticAnalysis.Resources","Microsoft.VisualStudio.PackageGroup.Community","Microsoft.VisualStudio.Community.Extra.Resources","Microsoft.VisualStudio.Community.Extra","Microsoft.VisualStudio.PackageGroup.Core","Microsoft.VisualStudio.CodeSense","Microsoft.VisualStudio.CodeSense.Community","Microsoft.VisualStudio.TestTools.TeamFoundationClient","Microsoft.VisualStudio.PackageGroup.Debugger.Core","Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Runtime","Microsoft.VisualStudio.Debugger.TimeTravel.Agent","Microsoft.VisualStudio.Debugger.TimeTravel.Record","Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost","Microsoft.VisualStudio.VC.Ide.Debugger","Microsoft.VisualStudio.VC.Ide.Debugger.Concord","Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources","Microsoft.VisualStudio.VC.Ide.Debugger.Resources","Microsoft.VisualStudio.VC.Ide.Common","Microsoft.VisualStudio.VC.Ide.Common.Resources","Microsoft.VisualStudio.Debugger.Parallel","Microsoft.VisualStudio.Debugger.Parallel.Resources","Microsoft.VisualStudio.Debugger.CollectionAgents","Microsoft.VisualStudio.Debugger.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed","Microsoft.VisualStudio.Debugger.Concord.Managed.Resources","Microsoft.VisualStudio.Debugger.Managed.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote","Microsoft.VisualStudio.Debugger.Concord.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger.Remote.Resources","Microsoft.VisualStudio.Debugger","Microsoft.VisualStudio.Debugger.Package.DiagHub.Client.VSx86","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.Debugger.Remote.DiagHub.Client","Microsoft.VisualStudio.VC.MSVCDis","Microsoft.VisualStudio.ScriptedHost","Microsoft.VisualStudio.ScriptedHost.Targeted","Microsoft.VisualStudio.ScriptedHost.Resources","Microsoft.IntelliTrace.DiagnosticsHub","Microsoft.VisualStudio.Debugger.Concord","Microsoft.VisualStudio.Debugger.Concord.Resources","Microsoft.VisualStudio.Debugger.Resources","Microsoft.PackageGroup.ClientDiagnostics","Microsoft.VisualStudio.AppResponsiveness","Microsoft.VisualStudio.AppResponsiveness.Targeted","Microsoft.VisualStudio.AppResponsiveness.Resources","Microsoft.VisualStudio.ClientDiagnostics","Microsoft.VisualStudio.ClientDiagnostics.Targeted","Microsoft.VisualStudio.ClientDiagnostics.Resources","Microsoft.VisualStudio.PackageGroup.ProfessionalCore","Microsoft.VisualStudio.Professional","Microsoft.VisualStudio.Professional.Msi","Microsoft.VisualStudio.PackageGroup.EnterpriseCore","Microsoft.VisualStudio.Enterprise.Msi","Microsoft.VisualStudio.Enterprise","Microsoft.ShDocVw","Microsoft.VisualStudio.PackageGroup.CommunityCore","Microsoft.VisualStudio.ProjectSystem.Full","Microsoft.VisualStudio.ProjectSystem","Microsoft.VisualStudio.Community.x86","Microsoft.VisualStudio.Community.x64","Microsoft.VisualStudio.Community","Microsoft.IntelliTrace.CollectorCab","Microsoft.VisualStudio.Community.Resources","Microsoft.VisualStudio.WebSiteProject.DTE","Microsoft.MSHtml","Microsoft.VisualStudio.Platform.CallHierarchy","Microsoft.VisualStudio.Community.Msi.Resources","Microsoft.VisualStudio.Community.Msi","Microsoft.VisualStudio.Devenv.Msi","Microsoft.VisualStudio.MinShell.Interop.Msi","Microsoft.VisualStudio.Editors","Microsoft.VisualStudio.Net.Eula.Resources","Microsoft.CodeAnalysis.ExpressionEvaluator","Microsoft.CodeAnalysis.VisualStudio.Setup","Microsoft.Component.MSBuild","Microsoft.NuGet.Build.Tasks","Microsoft.VisualStudio.Component.Roslyn.Compiler","Microsoft.CodeAnalysis.Compilers","Microsoft.VisualStudio.Workload.CoreEditor","Microsoft.VisualStudio.Component.CoreEditor","Microsoft.VisualStudio.PackageGroup.CoreEditor","Microsoft.VisualCpp.Tools.Common.UtilsPrereq","Microsoft.VisualCpp.Tools.Common.Utils","Microsoft.VisualCpp.Tools.Common.Utils.Resources","Microsoft.VisualStudio.PackageGroup.VsDevCmd","Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk","Microsoft.VisualStudio.VsDevCmd.Core.WinSdk","Microsoft.VisualStudio.VsDevCmd.Core.DotNet","Microsoft.VisualStudio.VC.DevCmd","Microsoft.VisualStudio.VC.DevCmd.Resources","Microsoft.VisualStudio.VirtualTree","Microsoft.VisualStudio.PackageGroup.Progression","Microsoft.VisualStudio.PerformanceProvider","Microsoft.VisualStudio.GraphModel","Microsoft.VisualStudio.GraphProvider","Microsoft.DiaSymReader","Microsoft.Build.Dependencies","Microsoft.Build.FileTracker.Msi","Microsoft.Build","Microsoft.VisualStudio.TextMateGrammars","Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common","Microsoft.VisualStudio.TeamExplorer","Microsoft.ServiceHub","Microsoft.VisualStudio.ProjectServices","Microsoft.VisualStudio.SLNX.VSIX","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.FileHandler.Msi","Microsoft.VisualStudio.PackageGroup.MinShell","Microsoft.VisualStudio.MinShell.Interop","Microsoft.VisualStudio.Log","Microsoft.VisualStudio.Log.Targeted","Microsoft.VisualStudio.Log.Resources","Microsoft.VisualStudio.Finalizer","Microsoft.VisualStudio.Devenv","Microsoft.VisualStudio.Devenv.Resources","Microsoft.VisualStudio.CoreEditor","Microsoft.VisualStudio.Platform.NavigateTo","Microsoft.VisualStudio.Connected","Microsoft.VisualStudio.Connected.Resources","Microsoft.VisualStudio.MinShell","Microsoft.VisualStudio.Setup.Configuration","Microsoft.VisualStudio.Platform.Search","Microsoft.VisualStudio.MinShell.Platform","Microsoft.VisualStudio.MinShell.Platform.Resources","Microsoft.VisualStudio.MefHosting","Microsoft.VisualStudio.MefHosting.Resources","Microsoft.VisualStudio.Initializer","Microsoft.VisualStudio.ExtensionManager","Microsoft.VisualStudio.Platform.Editor","Microsoft.VisualStudio.MinShell.x86","Microsoft.VisualStudio.NativeImageSupport","Microsoft.VisualStudio.MinShell.Msi","Microsoft.VisualStudio.MinShell.Msi.Resources","Microsoft.VisualStudio.LanguageServer","Microsoft.VisualStudio.Devenv.Config","Microsoft.VisualStudio.MinShell.Resources","Microsoft.Net.PackageGroup.4.7.2.Redist","Microsoft.VisualStudio.Branding.Enterprise"]}] diff --git a/node_modules/node-gyp/test/fixtures/ca-bundle.crt b/node_modules/node-gyp/test/fixtures/ca-bundle.crt new file mode 100644 index 0000000..fb1dea9 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/ca-bundle.crt @@ -0,0 +1,40 @@ +-----BEGIN CERTIFICATE----- +MIIDJjCCAg4CAhnOMA0GCSqGSIb3DQEBBQUAMH0xCzAJBgNVBAYTAlVTMQswCQYD +VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n +TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv +bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMBkxFzAV +BgNVBAMMDnN0cm9uZ2xvb3AuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB +CgKCAQEAwOYI7OZ2FX/YjRgLZoDQlbPc5UZXU/j0e1wwiJNPtPEax9Y5Uoza0Pnt +Ikzkc2SfvQ+IJrhXo385tI0W5juuqbHnE7UrjUuPjUX6NHevkxcs/flmjan5wnZM +cPsGhH71WDuUEEflvZihf2Se2x+xgZtMhc5XGmVmRuZFYKvkgUhA2/w8/QrK+jPT +n9QRJxZjWNh2RBdC1B7u4jffSmOSUljYFH1I2eTeY+Rdi6YUIYSU9gEoZxsv3Tia +SomfMF5jt2Mouo6MzA+IhLvvFjcrcph1Qxgi9RkfdCMMd+Ipm9YWELkyG1bDRpQy +0iyHD4gvVsAqz1Y2KdRSdc3Kt+nTqwIDAQABoxkwFzAVBgNVHREEDjAMhwQAAAAA +hwR/AAABMA0GCSqGSIb3DQEBBQUAA4IBAQAhy4J0hML3NgmDRHdL5/iTucBe22Mf +jJjg2aifD1S187dHm+Il4qZNO2plWwAhN0h704f+8wpsaALxUvBIu6nvlvcMP5PH +jGN5JLe2Km3UaPvYOQU2SgacLilu+uBcIo2JSHLV6O7ziqUj5Gior6YxDLCtEZie +Ea8aX5/YjuACtEMJ1JjRqjgkM66XAoUe0E8onOK3FgTIO3tGoTJwRp0zS50pFuP0 +PsZtT04ck6mmXEXXknNoAyBCvPypfms9OHqcUIW9fiQnrGbS/Ri4QSQYj0DtFk/1 +na4fY1gf3zTHxH8259b/TOOaPfTnCEsOQtjUrWNR4xhmVZ+HJy4yytUW +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIDbzCCAlcCAmm6MA0GCSqGSIb3DQEBCwUAMH0xCzAJBgNVBAYTAlVTMQswCQYD +VQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZMBcGA1UECgwQU3Ryb25n +TG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRowGAYDVQQDDBFjYS5zdHJv +bmdsb29wLmNvbTAeFw0xNTEyMDgyMzM1MzNaFw00MzA0MjQyMzM1MzNaMH0xCzAJ +BgNVBAYTAlVTMQswCQYDVQQIDAJDQTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzEZ +MBcGA1UECgwQU3Ryb25nTG9vcCwgSW5jLjESMBAGA1UECwwJU3Ryb25nT3BzMRow +GAYDVQQDDBFjYS5zdHJvbmdsb29wLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBANfj86jkvvYDjHBgiqWhk9Cj+bqiMq3MqnV0CBO4iuK33Fo6XssE +H+yVdXlIBFbFe6t655MdBVOR2Sfj7WqNh96vhu6PyDHiwcQlTaiLU6nhIed1J4Wv +lvnJHFmp8Wbtx5AgLT4UYu03ftvXEl2DLi3vhSL2tRM1ebXHB/KPbRWkb25DPX0P +foOHot3f2dgNe2x6kponf7E/QDmAu3s7Nlkfh+ryDhgGU7wocXEhXbprNqRqOGNo +xbXgUI+/9XDxYT/7Gn5LF/fPjtN+aB0SKMnTsDhprVlZie83mlqJ46fOOrR+vrsQ +mi/1m/TadrARtZoIExC/cQRdVM05EK4tUa8CAwEAATANBgkqhkiG9w0BAQsFAAOC +AQEAQ7k5WhyhDTIGYCNzRnrMHWSzGqa1y4tJMW06wafJNRqTm1cthq1ibc6Hfq5a +K10K0qMcgauRTfQ1MWrVCTW/KnJ1vkhiTOH+RvxapGn84gSaRmV6KZen0+gMsgae +KEGe/3Hn+PmDVV+PTamHgPACfpTww38WHIe/7Ce9gHfG7MZ8cKHNZhDy0IAYPln+ +YRwMLd7JNQffHAbWb2CE1mcea4H/12U8JZW5tHCF6y9V+7IuDzqwIrLKcW3lG17n +VUG6ODF/Ryqn3V5X+TL91YyXi6c34y34IpC7MQDV/67U7+5Bp5CfeDPWW2wVSrW+ +uGZtfEvhbNm6m2i4UNmpCXxUZQ== +-----END CERTIFICATE----- diff --git a/node_modules/node-gyp/test/fixtures/ca.crt b/node_modules/node-gyp/test/fixtures/ca.crt new file mode 100644 index 0000000..aaf9757 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/ca.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDZDCCAkwCCQCAzfCLqrJvuTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt +Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v +ZGVqcy5vcmcwHhcNMTkwNjIyMDYyMjMzWhcNMjIwNDExMDYyMjMzWjB0MQswCQYD +VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM +CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1 +aWxkQG5vZGVqcy5vcmcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDS +CHjvtVW4HdbbUwZ/ZV9s6U4x0KSoyNQrsCZjB8kRpFPe50DS5mfmu2SNBGYKRgzk +4QEEwFB9N2o8YTWsCefSRl6ti4ToPZqulU4hhRKYrEGtMJcRzi3IN7s200JaO3UH +01Su8ruO0NESb5zEU1Ykfh8Lub8TGEAINmgI61d/5d5Aq3kDjUHQJt1Ekw03Ylnu +juQyCGZxLxnngu0mIvwzyL/UeeUgsfQLzvppUk6In7tC1zzMjSPWo0c8qu6KvrW4 +bKYnkZkzdQifzbpO5ERMEsh5HWq0uHa6+dgcVHFvlhdqF4Uat87ygNplVf0txsZB +MNVqbz1k6xkZYMnzDoydAgMBAAEwDQYJKoZIhvcNAQELBQADggEBADspZGtKpWxy +J1W3FA1aeQhMvequQTcMRz4avkm4K4HfTdV1iVD4CbvdezBphouBlyLVLDFJP7RZ +m7dBJVgBwnxufoFLne8cR2MGqDRoySbFT1AtDJdxabE6Fg+QGUpgOQfeBJ6ANlSB ++qJ+HG4QA+Ouh5hxz9mgYwkIsMUABHiwENdZ/kT8Edw4xKgd3uH0YP4iiePMD66c +rzW3uXH5J1jnKgBlpxtog4P6dHCcoq+PZJ17W5bdXNyqC1LPzQqniZ2BNcEZ4ix3 +slAZAOWD1zLLGJhBPMV1fa0sHNBWc6oicr3YK/IDb0cp9kiLvnUu1pHy+LWQGqtC +rceJuGsnJEQ= +-----END CERTIFICATE----- diff --git a/node_modules/node-gyp/test/fixtures/nodedir/include/node/config.gypi b/node_modules/node-gyp/test/fixtures/nodedir/include/node/config.gypi new file mode 100644 index 0000000..e767534 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/nodedir/include/node/config.gypi @@ -0,0 +1,6 @@ +# Test configuration +{ + 'variables': { + 'build_with_electron': true + } +} diff --git a/node_modules/node-gyp/test/fixtures/server.crt b/node_modules/node-gyp/test/fixtures/server.crt new file mode 100644 index 0000000..5d0c440 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/server.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDYjCCAkoCCQCSlmGR7KzZGTANBgkqhkiG9w0BAQsFADB0MQswCQYDVQQGEwJV +UzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsMCG5vZGUt +Z3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHzAdBgkqhkiG9w0BCQEWEGJ1aWxkQG5v +ZGVqcy5vcmcwHhcNMTkwNjIyMDYyNTU1WhcNMjkwNjE5MDYyNTU1WjByMQswCQYD +VQQGEwJVUzELMAkGA1UECAwCQ0ExEDAOBgNVBAoMB05vZGUuanMxETAPBgNVBAsM +CG5vZGUtZ3lwMRIwEAYDVQQDDAlsb2NhbGhvc3QxHTAbBgkqhkiG9w0BCQEWDmJ1 +aWxkQGlvanMub3JnMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA6S1E +2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5izgvQNaANmn3xLFCS5zs +uZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q+O70ELyFrimXfZ4JI+bd +IG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/9+gZ02/cdIBCAtZHQwqx +7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9yFybt6mbZp9kglBmyKYCc +7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2VUHDllcNhpDWlmInXA+I +tHdGZHCp95ohqpCPgQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQCdvYj6CD0ZLwT2 +3t1r+deC3TJuHlNVSeKeT7wIfFnh2FW5riGV0q/w6eXPLTHjuiS6YmpAAbdNUgX/ +sq64FqI2RLpX6pgY5yB0SKopMcJxMLKqmF4zHpIHxtYN5EmN3PR0vehneBR/nZ2T +3ikvWD5JeXlm7Dfw+tjijdxM/sEoDWErGup4mMKMd1s5s830p+ITJUa50d0DLFdH +mqPSbUZF8mMPwGJd+nu1Ht3gTLtK7+gYJgGtXMJmGC0Qg77EJHDB2NbotgDGNmSU +1H9BpAeFHHIcbh2Rr7kkTvnh/c03vFe+CsDZmezcmRpRzW1fKj3YbfqBxU4XwJrL +a5T/N9xU +-----END CERTIFICATE----- diff --git a/node_modules/node-gyp/test/fixtures/server.key b/node_modules/node-gyp/test/fixtures/server.key new file mode 100644 index 0000000..a844739 --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/server.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEA6S1E2WchgmbJYqCnpN7310ZgHjIOqeJe6MpSue2u6z6mTNd5 +izgvQNaANmn3xLFCS5zsuZaTvdPYPkcmSQzb1YcZSUYnAxZifjYARc6kb5GSBl3q ++O70ELyFrimXfZ4JI+bdIG9KiHY17DlvZZZj/csGYVWWg0mkeH3O5LPX6/DXQVh/ +9+gZ02/cdIBCAtZHQwqx7tF/qZA/kD4GZNFpU1DYHzf9H6g9htoCqmNHQWrV2T9y +Fybt6mbZp9kglBmyKYCc7hmQnb7N/mHn1yIuwhBsirCJTfKH86gN81u8M3+SVHA2 +VUHDllcNhpDWlmInXA+ItHdGZHCp95ohqpCPgQIDAQABAoIBABW8R4ewalo6dJlB ++n6O3jFt+PW3mtBRLqGqgm2cb0q0a1IMX+MPWLBFjmwEErl+AH0F4rcmBx2Ryr17 +amEy1qcf0caXyHksNAApznqzWXag7iizxnxv4cZRnHBwphNqkNWM5p3oYd04j6w2 +amDg1O9KZozaKo6QZclpiMiezwjKG+PVZLT8p7afswjv+yDWPDByhlcGiye9QD1T +VuZ0QCoXp6N/8JxW0gdkLp9NqFvGeGFzJ5h6L+d7A6BWw8akXrBRHHcKkyvVYBfd +myhSzSK4FPFMaxaEY/65FlVSyAO6ezGm3Umx4g7mkFjLdwKWaIOjkBkPeFgl3Pp4 +7Lo5X3UCgYEA/FrrIwmEU5ayulBVScEMKeavu5eNY4r0Sqbpov2oyTdYe8G49Pzy +ryMXfunY43moLKpajGwgTKRGvdqFtK08AAkaCssiAPkP3rZuZvMTF4sLo/vlWrjP +3er+tUqj22BzXi5XV0BAvH8Y3TL8KQ3he/8JxDvkC811/DQ9Y/Da3U8CgYEA7Itw +UM37URma08Bj9VTMoL9ZCyURewX+ZLDb2+O8sXGXJs28i1RkE6PTBlnRmedn+Jjk +byzQ5Cs5wA5uMbhYTA7kgXOs1bvgQqmlLmyL6FfHkucoMhr2Di7VeGf4OxE26JZ8 +JdY4+1MOyI3A2rR8WU+GmHxy0ay4K2xe6W0vsi8CgYBoGLEKIPDe8jkDtgOYivOT +jT9MaLXALB+dc8DIpU4swpHTaxP6qyUIrbcReTEolJSU6Ci16BxiwRkVU8D3yMYJ +VbfSX/zE3fh37FUaToa/nXHN0SjJBZdpeXhcHE//PIgaf48zxKNvnhYJmPB/luQ+ +m/PRaMsnOzfCM2JniYEe7QKBgGwjnxhB4tgDtaWCue/pcZc3gzS2IJS2e8N6mzie +l6Ajhu+FdOHZldrotUuc+la61OxwsVYmDeWR4VftAPGYDj3PPSX1RRl9R5wSRGLB +2wBASQvew6CMdNqtDIh8N56BUzHnwh/mHKzBHuwO6hDSHFsUITtLAY7bwGKRq55Z +fUmfAoGBANOYFyoJoDLcl+Jd750lyqfCifcGtkRdmZMtrPXaYnD8ZGme9vz1vsK/ +4iUkV3mi7Z9s1LXMa/tPPfKdVhCM1PXost3/si0+u1Bz5yKqEPXlyy2ltpIVyGu8 +yiy7y75asp8Iii/1cgtwyp9+VeSif8wJ+MHQoGdGxvAQP80R3EjF +-----END RSA PRIVATE KEY----- diff --git a/node_modules/node-gyp/test/fixtures/test-charmap.py b/node_modules/node-gyp/test/fixtures/test-charmap.py new file mode 100644 index 0000000..63aa77b --- /dev/null +++ b/node_modules/node-gyp/test/fixtures/test-charmap.py @@ -0,0 +1,31 @@ +import sys +import locale + +try: + reload(sys) +except NameError: # Python 3 + pass + + +def main(): + encoding = locale.getdefaultlocale()[1] + if not encoding: + return False + + try: + sys.setdefaultencoding(encoding) + except AttributeError: # Python 3 + pass + + textmap = { + "cp936": "\u4e2d\u6587", + "cp1252": "Lat\u012Bna", + "cp932": "\u306b\u307b\u3093\u3054", + } + if encoding in textmap: + print(textmap[encoding]) + return True + + +if __name__ == "__main__": + print(main()) diff --git a/node_modules/node-gyp/test/process-exec-sync.js b/node_modules/node-gyp/test/process-exec-sync.js new file mode 100644 index 0000000..21763bc --- /dev/null +++ b/node_modules/node-gyp/test/process-exec-sync.js @@ -0,0 +1,140 @@ +'use strict' + +const fs = require('graceful-fs') +const childProcess = require('child_process') + +function startsWith (str, search, pos) { + if (String.prototype.startsWith) { + return str.startsWith(search, pos) + } + + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search +} + +function processExecSync (file, args, options) { + var child, error, timeout, tmpdir, command + command = makeCommand(file, args) + + /* + this function emulates child_process.execSync for legacy node <= 0.10.x + derived from https://github.com/gvarsanyi/sync-exec/blob/master/js/sync-exec.js + */ + + options = options || {} + // init timeout + timeout = Date.now() + options.timeout + // init tmpdir + var osTempBase = '/tmp' + var os = determineOS() + osTempBase = '/tmp' + + if (process.env.TMP) { + osTempBase = process.env.TMP + } + + if (osTempBase[osTempBase.length - 1] !== '/') { + osTempBase += '/' + } + + tmpdir = osTempBase + 'processExecSync.' + Date.now() + Math.random() + fs.mkdirSync(tmpdir) + + // init command + if (os === 'linux') { + command = '(' + command + ' > ' + tmpdir + '/stdout 2> ' + tmpdir + + '/stderr); echo $? > ' + tmpdir + '/status' + } else { + command = '(' + command + ' > ' + tmpdir + '/stdout 2> ' + tmpdir + + '/stderr) | echo %errorlevel% > ' + tmpdir + '/status | exit' + } + + // init child + child = childProcess.exec(command, options) + + var maxTry = 100000 // increases the test time by 6 seconds on win-2016-node-0.10 + var tryCount = 0 + while (tryCount < maxTry) { + try { + var x = fs.readFileSync(tmpdir + '/status') + if (x.toString() === '0') { + break + } + } catch (ignore) {} + tryCount++ + if (Date.now() > timeout) { + error = child + break + } + } + + ['stdout', 'stderr', 'status'].forEach(function (file) { + child[file] = fs.readFileSync(tmpdir + '/' + file, options.encoding) + setTimeout(unlinkFile, 500, tmpdir + '/' + file) + }) + + child.status = Number(child.status) + if (child.status !== 0) { + error = child + } + + try { + fs.rmdirSync(tmpdir) + } catch (ignore) {} + if (error) { + throw error + } + return child.stdout +} + +function makeCommand (file, args) { + var command, quote + command = file + if (args.length > 0) { + for (var i in args) { + command = command + ' ' + if (args[i][0] === '-') { + command = command + args[i] + } else { + if (!quote) { + command = command + '"' + quote = true + } + command = command + args[i] + if (quote) { + if (args.length === (parseInt(i) + 1)) { + command = command + '"' + } + } + } + } + } + return command +} + +function determineOS () { + var os = '' + var tmpVar = '' + if (process.env.OSTYPE) { + tmpVar = process.env.OSTYPE + } else if (process.env.OS) { + tmpVar = process.env.OS + } else { + // default is linux + tmpVar = 'linux' + } + + if (startsWith(tmpVar, 'linux')) { + os = 'linux' + } + if (startsWith(tmpVar, 'win')) { + os = 'win' + } + + return os +} + +function unlinkFile (file) { + fs.unlinkSync(file) +} + +module.exports = processExecSync diff --git a/node_modules/node-gyp/test/simple-proxy.js b/node_modules/node-gyp/test/simple-proxy.js new file mode 100644 index 0000000..cb0dfcf --- /dev/null +++ b/node_modules/node-gyp/test/simple-proxy.js @@ -0,0 +1,27 @@ +'use strict' + +const http = require('http') +const https = require('https') +const server = http.createServer(handler) +const port = +process.argv[2] +const prefix = process.argv[3] +const upstream = process.argv[4] +var calls = 0 + +server.listen(port) + +function handler (req, res) { + if (req.url.indexOf(prefix) !== 0) { + throw new Error('request url [' + req.url + '] does not start with [' + prefix + ']') + } + + var upstreamUrl = upstream + req.url.substring(prefix.length) + https.get(upstreamUrl, function (ures) { + ures.on('end', function () { + if (++calls === 2) { + server.close() + } + }) + ures.pipe(res) + }) +} diff --git a/node_modules/node-gyp/test/test-addon.js b/node_modules/node-gyp/test/test-addon.js new file mode 100644 index 0000000..f79eff7 --- /dev/null +++ b/node_modules/node-gyp/test/test-addon.js @@ -0,0 +1,150 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const fs = require('graceful-fs') +const childProcess = require('child_process') +const os = require('os') +const addonPath = path.resolve(__dirname, 'node_modules', 'hello_world') +const nodeGyp = path.resolve(__dirname, '..', 'bin', 'node-gyp.js') +const execFileSync = childProcess.execFileSync || require('./process-exec-sync') +const execFile = childProcess.execFile + +function runHello (hostProcess) { + if (!hostProcess) { + hostProcess = process.execPath + } + var testCode = "console.log(require('hello_world').hello())" + return execFileSync(hostProcess, ['-e', testCode], { cwd: __dirname }).toString() +} + +function getEncoding () { + var code = 'import locale;print(locale.getdefaultlocale()[1])' + return execFileSync('python', ['-c', code]).toString().trim() +} + +function checkCharmapValid () { + var data + try { + data = execFileSync('python', ['fixtures/test-charmap.py'], + { cwd: __dirname }) + } catch (err) { + return false + } + var lines = data.toString().trim().split('\n') + return lines.pop() === 'True' +} + +test('build simple addon', function (t) { + t.plan(3) + + // Set the loglevel otherwise the output disappears when run via 'npm test' + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + t.strictEqual(err, null) + t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + t.strictEqual(runHello().trim(), 'world') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') +}) + +test('build simple addon in path with non-ascii characters', function (t) { + t.plan(1) + + if (!checkCharmapValid()) { + return t.skip('python console app can\'t encode non-ascii character.') + } + + var testDirNames = { + cp936: '文件夹', + cp1252: 'Latīna', + cp932: 'フォルダ' + } + // Select non-ascii characters by current encoding + var testDirName = testDirNames[getEncoding()] + // If encoding is UTF-8 or other then no need to test + if (!testDirName) { + return t.skip('no need to test') + } + + t.plan(3) + + var data + var configPath = path.join(addonPath, 'build', 'config.gypi') + try { + data = fs.readFileSync(configPath, 'utf8') + } catch (err) { + t.error(err) + return + } + var config = JSON.parse(data.replace(/#.+\n/, '')) + var nodeDir = config.variables.nodedir + var testNodeDir = path.join(addonPath, testDirName) + // Create symbol link to path with non-ascii characters + try { + fs.symlinkSync(nodeDir, testNodeDir, 'dir') + } catch (err) { + switch (err.code) { + case 'EEXIST': break + case 'EPERM': + t.error(err, 'Please try to running console as an administrator') + return + default: + t.error(err) + return + } + } + + var cmd = [ + nodeGyp, + 'rebuild', + '-C', + addonPath, + '--loglevel=verbose', + '-nodedir=' + testNodeDir + ] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + try { + fs.unlink(testNodeDir) + } catch (err) { + t.error(err) + } + + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + t.strictEqual(err, null) + t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + t.strictEqual(runHello().trim(), 'world') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') +}) + +test('addon works with renamed host executable', function (t) { + // No `fs.copyFileSync` before node8. + if (process.version.substr(1).split('.')[0] < 8) { + t.skip('skipping test for old node version') + t.end() + return + } + + t.plan(3) + + var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) + fs.copyFileSync(process.execPath, notNodePath) + + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + t.strictEqual(err, null) + t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + t.strictEqual(runHello(notNodePath).trim(), 'world') + fs.unlinkSync(notNodePath) + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') +}) diff --git a/node_modules/node-gyp/test/test-configure-python.js b/node_modules/node-gyp/test/test-configure-python.js new file mode 100644 index 0000000..4290e7a --- /dev/null +++ b/node_modules/node-gyp/test/test-configure-python.js @@ -0,0 +1,82 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const devDir = require('./common').devDir() +const gyp = require('../lib/node-gyp') +const requireInject = require('require-inject') +const configure = requireInject('../lib/configure', { + 'graceful-fs': { + openSync: function () { return 0 }, + closeSync: function () { }, + writeFile: function (file, data, cb) { cb() }, + stat: function (file, cb) { cb(null, {}) }, + mkdir: function (dir, options, cb) { cb() }, + promises: { + writeFile: function (file, data) { return Promise.resolve(null) } + } + } +}) + +const EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib') +const SEPARATOR = process.platform === 'win32' ? ';' : ':' +const SPAWN_RESULT = { on: function () { } } + +require('npmlog').level = 'warn' + +test('configure PYTHONPATH with no existing env', function (t) { + t.plan(1) + + delete process.env.PYTHONPATH + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + t.equal(process.env.PYTHONPATH, EXPECTED_PYPATH) + return SPAWN_RESULT + } + prog.devDir = devDir + configure(prog, [], t.fail) +}) + +test('configure PYTHONPATH with existing env of one dir', function (t) { + t.plan(2) + + var existingPath = path.join('a', 'b') + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + t.deepEqual(dirs, [EXPECTED_PYPATH, existingPath]) + + return SPAWN_RESULT + } + prog.devDir = devDir + configure(prog, [], t.fail) +}) + +test('configure PYTHONPATH with existing env of multiple dirs', function (t) { + t.plan(2) + + var pythonDir1 = path.join('a', 'b') + var pythonDir2 = path.join('b', 'c') + var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR) + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + t.deepEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2]) + + return SPAWN_RESULT + } + prog.devDir = devDir + configure(prog, [], t.fail) +}) diff --git a/node_modules/node-gyp/test/test-create-config-gypi.js b/node_modules/node-gyp/test/test-create-config-gypi.js new file mode 100644 index 0000000..eeac73f --- /dev/null +++ b/node_modules/node-gyp/test/test-create-config-gypi.js @@ -0,0 +1,70 @@ +'use strict' + +const path = require('path') +const { test } = require('tap') +const gyp = require('../lib/node-gyp') +const createConfigGypi = require('../lib/create-config-gypi') +const { parseConfigGypi, getCurrentConfigGypi } = createConfigGypi.test + +test('config.gypi with no options', async function (t) { + t.plan(2) + + const prog = gyp() + prog.parseArgv([]) + + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + t.equal(config.target_defaults.default_configuration, 'Release') + t.equal(config.variables.target_arch, process.arch) +}) + +test('config.gypi with --debug', async function (t) { + t.plan(1) + + const prog = gyp() + prog.parseArgv(['_', '_', '--debug']) + + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + t.equal(config.target_defaults.default_configuration, 'Debug') +}) + +test('config.gypi with custom options', async function (t) { + t.plan(1) + + const prog = gyp() + prog.parseArgv(['_', '_', '--shared-libxml2']) + + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + t.equal(config.variables.shared_libxml2, true) +}) + +test('config.gypi with nodedir', async function (t) { + t.plan(1) + + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') + + const prog = gyp() + prog.parseArgv(['_', '_', `--nodedir=${nodeDir}`]) + + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + t.equal(config.variables.build_with_electron, true) +}) + +test('config.gypi with --force-process-config', async function (t) { + t.plan(1) + + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') + + const prog = gyp() + prog.parseArgv(['_', '_', '--force-process-config', `--nodedir=${nodeDir}`]) + + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + t.equal(config.variables.build_with_electron, undefined) +}) + +test('config.gypi parsing', function (t) { + t.plan(1) + + const str = "# Some comments\n{'variables': {'multiline': 'A'\n'B'}}" + const config = parseConfigGypi(str) + t.deepEqual(config, { variables: { multiline: 'AB' } }) +}) diff --git a/node_modules/node-gyp/test/test-download.js b/node_modules/node-gyp/test/test-download.js new file mode 100644 index 0000000..71a3c0d --- /dev/null +++ b/node_modules/node-gyp/test/test-download.js @@ -0,0 +1,207 @@ +'use strict' + +const { test } = require('tap') +const fs = require('fs') +const path = require('path') +const util = require('util') +const http = require('http') +const https = require('https') +const install = require('../lib/install') +const semver = require('semver') +const devDir = require('./common').devDir() +const rimraf = require('rimraf') +const gyp = require('../lib/node-gyp') +const log = require('npmlog') + +log.level = 'warn' + +test('download over http', async (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + t.tearDown(() => new Promise((resolve) => server.close(resolve))) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: {}, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'ok') +}) + +test('download over https with custom ca', async (t) => { + t.plan(3) + + const cafile = path.join(__dirname, '/fixtures/ca.crt') + const [cert, key, ca] = await Promise.all([ + fs.promises.readFile(path.join(__dirname, 'fixtures/server.crt'), 'utf8'), + fs.promises.readFile(path.join(__dirname, 'fixtures/server.key'), 'utf8'), + install.test.readCAFile(cafile) + ]) + + t.strictEqual(ca.length, 1) + + const options = { ca: ca, cert: cert, key: key } + const server = https.createServer(options, (req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + t.tearDown(() => new Promise((resolve) => server.close(resolve))) + + server.on('clientError', (err) => { throw err }) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: { cafile }, + version: '42' + } + const url = `https://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'ok') +}) + +test('download over http with proxy', async (t) => { + t.plan(2) + + const server = http.createServer((_, res) => { + res.end('ok') + }) + + const pserver = http.createServer((req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('proxy ok') + }) + + t.tearDown(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: 'bad' + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'proxy ok') +}) + +test('download over http with noproxy', async (t) => { + t.plan(2) + + const server = http.createServer((req, res) => { + t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + const pserver = http.createServer((_, res) => { + res.end('proxy ok') + }) + + t.tearDown(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: host + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + t.strictEqual(await res.text(), 'ok') +}) + +test('download with missing cafile', async (t) => { + t.plan(1) + const gyp = { + opts: { cafile: 'no.such.file' } + } + try { + await install.test.download(gyp, {}, 'http://bad/') + } catch (e) { + t.ok(/no.such.file/.test(e.message)) + } +}) + +test('check certificate splitting', async (t) => { + const cas = await install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) + t.plan(2) + t.strictEqual(cas.length, 2) + t.notStrictEqual(cas[0], cas[1]) +}) + +// only run this test if we are running a version of Node with predictable version path behavior + +test('download headers (actual)', async (t) => { + if (process.env.FAST_TEST || + process.release.name !== 'node' || + semver.prerelease(process.version) !== null || + semver.satisfies(process.version, '<10')) { + return t.skip('Skipping actual download of headers due to test environment configuration') + } + + t.plan(12) + + const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) + await util.promisify(rimraf)(expectedDir) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + log.level = 'warn' + await util.promisify(install)(prog, []) + + const data = await fs.promises.readFile(path.join(expectedDir, 'installVersion'), 'utf8') + t.strictEqual(data, '9\n', 'correct installVersion') + + const list = await fs.promises.readdir(path.join(expectedDir, 'include/node')) + t.ok(list.includes('common.gypi')) + t.ok(list.includes('config.gypi')) + t.ok(list.includes('node.h')) + t.ok(list.includes('node_version.h')) + t.ok(list.includes('openssl')) + t.ok(list.includes('uv')) + t.ok(list.includes('uv.h')) + t.ok(list.includes('v8-platform.h')) + t.ok(list.includes('v8.h')) + t.ok(list.includes('zlib.h')) + + const lines = (await fs.promises.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8')).split('\n') + + // extract the 3 version parts from the defines to build a valid version string and + // and check them against our current env version + const version = ['major', 'minor', 'patch'].reduce((version, type) => { + const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) + const line = lines.find((l) => re.test(l)) + const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' + return `${version}${type !== 'major' ? '.' : 'v'}${i}` + }, '') + + t.strictEqual(version, process.version) +}) diff --git a/node_modules/node-gyp/test/test-find-accessible-sync.js b/node_modules/node-gyp/test/test-find-accessible-sync.js new file mode 100644 index 0000000..0a2e584 --- /dev/null +++ b/node_modules/node-gyp/test/test-find-accessible-sync.js @@ -0,0 +1,84 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const requireInject = require('require-inject') +const configure = requireInject('../lib/configure', { + 'graceful-fs': { + closeSync: function () { return undefined }, + openSync: function (path) { + if (readableFiles.some(function (f) { return f === path })) { + return 0 + } else { + var error = new Error('ENOENT - not found') + throw error + } + } + } +}) + +const dir = path.sep + 'testdir' +const readableFile = 'readable_file' +const anotherReadableFile = 'another_readable_file' +const readableFileInDir = 'somedir' + path.sep + readableFile +const readableFiles = [ + path.resolve(dir, readableFile), + path.resolve(dir, anotherReadableFile), + path.resolve(dir, readableFileInDir) +] + +test('find accessible - empty array', function (t) { + t.plan(1) + + var candidates = [] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, undefined) +}) + +test('find accessible - single item array, readable', function (t) { + t.plan(1) + + var candidates = [readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, readableFile)) +}) + +test('find accessible - single item array, readable in subdir', function (t) { + t.plan(1) + + var candidates = [readableFileInDir] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, readableFileInDir)) +}) + +test('find accessible - single item array, unreadable', function (t) { + t.plan(1) + + var candidates = ['unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, undefined) +}) + +test('find accessible - multi item array, no matches', function (t) { + t.plan(1) + + var candidates = ['non_existent_file', 'unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, undefined) +}) + +test('find accessible - multi item array, single match', function (t) { + t.plan(1) + + var candidates = ['non_existent_file', readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, readableFile)) +}) + +test('find accessible - multi item array, return first match', function (t) { + t.plan(1) + + var candidates = ['non_existent_file', anotherReadableFile, readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + t.strictEqual(found, path.resolve(dir, anotherReadableFile)) +}) diff --git a/node_modules/node-gyp/test/test-find-node-directory.js b/node_modules/node-gyp/test/test-find-node-directory.js new file mode 100644 index 0000000..f1380d1 --- /dev/null +++ b/node_modules/node-gyp/test/test-find-node-directory.js @@ -0,0 +1,119 @@ +'use strict' + +const test = require('tap').test +const path = require('path') +const findNodeDirectory = require('../lib/find-node-directory') + +const platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix'] + +// we should find the directory based on the directory +// the script is running in and it should match the layout +// in a build tree where npm is installed in +// .... /deps/npm +test('test find-node-directory - node install', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + t.equal( + findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), + path.join('/x')) + } +}) + +// we should find the directory based on the directory +// the script is running in and it should match the layout +// in an installed tree where npm is installed in +// .... /lib/node_modules/npm or .../node_modules/npm +// depending on the patform +test('test find-node-directory - node build', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + if (platforms[next] === 'win32') { + t.equal( + findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } else { + t.equal( + findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } + } +}) + +// we should find the directory based on the execPath +// for node and match because it was in the bin directory +test('test find-node-directory - node in bin directory', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + t.equal( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) + } +}) + +// we should find the directory based on the execPath +// for node and match because it was in the Release directory +test('test find-node-directory - node in build release dir', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } + } else { + processObj = { + execPath: '/x/y/out/Release/node', + platform: platforms[next] + } + } + + t.equal( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) + } +}) + +// we should find the directory based on the execPath +// for node and match because it was in the Debug directory +test('test find-node-directory - node in Debug release dir', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } + } else { + processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } + } + + t.equal( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/a/b')) + } +}) + +// we should not find it as it will not match based on the execPath nor +// the directory from which the script is running +test('test find-node-directory - not found', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/z/y', platform: next } + t.equal(findNodeDirectory('/a/b/c/d', processObj), '') + } +}) + +// we should find the directory based on the directory +// the script is running in and it should match the layout +// in a build tree where npm is installed in +// .... /deps/npm +// same test as above but make sure additional directory entries +// don't cause an issue +test('test find-node-directory - node install', function (t) { + t.plan(platforms.length) + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + t.equal( + findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', + processObj), path.join('/x/y/z/a/b/c')) + } +}) diff --git a/node_modules/node-gyp/test/test-find-python.js b/node_modules/node-gyp/test/test-find-python.js new file mode 100644 index 0000000..67d0b26 --- /dev/null +++ b/node_modules/node-gyp/test/test-find-python.js @@ -0,0 +1,226 @@ +'use strict' + +delete process.env.PYTHON + +const test = require('tap').test +const findPython = require('../lib/find-python') +const execFile = require('child_process').execFile +const PythonFinder = findPython.test.PythonFinder + +require('npmlog').level = 'warn' + +test('find python', function (t) { + t.plan(4) + + findPython.test.findPython(null, function (err, found) { + t.strictEqual(err, null) + var proc = execFile(found, ['-V'], function (err, stdout, stderr) { + t.strictEqual(err, null) + t.ok(/Python 3/.test(stdout)) + t.strictEqual(stderr, '') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') + }) +}) + +function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() + } + var descriptor = { + configurable: false, + enumerable: false, + get: fail, + set: fail + } + Object.defineProperty(object, property, descriptor) +} + +function TestPythonFinder () { + PythonFinder.apply(this, arguments) +} +TestPythonFinder.prototype = Object.create(PythonFinder.prototype) +// Silence npmlog - remove for debugging +TestPythonFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} +} +delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON + +test('find python - python', function (t) { + t.plan(6) + + var f = new TestPythonFinder('python', done) + f.execFile = function (program, args, opts, cb) { + f.execFile = function (program, args, opts, cb) { + poison(f, 'execFile') + t.strictEqual(program, '/path/python') + t.ok(/sys\.version_info/.test(args[1])) + cb(null, '3.9.1') + } + t.strictEqual(program, + process.platform === 'win32' ? '"python"' : 'python') + t.ok(/sys\.executable/.test(args[1])) + cb(null, '/path/python') + } + f.findPython() + + function done (err, python) { + t.strictEqual(err, null) + t.strictEqual(python, '/path/python') + } +}) + +test('find python - python too old', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(null, '/path/python') + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(null, '2.3.4') + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not supported/i.test(f.errorLog)) + } +}) + +test('find python - no python', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) + } +}) + +test('find python - no python2, no python, unix', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.checkPyLauncher = t.fail + f.win = false + + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) + } +}) + +test('find python - no python, use python launcher', function (t) { + t.plan(4) + + var f = new TestPythonFinder(null, done) + f.win = true + + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + t.notEqual(args.indexOf('-3'), -1) + t.notEqual(args.indexOf('-c'), -1) + return cb(null, 'Z:\\snake.exe') + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (f.winDefaultLocations.includes(program)) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + if (program === 'Z:\\snake.exe') { + cb(null, '3.9.0') + } else { + t.fail() + } + } else { + t.fail() + } + } + f.findPython() + + function done (err, python) { + t.strictEqual(err, null) + t.strictEqual(python, 'Z:\\snake.exe') + } +}) + +test('find python - no python, no python launcher, good guess', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.win = true + const expectedProgram = f.winDefaultLocations[0] + + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + return cb(new Error('not found')) + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (program === expectedProgram && + /sys\.version_info/.test(args[args.length - 1])) { + cb(null, '3.7.3') + } else { + t.fail() + } + } + f.findPython() + + function done (err, python) { + t.strictEqual(err, null) + t.ok(python === expectedProgram) + } +}) + +test('find python - no python, no python launcher, bad guess', function (t) { + t.plan(2) + + var f = new TestPythonFinder(null, done) + f.win = true + + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + t.fail() + } + } + f.findPython() + + function done (err) { + t.ok(/Could not find any Python/.test(err)) + t.ok(/not in PATH/.test(f.errorLog)) + } +}) diff --git a/node_modules/node-gyp/test/test-find-visualstudio.js b/node_modules/node-gyp/test/test-find-visualstudio.js new file mode 100644 index 0000000..1327cf8 --- /dev/null +++ b/node_modules/node-gyp/test/test-find-visualstudio.js @@ -0,0 +1,676 @@ +'use strict' + +const test = require('tap').test +const fs = require('fs') +const path = require('path') +const findVisualStudio = require('../lib/find-visualstudio') +const VisualStudioFinder = findVisualStudio.test.VisualStudioFinder + +const semverV1 = { major: 1, minor: 0, patch: 0 } + +delete process.env.VCINSTALLDIR + +function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() + } + var descriptor = { + configurable: false, + enumerable: false, + get: fail, + set: fail + } + Object.defineProperty(object, property, descriptor) +} + +function TestVisualStudioFinder () { VisualStudioFinder.apply(this, arguments) } +TestVisualStudioFinder.prototype = Object.create(VisualStudioFinder.prototype) +// Silence npmlog - remove for debugging +TestVisualStudioFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} +} + +test('VS2013', function (t) { + t.plan(4) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\MSBuild12\\MSBuild.exe', + path: 'C:\\VS2013', + sdk: null, + toolset: 'v120', + version: '12.0', + versionMajor: 12, + versionMinor: 0, + versionYear: 2013 + }) + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild12\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('VS2013 should not be found on new node versions', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder({ + major: 10, + minor: 0, + patch: 0 + }, null, (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('VS2015', function (t) { + t.plan(4) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\MSBuild14\\MSBuild.exe', + path: 'C:\\VS2015', + sdk: null, + toolset: 'v140', + version: '14.0', + versionMajor: 14, + versionMinor: 0, + versionYear: 2015 + }) + }) + + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + t.pass(`expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild14\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() +}) + +test('error from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(new Error(), '', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('empty output from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('output from PowerShell not JSON', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, 'AAAABBBB', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('wrong JSON from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '{}', '', (info) => { + t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('empty JSON from PowerShell', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, '[]', '', (info) => { + t.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('future version', function (t) { + t.plan(3) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, JSON.stringify([{ + packages: [ + 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', + 'Microsoft.VisualStudio.Component.Windows10SDK.17763', + 'Microsoft.VisualStudio.VC.MSBuild.Base' + ], + path: 'C:\\VS', + version: '9999.9999.9999.9999' + }]), '', (info) => { + t.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') + t.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('single unusable VS2017', function (t) { + t.plan(3) + + const finder = new TestVisualStudioFinder(semverV1, null, null) + + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', (info) => { + t.ok(/checking/i.test(finder.errorLog[0]), 'expect error') + t.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') + t.false(info, 'no data') + }) +}) + +test('minimal VS2017 Build Tools', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2017 Community with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2017 Express', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.858', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2019 Preview with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.0.28608.199', + versionMajor: 16, + versionMinor: 0, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('minimal VS2019 Build Tools', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +test('VS2019 Community with C++ workload', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() +}) + +function allVsVersions (t, finder) { + finder.findVisualStudio2017OrNewer = (cb) => { + const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Unusable.txt'))) + const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt'))) + const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt'))) + const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Express.txt'))) + const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt'))) + const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt'))) + const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt'))) + const data = JSON.stringify(data0.concat(data1, data2, data3, data4, + data5, data6)) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + continue + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild12\\') + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild14\\') + default: + t.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } +} + +test('fail when looking for invalid path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2013 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2013) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2013 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2013') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2015 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2015) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2015 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2017 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2017) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2017 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2019 by version number', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2019) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('look for VS2019 by installation path', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('msvs_version match should be case insensitive', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('latest version should be found by default', function (t) { + t.plan(2) + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.versionYear, 2019) + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a usable VS Command Prompt', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 + delete process.env.VSINSTALLDIR + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('VCINSTALLDIR match should be case insensitive', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a unusable VS Command Prompt', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a VS Command Prompt with matching msvs_version', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.strictEqual(err, null) + t.deepEqual(info.path, 'C:\\VS2015') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) + +test('run on a VS Command Prompt with mismatched msvs_version', function (t) { + t.plan(2) + + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' + + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + t.ok(/find .* Visual Studio/i.test(err), 'expect error') + t.false(info, 'no data') + }) + + allVsVersions(t, finder) + finder.findVisualStudio() +}) diff --git a/node_modules/node-gyp/test/test-install.js b/node_modules/node-gyp/test/test-install.js new file mode 100644 index 0000000..5039dc9 --- /dev/null +++ b/node_modules/node-gyp/test/test-install.js @@ -0,0 +1,46 @@ +'use strict' + +const { test } = require('tap') +const { test: { install } } = require('../lib/install') +const log = require('npmlog') + +log.level = 'error' // we expect a warning + +test('EACCES retry once', async (t) => { + t.plan(3) + + const fs = { + promises: { + stat (_) { + const err = new Error() + err.code = 'EACCES' + t.ok(true) + throw err + } + } + } + + const Gyp = { + devDir: __dirname, + opts: { + ensure: true + }, + commands: { + install (argv, cb) { + install(fs, Gyp, argv).then(cb, cb) + }, + remove (_, cb) { + cb() + } + } + } + + try { + await install(fs, Gyp, []) + } catch (err) { + t.ok(true) + if (/"pre" versions of node cannot be installed/.test(err.message)) { + t.ok(true) + } + } +}) diff --git a/node_modules/node-gyp/test/test-options.js b/node_modules/node-gyp/test/test-options.js new file mode 100644 index 0000000..b2ac62c --- /dev/null +++ b/node_modules/node-gyp/test/test-options.js @@ -0,0 +1,31 @@ +'use strict' + +const test = require('tap').test +const gyp = require('../lib/node-gyp') + +test('options in environment', (t) => { + t.plan(1) + + // `npm test` dumps a ton of npm_config_* variables in the environment. + Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .forEach((key) => { delete process.env[key] }) + + // in some platforms, certain keys are stubborn and cannot be removed + const keys = Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .map((key) => key.substring('npm_config_'.length)) + .concat('argv', 'x') + + // Zero-length keys should get filtered out. + process.env.npm_config_ = '42' + // Other keys should get added. + process.env.npm_config_x = '42' + // Except loglevel. + process.env.npm_config_loglevel = 'debug' + + const g = gyp() + g.parseArgv(['rebuild']) // Also sets opts.argv. + + t.deepEqual(Object.keys(g.opts).sort(), keys.sort()) +}) diff --git a/node_modules/node-gyp/test/test-process-release.js b/node_modules/node-gyp/test/test-process-release.js new file mode 100644 index 0000000..c3ee070 --- /dev/null +++ b/node_modules/node-gyp/test/test-process-release.js @@ -0,0 +1,434 @@ +'use strict' + +const test = require('tap').test +const processRelease = require('../lib/process-release') + +test('test process release - process.version = 0.8.20', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.8.20', null) + + t.equal(release.semver.version, '0.8.20') + delete release.semver + + t.deepEqual(release, { + version: '0.8.20', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.8.20/', + tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', + versionDir: '0.8.20', + ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +test('test process release - process.version = 0.10.21', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.10.21', null) + + t.equal(release.semver.version, '0.10.21') + delete release.semver + + t.deepEqual(release, { + version: '0.10.21', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.21/', + tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', + versionDir: '0.10.21', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// prior to -headers.tar.gz +test('test process release - process.version = 0.12.9', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.12.9', null) + + t.equal(release.semver.version, '0.12.9') + delete release.semver + + t.deepEqual(release, { + version: '0.12.9', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.9/', + tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', + versionDir: '0.12.9', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// prior to -headers.tar.gz +test('test process release - process.version = 0.10.41', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.10.41', null) + + t.equal(release.semver.version, '0.10.41') + delete release.semver + + t.deepEqual(release, { + version: '0.10.41', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.41/', + tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', + versionDir: '0.10.41', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// has -headers.tar.gz +test('test process release - process.release ~ node@0.10.42', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.10.42', null) + + t.equal(release.semver.version, '0.10.42') + delete release.semver + + t.deepEqual(release, { + version: '0.10.42', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.42/', + tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', + versionDir: '0.10.42', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +// has -headers.tar.gz +test('test process release - process.release ~ node@0.12.10', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v0.12.10', null) + + t.equal(release.semver.version, '0.12.10') + delete release.semver + + t.deepEqual(release, { + version: '0.12.10', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.10/', + tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', + versionDir: '0.12.10', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v4.1.23/', + tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23 / corp build', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://some.custom.location/', + tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@12.8.0 Windows', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' + }) + + t.equal(release.semver.version, '12.8.0') + delete release.semver + + t.deepEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@12.8.0 Windows ARM64', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' + }) + + t.equal(release.semver.version, '12.8.0') + delete release.semver + + t.deepEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23 --target=0.10.40', function (t) { + t.plan(2) + + var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '0.10.40') + delete release.semver + + t.deepEqual(release, { + version: '0.10.40', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.40/', + tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', + versionDir: '0.10.40', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function (t) { + t.plan(2) + + var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://foo.bar/baz/v4.1.23/', + tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ frankenstein@4.1.23', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/', + tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) +}) + +test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function (t) { + t.plan(2) + + var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'http://foo.bar/baz/v4.1.23/', + tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) +}) + +test('test process release - process.release ~ node@4.0.0-rc.4', function (t) { + t.plan(2) + + var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.0.0-rc.4') + delete release.semver + + t.deepEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function (t) { + t.plan(2) + + // note the missing 'v' on the arg, it should normalise when checking + // whether we're on the default or not + var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.0.0-rc.4') + delete release.semver + + t.deepEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function (t) { + t.plan(2) + + // additional arguments can be passed in on the commandline that should be ignored if they + // are not specifying a valid version @ position 0 + var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.0.0-rc.4') + delete release.semver + + t.deepEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) +}) + +test('test process release - NODEJS_ORG_MIRROR', function (t) { + t.plan(2) + + process.env.NODEJS_ORG_MIRROR = 'http://foo.bar' + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + t.equal(release.semver.version, '4.1.23') + delete release.semver + + t.deepEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'http://foo.bar/v4.1.23/', + tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) + + delete process.env.NODEJS_ORG_MIRROR +}) diff --git a/node_modules/node-gyp/update-gyp.py b/node_modules/node-gyp/update-gyp.py new file mode 100644 index 0000000..bb84f07 --- /dev/null +++ b/node_modules/node-gyp/update-gyp.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python3 + +import argparse +import os +import shutil +import subprocess +import tarfile +import tempfile +import urllib.request + +BASE_URL = "https://github.com/nodejs/gyp-next/archive/" +CHECKOUT_PATH = os.path.dirname(os.path.realpath(__file__)) +CHECKOUT_GYP_PATH = os.path.join(CHECKOUT_PATH, "gyp") + +parser = argparse.ArgumentParser() +parser.add_argument("tag", help="gyp tag to update to") +args = parser.parse_args() + +tar_url = BASE_URL + args.tag + ".tar.gz" + +changed_files = subprocess.check_output(["git", "diff", "--name-only"]).strip() +if changed_files: + raise Exception("Can't update gyp while you have uncommitted changes in node-gyp") + +with tempfile.TemporaryDirectory() as tmp_dir: + tar_file = os.path.join(tmp_dir, "gyp.tar.gz") + unzip_target = os.path.join(tmp_dir, "gyp") + with open(tar_file, "wb") as f: + print("Downloading gyp-next@" + args.tag + " into temporary directory...") + print("From: " + tar_url) + with urllib.request.urlopen(tar_url) as in_file: + f.write(in_file.read()) + + print("Unzipping...") + with tarfile.open(tar_file, "r:gz") as tar_ref: + tar_ref.extractall(unzip_target) + + print("Moving to current checkout (" + CHECKOUT_PATH + ")...") + if os.path.exists(CHECKOUT_GYP_PATH): + shutil.rmtree(CHECKOUT_GYP_PATH) + shutil.move( + os.path.join(unzip_target, os.listdir(unzip_target)[0]), CHECKOUT_GYP_PATH + ) + +subprocess.check_output(["git", "add", "gyp"], cwd=CHECKOUT_PATH) +subprocess.check_output(["git", "commit", "-m", "feat(gyp): update gyp to " + args.tag]) diff --git a/node_modules/node-sass/LICENSE b/node_modules/node-sass/LICENSE new file mode 100644 index 0000000..6713846 --- /dev/null +++ b/node_modules/node-sass/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013-2016 Andrew Nesbitt + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-sass/README.md b/node_modules/node-sass/README.md new file mode 100644 index 0000000..125dee8 --- /dev/null +++ b/node_modules/node-sass/README.md @@ -0,0 +1,653 @@ +# node-sass + +**Warning:** [LibSass and Node Sass are deprecated](https://sass-lang.com/blog/libsass-is-deprecated). +While they will continue to receive maintenance releases indefinitely, there are no +plans to add additional features or compatibility with any new CSS or Sass features. +Projects that still use it should move onto +[Dart Sass](https://sass-lang.com/dart-sass). + +## Node version support policy + +1. Supported Node.js versions vary by release, please consult the [releases page](https://github.com/sass/node-sass/releases). +1. Node versions that hit end of life , will be dropped from support at each node-sass release (major, minor). +1. We will stop building binaries for unsupported releases, testing for breakages in dependency compatibility, but we will not block installations for those that want to support themselves. +1. New node release require minor internal changes along with support from CI providers (AppVeyor, GitHub Actions). We will open a single issue for interested parties to subscribe to, and close additional issues. + +Below is a quick guide for minimum and maximum supported versions of node-sass: + +NodeJS | Supported node-sass version | Node Module +--------|-----------------------------|------------ +Node 20 | 9.0+ | 115 +Node 19 | 8.0+ | 111 +Node 18 | 8.0+ | 108 +Node 17 | 7.0+, <8.0 | 102 +Node 16 | 6.0+ | 93 +Node 15 | 5.0+, <7.0 | 88 +Node 14 | 4.14+, <9.0 | 83 +Node 13 | 4.13+, <5.0 | 79 +Node 12 | 4.12+, <8.0 | 72 +Node 11 | 4.10+, <5.0 | 67 +Node 10 | 4.9+, <6.0 | 64 +Node 8 | 4.5.3+, <5.0 | 57 +Node <8 | <5.0 | <57 + + + + + + +
+ Sass logo + + + + +
+ +![Alpine](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20Alpine%20releases/badge.svg) +![Linux](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20Linux%20releases/badge.svg) +![macOS](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20macOS%20releases/badge.svg) +![Windows x64](https://github.com/sass/node-sass/workflows/Build%20bindings%20for%20Windows%20releases/badge.svg) +![Linting](https://github.com/sass/node-sass/workflows/Lint%20JS/badge.svg) +[![Windows x86](https://ci.appveyor.com/api/projects/status/22mjbk59kvd55m9y/branch/master?svg=true)](https://ci.appveyor.com/project/sass/node-sass/branch/master) +[![Coverage Status](https://coveralls.io/repos/sass/node-sass/badge.svg?branch=master)](https://coveralls.io/r/sass/node-sass?branch=master) + +Node-sass is a library that provides binding for Node.js to [LibSass], the C version of the popular stylesheet preprocessor, Sass. + +It allows you to natively compile .scss files to css at incredible speed and automatically via a connect middleware. + +Find it on npm: + +Follow @nodesass on twitter for release updates: + +## Install + +```shell +npm install node-sass +``` + +Some users have reported issues installing on Ubuntu due to `node` being registered to another package. [Follow the official NodeJS docs](https://github.com/nodesource/distributions/blob/master/README.md#debinstall) to install NodeJS so that `#!/usr/bin/env node` correctly resolves. + +Compiling on Windows machines requires the [node-gyp prerequisites](https://github.com/nodejs/node-gyp#on-windows). + +Are you seeing the following error? Check out our [Troubleshooting guide](https://github.com/sass/node-sass/blob/master/TROUBLESHOOTING.md#installing-node-sass-4x-with-node--4).** + +``` +SyntaxError: Use of const in strict mode. +``` + +**Having installation troubles? Check out our [Troubleshooting guide](https://github.com/sass/node-sass/blob/master/TROUBLESHOOTING.md).** + +### Install from mirror in China + +```shell +npm install -g mirror-config-china --registry=https://registry.npmmirror.com +npm install node-sass +``` + +## Usage + +```javascript +var sass = require('node-sass'); +sass.render({ + file: scss_filename, + [, options..] +}, function(err, result) { /*...*/ }); +// OR +var result = sass.renderSync({ + data: scss_content + [, options..] +}); +``` + +## Options + +### file + +* Type: `String` +* Default: `null` + +**Special**: `file` or `data` must be specified + +Path to a file for [LibSass] to compile. + +### data + +* Type: `String` +* Default: `null` + +**Special**: `file` or `data` must be specified + +A string to pass to [LibSass] to compile. It is recommended that you use `includePaths` in conjunction with this so that [LibSass] can find files when using the `@import` directive. + +### importer (>= v2.0.0) - _experimental_ + +**This is an experimental LibSass feature. Use with caution.** + +* Type: `Function | Function[]` signature `function(url, prev, done)` +* Default: `undefined` + +Function Parameters and Information: + +* `url (String)` - the path in import **as-is**, which [LibSass] encountered +* `prev (String)` - the previously resolved path +* `done (Function)` - a callback function to invoke on async completion, takes an object literal containing + * `file (String)` - an alternate path for [LibSass] to use **OR** + * `contents (String)` - the imported contents (for example, read from memory or the file system) + +Handles when [LibSass] encounters the `@import` directive. A custom importer allows extension of the [LibSass] engine in both a synchronous and asynchronous manner. In both cases, the goal is to either `return` or call `done()` with an object literal. Depending on the value of the object literal, one of two things will happen. + +When returning or calling `done()` with `{ file: "String" }`, the new file path will be assumed for the `@import`. It's recommended to be mindful of the value of `prev` in instances where relative path resolution may be required. + +When returning or calling `done()` with `{ contents: "String" }`, the string value will be used as if the file was read in through an external source. + +Starting from v3.0.0: + +* `this` refers to a contextual scope for the immediate run of `sass.render` or `sass.renderSync` + +* importers can return error and LibSass will emit that error in response. For instance: + + ```javascript + done(new Error('doesn\'t exist!')); + // or return synchronously + return new Error('nothing to do here'); + ``` + +* importer can be an array of functions, which will be called by LibSass in the order of their occurrence in array. This helps user specify special importer for particular kind of path (filesystem, http). If an importer does not want to handle a particular path, it should return `null`. See [functions section](#functions--v300---experimental) for more details on Sass types. + +### functions (>= v3.0.0) - _experimental_ + +**This is an experimental LibSass feature. Use with caution.** + +`functions` is an `Object` that holds a collection of custom functions that may be invoked by the sass files being compiled. They may take zero or more input parameters and must return a value either synchronously (`return ...;`) or asynchronously (`done();`). Those parameters will be instances of one of the constructors contained in the `require('node-sass').types` hash. The return value must be of one of these types as well. See the list of available types below: + +#### types.Number(value [, unit = ""]) + +* `getValue()`/ `setValue(value)` : gets / sets the numerical portion of the number +* `getUnit()` / `setUnit(unit)` : gets / sets the unit portion of the number + +#### types.String(value) + +* `getValue()` / `setValue(value)` : gets / sets the enclosed string + +#### types.Color(r, g, b [, a = 1.0]) or types.Color(argb) + +* `getR()` / `setR(value)` : red component (integer from `0` to `255`) +* `getG()` / `setG(value)` : green component (integer from `0` to `255`) +* `getB()` / `setB(value)` : blue component (integer from `0` to `255`) +* `getA()` / `setA(value)` : alpha component (number from `0` to `1.0`) + +Example: + +```javascript +var Color = require('node-sass').types.Color, + c1 = new Color(255, 0, 0), + c2 = new Color(0xff0088cc); +``` + +#### types.Boolean(value) + +* `getValue()` : gets the enclosed boolean +* `types.Boolean.TRUE` : Singleton instance of `types.Boolean` that holds "true" +* `types.Boolean.FALSE` : Singleton instance of `types.Boolean` that holds "false" + +#### types.List(length [, commaSeparator = true]) + +* `getValue(index)` / `setValue(index, value)` : `value` must itself be an instance of one of the constructors in `sass.types`. +* `getSeparator()` / `setSeparator(isComma)` : whether to use commas as a separator +* `getLength()` + +#### types.Map(length) + +* `getKey(index)` / `setKey(index, value)` +* `getValue(index)` / `setValue(index, value)` +* `getLength()` + +#### types.Null() + +* `types.Null.NULL` : Singleton instance of `types.Null`. + +#### Example + +```javascript +sass.renderSync({ + data: '#{headings(2,5)} { color: #08c; }', + functions: { + 'headings($from: 0, $to: 6)': function(from, to) { + var i, f = from.getValue(), t = to.getValue(), + list = new sass.types.List(t - f + 1); + + for (i = f; i <= t; i++) { + list.setValue(i - f, new sass.types.String('h' + i)); + } + + return list; + } + } +}); +``` + +### includePaths + +* Type: `Array` +* Default: `[]` + +An array of paths that [LibSass] can look in to attempt to resolve your `@import` declarations. When using `data`, it is recommended that you use this. + +### indentedSyntax + +* Type: `Boolean` +* Default: `false` + +`true` values enable [Sass Indented Syntax](https://sass-lang.com/documentation/file.INDENTED_SYNTAX.html) for parsing the data string or file. + +__Note:__ node-sass/libsass will compile a mixed library of scss and indented syntax (.sass) files with the Default setting (false) as long as .sass and .scss extensions are used in filenames. + +### indentType (>= v3.0.0) + +* Type: `String` +* Default: `space` + +Used to determine whether to use space or tab character for indentation. + +### indentWidth (>= v3.0.0) + +* Type: `Number` +* Default: `2` +* Maximum: `10` + +Used to determine the number of spaces or tabs to be used for indentation. + +### linefeed (>= v3.0.0) + +* Type: `String` +* Default: `lf` + +Used to determine whether to use `cr`, `crlf`, `lf` or `lfcr` sequence for line break. + +### omitSourceMapUrl + +* Type: `Boolean` +* Default: `false` + +**Special:** When using this, you should also specify `outFile` to avoid unexpected behavior. + +`true` values disable the inclusion of source map information in the output file. + +### outFile + +* Type: `String | null` +* Default: `null` + +**Special:** Required when `sourceMap` is a truthy value + +Specify the intended location of the output file. Strongly recommended when outputting source maps so that they can properly refer back to their intended files. + +**Attention** enabling this option will **not** write the file on disk for you, it's for internal reference purpose only (to generate the map for example). + +Example on how to write it on the disk + +```javascript +sass.render({ + ... + outFile: yourPathTotheFile, + }, function(error, result) { // node-style callback from v3.0.0 onwards + if(!error){ + // No errors during the compilation, write this result on the disk + fs.writeFile(yourPathTotheFile, result.css, function(err){ + if(!err){ + //file written on disk + } + }); + } + }); +}); +``` + +### outputStyle + +* Type: `String` +* Default: `nested` +* Values: `nested`, `expanded`, `compact`, `compressed` + +Determines the output format of the final CSS style. + +### precision + +* Type: `Integer` +* Default: `5` + +Used to determine how many digits after the decimal will be allowed. For instance, if you had a decimal number of `1.23456789` and a precision of `5`, the result will be `1.23457` in the final CSS. + +### sourceComments + +* Type: `Boolean` +* Default: `false` + +`true` Enables the line number and file where a selector is defined to be emitted into the compiled CSS as a comment. Useful for debugging, especially when using imports and mixins. + +### sourceMap + +* Type: `Boolean | String | undefined` +* Default: `undefined` + +Enables source map generation during `render` and `renderSync`. + +When `sourceMap === true`, the value of `outFile` is used as the target output location for the source map with the suffix `.map` appended. If no `outFile` is set, `sourceMap` parameter is ignored. + +When `typeof sourceMap === "string"`, the value of `sourceMap` will be used as the writing location for the file. + +### sourceMapContents + +* Type: `Boolean` +* Default: `false` + +`true` includes the `contents` in the source map information + +### sourceMapEmbed + +* Type: `Boolean` +* Default: `false` + +`true` embeds the source map as a data URI + +### sourceMapRoot + +* Type: `String` +* Default: `undefined` + +the value will be emitted as `sourceRoot` in the source map information + +## `render` Callback (>= v3.0.0) + +node-sass supports standard node style asynchronous callbacks with the signature of `function(err, result)`. In error conditions, the `error` argument is populated with the error object. In success conditions, the `result` object is populated with an object describing the result of the render call. + +### Error Object + +* `message` (String) - The error message. +* `line` (Number) - The line number of error. +* `column` (Number) - The column number of error. +* `status` (Number) - The status code. +* `file` (String) - The filename of error. In case `file` option was not set (in favour of `data`), this will reflect the value `stdin`. + +### Result Object + +* `css` (Buffer) - The compiled CSS. Write this to a file, or serve it out as needed. +* `map` (Buffer) - The source map +* `stats` (Object) - An object containing information about the compile. It contains the following keys: + * `entry` (String) - The path to the scss file, or `data` if the source was not a file + * `start` (Number) - Date.now() before the compilation + * `end` (Number) - Date.now() after the compilation + * `duration` (Number) - *end* - *start* + * `includedFiles` (Array) - Absolute paths to all related scss files in no particular order. + +### Examples + +```javascript +var sass = require('node-sass'); +sass.render({ + file: '/path/to/myFile.scss', + data: 'body{background:blue; a{color:black;}}', + importer: function(url, prev, done) { + // url is the path in import as is, which LibSass encountered. + // prev is the previously resolved path. + // done is an optional callback, either consume it or return value synchronously. + // this.options contains this options hash, this.callback contains the node-style callback + someAsyncFunction(url, prev, function(result){ + done({ + file: result.path, // only one of them is required, see section Special Behaviours. + contents: result.data + }); + }); + // OR + var result = someSyncFunction(url, prev); + return {file: result.path, contents: result.data}; + }, + includePaths: [ 'lib/', 'mod/' ], + outputStyle: 'compressed' +}, function(error, result) { // node-style callback from v3.0.0 onwards + if (error) { + console.log(error.status); // used to be "code" in v2x and below + console.log(error.column); + console.log(error.message); + console.log(error.line); + } + else { + console.log(result.css.toString()); + + console.log(result.stats); + + console.log(result.map.toString()); + // or better + console.log(JSON.stringify(result.map)); // note, JSON.stringify accepts Buffer too + } +}); +// OR +var result = sass.renderSync({ + file: '/path/to/file.scss', + data: 'body{background:blue; a{color:black;}}', + outputStyle: 'compressed', + outFile: '/to/my/output.css', + sourceMap: true, // or an absolute or relative (to outFile) path + importer: function(url, prev, done) { + // url is the path in import as is, which LibSass encountered. + // prev is the previously resolved path. + // done is an optional callback, either consume it or return value synchronously. + // this.options contains this options hash + someAsyncFunction(url, prev, function(result){ + done({ + file: result.path, // only one of them is required, see section Special Behaviours. + contents: result.data + }); + }); + // OR + var result = someSyncFunction(url, prev); + return {file: result.path, contents: result.data}; + } +}); + +console.log(result.css); +console.log(result.map); +console.log(result.stats); +``` + +### Special behaviours + +* In the case that both `file` and `data` options are set, node-sass will give precedence to `data` and use `file` to calculate paths in sourcemaps. + +### Version information (>= v2.0.0) + +Both `node-sass` and `libsass` version info is now exposed via the `info` method: + +```javascript +var sass = require('node-sass'); + +console.log(sass.info); + +/* + it will output something like: + + node-sass 2.0.1 (Wrapper) [JavaScript] + libsass 3.1.0 (Sass Compiler) [C/C++] +*/ +``` + +Since node-sass >=v3.0.0 LibSass version is determined at run time. + +## Integrations + +Listing of community uses of node-sass in build tools and frameworks. + +### Brackets extension + +[@jasonsanjose](https://github.com/jasonsanjose) has created a [Brackets](http://brackets.io) extension based on node-sass: . When editing Sass files, the extension compiles changes on save. The extension also integrates with Live Preview to show Sass changes in the browser without saving or compiling. + +### Brunch plugin + +[Brunch](http://brunch.io)'s official sass plugin uses node-sass by default, and automatically falls back to ruby if use of Compass is detected: + +### Connect/Express middleware + +Recompile `.scss` files automatically for connect and express based http servers. + +This functionality has been moved to [`node-sass-middleware`](https://github.com/sass/node-sass-middleware) in node-sass v1.0.0 + +### DocPad Plugin + +[@10xLaCroixDrinker](https://github.com/10xLaCroixDrinker) wrote a [DocPad](http://docpad.org/) plugin that compiles `.scss` files using node-sass: + +### Duo.js extension + +[@stephenway](https://github.com/stephenway) has created an extension that transpiles Sass to CSS using node-sass with [duo.js](http://duojs.org/) + + +### Grunt extension + +[@sindresorhus](https://github.com/sindresorhus/) has created a set of grunt tasks based on node-sass: + +### Gulp extension + +[@dlmanning](https://github.com/dlmanning/) has created a gulp sass plugin based on node-sass: + +### Harp + +[@sintaxi](https://github.com/sintaxi)’s Harp web server implicitly compiles `.scss` files using node-sass: + +### Metalsmith plugin + +[@stevenschobert](https://github.com/stevenschobert/) has created a metalsmith plugin based on node-sass: + +### Meteor plugin + +[@fourseven](https://github.com/fourseven) has created a meteor plugin based on node-sass: + +### Mimosa module + +[@dbashford](https://github.com/dbashford) has created a Mimosa module for sass which includes node-sass: + +## Example App + +There is also an example connect app here: + +## Rebuilding binaries + +Node-sass includes pre-compiled binaries for popular platforms, to add a binary for your platform follow these steps: + +Check out the project: + +```bash +git clone --recursive https://github.com/sass/node-sass.git +cd node-sass +npm install +node scripts/build -f # use -d switch for debug release +# if succeeded, it will generate and move +# the binary in vendor directory. +``` + +## Command Line Interface + +The interface for command-line usage is fairly simplistic at this stage, as seen in the following usage section. + +Output will be sent to stdout if the `--output` flag is omitted. + +### Usage + + `node-sass [options] [output]` + Or: + `cat | node-sass > output` + +Example: + +`node-sass src/style.scss dest/style.css` + + **Options:** + +```bash + -w, --watch Watch a directory or file + -r, --recursive Recursively watch directories or files + -o, --output Output directory + -x, --omit-source-map-url Omit source map URL comment from output + -i, --indented-syntax Treat data from stdin as sass code (versus scss) + -q, --quiet Suppress log output except on error + -v, --version Prints version info + --output-style CSS output style (nested | expanded | compact | compressed) + --indent-type Indent type for output CSS (space | tab) + --indent-width Indent width; number of spaces or tabs (maximum value: 10) + --linefeed Linefeed style (cr | crlf | lf | lfcr) + --source-comments Include debug info in output + --source-map Emit source map + --source-map-contents Embed include contents in map + --source-map-embed Embed sourceMappingUrl as data URI + --source-map-root Base path, will be emitted in source-map as is + --include-path Path to look for imported files + --follow Follow symlinked directories + --precision The amount of precision allowed in decimal numbers + --error-bell Output a bell character on errors + --importer Path to .js file containing custom importer + --functions Path to .js file containing custom functions + --help Print usage info +``` + +The `input` can be either a single `.scss` or `.sass`, or a directory. If the input is a directory the `--output` flag must also be supplied. + +Also, note `--importer` takes the (absolute or relative to pwd) path to a js file, which needs to have a default `module.exports` set to the importer function. See our test [fixtures](https://github.com/sass/node-sass/tree/974f93e76ddd08ea850e3e663cfe64bb6a059dd3/test/fixtures/extras) for example. + +The `--source-map` option accepts a boolean value, in which case it replaces destination extension with `.css.map`. It also accepts path to `.map` file and even path to the desired directory. +When compiling a directory `--source-map` can either be a boolean value or a directory. + +## Binary configuration parameters + +node-sass supports different configuration parameters to change settings related to the sass binary such as binary name, binary path or alternative download path. Following parameters are supported by node-sass: + +Variable name | .npmrc parameter | Process argument | Value +-------------------------|--------------------------|----------------------------|------ +SASS_BINARY_NAME | sass_binary_name | --sass-binary-name | path +SASS_BINARY_SITE | sass_binary_site | --sass-binary-site | URL +SASS_BINARY_PATH | sass_binary_path | --sass-binary-path | path +SASS_BINARY_DIR | sass_binary_dir | --sass-binary-dir | path +SASS_REJECT_UNAUTHORIZED | sass_reject_unauthorized | --sass-reject-unauthorized | value + +These parameters can be used as environment variable: + +* E.g. `export SASS_BINARY_SITE=http://example.com/` + +As local or global [.npmrc](https://docs.npmjs.com/misc/config) configuration file: + +* E.g. `sass_binary_site=http://example.com/` + +As a process argument: + +* E.g. `npm install node-sass --sass-binary-site=http://example.com/` + +If you are using self-signed certificates for your binary then `SASS_REJECT_UNAUTHORIZED` will override (rejectUnauthorized)[https://nodejs.org/docs/latest/api/tls.html#tls_tls_createserver_options_secureconnectionlistener]. + +## Post-install Build + +Install runs only two Mocha tests to see if your machine can use the pre-built [LibSass] which will save some time during install. If any tests fail it will build from source. + +## Maintainers + +This module is brought to you and maintained by the following people: + +* Michael Mifsud - Project Lead ([Github](https://github.com/xzyfer) / [Twitter](https://twitter.com/xzyfer)) +* Andrew Nesbitt ([Github](https://github.com/andrew) / [Twitter](https://twitter.com/teabass)) +* Dean Mao ([Github](https://github.com/deanmao) / [Twitter](https://twitter.com/deanmao)) +* Brett Wilkins ([Github](https://github.com/bwilkins) / [Twitter](https://twitter.com/bjmaz)) +* Keith Cirkel ([Github](https://github.com/keithamus) / [Twitter](https://twitter.com/Keithamus)) +* Laurent Goderre ([Github](https://github.com/laurentgoderre) / [Twitter](https://twitter.com/laurentgoderre)) +* Nick Schonning ([Github](https://github.com/nschonni) / [Twitter](https://twitter.com/nschonni)) +* Adeel Mujahid ([Github](https://github.com/am11) / [Twitter](https://twitter.com/adeelbm)) + +## Contributors + +We <3 our contributors! A special thanks to all those who have clocked in some dev time on this project, we really appreciate your hard work. You can find [a full list of those people here.](https://github.com/sass/node-sass/graphs/contributors) + +### Note on Patches/Pull Requests + +Check out our [Contributing guide](/.github/CONTRIBUTING.md) + +## Copyright + +Copyright (c) 2015 Andrew Nesbitt. See [LICENSE](https://github.com/sass/node-sass/blob/master/LICENSE) for details. + +[LibSass]: https://github.com/sass/libsass diff --git a/node_modules/node-sass/bin/node-sass b/node_modules/node-sass/bin/node-sass new file mode 100644 index 0000000..7645ecb --- /dev/null +++ b/node_modules/node-sass/bin/node-sass @@ -0,0 +1,444 @@ +#!/usr/bin/env node + +var Emitter = require('events').EventEmitter, + forEach = require('async-foreach').forEach, + Gaze = require('gaze'), + meow = require('meow'), + util = require('util'), + path = require('path'), + glob = require('glob'), + sass = require('../lib'), + render = require('../lib/render'), + watcher = require('../lib/watcher'), + stdout = require('stdout-stream'), + stdin = require('get-stdin'), + fs = require('fs'); + +/** + * Initialize CLI + */ + +var cli = meow(` + Usage: + node-sass [options] + cat | node-sass [options] > output.css + + Example: Compile foobar.scss to foobar.css + node-sass --output-style compressed foobar.scss > foobar.css + cat foobar.scss | node-sass --output-style compressed > foobar.css + + Example: Watch the sass directory for changes, compile with sourcemaps to the css directory + node-sass --watch --recursive --output css + --source-map true --source-map-contents sass + + Options + -w, --watch Watch a directory or file + -r, --recursive Recursively watch directories or files + -o, --output Output directory + -x, --omit-source-map-url Omit source map URL comment from output + -i, --indented-syntax Treat data from stdin as sass code (versus scss) + -q, --quiet Suppress log output except on error + -v, --version Prints version info + --output-style CSS output style (nested | expanded | compact | compressed) + --indent-type Indent type for output CSS (space | tab) + --indent-width Indent width; number of spaces or tabs (maximum value: 10) + --linefeed Linefeed style (cr | crlf | lf | lfcr) + --source-comments Include debug info in output + --source-map Emit source map (boolean, or path to output .map file) + --source-map-contents Embed include contents in map + --source-map-embed Embed sourceMappingUrl as data URI + --source-map-root Base path, will be emitted in source-map as is + --include-path Path to look for imported files + --follow Follow symlinked directories + --precision The amount of precision allowed in decimal numbers + --error-bell Output a bell character on errors + --importer Path to .js file containing custom importer + --functions Path to .js file containing custom functions + --help Print usage info +`, { + version: sass.info, + flags: { + errorBell: { + type: 'boolean', + }, + functions: { + type: 'string', + }, + follow: { + type: 'boolean', + }, + importer: { + type: 'string', + }, + includePath: { + type: 'string', + default: [process.cwd()], + isMultiple: true, + }, + indentType: { + type: 'string', + default: 'space', + }, + indentWidth: { + type: 'number', + default: 2, + }, + indentedSyntax: { + type: 'boolean', + alias: 'i', + }, + linefeed: { + type: 'string', + default: 'lf', + }, + omitSourceMapUrl: { + type: 'boolean', + alias: 'x', + }, + output: { + type: 'string', + alias: 'o', + }, + outputStyle: { + type: 'string', + default: 'nested', + }, + precision: { + type: 'number', + default: 5, + }, + quiet: { + type: 'boolean', + default: false, + alias: 'q', + }, + recursive: { + type: 'boolean', + default: true, + alias: 'r', + }, + sourceMapContents: { + type: 'boolean', + }, + sourceMapEmbed: { + type: 'boolean', + }, + sourceMapRoot: { + type: 'string', + }, + sourceComments: { + type: 'boolean', + alias: 'c', + }, + version: { + type: 'boolean', + alias: 'v', + }, + watch: { + type: 'boolean', + alias: 'w', + }, + }, +}); + +/** + * Is a Directory + * + * @param {String} filePath + * @returns {Boolean} + * @api private + */ + +function isDirectory(filePath) { + var isDir = false; + try { + var absolutePath = path.resolve(filePath); + isDir = fs.statSync(absolutePath).isDirectory(); + } catch (e) { + isDir = e.code === 'ENOENT'; + } + return isDir; +} + +/** + * Get correct glob pattern + * + * @param {Object} options + * @returns {String} + * @api private + */ + +function globPattern(options) { + return options.recursive ? '**/*.{sass,scss}' : '*.{sass,scss}'; +} + +/** + * Create emitter + * + * @api private + */ + +function getEmitter() { + var emitter = new Emitter(); + + emitter.on('error', function(err) { + if (options.errorBell) { + err += '\x07'; + } + console.error(err); + if (!options.watch) { + process.exit(1); + } + }); + + emitter.on('warn', function(data) { + if (!options.quiet) { + console.warn(data); + } + }); + + emitter.on('info', function(data) { + if (!options.quiet) { + console.info(data); + } + }); + + emitter.on('log', stdout.write.bind(stdout)); + + return emitter; +} + +/** + * Construct options + * + * @param {Array} arguments + * @param {Object} options + * @api private + */ + +function getOptions(args, options) { + var cssDir, sassDir, file, mapDir; + options.src = args[0]; + + if (args[1]) { + options.dest = path.resolve(args[1]); + } else if (options.output) { + options.dest = path.join( + path.resolve(options.output), + [path.basename(options.src, path.extname(options.src)), '.css'].join('')); // replace ext. + } + + if (options.directory) { + sassDir = path.resolve(options.directory); + file = path.relative(sassDir, args[0]); + cssDir = path.resolve(options.output); + options.dest = path.join(cssDir, file).replace(path.extname(file), '.css'); + } + + if (options.sourceMap) { + if(!options.sourceMapOriginal) { + options.sourceMapOriginal = options.sourceMap; + } + + if (options.sourceMapOriginal === 'true') { + options.sourceMap = options.dest + '.map'; + } else { + // check if sourceMap path ends with .map to avoid isDirectory false-positive + var sourceMapIsDirectory = options.sourceMapOriginal.indexOf('.map', options.sourceMapOriginal.length - 4) === -1 && isDirectory(options.sourceMapOriginal); + + if (!sourceMapIsDirectory) { + options.sourceMap = path.resolve(options.sourceMapOriginal); + } else if (!options.directory) { + options.sourceMap = path.resolve(options.sourceMapOriginal, path.basename(options.dest) + '.map'); + } else { + sassDir = path.resolve(options.directory); + file = path.relative(sassDir, args[0]); + mapDir = path.resolve(options.sourceMapOriginal); + options.sourceMap = path.join(mapDir, file).replace(path.extname(file), '.css.map'); + } + } + } + + return options; +} + +/** + * Watch + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ + +function watch(options, emitter) { + var handler = function(files) { + files.added.forEach(function(file) { + var watch = gaze.watched(); + Object.keys(watch).forEach(function (dir) { + if (watch[dir].indexOf(file) !== -1) { + gaze.add(file); + } + }); + }); + + files.changed.forEach(function(file) { + if (path.basename(file)[0] !== '_') { + renderFile(file, options, emitter); + } + }); + + files.removed.forEach(function(file) { + gaze.remove(file); + }); + }; + + var gaze = new Gaze(); + gaze.add(watcher.reset(options)); + gaze.on('error', emitter.emit.bind(emitter, 'error')); + + gaze.on('changed', function(file) { + handler(watcher.changed(file)); + }); + + gaze.on('added', function(file) { + handler(watcher.added(file)); + }); + + gaze.on('deleted', function(file) { + handler(watcher.removed(file)); + }); +} + +/** + * Run + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ + +function run(options, emitter) { + if (options.directory) { + if (!options.output) { + emitter.emit('error', 'An output directory must be specified when compiling a directory'); + } + if (!isDirectory(options.output)) { + emitter.emit('error', 'An output directory must be specified when compiling a directory'); + } + } + + if (options.sourceMapOriginal && options.directory && !isDirectory(options.sourceMapOriginal) && options.sourceMapOriginal !== 'true') { + emitter.emit('error', 'The --source-map option must be either a boolean or directory when compiling a directory'); + } + + if (options.importer) { + if ((path.resolve(options.importer) === path.normalize(options.importer).replace(/(.+)([/|\\])$/, '$1'))) { + options.importer = require(options.importer); + } else { + options.importer = require(path.resolve(options.importer)); + } + } + + if (options.functions) { + if ((path.resolve(options.functions) === path.normalize(options.functions).replace(/(.+)([/|\\])$/, '$1'))) { + options.functions = require(options.functions); + } else { + options.functions = require(path.resolve(options.functions)); + } + } + + if (options.watch) { + watch(options, emitter); + } else if (options.directory) { + renderDir(options, emitter); + } else { + render(options, emitter); + } +} + +/** + * Render a file + * + * @param {String} file + * @param {Object} options + * @param {Object} emitter + * @api private + */ +function renderFile(file, options, emitter) { + options = getOptions([path.resolve(file)], options); + if (options.watch && !options.quiet) { + emitter.emit('info', util.format('=> changed: %s', file)); + } + render(options, emitter); +} + +/** + * Render all sass files in a directory + * + * @param {Object} options + * @param {Object} emitter + * @api private + */ +function renderDir(options, emitter) { + var globPath = path.resolve(options.directory, globPattern(options)); + glob(globPath, { ignore: '**/_*', follow: options.follow }, function(err, files) { + if (err) { + return emitter.emit('error', util.format('You do not have permission to access this path: %s.', err.path)); + } else if (!files.length) { + return emitter.emit('error', 'No input file was found.'); + } + + forEach(files, function(subject) { + emitter.once('done', this.async()); + renderFile(subject, options, emitter); + }, function(successful, arr) { + var outputDir = path.join(process.cwd(), options.output); + if (!options.quiet) { + emitter.emit('info', util.format('Wrote %s CSS files to %s', arr.length, outputDir)); + } + process.exit(); + }); + }); +} + +/** + * Arguments and options + */ + +var options = getOptions(cli.input, cli.flags); +var emitter = getEmitter(); + +/** + * Show usage if no arguments are supplied + */ + +if (!options.src && process.stdin.isTTY) { + emitter.emit('error', [ + 'Provide a Sass file to render', + '', + 'Example: Compile foobar.scss to foobar.css', + ' node-sass --output-style compressed foobar.scss > foobar.css', + ' cat foobar.scss | node-sass --output-style compressed > foobar.css', + '', + 'Example: Watch the sass directory for changes, compile with sourcemaps to the css directory', + ' node-sass --watch --recursive --output css', + ' --source-map true --source-map-contents sass', + ].join('\n')); +} + +/** + * Apply arguments + */ + +if (options.src) { + if (isDirectory(options.src)) { + options.directory = options.src; + } + run(options, emitter); +} else if (!process.stdin.isTTY) { + stdin(function(data) { + options.data = data; + options.stdin = true; + run(options, emitter); + }); +} diff --git a/node_modules/node-sass/binding.gyp b/node_modules/node-sass/binding.gyp new file mode 100644 index 0000000..bb87e6c --- /dev/null +++ b/node_modules/node-sass/binding.gyp @@ -0,0 +1,74 @@ +{ + 'variables': { + 'libsass_ext%': '', + }, + 'targets': [ + { + 'target_name': 'binding', + 'win_delay_load_hook': 'true', + 'sources': [ + 'src/binding.cpp', + 'src/create_string.cpp', + 'src/custom_function_bridge.cpp', + 'src/custom_importer_bridge.cpp', + 'src/sass_context_wrapper.cpp', + 'src/sass_types/boolean.cpp', + 'src/sass_types/color.cpp', + 'src/sass_types/error.cpp', + 'src/sass_types/factory.cpp', + 'src/sass_types/list.cpp', + 'src/sass_types/map.cpp', + 'src/sass_types/null.cpp', + 'src/sass_types/number.cpp', + 'src/sass_types/string.cpp' + ], + 'msvs_settings': { + 'VCLinkerTool': { + 'SetChecksum': 'true' + } + }, + 'xcode_settings': { + 'CLANG_CXX_LIBRARY': 'libc++', + 'OTHER_LDFLAGS': [], + 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', + 'MACOSX_DEPLOYMENT_TARGET': '10.11' + }, + 'include_dirs': [ + '= flags.length) { + return null; + } + + return flags[index + 1]; +} + +/** + * Get binary name. + * If environment variable SASS_BINARY_NAME, + * .npmrc variable sass_binary_name or + * process argument --binary-name is provided, + * return it as is, otherwise make default binary + * name: {platform}-{arch}-{v8 version}.node + * + * @api public + */ + +function getBinaryName() { + var binaryName, + variant, + platform = process.platform; + + if (getArgument('--sass-binary-name')) { + binaryName = getArgument('--sass-binary-name'); + } else if (process.env.SASS_BINARY_NAME) { + binaryName = process.env.SASS_BINARY_NAME; + } else if (process.env.npm_config_sass_binary_name) { + binaryName = process.env.npm_config_sass_binary_name; + } else if (pkg.nodeSassConfig && pkg.nodeSassConfig.binaryName) { + binaryName = pkg.nodeSassConfig.binaryName; + } else { + variant = getPlatformVariant(); + if (variant) { + platform += '_' + variant; + } + + binaryName = [ + platform, '-', + process.arch, '-', + process.versions.modules + ].join(''); + } + + return [binaryName, 'binding.node'].join('_'); +} + +/** + * Determine the URL to fetch binary file from. + * By default fetch from the node-sass distribution + * site on GitHub. + * + * The default URL can be overridden using + * the environment variable SASS_BINARY_SITE, + * .npmrc variable sass_binary_site or + * or a command line option --sass-binary-site: + * + * node scripts/install.js --sass-binary-site http://example.com/ + * + * The URL should to the mirror of the repository + * laid out as follows: + * + * SASS_BINARY_SITE/ + * + * v3.0.0 + * v3.0.0/freebsd-x64-14_binding.node + * .... + * v3.0.0 + * v3.0.0/freebsd-ia32-11_binding.node + * v3.0.0/freebsd-x64-42_binding.node + * ... etc. for all supported versions and platforms + * + * @api public + */ + +function getBinaryUrl() { + var site = getArgument('--sass-binary-site') || + process.env.SASS_BINARY_SITE || + process.env.npm_config_sass_binary_site || + (pkg.nodeSassConfig && pkg.nodeSassConfig.binarySite) || + 'https://github.com/sass/node-sass/releases/download'; + + return [site, 'v' + pkg.version, getBinaryName()].join('/'); +} + +/** + * Get binary dir. + * If environment variable SASS_BINARY_DIR, + * .npmrc variable sass_binary_dir or + * process argument --sass-binary-dir is provided, + * select it by appending binary name, otherwise + * use default binary dir. + * Once the primary selection is made, check if + * callers wants to throw if file not exists before + * returning. + * + * @api public + */ + +function getBinaryDir() { + var binaryDir; + + if (getArgument('--sass-binary-dir')) { + binaryDir = getArgument('--sass-binary-dir'); + } else if (process.env.SASS_BINARY_DIR) { + binaryDir = process.env.SASS_BINARY_DIR; + } else if (process.env.npm_config_sass_binary_dir) { + binaryDir = process.env.npm_config_sass_binary_dir; + } else if (pkg.nodeSassConfig && pkg.nodeSassConfig.binaryDir) { + binaryDir = pkg.nodeSassConfig.binaryDir; + } else { + binaryDir = defaultBinaryDir; + } + + return binaryDir; +} + +/** + * Get binary path. + * If environment variable SASS_BINARY_PATH, + * .npmrc variable sass_binary_path or + * process argument --sass-binary-path is provided, + * select it by appending binary name, otherwise + * make default binary path using binary name. + * Once the primary selection is made, check if + * callers wants to throw if file not exists before + * returning. + * + * @api public + */ + +function getBinaryPath() { + var binaryPath; + + if (getArgument('--sass-binary-path')) { + binaryPath = getArgument('--sass-binary-path'); + } else if (process.env.SASS_BINARY_PATH) { + binaryPath = process.env.SASS_BINARY_PATH; + } else if (process.env.npm_config_sass_binary_path) { + binaryPath = process.env.npm_config_sass_binary_path; + } else if (pkg.nodeSassConfig && pkg.nodeSassConfig.binaryPath) { + binaryPath = pkg.nodeSassConfig.binaryPath; + } else { + binaryPath = path.join(getBinaryDir(), getBinaryName().replace(/_(?=binding\.node)/, '/')); + } + + try { + return trueCasePathSync(binaryPath) || binaryPath; + } catch (e) { + return binaryPath; + } +} + +/** + * An array of paths suitable for use as a local disk cache of the binding. + * + * @return {[]String} an array of paths + * @api public + */ +function getCachePathCandidates() { + return [ + process.env.npm_config_sass_binary_cache, + process.env.npm_config_cache, + ].filter(function(_) { return _; }); +} + +/** + * The most suitable location for caching the binding on disk. + * + * Given the candidates directories provided by `getCachePathCandidates()` this + * returns the first writable directory. By treating the candidate directories + * as a prioritised list this method is deterministic, assuming no change to the + * local environment. + * + * @return {String} directory to cache binding + * @api public + */ +function getBinaryCachePath() { + var i, + cachePath, + cachePathCandidates = getCachePathCandidates(); + + for (i = 0; i < cachePathCandidates.length; i++) { + cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version); + + try { + fs.mkdirSync(cachePath, {recursive: true}); + return cachePath; + } catch (e) { + // Directory is not writable, try another + } + } + + return ''; +} + +/** + * The cached binding + * + * Check the candidates directories provided by `getCachePathCandidates()` for + * the binding file, if it exists. By treating the candidate directories + * as a prioritised list this method is deterministic, assuming no change to the + * local environment. + * + * @return {String} path to cached binary + * @api public + */ +function getCachedBinary() { + var i, + cachePath, + cacheBinary, + cachePathCandidates = getCachePathCandidates(), + binaryName = getBinaryName(); + + for (i = 0; i < cachePathCandidates.length; i++) { + cachePath = path.join(cachePathCandidates[i], pkg.name, pkg.version); + cacheBinary = path.join(cachePath, binaryName); + + if (fs.existsSync(cacheBinary)) { + return cacheBinary; + } + } + + return ''; +} + +/** + * Does the supplied binary path exist + * + * @param {String} binaryPath + * @api public + */ + +function hasBinary(binaryPath) { + return fs.existsSync(binaryPath); +} + +/** + * Get Sass version information + * + * @api public + */ + +function getVersionInfo(binding) { + return [ + ['node-sass', pkg.version, '(Wrapper)', '[JavaScript]'].join('\t'), + ['libsass ', binding.libsassVersion(), '(Sass Compiler)', '[C/C++]'].join('\t'), + ].join(eol); +} + +/** + * Gets the platform variant, currently either an empty string or 'musl' for Linux/musl platforms. + * + * @api public + */ + +function getPlatformVariant() { + var contents = ''; + + if (process.platform !== 'linux') { + return ''; + } + + try { + contents = fs.readFileSync(process.execPath); + + if (contents.indexOf('libc.musl-x86_64.so.1') !== -1) { + return 'musl'; + } + } catch (err) { } // eslint-disable-line no-empty + + return ''; +} + +module.exports.hasBinary = hasBinary; +module.exports.getBinaryUrl = getBinaryUrl; +module.exports.getBinaryName = getBinaryName; +module.exports.getBinaryDir = getBinaryDir; +module.exports.getBinaryPath = getBinaryPath; +module.exports.getBinaryCachePath = getBinaryCachePath; +module.exports.getCachedBinary = getCachedBinary; +module.exports.getCachePathCandidates = getCachePathCandidates; +module.exports.getVersionInfo = getVersionInfo; +module.exports.getHumanEnvironment = getHumanEnvironment; +module.exports.getInstalledBinaries = getInstalledBinaries; +module.exports.isSupportedEnvironment = isSupportedEnvironment; diff --git a/node_modules/node-sass/lib/index.js b/node_modules/node-sass/lib/index.js new file mode 100644 index 0000000..1006280 --- /dev/null +++ b/node_modules/node-sass/lib/index.js @@ -0,0 +1,458 @@ +/*! + * node-sass: lib/index.js + */ + +var path = require('path'), + clonedeep = require('lodash/cloneDeep'), + sass = require('./extensions'); + +/** + * Require binding + */ + +var binding = require('./binding')(sass); + +/** + * Get input file + * + * @param {Object} options + * @api private + */ + +function getInputFile(options) { + return options.file ? path.resolve(options.file) : null; +} + +/** + * Get output file + * + * @param {Object} options + * @api private + */ + +function getOutputFile(options) { + var outFile = options.outFile; + + if (!outFile || typeof outFile !== 'string' || (!options.data && !options.file)) { + return null; + } + + return path.resolve(outFile); +} + +/** + * Get source map + * + * @param {Object} options + * @api private + */ + +function getSourceMap(options) { + var sourceMap = options.sourceMap; + + if (sourceMap && typeof sourceMap !== 'string' && options.outFile) { + sourceMap = options.outFile + '.map'; + } + + return sourceMap && typeof sourceMap === 'string' ? path.resolve(sourceMap) : null; +} + +/** + * Get stats + * + * @param {Object} options + * @api private + */ + +function getStats(options) { + var stats = {}; + + stats.entry = options.file || 'data'; + stats.start = Date.now(); + + return stats; +} + +/** + * End stats + * + * @param {Object} stats + * @param {Object} sourceMap + * @api private + */ + +function endStats(stats) { + stats.end = Date.now(); + stats.duration = stats.end - stats.start; + + return stats; +} + +/** + * Get style + * + * @param {Object} options + * @api private + */ + +function getStyle(options) { + var styles = { + nested: 0, + expanded: 1, + compact: 2, + compressed: 3 + }; + + return styles[options.outputStyle] || 0; +} + +/** + * Get indent width + * + * @param {Object} options + * @api private + */ + +function getIndentWidth(options) { + var width = parseInt(options.indentWidth) || 2; + + return width > 10 ? 2 : width; +} + +/** + * Get indent type + * + * @param {Object} options + * @api private + */ + +function getIndentType(options) { + var types = { + space: 0, + tab: 1 + }; + + return types[options.indentType] || 0; +} + +/** + * Get linefeed + * + * @param {Object} options + * @api private + */ + +function getLinefeed(options) { + var feeds = { + cr: '\r', + crlf: '\r\n', + lf: '\n', + lfcr: '\n\r' + }; + + return feeds[options.linefeed] || '\n'; +} + +/** + * Build an includePaths string + * from the options.includePaths array and the SASS_PATH environment variable + * + * @param {Object} options + * @api private + */ + +function buildIncludePaths(options) { + options.includePaths = options.includePaths || []; + + if (Object.prototype.hasOwnProperty.call(process.env, 'SASS_PATH')) { + options.includePaths = options.includePaths.concat( + process.env.SASS_PATH.split(path.delimiter) + ); + } + + // Preserve the behaviour people have come to expect. + // This behaviour was removed from Sass in 3.4 and + // LibSass in 3.5. + options.includePaths.unshift(process.cwd()); + + return options.includePaths.join(path.delimiter); +} + +/** + * Get options + * + * @param {Object} options + * @api private + */ + +function getOptions(opts, cb) { + if (typeof opts !== 'object') { + throw new Error('Invalid: options is not an object.'); + } + var options = clonedeep(opts || {}); + + options.sourceComments = options.sourceComments || false; + if (Object.prototype.hasOwnProperty.call(options, 'file')) { + options.file = getInputFile(options); + } + options.outFile = getOutputFile(options); + options.includePaths = buildIncludePaths(options); + options.precision = parseInt(options.precision) || 5; + options.sourceMap = getSourceMap(options); + options.style = getStyle(options); + options.indentWidth = getIndentWidth(options); + options.indentType = getIndentType(options); + options.linefeed = getLinefeed(options); + + // context object represents node-sass environment + options.context = { options: options, callback: cb }; + + options.result = { + stats: getStats(options) + }; + + return options; +} + +/** + * Executes a callback and transforms any exception raised into a sass error + * + * @param {Function} callback + * @param {Array} arguments + * @api private + */ + +function tryCallback(callback, args) { + try { + return callback.apply(this, args); + } catch (e) { + if (typeof e === 'string') { + return new binding.types.Error(e); + } else if (e instanceof Error) { + return new binding.types.Error(e.message); + } else { + return new binding.types.Error('An unexpected error occurred'); + } + } +} + +/** + * Normalizes the signature of custom functions to make it possible to just supply the + * function name and have the signature default to `fn(...)`. The callback is adjusted + * to transform the input sass list into discrete arguments. + * + * @param {String} signature + * @param {Function} callback + * @return {Object} + * @api private + */ + +function normalizeFunctionSignature(signature, callback) { + if (!/^\*|@warn|@error|@debug|\w+\(.*\)$/.test(signature)) { + if (!/\w+/.test(signature)) { + throw new Error('Invalid function signature format "' + signature + '"'); + } + + return { + signature: signature + '(...)', + callback: function() { + var args = Array.prototype.slice.call(arguments), + list = args.shift(), + i; + + for (i = list.getLength() - 1; i >= 0; i--) { + args.unshift(list.getValue(i)); + } + + return callback.apply(this, args); + } + }; + } + + return { + signature: signature, + callback: callback + }; +} + +/** + * Render + * + * @param {Object} options + * @api public + */ + +module.exports.render = function(opts, cb) { + var options = getOptions(opts, cb); + + // options.error and options.success are for libsass binding + options.error = function(err) { + var payload = Object.assign(new Error(), JSON.parse(err)); + + if (cb) { + options.context.callback.call(options.context, payload, null); + } + }; + + options.success = function() { + var result = options.result; + var stats = endStats(result.stats); + var payload = { + css: result.css, + stats: stats + }; + if (result.map) { + payload.map = result.map; + } + + if (cb) { + options.context.callback.call(options.context, null, payload); + } + }; + + var importer = options.importer; + + if (importer) { + if (Array.isArray(importer)) { + options.importer = []; + importer.forEach(function(subject, index) { + options.importer[index] = function(file, prev, bridge) { + function done(result) { + bridge.success(result === module.exports.NULL ? null : result); + } + + var result = subject.call(options.context, file, prev, done); + + if (result !== undefined) { + done(result); + } + }; + }); + } else { + options.importer = function(file, prev, bridge) { + function done(result) { + bridge.success(result === module.exports.NULL ? null : result); + } + + var result = importer.call(options.context, file, prev, done); + + if (result !== undefined) { + done(result); + } + }; + } + } + + var functions = clonedeep(options.functions); + + if (functions) { + options.functions = {}; + + Object.keys(functions).forEach(function(subject) { + var cb = normalizeFunctionSignature(subject, functions[subject]); + + options.functions[cb.signature] = function() { + var args = Array.prototype.slice.call(arguments), + bridge = args.pop(); + + function done(data) { + bridge.success(data); + } + + var result = tryCallback(cb.callback.bind(options.context), args.concat(done)); + + if (result) { + done(result); + } + }; + }); + } + + if (options.data) { + binding.render(options); + } else if (options.file) { + binding.renderFile(options); + } else { + cb({status: 3, message: 'No input specified: provide a file name or a source string to process' }); + } +}; + +/** + * Render sync + * + * @param {Object} options + * @api public + */ + +module.exports.renderSync = function(opts) { + var options = getOptions(opts); + var importer = options.importer; + + if (importer) { + if (Array.isArray(importer)) { + options.importer = []; + importer.forEach(function(subject, index) { + options.importer[index] = function(file, prev) { + var result = subject.call(options.context, file, prev); + + return result === module.exports.NULL ? null : result; + }; + }); + } else { + options.importer = function(file, prev) { + var result = importer.call(options.context, file, prev); + + return result === module.exports.NULL ? null : result; + }; + } + } + + var functions = clonedeep(options.functions); + + if (options.functions) { + options.functions = {}; + + Object.keys(functions).forEach(function(signature) { + var cb = normalizeFunctionSignature(signature, functions[signature]); + + options.functions[cb.signature] = function() { + return tryCallback(cb.callback.bind(options.context), arguments); + }; + }); + } + + var status; + if (options.data) { + status = binding.renderSync(options); + } else if (options.file) { + status = binding.renderFileSync(options); + } else { + throw new Error('No input specified: provide a file name or a source string to process'); + } + + var result = options.result; + + if (status) { + result.stats = endStats(result.stats); + return result; + } + + throw Object.assign(new Error(), JSON.parse(result.error)); +}; + +/** + * API Info + * + * @api public + */ + +module.exports.info = sass.getVersionInfo(binding); + +/** + * Expose sass types + */ + +module.exports.types = binding.types; +module.exports.TRUE = binding.types.Boolean.TRUE; +module.exports.FALSE = binding.types.Boolean.FALSE; +module.exports.NULL = binding.types.Null.NULL; diff --git a/node_modules/node-sass/lib/render.js b/node_modules/node-sass/lib/render.js new file mode 100644 index 0000000..3539a9a --- /dev/null +++ b/node_modules/node-sass/lib/render.js @@ -0,0 +1,120 @@ +/*! + * node-sass: lib/render.js + */ + +var chalk = require('chalk'), + fs = require('fs'), + path = require('path'), + sass = require('./'); + +/** + * Render + * + * @param {Object} options + * @param {Object} emitter + * @api public + */ + +module.exports = function(options, emitter) { + var renderOptions = { + includePaths: options.includePath, + omitSourceMapUrl: options.omitSourceMapUrl, + indentedSyntax: options.indentedSyntax, + outFile: options.dest, + outputStyle: options.outputStyle, + precision: options.precision, + sourceComments: options.sourceComments, + sourceMapEmbed: options.sourceMapEmbed, + sourceMapContents: options.sourceMapContents, + sourceMap: options.sourceMap, + sourceMapRoot: options.sourceMapRoot, + importer: options.importer, + functions: options.functions, + indentWidth: options.indentWidth, + indentType: options.indentType, + linefeed: options.linefeed + }; + + if (options.data) { + renderOptions.data = options.data; + } else if (options.src) { + renderOptions.file = options.src; + } + + var sourceMap = options.sourceMap; + var destination = options.dest; + var stdin = options.stdin; + + var success = function(result) { + var todo = 1; + var done = function() { + if (--todo <= 0) { + emitter.emit('done'); + } + }; + + if (!destination || stdin) { + emitter.emit('log', result.css.toString()); + + if (sourceMap && !options.sourceMapEmbed) { + emitter.emit('log', result.map.toString()); + } + + return done(); + } + + emitter.emit('info', chalk.green('Rendering Complete, saving .css file...')); + + fs.mkdir(path.dirname(destination), {recursive: true}, function(err) { + if (err) { + return emitter.emit('error', chalk.red(err)); + } + + fs.writeFile(destination, result.css.toString(), function(err) { + if (err) { + return emitter.emit('error', chalk.red(err)); + } + + emitter.emit('info', chalk.green('Wrote CSS to ' + destination)); + emitter.emit('write', err, destination, result.css.toString()); + done(); + }); + }); + + if (sourceMap) { + todo++; + + fs.mkdir(path.dirname(sourceMap), {recursive: true}, function(err) { + if (err) { + return emitter.emit('error', chalk.red(err)); + } + fs.writeFile(sourceMap, result.map, function(err) { + if (err) { + return emitter.emit('error', chalk.red('Error' + err)); + } + + emitter.emit('info', chalk.green('Wrote Source Map to ' + sourceMap)); + emitter.emit('write-source-map', err, sourceMap, result.map); + done(); + }); + }); + } + + emitter.emit('render', result.css.toString()); + }; + + var error = function(error) { + emitter.emit('error', chalk.red(JSON.stringify(error, null, 2))); + }; + + var renderCallback = function(err, result) { + if (err) { + error(err); + } + else { + success(result); + } + }; + + sass.render(renderOptions, renderCallback); +}; diff --git a/node_modules/node-sass/lib/watcher.js b/node_modules/node-sass/lib/watcher.js new file mode 100644 index 0000000..89443b4 --- /dev/null +++ b/node_modules/node-sass/lib/watcher.js @@ -0,0 +1,93 @@ +var grapher = require('sass-graph'), + clonedeep = require('lodash/cloneDeep'), + path = require('path'), + config = {}, + watcher = {}, + graph = null; + +watcher.reset = function(opts) { + config = clonedeep(opts || config || {}); + var options = { + loadPaths: config.includePath, + extensions: ['scss', 'sass', 'css'], + follow: config.follow, + }; + + if (config.directory) { + graph = grapher.parseDir(config.directory, options); + } else { + graph = grapher.parseFile(config.src, options); + } + + return Object.keys(graph.index); +}; + +watcher.changed = function(absolutePath) { + var files = { + added: [], + changed: [], + removed: [], + }; + + this.reset(); + + if (absolutePath && path.basename(absolutePath)[0] !== '_') { + files.changed.push(absolutePath); + } + + graph.visitAncestors(absolutePath, function(parent) { + if (path.basename(parent)[0] !== '_') { + files.changed.push(parent); + } + }); + + graph.visitDescendents(absolutePath, function(child) { + files.added.push(child); + }); + + return files; +}; + +watcher.added = function(absolutePath) { + var files = { + added: [], + changed: [], + removed: [], + }; + + this.reset(); + + if (Object.keys(graph.index).indexOf(absolutePath) === -1) { + files.added.push(absolutePath); + } + + graph.visitDescendents(absolutePath, function(child) { + files.added.push(child); + }); + + return files; +}; + +watcher.removed = function(absolutePath) { + var files = { + added: [], + changed: [], + removed: [], + }; + + graph.visitAncestors(absolutePath, function(parent) { + if (path.basename(parent)[0] !== '_') { + files.changed.push(parent); + } + }); + + if (Object.keys(graph.index).indexOf(absolutePath) !== -1) { + files.removed.push(absolutePath); + } + + this.reset(); + + return files; +}; + +module.exports = watcher; diff --git a/node_modules/node-sass/node_modules/.bin/node-gyp b/node_modules/node-sass/node_modules/.bin/node-gyp new file mode 100644 index 0000000..82b5d7f --- /dev/null +++ b/node_modules/node-sass/node_modules/.bin/node-gyp @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../node-gyp/bin/node-gyp.js" "$@" + ret=$? +else + node "$basedir/../../../node-gyp/bin/node-gyp.js" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-sass/node_modules/.bin/node-gyp.cmd b/node_modules/node-sass/node_modules/.bin/node-gyp.cmd new file mode 100644 index 0000000..70c6dd0 --- /dev/null +++ b/node_modules/node-sass/node_modules/.bin/node-gyp.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\node-gyp\bin\node-gyp.js" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\node-gyp\bin\node-gyp.js" %* +) \ No newline at end of file diff --git a/node_modules/node-sass/node_modules/.bin/sassgraph b/node_modules/node-sass/node_modules/.bin/sassgraph new file mode 100644 index 0000000..cb28ffb --- /dev/null +++ b/node_modules/node-sass/node_modules/.bin/sassgraph @@ -0,0 +1,15 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*) basedir=`cygpath -w "$basedir"`;; +esac + +if [ -x "$basedir/node" ]; then + "$basedir/node" "$basedir/../../../sass-graph/bin/sassgraph" "$@" + ret=$? +else + node "$basedir/../../../sass-graph/bin/sassgraph" "$@" + ret=$? +fi +exit $ret diff --git a/node_modules/node-sass/node_modules/.bin/sassgraph.cmd b/node_modules/node-sass/node_modules/.bin/sassgraph.cmd new file mode 100644 index 0000000..1f0bf54 --- /dev/null +++ b/node_modules/node-sass/node_modules/.bin/sassgraph.cmd @@ -0,0 +1,7 @@ +@IF EXIST "%~dp0\node.exe" ( + "%~dp0\node.exe" "%~dp0\..\..\..\sass-graph\bin\sassgraph" %* +) ELSE ( + @SETLOCAL + @SET PATHEXT=%PATHEXT:;.JS;=;% + node "%~dp0\..\..\..\sass-graph\bin\sassgraph" %* +) \ No newline at end of file diff --git a/node_modules/node-sass/node_modules/chalk/index.d.ts b/node_modules/node-sass/node_modules/chalk/index.d.ts new file mode 100644 index 0000000..9cd88f3 --- /dev/null +++ b/node_modules/node-sass/node_modules/chalk/index.d.ts @@ -0,0 +1,415 @@ +/** +Basic foreground colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type ForegroundColor = + | 'black' + | 'red' + | 'green' + | 'yellow' + | 'blue' + | 'magenta' + | 'cyan' + | 'white' + | 'gray' + | 'grey' + | 'blackBright' + | 'redBright' + | 'greenBright' + | 'yellowBright' + | 'blueBright' + | 'magentaBright' + | 'cyanBright' + | 'whiteBright'; + +/** +Basic background colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type BackgroundColor = + | 'bgBlack' + | 'bgRed' + | 'bgGreen' + | 'bgYellow' + | 'bgBlue' + | 'bgMagenta' + | 'bgCyan' + | 'bgWhite' + | 'bgGray' + | 'bgGrey' + | 'bgBlackBright' + | 'bgRedBright' + | 'bgGreenBright' + | 'bgYellowBright' + | 'bgBlueBright' + | 'bgMagentaBright' + | 'bgCyanBright' + | 'bgWhiteBright'; + +/** +Basic colors. + +[More colors here.](https://github.com/chalk/chalk/blob/master/readme.md#256-and-truecolor-color-support) +*/ +declare type Color = ForegroundColor | BackgroundColor; + +declare type Modifiers = + | 'reset' + | 'bold' + | 'dim' + | 'italic' + | 'underline' + | 'inverse' + | 'hidden' + | 'strikethrough' + | 'visible'; + +declare namespace chalk { + /** + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + type Level = 0 | 1 | 2 | 3; + + interface Options { + /** + Specify the color support for Chalk. + + By default, color support is automatically detected based on the environment. + + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + level?: Level; + } + + /** + Return a new Chalk instance. + */ + type Instance = new (options?: Options) => Chalk; + + /** + Detect whether the terminal supports color. + */ + interface ColorSupport { + /** + The color level used by Chalk. + */ + level: Level; + + /** + Return whether Chalk supports basic 16 colors. + */ + hasBasic: boolean; + + /** + Return whether Chalk supports ANSI 256 colors. + */ + has256: boolean; + + /** + Return whether Chalk supports Truecolor 16 million colors. + */ + has16m: boolean; + } + + interface ChalkFunction { + /** + Use a template string. + + @remarks Template literals are unsupported for nested calls (see [issue #341](https://github.com/chalk/chalk/issues/341)) + + @example + ``` + import chalk = require('chalk'); + + log(chalk` + CPU: {red ${cpu.totalPercent}%} + RAM: {green ${ram.used / ram.total * 100}%} + DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} + `); + ``` + + @example + ``` + import chalk = require('chalk'); + + log(chalk.red.bgBlack`2 + 3 = {bold ${2 + 3}}`) + ``` + */ + (text: TemplateStringsArray, ...placeholders: unknown[]): string; + + (...text: unknown[]): string; + } + + interface Chalk extends ChalkFunction { + /** + Return a new Chalk instance. + */ + Instance: Instance; + + /** + The color support for Chalk. + + By default, color support is automatically detected based on the environment. + + Levels: + - `0` - All colors disabled. + - `1` - Basic 16 colors support. + - `2` - ANSI 256 colors support. + - `3` - Truecolor 16 million colors support. + */ + level: Level; + + /** + Use HEX value to set text color. + + @param color - Hexadecimal value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.hex('#DEADED'); + ``` + */ + hex(color: string): Chalk; + + /** + Use keyword color value to set text color. + + @param color - Keyword value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.keyword('orange'); + ``` + */ + keyword(color: string): Chalk; + + /** + Use RGB values to set text color. + */ + rgb(red: number, green: number, blue: number): Chalk; + + /** + Use HSL values to set text color. + */ + hsl(hue: number, saturation: number, lightness: number): Chalk; + + /** + Use HSV values to set text color. + */ + hsv(hue: number, saturation: number, value: number): Chalk; + + /** + Use HWB values to set text color. + */ + hwb(hue: number, whiteness: number, blackness: number): Chalk; + + /** + Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set text color. + + 30 <= code && code < 38 || 90 <= code && code < 98 + For example, 31 for red, 91 for redBright. + */ + ansi(code: number): Chalk; + + /** + Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set text color. + */ + ansi256(index: number): Chalk; + + /** + Use HEX value to set background color. + + @param color - Hexadecimal value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.bgHex('#DEADED'); + ``` + */ + bgHex(color: string): Chalk; + + /** + Use keyword color value to set background color. + + @param color - Keyword value representing the desired color. + + @example + ``` + import chalk = require('chalk'); + + chalk.bgKeyword('orange'); + ``` + */ + bgKeyword(color: string): Chalk; + + /** + Use RGB values to set background color. + */ + bgRgb(red: number, green: number, blue: number): Chalk; + + /** + Use HSL values to set background color. + */ + bgHsl(hue: number, saturation: number, lightness: number): Chalk; + + /** + Use HSV values to set background color. + */ + bgHsv(hue: number, saturation: number, value: number): Chalk; + + /** + Use HWB values to set background color. + */ + bgHwb(hue: number, whiteness: number, blackness: number): Chalk; + + /** + Use a [Select/Set Graphic Rendition](https://en.wikipedia.org/wiki/ANSI_escape_code#SGR_parameters) (SGR) [color code number](https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit) to set background color. + + 30 <= code && code < 38 || 90 <= code && code < 98 + For example, 31 for red, 91 for redBright. + Use the foreground code, not the background code (for example, not 41, nor 101). + */ + bgAnsi(code: number): Chalk; + + /** + Use a [8-bit unsigned number](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit) to set background color. + */ + bgAnsi256(index: number): Chalk; + + /** + Modifier: Resets the current color chain. + */ + readonly reset: Chalk; + + /** + Modifier: Make text bold. + */ + readonly bold: Chalk; + + /** + Modifier: Emitting only a small amount of light. + */ + readonly dim: Chalk; + + /** + Modifier: Make text italic. (Not widely supported) + */ + readonly italic: Chalk; + + /** + Modifier: Make text underline. (Not widely supported) + */ + readonly underline: Chalk; + + /** + Modifier: Inverse background and foreground colors. + */ + readonly inverse: Chalk; + + /** + Modifier: Prints the text, but makes it invisible. + */ + readonly hidden: Chalk; + + /** + Modifier: Puts a horizontal line through the center of the text. (Not widely supported) + */ + readonly strikethrough: Chalk; + + /** + Modifier: Prints the text only when Chalk has a color support level > 0. + Can be useful for things that are purely cosmetic. + */ + readonly visible: Chalk; + + readonly black: Chalk; + readonly red: Chalk; + readonly green: Chalk; + readonly yellow: Chalk; + readonly blue: Chalk; + readonly magenta: Chalk; + readonly cyan: Chalk; + readonly white: Chalk; + + /* + Alias for `blackBright`. + */ + readonly gray: Chalk; + + /* + Alias for `blackBright`. + */ + readonly grey: Chalk; + + readonly blackBright: Chalk; + readonly redBright: Chalk; + readonly greenBright: Chalk; + readonly yellowBright: Chalk; + readonly blueBright: Chalk; + readonly magentaBright: Chalk; + readonly cyanBright: Chalk; + readonly whiteBright: Chalk; + + readonly bgBlack: Chalk; + readonly bgRed: Chalk; + readonly bgGreen: Chalk; + readonly bgYellow: Chalk; + readonly bgBlue: Chalk; + readonly bgMagenta: Chalk; + readonly bgCyan: Chalk; + readonly bgWhite: Chalk; + + /* + Alias for `bgBlackBright`. + */ + readonly bgGray: Chalk; + + /* + Alias for `bgBlackBright`. + */ + readonly bgGrey: Chalk; + + readonly bgBlackBright: Chalk; + readonly bgRedBright: Chalk; + readonly bgGreenBright: Chalk; + readonly bgYellowBright: Chalk; + readonly bgBlueBright: Chalk; + readonly bgMagentaBright: Chalk; + readonly bgCyanBright: Chalk; + readonly bgWhiteBright: Chalk; + } +} + +/** +Main Chalk object that allows to chain styles together. +Call the last one as a method with a string argument. +Order doesn't matter, and later styles take precedent in case of a conflict. +This simply means that `chalk.red.yellow.green` is equivalent to `chalk.green`. +*/ +declare const chalk: chalk.Chalk & chalk.ChalkFunction & { + supportsColor: chalk.ColorSupport | false; + Level: chalk.Level; + Color: Color; + ForegroundColor: ForegroundColor; + BackgroundColor: BackgroundColor; + Modifiers: Modifiers; + stderr: chalk.Chalk & {supportsColor: chalk.ColorSupport | false}; +}; + +export = chalk; diff --git a/node_modules/node-sass/node_modules/chalk/license b/node_modules/node-sass/node_modules/chalk/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/node-sass/node_modules/chalk/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/node-sass/node_modules/chalk/package.json b/node_modules/node-sass/node_modules/chalk/package.json new file mode 100644 index 0000000..47c23f2 --- /dev/null +++ b/node_modules/node-sass/node_modules/chalk/package.json @@ -0,0 +1,68 @@ +{ + "name": "chalk", + "version": "4.1.2", + "description": "Terminal string styling done right", + "license": "MIT", + "repository": "chalk/chalk", + "funding": "https://github.com/chalk/chalk?sponsor=1", + "main": "source", + "engines": { + "node": ">=10" + }, + "scripts": { + "test": "xo && nyc ava && tsd", + "bench": "matcha benchmark.js" + }, + "files": [ + "source", + "index.d.ts" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "str", + "ansi", + "style", + "styles", + "tty", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "devDependencies": { + "ava": "^2.4.0", + "coveralls": "^3.0.7", + "execa": "^4.0.0", + "import-fresh": "^3.1.0", + "matcha": "^0.7.0", + "nyc": "^15.0.0", + "resolve-from": "^5.0.0", + "tsd": "^0.7.4", + "xo": "^0.28.2" + }, + "xo": { + "rules": { + "unicorn/prefer-string-slice": "off", + "unicorn/prefer-includes": "off", + "@typescript-eslint/member-ordering": "off", + "no-redeclare": "off", + "unicorn/string-content": "off", + "unicorn/better-regex": "off" + } + } +} diff --git a/node_modules/node-sass/node_modules/chalk/readme.md b/node_modules/node-sass/node_modules/chalk/readme.md new file mode 100644 index 0000000..a055d21 --- /dev/null +++ b/node_modules/node-sass/node_modules/chalk/readme.md @@ -0,0 +1,341 @@ +

+
+
+ Chalk +
+
+
+

+ +> Terminal string styling done right + +[![Build Status](https://travis-ci.org/chalk/chalk.svg?branch=master)](https://travis-ci.org/chalk/chalk) [![Coverage Status](https://coveralls.io/repos/github/chalk/chalk/badge.svg?branch=master)](https://coveralls.io/github/chalk/chalk?branch=master) [![npm dependents](https://badgen.net/npm/dependents/chalk)](https://www.npmjs.com/package/chalk?activeTab=dependents) [![Downloads](https://badgen.net/npm/dt/chalk)](https://www.npmjs.com/package/chalk) [![](https://img.shields.io/badge/unicorn-approved-ff69b4.svg)](https://www.youtube.com/watch?v=9auOCbH5Ns4) [![XO code style](https://img.shields.io/badge/code_style-XO-5ed9c7.svg)](https://github.com/xojs/xo) ![TypeScript-ready](https://img.shields.io/npm/types/chalk.svg) [![run on repl.it](https://repl.it/badge/github/chalk/chalk)](https://repl.it/github/chalk/chalk) + + + +
+ +--- + + + +--- + +
+ +## Highlights + +- Expressive API +- Highly performant +- Ability to nest styles +- [256/Truecolor color support](#256-and-truecolor-color-support) +- Auto-detects color support +- Doesn't extend `String.prototype` +- Clean and focused +- Actively maintained +- [Used by ~50,000 packages](https://www.npmjs.com/browse/depended/chalk) as of January 1, 2020 + +## Install + +```console +$ npm install chalk +``` + +## Usage + +```js +const chalk = require('chalk'); + +console.log(chalk.blue('Hello world!')); +``` + +Chalk comes with an easy to use composable API where you just chain and nest the styles you want. + +```js +const chalk = require('chalk'); +const log = console.log; + +// Combine styled and normal strings +log(chalk.blue('Hello') + ' World' + chalk.red('!')); + +// Compose multiple styles using the chainable API +log(chalk.blue.bgRed.bold('Hello world!')); + +// Pass in multiple arguments +log(chalk.blue('Hello', 'World!', 'Foo', 'bar', 'biz', 'baz')); + +// Nest styles +log(chalk.red('Hello', chalk.underline.bgBlue('world') + '!')); + +// Nest styles of the same type even (color, underline, background) +log(chalk.green( + 'I am a green line ' + + chalk.blue.underline.bold('with a blue substring') + + ' that becomes green again!' +)); + +// ES2015 template literal +log(` +CPU: ${chalk.red('90%')} +RAM: ${chalk.green('40%')} +DISK: ${chalk.yellow('70%')} +`); + +// ES2015 tagged template literal +log(chalk` +CPU: {red ${cpu.totalPercent}%} +RAM: {green ${ram.used / ram.total * 100}%} +DISK: {rgb(255,131,0) ${disk.used / disk.total * 100}%} +`); + +// Use RGB colors in terminal emulators that support it. +log(chalk.keyword('orange')('Yay for orange colored text!')); +log(chalk.rgb(123, 45, 67).underline('Underlined reddish color')); +log(chalk.hex('#DEADED').bold('Bold gray!')); +``` + +Easily define your own themes: + +```js +const chalk = require('chalk'); + +const error = chalk.bold.red; +const warning = chalk.keyword('orange'); + +console.log(error('Error!')); +console.log(warning('Warning!')); +``` + +Take advantage of console.log [string substitution](https://nodejs.org/docs/latest/api/console.html#console_console_log_data_args): + +```js +const name = 'Sindre'; +console.log(chalk.green('Hello %s'), name); +//=> 'Hello Sindre' +``` + +## API + +### chalk.`