netlify-cli: 6.13.2 -> 12.2.4

Updates netlify-cli to 12.2.4, while switching to NodeJS 16.
According to its issue page, there seems to be problems running
this tool with NodeJS 18, so we'd better stick to NodeJS 16
instead.
This commit is contained in:
panda2134 2022-11-22 12:18:51 +08:00
parent 63a66fc2ff
commit ae83b9c7c6
6 changed files with 3599 additions and 3865 deletions

View file

@ -1,12 +1,12 @@
# This file has been generated by node2nix 1.9.0. Do not edit!
# This file has been generated by node2nix 1.11.1. Do not edit!
{pkgs ? import <nixpkgs> {
inherit system;
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-14_x"}:
}, system ? builtins.currentSystem, nodejs ? pkgs."nodejs-16_x"}:
let
nodeEnv = import ./node-env.nix {
inherit (pkgs) stdenv lib python2 runCommand writeTextFile;
inherit (pkgs) stdenv lib python2 runCommand writeTextFile writeShellScript;
inherit pkgs nodejs;
libtool = if pkgs.stdenv.isDarwin then pkgs.darwin.cctools else null;
};

View file

@ -1,12 +1,18 @@
{ callPackage, fetchFromGitHub, lib, pkgs }:
let
nodePackages = import ./composition.nix { inherit pkgs; };
meta = (lib.importJSON ./netlify-cli.json);
in
nodePackages.package.override {
preRebuild = ''
export ESBUILD_BINARY_PATH="${pkgs.esbuild_netlify}/bin/esbuild"
'';
src = fetchFromGitHub (lib.importJSON ./netlify-cli.json);
src = fetchFromGitHub {
owner = meta.owner;
repo = meta.repo;
rev = meta.rev;
sha256 = meta.sha256;
};
bypassCache = true;
reconstructLock = true;
passthru.tests.test = callPackage ./test.nix { };

View file

@ -2,7 +2,7 @@
set -eu -o pipefail
cd "$( dirname "${BASH_SOURCE[0]}" )"
rm -f ./node-env.nix
src="$(nix-build --expr '(import ../../../.. {}).fetchFromGitHub (lib.importJSON ./netlify-cli.json)')"
src="$(nix-build --expr 'let pkgs = import ../../../.. {}; lib = import ../../../../lib; meta = (lib.importJSON ./netlify-cli.json); in pkgs.fetchFromGitHub {owner = meta.owner; repo = meta.repo; rev = meta.rev; sha256 = meta.sha256;}')"
echo $src
node2nix \
--input $src/package.json \
@ -10,5 +10,5 @@ node2nix \
--output node-packages.nix \
--composition composition.nix \
--node-env node-env.nix \
--nodejs-14 \
--nodejs-16 \
;

View file

@ -1,7 +1,9 @@
{
"owner": "netlify",
"repo": "cli",
"rev": "a50e410fddda92d3f3f256321eddefb8cb8ba6e1",
"sha256": "sisX58I5UxxEPGCh5JGtQHw72A4+pLuENpBB9WKRTZc=",
"fetchSubmodules": false
"rev": "6c7e8c9a4db4e2e408f65e6098a194497944e306",
"sha256": "YMnQrurZDJtfeHBCIzy6vToGHnqtdRGvWFPX5RcWyPg=",
"fetchSubmodules": false,
"leaveDotGit": false,
"deepClone": false
}

View file

@ -1,6 +1,6 @@
# This file originates from node2nix
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile}:
{lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
let
# Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
@ -40,36 +40,22 @@ let
'';
};
includeDependencies = {dependencies}:
lib.optionalString (dependencies != [])
(lib.concatMapStrings (dependency:
''
# Bundle the dependencies of the package
mkdir -p node_modules
cd node_modules
# Common shell logic
installPackage = writeShellScript "install-package" ''
installPackage() {
local packageName=$1 src=$2
# Only include dependencies if they don't exist. They may also be bundled in the package.
if [ ! -e "${dependency.name}" ]
then
${composePackage dependency}
fi
local strippedName
cd ..
''
) dependencies);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
DIR=$(pwd)
local DIR=$PWD
cd $TMPDIR
unpackFile ${src}
unpackFile $src
# Make the base dir in which the target dependency resides first
mkdir -p "$(dirname "$DIR/${packageName}")"
mkdir -p "$(dirname "$DIR/$packageName")"
if [ -f "${src}" ]
if [ -f "$src" ]
then
# Figure out what directory has been unpacked
packageDir="$(find . -maxdepth 1 -type d | tail -1)"
@ -79,28 +65,53 @@ let
chmod -R u+w "$packageDir"
# Move the extracted tarball into the output folder
mv "$packageDir" "$DIR/${packageName}"
elif [ -d "${src}" ]
mv "$packageDir" "$DIR/$packageName"
elif [ -d "$src" ]
then
# Get a stripped name (without hash) of the source directory.
# On old nixpkgs it's already set internally.
if [ -z "$strippedName" ]
then
strippedName="$(stripHash ${src})"
strippedName="$(stripHash $src)"
fi
# Restore write permissions to make building work
chmod -R u+w "$strippedName"
# Move the extracted directory into the output folder
mv "$strippedName" "$DIR/${packageName}"
mv "$strippedName" "$DIR/$packageName"
fi
# Unset the stripped name to not confuse the next unpack step
unset strippedName
# Change to the package directory to install dependencies
cd "$DIR/$packageName"
}
'';
# Include the dependencies of the package
cd "$DIR/${packageName}"
# Bundle the dependencies of the package
#
# Only include dependencies if they don't exist. They may also be bundled in the package.
includeDependencies = {dependencies}:
lib.optionalString (dependencies != []) (
''
mkdir -p node_modules
cd node_modules
''
+ (lib.concatMapStrings (dependency:
''
if [ ! -e "${dependency.packageName}" ]; then
${composePackage dependency}
fi
''
) dependencies)
+ ''
cd ..
''
);
# Recursively composes the dependencies of a package
composePackage = { name, packageName, src, dependencies ? [], ... }@args:
builtins.addErrorContext "while evaluating node package '${packageName}'" ''
installPackage "${packageName}" "${src}"
${includeDependencies { inherit dependencies; }}
cd ..
${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
@ -154,7 +165,11 @@ let
if(process.argv[2] == "development") {
replaceDependencies(packageObj.devDependencies);
}
else {
packageObj.devDependencies = {};
}
replaceDependencies(packageObj.optionalDependencies);
replaceDependencies(packageObj.peerDependencies);
/* Write the fixed package.json file */
fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
@ -246,8 +261,8 @@ let
var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
if(![1, 2].includes(packageLock.lockfileVersion)) {
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
process.exit(1);
}
if(packageLock.dependencies !== undefined) {
@ -259,7 +274,7 @@ let
# Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
reconstructPackageLock = writeTextFile {
name = "addintegrityfields.js";
name = "reconstructpackagelock.js";
text = ''
var fs = require('fs');
var path = require('path');
@ -269,25 +284,43 @@ let
var lockObj = {
name: packageObj.name,
version: packageObj.version,
lockfileVersion: 1,
lockfileVersion: 2,
requires: true,
packages: {
"": {
name: packageObj.name,
version: packageObj.version,
license: packageObj.license,
bin: packageObj.bin,
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
}
},
dependencies: {}
};
function augmentPackageJSON(filePath, dependencies) {
function augmentPackageJSON(filePath, packages, dependencies) {
var packageJSON = path.join(filePath, "package.json");
if(fs.existsSync(packageJSON)) {
var packageObj = JSON.parse(fs.readFileSync(packageJSON));
packages[filePath] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: packageObj.dependencies,
engines: packageObj.engines,
optionalDependencies: packageObj.optionalDependencies
};
dependencies[packageObj.name] = {
version: packageObj.version,
integrity: "sha1-000000000000000000000000000=",
dependencies: {}
};
processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
processDependencies(path.join(filePath, "node_modules"), packages, dependencies[packageObj.name].dependencies);
}
}
function processDependencies(dir, dependencies) {
function processDependencies(dir, packages, dependencies) {
if(fs.existsSync(dir)) {
var files = fs.readdirSync(dir);
@ -303,23 +336,84 @@ let
pkgFiles.forEach(function(entry) {
if(stats.isDirectory()) {
var pkgFilePath = path.join(filePath, entry);
augmentPackageJSON(pkgFilePath, dependencies);
augmentPackageJSON(pkgFilePath, packages, dependencies);
}
});
} else {
augmentPackageJSON(filePath, dependencies);
augmentPackageJSON(filePath, packages, dependencies);
}
}
});
}
}
processDependencies("node_modules", lockObj.dependencies);
processDependencies("node_modules", lockObj.packages, lockObj.dependencies);
fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
'';
};
# Script that links bins defined in package.json to the node_modules bin directory
# NPM does not do this for top-level packages itself anymore as of v7
linkBinsScript = writeTextFile {
name = "linkbins.js";
text = ''
var fs = require('fs');
var path = require('path');
var packageObj = JSON.parse(fs.readFileSync("package.json"));
var nodeModules = Array(packageObj.name.split("/").length).fill("..").join(path.sep);
if(packageObj.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
if(typeof packageObj.bin == "object") {
Object.keys(packageObj.bin).forEach(function(exe) {
if(fs.existsSync(packageObj.bin[exe])) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin[exe]),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
else {
if(fs.existsSync(packageObj.bin)) {
console.log("linking bin '" + packageObj.bin + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.bin),
path.join(nodeModules, ".bin", packageObj.name.split("/").pop())
);
}
else {
console.log("skipping non-existent bin '" + packageObj.bin + "'");
}
}
}
else if(packageObj.directories !== undefined && packageObj.directories.bin !== undefined) {
fs.mkdirSync(path.join(nodeModules, ".bin"))
fs.readdirSync(packageObj.directories.bin).forEach(function(exe) {
if(fs.existsSync(path.join(packageObj.directories.bin, exe))) {
console.log("linking bin '" + exe + "'");
fs.symlinkSync(
path.join("..", packageObj.name, packageObj.directories.bin, exe),
path.join(nodeModules, ".bin", exe)
);
}
else {
console.log("skipping non-existent bin '" + exe + "'");
}
})
}
'';
};
prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
let
forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
@ -366,20 +460,25 @@ let
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
runHook postRebuild
if [ "''${dontNpmInstall-}" != "1" ]
then
# NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
rm -f npm-shrinkwrap.json
npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
npm ${forceOfflineFlag} --nodedir=${nodeSources} --no-bin-links --ignore-scripts ${npmFlags} ${lib.optionalString production "--production"} install
fi
# Link executables defined in package.json
node ${linkBinsScript}
'';
# Builds and composes an NPM package including all its dependencies
buildNodePackage =
{ name
, packageName
, version
, version ? null
, dependencies ? []
, buildInputs ? []
, production ? true
@ -398,7 +497,7 @@ let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
in
stdenv.mkDerivation ({
name = "${name}-${version}";
name = "${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
++ lib.optional (stdenv.isDarwin) libtool
@ -415,6 +514,8 @@ let
passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
# Create and enter a root node_modules/ folder
mkdir -p $out/lib/node_modules
cd $out/lib/node_modules
@ -428,6 +529,14 @@ let
if [ -d "$out/lib/node_modules/.bin" ]
then
ln -s $out/lib/node_modules/.bin $out/bin
# Patch the shebang lines of all the executables
ls $out/bin/* | while read i
do
file="$(readlink -f "$i")"
chmod u+rwx "$file"
patchShebangs "$file"
done
fi
# Create symlinks to the deployed manual page folders, if applicable
@ -458,7 +567,7 @@ let
buildNodeDependencies =
{ name
, packageName
, version
, version ? null
, src
, dependencies ? []
, buildInputs ? []
@ -476,7 +585,7 @@ let
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
in
stdenv.mkDerivation ({
name = "node-dependencies-${name}-${version}";
name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ tarWrapper python nodejs ]
++ lib.optional (stdenv.isLinux) utillinux
@ -492,6 +601,8 @@ let
passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
installPhase = ''
source ${installPackage}
mkdir -p $out/${packageName}
cd $out/${packageName}
@ -504,6 +615,7 @@ let
if [ -f ${src}/package-lock.json ]
then
cp ${src}/package-lock.json .
chmod 644 package-lock.json
fi
''}
@ -526,7 +638,7 @@ let
buildNodeShell =
{ name
, packageName
, version
, version ? null
, src
, dependencies ? []
, buildInputs ? []
@ -542,9 +654,10 @@ let
let
nodeDependencies = buildNodeDependencies args;
extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
in
stdenv.mkDerivation {
name = "node-shell-${name}-${version}";
stdenv.mkDerivation ({
name = "node-shell-${name}${if version == null then "" else "-${version}"}";
buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
buildCommand = ''
@ -563,7 +676,7 @@ let
export NODE_PATH=${nodeDependencies}/lib/node_modules
export PATH="${nodeDependencies}/bin:$PATH"
'';
};
} // extraArgs);
in
{
buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;

File diff suppressed because it is too large Load diff