ln-ws-proxy

websockets to lightning proxy
git clone git://jb55.com/ln-ws-proxy
Log | Files | Refs | README

node-env.nix (21290B)


      1 # This file originates from node2nix
      2 
      3 {lib, stdenv, nodejs, python2, pkgs, libtool, runCommand, writeTextFile, writeShellScript}:
      4 
      5 let
      6   # Workaround to cope with utillinux in Nixpkgs 20.09 and util-linux in Nixpkgs master
      7   utillinux = if pkgs ? utillinux then pkgs.utillinux else pkgs.util-linux;
      8 
      9   python = if nodejs ? python then nodejs.python else python2;
     10 
     11   # Create a tar wrapper that filters all the 'Ignoring unknown extended header keyword' noise
     12   tarWrapper = runCommand "tarWrapper" {} ''
     13     mkdir -p $out/bin
     14 
     15     cat > $out/bin/tar <<EOF
     16     #! ${stdenv.shell} -e
     17     $(type -p tar) "\$@" --warning=no-unknown-keyword --delay-directory-restore
     18     EOF
     19 
     20     chmod +x $out/bin/tar
     21   '';
     22 
     23   # Function that generates a TGZ file from a NPM project
     24   buildNodeSourceDist =
     25     { name, version, src, ... }:
     26 
     27     stdenv.mkDerivation {
     28       name = "node-tarball-${name}-${version}";
     29       inherit src;
     30       buildInputs = [ nodejs ];
     31       buildPhase = ''
     32         export HOME=$TMPDIR
     33         tgzFile=$(npm pack | tail -n 1) # Hooks to the pack command will add output (https://docs.npmjs.com/misc/scripts)
     34       '';
     35       installPhase = ''
     36         mkdir -p $out/tarballs
     37         mv $tgzFile $out/tarballs
     38         mkdir -p $out/nix-support
     39         echo "file source-dist $out/tarballs/$tgzFile" >> $out/nix-support/hydra-build-products
     40       '';
     41     };
     42 
     43   # Common shell logic
     44   installPackage = writeShellScript "install-package" ''
     45     installPackage() {
     46       local packageName=$1 src=$2
     47 
     48       local strippedName
     49 
     50       local DIR=$PWD
     51       cd $TMPDIR
     52 
     53       unpackFile $src
     54 
     55       # Make the base dir in which the target dependency resides first
     56       mkdir -p "$(dirname "$DIR/$packageName")"
     57 
     58       if [ -f "$src" ]
     59       then
     60           # Figure out what directory has been unpacked
     61           packageDir="$(find . -maxdepth 1 -type d | tail -1)"
     62 
     63           # Restore write permissions to make building work
     64           find "$packageDir" -type d -exec chmod u+x {} \;
     65           chmod -R u+w "$packageDir"
     66 
     67           # Move the extracted tarball into the output folder
     68           mv "$packageDir" "$DIR/$packageName"
     69       elif [ -d "$src" ]
     70       then
     71           # Get a stripped name (without hash) of the source directory.
     72           # On old nixpkgs it's already set internally.
     73           if [ -z "$strippedName" ]
     74           then
     75               strippedName="$(stripHash $src)"
     76           fi
     77 
     78           # Restore write permissions to make building work
     79           chmod -R u+w "$strippedName"
     80 
     81           # Move the extracted directory into the output folder
     82           mv "$strippedName" "$DIR/$packageName"
     83       fi
     84 
     85       # Change to the package directory to install dependencies
     86       cd "$DIR/$packageName"
     87     }
     88   '';
     89 
     90   # Bundle the dependencies of the package
     91   #
     92   # Only include dependencies if they don't exist. They may also be bundled in the package.
     93   includeDependencies = {dependencies}:
     94     lib.optionalString (dependencies != []) (
     95       ''
     96         mkdir -p node_modules
     97         cd node_modules
     98       ''
     99       + (lib.concatMapStrings (dependency:
    100         ''
    101           if [ ! -e "${dependency.packageName}" ]; then
    102               ${composePackage dependency}
    103           fi
    104         ''
    105       ) dependencies)
    106       + ''
    107         cd ..
    108       ''
    109     );
    110 
    111   # Recursively composes the dependencies of a package
    112   composePackage = { name, packageName, src, dependencies ? [], ... }@args:
    113     builtins.addErrorContext "while evaluating node package '${packageName}'" ''
    114       installPackage "${packageName}" "${src}"
    115       ${includeDependencies { inherit dependencies; }}
    116       cd ..
    117       ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
    118     '';
    119 
    120   pinpointDependencies = {dependencies, production}:
    121     let
    122       pinpointDependenciesFromPackageJSON = writeTextFile {
    123         name = "pinpointDependencies.js";
    124         text = ''
    125           var fs = require('fs');
    126           var path = require('path');
    127 
    128           function resolveDependencyVersion(location, name) {
    129               if(location == process.env['NIX_STORE']) {
    130                   return null;
    131               } else {
    132                   var dependencyPackageJSON = path.join(location, "node_modules", name, "package.json");
    133 
    134                   if(fs.existsSync(dependencyPackageJSON)) {
    135                       var dependencyPackageObj = JSON.parse(fs.readFileSync(dependencyPackageJSON));
    136 
    137                       if(dependencyPackageObj.name == name) {
    138                           return dependencyPackageObj.version;
    139                       }
    140                   } else {
    141                       return resolveDependencyVersion(path.resolve(location, ".."), name);
    142                   }
    143               }
    144           }
    145 
    146           function replaceDependencies(dependencies) {
    147               if(typeof dependencies == "object" && dependencies !== null) {
    148                   for(var dependency in dependencies) {
    149                       var resolvedVersion = resolveDependencyVersion(process.cwd(), dependency);
    150 
    151                       if(resolvedVersion === null) {
    152                           process.stderr.write("WARNING: cannot pinpoint dependency: "+dependency+", context: "+process.cwd()+"\n");
    153                       } else {
    154                           dependencies[dependency] = resolvedVersion;
    155                       }
    156                   }
    157               }
    158           }
    159 
    160           /* Read the package.json configuration */
    161           var packageObj = JSON.parse(fs.readFileSync('./package.json'));
    162 
    163           /* Pinpoint all dependencies */
    164           replaceDependencies(packageObj.dependencies);
    165           if(process.argv[2] == "development") {
    166               replaceDependencies(packageObj.devDependencies);
    167           }
    168           replaceDependencies(packageObj.optionalDependencies);
    169 
    170           /* Write the fixed package.json file */
    171           fs.writeFileSync("package.json", JSON.stringify(packageObj, null, 2));
    172         '';
    173       };
    174     in
    175     ''
    176       node ${pinpointDependenciesFromPackageJSON} ${if production then "production" else "development"}
    177 
    178       ${lib.optionalString (dependencies != [])
    179         ''
    180           if [ -d node_modules ]
    181           then
    182               cd node_modules
    183               ${lib.concatMapStrings (dependency: pinpointDependenciesOfPackage dependency) dependencies}
    184               cd ..
    185           fi
    186         ''}
    187     '';
    188 
    189   # Recursively traverses all dependencies of a package and pinpoints all
    190   # dependencies in the package.json file to the versions that are actually
    191   # being used.
    192 
    193   pinpointDependenciesOfPackage = { packageName, dependencies ? [], production ? true, ... }@args:
    194     ''
    195       if [ -d "${packageName}" ]
    196       then
    197           cd "${packageName}"
    198           ${pinpointDependencies { inherit dependencies production; }}
    199           cd ..
    200           ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
    201       fi
    202     '';
    203 
    204   # Extract the Node.js source code which is used to compile packages with
    205   # native bindings
    206   nodeSources = runCommand "node-sources" {} ''
    207     tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
    208     mv node-* $out
    209   '';
    210 
    211   # Script that adds _integrity fields to all package.json files to prevent NPM from consulting the cache (that is empty)
    212   addIntegrityFieldsScript = writeTextFile {
    213     name = "addintegrityfields.js";
    214     text = ''
    215       var fs = require('fs');
    216       var path = require('path');
    217 
    218       function augmentDependencies(baseDir, dependencies) {
    219           for(var dependencyName in dependencies) {
    220               var dependency = dependencies[dependencyName];
    221 
    222               // Open package.json and augment metadata fields
    223               var packageJSONDir = path.join(baseDir, "node_modules", dependencyName);
    224               var packageJSONPath = path.join(packageJSONDir, "package.json");
    225 
    226               if(fs.existsSync(packageJSONPath)) { // Only augment packages that exist. Sometimes we may have production installs in which development dependencies can be ignored
    227                   console.log("Adding metadata fields to: "+packageJSONPath);
    228                   var packageObj = JSON.parse(fs.readFileSync(packageJSONPath));
    229 
    230                   if(dependency.integrity) {
    231                       packageObj["_integrity"] = dependency.integrity;
    232                   } else {
    233                       packageObj["_integrity"] = "sha1-000000000000000000000000000="; // When no _integrity string has been provided (e.g. by Git dependencies), add a dummy one. It does not seem to harm and it bypasses downloads.
    234                   }
    235 
    236                   if(dependency.resolved) {
    237                       packageObj["_resolved"] = dependency.resolved; // Adopt the resolved property if one has been provided
    238                   } else {
    239                       packageObj["_resolved"] = dependency.version; // Set the resolved version to the version identifier. This prevents NPM from cloning Git repositories.
    240                   }
    241 
    242                   if(dependency.from !== undefined) { // Adopt from property if one has been provided
    243                       packageObj["_from"] = dependency.from;
    244                   }
    245 
    246                   fs.writeFileSync(packageJSONPath, JSON.stringify(packageObj, null, 2));
    247               }
    248 
    249               // Augment transitive dependencies
    250               if(dependency.dependencies !== undefined) {
    251                   augmentDependencies(packageJSONDir, dependency.dependencies);
    252               }
    253           }
    254       }
    255 
    256       if(fs.existsSync("./package-lock.json")) {
    257           var packageLock = JSON.parse(fs.readFileSync("./package-lock.json"));
    258 
    259           if(![1, 2].includes(packageLock.lockfileVersion)) {
    260             process.stderr.write("Sorry, I only understand lock file versions 1 and 2!\n");
    261             process.exit(1);
    262           }
    263 
    264           if(packageLock.dependencies !== undefined) {
    265               augmentDependencies(".", packageLock.dependencies);
    266           }
    267       }
    268     '';
    269   };
    270 
    271   # Reconstructs a package-lock file from the node_modules/ folder structure and package.json files with dummy sha1 hashes
    272   reconstructPackageLock = writeTextFile {
    273     name = "addintegrityfields.js";
    274     text = ''
    275       var fs = require('fs');
    276       var path = require('path');
    277 
    278       var packageObj = JSON.parse(fs.readFileSync("package.json"));
    279 
    280       var lockObj = {
    281           name: packageObj.name,
    282           version: packageObj.version,
    283           lockfileVersion: 1,
    284           requires: true,
    285           dependencies: {}
    286       };
    287 
    288       function augmentPackageJSON(filePath, dependencies) {
    289           var packageJSON = path.join(filePath, "package.json");
    290           if(fs.existsSync(packageJSON)) {
    291               var packageObj = JSON.parse(fs.readFileSync(packageJSON));
    292               dependencies[packageObj.name] = {
    293                   version: packageObj.version,
    294                   integrity: "sha1-000000000000000000000000000=",
    295                   dependencies: {}
    296               };
    297               processDependencies(path.join(filePath, "node_modules"), dependencies[packageObj.name].dependencies);
    298           }
    299       }
    300 
    301       function processDependencies(dir, dependencies) {
    302           if(fs.existsSync(dir)) {
    303               var files = fs.readdirSync(dir);
    304 
    305               files.forEach(function(entry) {
    306                   var filePath = path.join(dir, entry);
    307                   var stats = fs.statSync(filePath);
    308 
    309                   if(stats.isDirectory()) {
    310                       if(entry.substr(0, 1) == "@") {
    311                           // When we encounter a namespace folder, augment all packages belonging to the scope
    312                           var pkgFiles = fs.readdirSync(filePath);
    313 
    314                           pkgFiles.forEach(function(entry) {
    315                               if(stats.isDirectory()) {
    316                                   var pkgFilePath = path.join(filePath, entry);
    317                                   augmentPackageJSON(pkgFilePath, dependencies);
    318                               }
    319                           });
    320                       } else {
    321                           augmentPackageJSON(filePath, dependencies);
    322                       }
    323                   }
    324               });
    325           }
    326       }
    327 
    328       processDependencies("node_modules", lockObj.dependencies);
    329 
    330       fs.writeFileSync("package-lock.json", JSON.stringify(lockObj, null, 2));
    331     '';
    332   };
    333 
    334   prepareAndInvokeNPM = {packageName, bypassCache, reconstructLock, npmFlags, production}:
    335     let
    336       forceOfflineFlag = if bypassCache then "--offline" else "--registry http://www.example.com";
    337     in
    338     ''
    339         # Pinpoint the versions of all dependencies to the ones that are actually being used
    340         echo "pinpointing versions of dependencies..."
    341         source $pinpointDependenciesScriptPath
    342 
    343         # Patch the shebangs of the bundled modules to prevent them from
    344         # calling executables outside the Nix store as much as possible
    345         patchShebangs .
    346 
    347         # Deploy the Node.js package by running npm install. Since the
    348         # dependencies have been provided already by ourselves, it should not
    349         # attempt to install them again, which is good, because we want to make
    350         # it Nix's responsibility. If it needs to install any dependencies
    351         # anyway (e.g. because the dependency parameters are
    352         # incomplete/incorrect), it fails.
    353         #
    354         # The other responsibilities of NPM are kept -- version checks, build
    355         # steps, postprocessing etc.
    356 
    357         export HOME=$TMPDIR
    358         cd "${packageName}"
    359         runHook preRebuild
    360 
    361         ${lib.optionalString bypassCache ''
    362           ${lib.optionalString reconstructLock ''
    363             if [ -f package-lock.json ]
    364             then
    365                 echo "WARNING: Reconstruct lock option enabled, but a lock file already exists!"
    366                 echo "This will most likely result in version mismatches! We will remove the lock file and regenerate it!"
    367                 rm package-lock.json
    368             else
    369                 echo "No package-lock.json file found, reconstructing..."
    370             fi
    371 
    372             node ${reconstructPackageLock}
    373           ''}
    374 
    375           node ${addIntegrityFieldsScript}
    376         ''}
    377 
    378         npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} rebuild
    379 
    380         if [ "''${dontNpmInstall-}" != "1" ]
    381         then
    382             # NPM tries to download packages even when they already exist if npm-shrinkwrap is used.
    383             rm -f npm-shrinkwrap.json
    384 
    385             npm ${forceOfflineFlag} --nodedir=${nodeSources} ${npmFlags} ${lib.optionalString production "--production"} install
    386         fi
    387     '';
    388 
    389   # Builds and composes an NPM package including all its dependencies
    390   buildNodePackage =
    391     { name
    392     , packageName
    393     , version ? null
    394     , dependencies ? []
    395     , buildInputs ? []
    396     , production ? true
    397     , npmFlags ? ""
    398     , dontNpmInstall ? false
    399     , bypassCache ? false
    400     , reconstructLock ? false
    401     , preRebuild ? ""
    402     , dontStrip ? true
    403     , unpackPhase ? "true"
    404     , buildPhase ? "true"
    405     , meta ? {}
    406     , ... }@args:
    407 
    408     let
    409       extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "preRebuild" "unpackPhase" "buildPhase" "meta" ];
    410     in
    411     stdenv.mkDerivation ({
    412       name = "${name}${if version == null then "" else "-${version}"}";
    413       buildInputs = [ tarWrapper python nodejs ]
    414         ++ lib.optional (stdenv.isLinux) utillinux
    415         ++ lib.optional (stdenv.isDarwin) libtool
    416         ++ buildInputs;
    417 
    418       inherit nodejs;
    419 
    420       inherit dontStrip; # Stripping may fail a build for some package deployments
    421       inherit dontNpmInstall preRebuild unpackPhase buildPhase;
    422 
    423       compositionScript = composePackage args;
    424       pinpointDependenciesScript = pinpointDependenciesOfPackage args;
    425 
    426       passAsFile = [ "compositionScript" "pinpointDependenciesScript" ];
    427 
    428       installPhase = ''
    429         source ${installPackage}
    430 
    431         # Create and enter a root node_modules/ folder
    432         mkdir -p $out/lib/node_modules
    433         cd $out/lib/node_modules
    434 
    435         # Compose the package and all its dependencies
    436         source $compositionScriptPath
    437 
    438         ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
    439 
    440         # Create symlink to the deployed executable folder, if applicable
    441         if [ -d "$out/lib/node_modules/.bin" ]
    442         then
    443             ln -s $out/lib/node_modules/.bin $out/bin
    444 
    445             # Patch the shebang lines of all the executables
    446             ls $out/bin/* | while read i
    447             do
    448                 file="$(readlink -f "$i")"
    449                 chmod u+rwx "$file"
    450                 patchShebangs "$file"
    451             done
    452         fi
    453 
    454         # Create symlinks to the deployed manual page folders, if applicable
    455         if [ -d "$out/lib/node_modules/${packageName}/man" ]
    456         then
    457             mkdir -p $out/share
    458             for dir in "$out/lib/node_modules/${packageName}/man/"*
    459             do
    460                 mkdir -p $out/share/man/$(basename "$dir")
    461                 for page in "$dir"/*
    462                 do
    463                     ln -s $page $out/share/man/$(basename "$dir")
    464                 done
    465             done
    466         fi
    467 
    468         # Run post install hook, if provided
    469         runHook postInstall
    470       '';
    471 
    472       meta = {
    473         # default to Node.js' platforms
    474         platforms = nodejs.meta.platforms;
    475       } // meta;
    476     } // extraArgs);
    477 
    478   # Builds a node environment (a node_modules folder and a set of binaries)
    479   buildNodeDependencies =
    480     { name
    481     , packageName
    482     , version ? null
    483     , src
    484     , dependencies ? []
    485     , buildInputs ? []
    486     , production ? true
    487     , npmFlags ? ""
    488     , dontNpmInstall ? false
    489     , bypassCache ? false
    490     , reconstructLock ? false
    491     , dontStrip ? true
    492     , unpackPhase ? "true"
    493     , buildPhase ? "true"
    494     , ... }@args:
    495 
    496     let
    497       extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" ];
    498     in
    499       stdenv.mkDerivation ({
    500         name = "node-dependencies-${name}${if version == null then "" else "-${version}"}";
    501 
    502         buildInputs = [ tarWrapper python nodejs ]
    503           ++ lib.optional (stdenv.isLinux) utillinux
    504           ++ lib.optional (stdenv.isDarwin) libtool
    505           ++ buildInputs;
    506 
    507         inherit dontStrip; # Stripping may fail a build for some package deployments
    508         inherit dontNpmInstall unpackPhase buildPhase;
    509 
    510         includeScript = includeDependencies { inherit dependencies; };
    511         pinpointDependenciesScript = pinpointDependenciesOfPackage args;
    512 
    513         passAsFile = [ "includeScript" "pinpointDependenciesScript" ];
    514 
    515         installPhase = ''
    516           source ${installPackage}
    517 
    518           mkdir -p $out/${packageName}
    519           cd $out/${packageName}
    520 
    521           source $includeScriptPath
    522 
    523           # Create fake package.json to make the npm commands work properly
    524           cp ${src}/package.json .
    525           chmod 644 package.json
    526           ${lib.optionalString bypassCache ''
    527             if [ -f ${src}/package-lock.json ]
    528             then
    529                 cp ${src}/package-lock.json .
    530                 chmod 644 package-lock.json
    531             fi
    532           ''}
    533 
    534           # Go to the parent folder to make sure that all packages are pinpointed
    535           cd ..
    536           ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
    537 
    538           ${prepareAndInvokeNPM { inherit packageName bypassCache reconstructLock npmFlags production; }}
    539 
    540           # Expose the executables that were installed
    541           cd ..
    542           ${lib.optionalString (builtins.substring 0 1 packageName == "@") "cd .."}
    543 
    544           mv ${packageName} lib
    545           ln -s $out/lib/node_modules/.bin $out/bin
    546         '';
    547       } // extraArgs);
    548 
    549   # Builds a development shell
    550   buildNodeShell =
    551     { name
    552     , packageName
    553     , version ? null
    554     , src
    555     , dependencies ? []
    556     , buildInputs ? []
    557     , production ? true
    558     , npmFlags ? ""
    559     , dontNpmInstall ? false
    560     , bypassCache ? false
    561     , reconstructLock ? false
    562     , dontStrip ? true
    563     , unpackPhase ? "true"
    564     , buildPhase ? "true"
    565     , ... }@args:
    566 
    567     let
    568       nodeDependencies = buildNodeDependencies args;
    569       extraArgs = removeAttrs args [ "name" "dependencies" "buildInputs" "dontStrip" "dontNpmInstall" "unpackPhase" "buildPhase" ];
    570     in
    571     stdenv.mkDerivation ({
    572       name = "node-shell-${name}${if version == null then "" else "-${version}"}";
    573 
    574       buildInputs = [ python nodejs ] ++ lib.optional (stdenv.isLinux) utillinux ++ buildInputs;
    575       buildCommand = ''
    576         mkdir -p $out/bin
    577         cat > $out/bin/shell <<EOF
    578         #! ${stdenv.shell} -e
    579         $shellHook
    580         exec ${stdenv.shell}
    581         EOF
    582         chmod +x $out/bin/shell
    583       '';
    584 
    585       # Provide the dependencies in a development shell through the NODE_PATH environment variable
    586       inherit nodeDependencies;
    587       shellHook = lib.optionalString (dependencies != []) ''
    588         export NODE_PATH=${nodeDependencies}/lib/node_modules
    589         export PATH="${nodeDependencies}/bin:$PATH"
    590       '';
    591     } // extraArgs);
    592 in
    593 {
    594   buildNodeSourceDist = lib.makeOverridable buildNodeSourceDist;
    595   buildNodePackage = lib.makeOverridable buildNodePackage;
    596   buildNodeDependencies = lib.makeOverridable buildNodeDependencies;
    597   buildNodeShell = lib.makeOverridable buildNodeShell;
    598 }