finally some progress
This commit is contained in:
parent
bab975ef06
commit
f74ee35d6d
@ -1,2 +1,2 @@
|
|||||||
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7101" }:
|
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }:
|
||||||
nixpkgs.pkgs.haskell.packages.${compiler}.callPackage ./ldap-client.nix {}
|
nixpkgs.pkgs.haskell.packages.${compiler}.callPackage ./ldap-client.nix {}
|
||||||
|
|||||||
1
npm/.nixfromnpm-version
Normal file
1
npm/.nixfromnpm-version
Normal file
@ -0,0 +1 @@
|
|||||||
|
0.7.0
|
||||||
9
npm/default.nix
Normal file
9
npm/default.nix
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{ nodejsVersion ? "4.1", npm3 ? false, pkgs ? import <nixpkgs> {} }:
|
||||||
|
let
|
||||||
|
nodeLib = import ./nodeLib {
|
||||||
|
inherit pkgs npm3 nodejsVersion;
|
||||||
|
self = nodeLib;
|
||||||
|
};
|
||||||
|
in nodeLib.generatePackages {
|
||||||
|
rootPath = ./nodePackages;
|
||||||
|
}
|
||||||
394
npm/nodeLib/buildNodePackage.nix
Normal file
394
npm/nodeLib/buildNodePackage.nix
Normal file
@ -0,0 +1,394 @@
|
|||||||
|
{
|
||||||
|
# Provides the mkDerivation function.
|
||||||
|
stdenv,
|
||||||
|
# Lets us run a command.
|
||||||
|
runCommand,
|
||||||
|
# Derivation for nodejs and npm.
|
||||||
|
nodejs,
|
||||||
|
# Which version of npm to use.
|
||||||
|
npm ? nodejs,
|
||||||
|
# List of required native build inputs.
|
||||||
|
neededNatives,
|
||||||
|
# Self-reference for overriding purposes.
|
||||||
|
buildNodePackage
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
# The path within $out/lib to find a package. If the package does not
|
||||||
|
# have a namespace, it will simply be in `node_modules`, and otherwise it
|
||||||
|
# will appear in `node_modules/@namespace`.
|
||||||
|
modulePath = pkg: if pkg.namespace == null then "node_modules"
|
||||||
|
else "node_modules/@${pkg.namespace}";
|
||||||
|
|
||||||
|
# The path to the package within its modulePath. Just appending the name
|
||||||
|
# of the package.
|
||||||
|
pathInModulePath = pkg: "${modulePath pkg}/${pkg.basicName}";
|
||||||
|
in
|
||||||
|
|
||||||
|
{
|
||||||
|
# Used for private packages. Indicated in the name field of the
|
||||||
|
# package.json, e.g. "@mynamespace/mypackage". Public packages will not
|
||||||
|
# need this.
|
||||||
|
namespace ? null,
|
||||||
|
|
||||||
|
# The name of the package. If it's a private package with a namespace,
|
||||||
|
# this should not contain the namespace.
|
||||||
|
name,
|
||||||
|
|
||||||
|
# Version of the package. This should follow the semver standard, although
|
||||||
|
# we don't explicitly enforce that in this function.
|
||||||
|
version,
|
||||||
|
|
||||||
|
# Source of the package; can be a tarball or a folder on the filesystem.
|
||||||
|
src,
|
||||||
|
|
||||||
|
# by default name of nodejs interpreter e.g. "nodejs-<version>-${name}"
|
||||||
|
namePrefix ? "${nodejs.name}-" +
|
||||||
|
(if namespace == null then "" else "${namespace}-"),
|
||||||
|
|
||||||
|
# List or attribute set of dependencies
|
||||||
|
deps ? {},
|
||||||
|
|
||||||
|
# List or attribute set of peer depencies
|
||||||
|
peerDependencies ? {},
|
||||||
|
|
||||||
|
# List or attribute set of optional dependencies
|
||||||
|
optionalDependencies ? {},
|
||||||
|
|
||||||
|
# List of optional dependencies to skip
|
||||||
|
skipOptionalDependencies ? [],
|
||||||
|
|
||||||
|
# List or set of development dependencies (or null).
|
||||||
|
devDependencies ? null,
|
||||||
|
|
||||||
|
# If true and devDependencies are not null, the package will be
|
||||||
|
# installed contingent on successfully running tests.
|
||||||
|
doCheck ? devDependencies != null,
|
||||||
|
|
||||||
|
# Additional flags passed to npm install
|
||||||
|
flags ? "",
|
||||||
|
|
||||||
|
# Command to be run before shell hook
|
||||||
|
preShellHook ? "",
|
||||||
|
|
||||||
|
# Command to be run after shell hook
|
||||||
|
postShellHook ? "",
|
||||||
|
|
||||||
|
# Same as https://docs.npmjs.com/files/package.json#os
|
||||||
|
os ? [],
|
||||||
|
|
||||||
|
# Same as https://docs.npmjs.com/files/package.json#cpu
|
||||||
|
cpu ? [],
|
||||||
|
|
||||||
|
# Attribute set of already resolved deps (internal),
|
||||||
|
# for avoiding infinite recursion
|
||||||
|
resolvedDeps ? {},
|
||||||
|
|
||||||
|
...
|
||||||
|
} @ args:
|
||||||
|
|
||||||
|
let
|
||||||
|
inherit (stdenv.lib) fold removePrefix hasPrefix subtractLists isList flip
|
||||||
|
intersectLists isAttrs listToAttrs nameValuePair
|
||||||
|
mapAttrs filterAttrs attrNames elem concatMapStrings
|
||||||
|
attrValues getVersion flatten remove concatStringsSep;
|
||||||
|
|
||||||
|
# whether we should run tests.
|
||||||
|
shouldTest = doCheck && devDependencies != null;
|
||||||
|
|
||||||
|
# The package name as it appears in the package.json. This contains a
|
||||||
|
# namespace if there is one, so it will be a distinct identifier for
|
||||||
|
# different packages.
|
||||||
|
pkgName = if namespace == null then name else "@${namespace}/${name}";
|
||||||
|
|
||||||
|
# We create a `self` object for self-referential expressions. It
|
||||||
|
# bottoms out in a call to `mkDerivation` at the end.
|
||||||
|
self = let
|
||||||
|
sources = runCommand "node-sources" {} ''
|
||||||
|
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
|
||||||
|
mv $(find . -type d -mindepth 1 -maxdepth 1) $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
platforms = if os == [] then nodejs.meta.platforms else
|
||||||
|
fold (entry: platforms:
|
||||||
|
let
|
||||||
|
filterPlatforms =
|
||||||
|
stdenv.lib.platforms.${removePrefix "!" entry} or [];
|
||||||
|
in
|
||||||
|
# Ignore unknown platforms
|
||||||
|
if filterPlatforms == [] then (if platforms == [] then nodejs.meta.platforms else platforms)
|
||||||
|
else
|
||||||
|
if hasPrefix "!" entry then
|
||||||
|
subtractLists (intersectLists filterPlatforms nodejs.meta.platforms) platforms
|
||||||
|
else
|
||||||
|
platforms ++ (intersectLists filterPlatforms nodejs.meta.platforms)
|
||||||
|
) [] os;
|
||||||
|
|
||||||
|
toAttrSet = obj: if isAttrs obj then obj else
|
||||||
|
(listToAttrs (map (x: nameValuePair x.name x) obj));
|
||||||
|
|
||||||
|
mapDependencies = deps: filterFunc: let
|
||||||
|
attrDeps = toAttrSet deps;
|
||||||
|
in rec {
|
||||||
|
# All required node modules, without already resolved dependencies
|
||||||
|
# Also override with already resolved dependencies
|
||||||
|
requiredDeps = mapAttrs (name: dep:
|
||||||
|
dep.override {resolvedDeps = resolvedDeps // { "${name}" = self; };}
|
||||||
|
) (filterAttrs filterFunc
|
||||||
|
(removeAttrs attrDeps (attrNames resolvedDeps)));
|
||||||
|
|
||||||
|
# Recursive dependencies that we want to avoid with shim creation
|
||||||
|
recursiveDeps = filterAttrs filterFunc
|
||||||
|
(removeAttrs attrDeps (attrNames requiredDeps));
|
||||||
|
};
|
||||||
|
|
||||||
|
# Filter out self-referential dependencies.
|
||||||
|
_dependencies = mapDependencies deps (name: dep:
|
||||||
|
dep.pkgName != pkgName);
|
||||||
|
|
||||||
|
# Filter out self-referential peer dependencies.
|
||||||
|
_peerDependencies = mapDependencies peerDependencies (name: dep:
|
||||||
|
dep.pkgName != pkgName);
|
||||||
|
|
||||||
|
# Filter out any optional dependencies which don't build correctly.
|
||||||
|
_optionalDependencies = mapDependencies optionalDependencies (name: dep:
|
||||||
|
(builtins.tryEval dep).success &&
|
||||||
|
!(elem dep.pkgName skipOptionalDependencies)
|
||||||
|
);
|
||||||
|
|
||||||
|
# Required dependencies are those that we haven't filtered yet.
|
||||||
|
requiredDependencies =
|
||||||
|
_dependencies.requiredDeps //
|
||||||
|
_optionalDependencies.requiredDeps //
|
||||||
|
_peerDependencies.requiredDeps;
|
||||||
|
|
||||||
|
recursiveDependencies =
|
||||||
|
_dependencies.recursiveDeps //
|
||||||
|
_optionalDependencies.recursiveDeps //
|
||||||
|
_peerDependencies.recursiveDeps;
|
||||||
|
|
||||||
|
npmFlags = concatStringsSep " " ([
|
||||||
|
# We point the registry at something that doesn't exist. This will
|
||||||
|
# mean that NPM will fail if any of the dependencies aren't met, as it
|
||||||
|
# will attempt to hit this registry for the missing dependency.
|
||||||
|
"--registry=fakeprotocol://notaregistry.$UNIQNAME.derp"
|
||||||
|
# These flags make failure fast, as otherwise NPM will spin for a while.
|
||||||
|
"--fetch-retry-mintimeout=0"
|
||||||
|
"--fetch-retry-maxtimeout=10"
|
||||||
|
# This will disable any user-level npm configuration.
|
||||||
|
"--userconfig=/dev/null"
|
||||||
|
# This flag is used for packages which link against the node headers.
|
||||||
|
"--nodedir=${sources}"
|
||||||
|
] ++ (if isList flags then flags else [flags]));
|
||||||
|
|
||||||
|
# A bit of bash to check that variables are set.
|
||||||
|
checkSet = vars: concatStringsSep "\n" (flip map vars (var: ''
|
||||||
|
[[ -z $${var} ]] && { echo "${var} is not set."; exit 1; }
|
||||||
|
''));
|
||||||
|
|
||||||
|
mkDerivationArgs = {
|
||||||
|
inherit src;
|
||||||
|
|
||||||
|
# Define some environment variables that we will use in the build.
|
||||||
|
prePatch = ''
|
||||||
|
export HASHEDNAME=$(echo "$propagatedNativeBuildInputs $name" \
|
||||||
|
| md5sum | awk '{print $1}')
|
||||||
|
export UNIQNAME="''${HASHEDNAME:0:10}-${name}-${version}"
|
||||||
|
export BUILD_DIR=$TMPDIR/$UNIQNAME-build
|
||||||
|
'';
|
||||||
|
|
||||||
|
patchPhase = ''
|
||||||
|
runHook prePatch
|
||||||
|
patchShebangs $PWD
|
||||||
|
|
||||||
|
# Remove any impure dependencies from the package.json (see script
|
||||||
|
# for details)
|
||||||
|
node ${./removeImpureDependencies.js}
|
||||||
|
|
||||||
|
# We do not handle shrinkwraps yet
|
||||||
|
rm npm-shrinkwrap.json 2>/dev/null || true
|
||||||
|
|
||||||
|
# Repackage source into a tarball, so npm pre/post publish hooks are
|
||||||
|
# not triggered,
|
||||||
|
mkdir -p $BUILD_DIR
|
||||||
|
GZIP=-1 tar -czf $BUILD_DIR/package.tgz ./
|
||||||
|
export PATCHED_SRC=$BUILD_DIR/package.tgz
|
||||||
|
runHook postPatch
|
||||||
|
'';
|
||||||
|
|
||||||
|
configurePhase = ''
|
||||||
|
runHook preConfigure
|
||||||
|
(
|
||||||
|
${checkSet ["BUILD_DIR"]}
|
||||||
|
mkdir -p $BUILD_DIR
|
||||||
|
cd $BUILD_DIR
|
||||||
|
# Symlink or copy dependencies for node modules
|
||||||
|
# copy is needed if dependency has recursive dependencies,
|
||||||
|
# because node can't follow symlinks while resolving recursive deps.
|
||||||
|
${
|
||||||
|
let
|
||||||
|
link = dep: ''
|
||||||
|
${if dep.recursiveDeps == [] then "ln -sfv" else "cp -rf"} \
|
||||||
|
${dep}/lib/${pathInModulePath dep} ${modulePath dep}
|
||||||
|
'';
|
||||||
|
in
|
||||||
|
flip concatMapStrings (attrValues requiredDependencies) (dep: ''
|
||||||
|
mkdir -p ${modulePath dep}
|
||||||
|
${link dep}
|
||||||
|
${concatMapStrings link (attrValues dep.peerDependencies)}
|
||||||
|
'')}
|
||||||
|
|
||||||
|
# Create shims for recursive dependenceies
|
||||||
|
${concatMapStrings (dep: ''
|
||||||
|
mkdir -p ${modulePath dep}
|
||||||
|
cat > ${pathInModulePath dep}/package.json <<EOF
|
||||||
|
{
|
||||||
|
"name": "${dep.pkgName}",
|
||||||
|
"version": "${getVersion dep}"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
'') (attrValues recursiveDependencies)}
|
||||||
|
|
||||||
|
# Create dummy package.json file
|
||||||
|
cat <<EOF > package.json
|
||||||
|
{"name":"dummy-for-$UNIQNAME","version":"0.0.0", "license":"MIT",
|
||||||
|
"description":"Dummy package file for building $name",
|
||||||
|
"repository":{"type":"git","url":"http://$UNIQNAME.com"}}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Create dummy readme
|
||||||
|
echo "Dummy package" > README.md
|
||||||
|
)
|
||||||
|
|
||||||
|
export HOME=$BUILD_DIR
|
||||||
|
runHook postConfigure
|
||||||
|
'';
|
||||||
|
|
||||||
|
buildPhase = ''
|
||||||
|
runHook preBuild
|
||||||
|
|
||||||
|
# Install package
|
||||||
|
(
|
||||||
|
${checkSet ["BUILD_DIR" "PATCHED_SRC"]}
|
||||||
|
|
||||||
|
echo "Building $name in $BUILD_DIR"
|
||||||
|
cd $BUILD_DIR
|
||||||
|
HOME=$PWD npm install $PATCHED_SRC ${npmFlags} || {
|
||||||
|
npm list
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
runHook postBuild
|
||||||
|
'';
|
||||||
|
|
||||||
|
installPhase = ''
|
||||||
|
runHook preInstall
|
||||||
|
|
||||||
|
(
|
||||||
|
cd $BUILD_DIR
|
||||||
|
|
||||||
|
# Remove shims
|
||||||
|
${concatMapStrings (dep: ''
|
||||||
|
rm ${pathInModulePath dep}/package.json
|
||||||
|
rmdir ${modulePath dep}
|
||||||
|
'') (attrValues recursiveDependencies)}
|
||||||
|
|
||||||
|
# Install the package that we just built.
|
||||||
|
mkdir -p $out/lib/${modulePath self}
|
||||||
|
|
||||||
|
# Move the folder that was created for this path to $out/lib.
|
||||||
|
mv ${pathInModulePath self} $out/lib/${pathInModulePath self}
|
||||||
|
|
||||||
|
# Remove the node_modules subfolder from there, and instead put things
|
||||||
|
# in $PWD/node_modules into that folder.
|
||||||
|
rm -rf $out/lib/${pathInModulePath self}/node_modules
|
||||||
|
cp -r node_modules $out/lib/${pathInModulePath self}/node_modules
|
||||||
|
|
||||||
|
if [ -e "$out/lib/${pathInModulePath self}/man" ]; then
|
||||||
|
mkdir -p $out/share
|
||||||
|
for dir in $out/lib/${pathInModulePath self}/man/*; do #*/
|
||||||
|
mkdir -p $out/share/man/$(basename "$dir")
|
||||||
|
for page in $dir/*; do #*/
|
||||||
|
ln -sv $page $out/share/man/$(basename "$dir")
|
||||||
|
done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Move peer dependencies to node_modules
|
||||||
|
${concatMapStrings (dep: ''
|
||||||
|
mkdir -p ${modulePath dep}
|
||||||
|
mv ${pathInModulePath dep} $out/lib/${modulePath dep}
|
||||||
|
'') (attrValues _peerDependencies.requiredDeps)}
|
||||||
|
|
||||||
|
# Install binaries and patch shebangs. These are always found in
|
||||||
|
# node_modules/.bin, regardless of a package namespace.
|
||||||
|
mv node_modules/.bin $out/lib/node_modules 2>/dev/null || true
|
||||||
|
if [ -d "$out/lib/node_modules/.bin" ]; then
|
||||||
|
ln -sv $out/lib/node_modules/.bin $out/bin
|
||||||
|
patchShebangs $out/lib/node_modules/.bin
|
||||||
|
fi
|
||||||
|
)
|
||||||
|
|
||||||
|
runHook postInstall
|
||||||
|
'';
|
||||||
|
|
||||||
|
shellHook = ''
|
||||||
|
${preShellHook}
|
||||||
|
export PATH=${npm}/bin:${nodejs}/bin:$(pwd)/node_modules/.bin:$PATH
|
||||||
|
mkdir -p node_modules
|
||||||
|
${concatMapStrings (dep: ''
|
||||||
|
mkdir -p ${modulePath dep}
|
||||||
|
ln -sfv ${dep}/lib/${pathInModulePath dep} ${pathInModulePath dep}
|
||||||
|
'') (attrValues requiredDependencies)}
|
||||||
|
${postShellHook}
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Stipping does not make a lot of sense in node packages
|
||||||
|
dontStrip = true;
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
inherit platforms;
|
||||||
|
maintainers = [ stdenv.lib.maintainers.offline ];
|
||||||
|
};
|
||||||
|
|
||||||
|
# Propagate pieces of information about the package so that downstream
|
||||||
|
# packages can reflect on them.
|
||||||
|
passthru.pkgName = pkgName;
|
||||||
|
passthru.basicName = name;
|
||||||
|
passthru.namespace = namespace;
|
||||||
|
passthru.version = version;
|
||||||
|
passthru.peerDependencies = _peerDependencies.requiredDeps;
|
||||||
|
passthru.recursiveDeps =
|
||||||
|
(flatten (
|
||||||
|
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
|
||||||
|
)) ++
|
||||||
|
(attrNames recursiveDependencies);
|
||||||
|
|
||||||
|
# Add an 'override' attribute, which will call `buildNodePackage` with the
|
||||||
|
# given arguments overridden.
|
||||||
|
passthru.override = newArgs: buildNodePackage (args // newArgs);
|
||||||
|
} // (removeAttrs args ["deps" "resolvedDeps" "optionalDependencies"
|
||||||
|
"devDependencies"]) // {
|
||||||
|
name = "${namePrefix}${name}-${version}";
|
||||||
|
|
||||||
|
# Run the node setup hook when this package is a build input
|
||||||
|
propagatedNativeBuildInputs = (args.propagatedNativeBuildInputs or []) ++
|
||||||
|
[ npm nodejs ];
|
||||||
|
|
||||||
|
nativeBuildInputs =
|
||||||
|
(args.nativeBuildInputs or []) ++ neededNatives ++
|
||||||
|
(attrValues requiredDependencies);
|
||||||
|
|
||||||
|
# Expose list of recursive dependencies upstream, up to the package that
|
||||||
|
# caused recursive dependency
|
||||||
|
recursiveDeps =
|
||||||
|
(flatten (
|
||||||
|
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
|
||||||
|
)) ++
|
||||||
|
(attrNames recursiveDependencies);
|
||||||
|
};
|
||||||
|
|
||||||
|
in stdenv.mkDerivation mkDerivationArgs;
|
||||||
|
|
||||||
|
in self
|
||||||
210
npm/nodeLib/default.nix
Normal file
210
npm/nodeLib/default.nix
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
/*
|
||||||
|
A set of tools for generating node packages, such as to be imported by
|
||||||
|
default.nix files generated by nixfromnpm.
|
||||||
|
*/
|
||||||
|
|
||||||
|
{
|
||||||
|
# Self-reference so that we can pass through to downstream libraries
|
||||||
|
self,
|
||||||
|
# Base set of packages, i.e. nixpkgs.
|
||||||
|
pkgs,
|
||||||
|
# Version of nodejs.
|
||||||
|
nodejsVersion ? "4.1",
|
||||||
|
# Whether to use npm3 (requires a prebuilt tarball of npm3).
|
||||||
|
npm3 ? true
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
# Function to replace dots with something
|
||||||
|
replaceDots = c: replaceChars ["."] [c];
|
||||||
|
inherit (builtins) readDir removeAttrs length getEnv elemAt hasAttr;
|
||||||
|
inherit (pkgs.lib) attrNames attrValues filterAttrs flip foldl
|
||||||
|
hasSuffix hasPrefix removeSuffix replaceChars
|
||||||
|
optional optionals stringToCharacters
|
||||||
|
concatStrings tail splitString;
|
||||||
|
inherit (pkgs.stdenv) isLinux;
|
||||||
|
|
||||||
|
# Function to remove the first character of a string.
|
||||||
|
dropFirstChar = str: concatStrings (tail (stringToCharacters str));
|
||||||
|
|
||||||
|
# Like a for loop.
|
||||||
|
for = flip map;
|
||||||
|
|
||||||
|
# Concatenate a list of sets.
|
||||||
|
joinSets = foldl (a: b: a // b) {};
|
||||||
|
|
||||||
|
# Extracts a tarball containing a bootstrapped version of npm 3.
|
||||||
|
# This tarball must have been previously generated by an invocation
|
||||||
|
# of nixfromnpm, but one of these should be included in the
|
||||||
|
# nixfromnpm distribution (if not, run the `gen_npm3` script).
|
||||||
|
npm3-src = pkgs.runCommand "npm3" {src=./npm3.tar.gz;} ''
|
||||||
|
mkdir -p $out && cd $out && tar -xf $src
|
||||||
|
'';
|
||||||
|
|
||||||
|
# Builds the extracted nix file. Since of course it can't use npm3,
|
||||||
|
# being that it hasn't been built yet, we disable npm3 for this.
|
||||||
|
_npm3 = import npm3-src {
|
||||||
|
inherit pkgs nodejsVersion;
|
||||||
|
npm3 = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
# Parse the `NPM_AUTH_TOKENS` environment variable to discover
|
||||||
|
# namespace-token associations and turn them into an attribute set
|
||||||
|
# which we can use as an input to the fetchPrivateNpm function.
|
||||||
|
# Split the variable on ':', then turn each k=v element in
|
||||||
|
# the list into an attribute set and join all of those sets.
|
||||||
|
namespaceTokens = joinSets (
|
||||||
|
for (splitString ":" (getEnv "NPM_AUTH_TOKENS")) (kvPair:
|
||||||
|
let kv = splitString "=" kvPair; in
|
||||||
|
if length kv != 2 then {}
|
||||||
|
else {"${elemAt kv 0}" = elemAt kv 1;}));
|
||||||
|
|
||||||
|
# A function similar to fetchUrl but allows setting of custom headers.
|
||||||
|
fetchUrlWithHeaders = pkgs.callPackage ./fetchUrlWithHeaders.nix {};
|
||||||
|
|
||||||
|
# Uses the parsed namespace tokens to create a function that can
|
||||||
|
# fetch a private package from an npm repo.
|
||||||
|
fetchPrivateNpm = {namespace, headers ? {}, ...}@args:
|
||||||
|
if !(hasAttr namespace namespaceTokens)
|
||||||
|
then throw "NPM_AUTH_TOKENS does not contain namespace ${namespace}"
|
||||||
|
else let
|
||||||
|
Authorization = "Bearer ${namespaceTokens.${namespace}}";
|
||||||
|
headers = {inherit Authorization;} // headers;
|
||||||
|
in
|
||||||
|
fetchUrlWithHeaders (removeAttrs args ["namespace"] // {inherit headers;});
|
||||||
|
in
|
||||||
|
|
||||||
|
rec {
|
||||||
|
nodejs = pkgs."nodejs-${replaceDots "_" nodejsVersion}" or (
|
||||||
|
throw "The given nodejs version ${nodejsVersion} has not been defined."
|
||||||
|
);
|
||||||
|
buildNodePackage = import ./buildNodePackage.nix ({
|
||||||
|
inherit (pkgs) stdenv runCommand;
|
||||||
|
inherit nodejs buildNodePackage;
|
||||||
|
neededNatives = [pkgs.python] ++ optionals isLinux [pkgs.utillinux];
|
||||||
|
} // (if npm3 then {npm = _npm3;} else {}));
|
||||||
|
# A generic package that will fail to build. This is used to indicate
|
||||||
|
# packages that are broken, without failing the entire generation of
|
||||||
|
# a package expression.
|
||||||
|
brokenPackage = {name, reason}:
|
||||||
|
let
|
||||||
|
deriv = pkgs.stdenv.mkDerivation {
|
||||||
|
name = "BROKEN-${name}";
|
||||||
|
buildCommand = ''
|
||||||
|
echo "Package ${name} is broken: ${reason}"
|
||||||
|
exit 1
|
||||||
|
'';
|
||||||
|
passthru.withoutTests = deriv;
|
||||||
|
passthru.pkgName = name;
|
||||||
|
passthru.basicName = "BROKEN";
|
||||||
|
passthru.namespace = null;
|
||||||
|
passthru.version = "BROKEN";
|
||||||
|
passthru.override = _: deriv;
|
||||||
|
passthru.recursiveDeps = [];
|
||||||
|
passthru.peerDependencies = {};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
deriv;
|
||||||
|
|
||||||
|
# List a directory after filtering the files.
|
||||||
|
lsFilter = pred: dir: attrNames (filterAttrs pred (readDir dir));
|
||||||
|
|
||||||
|
# Checks the name and type of a listing to grab non-dotfile dirs.
|
||||||
|
isRegDir = name: type: type == "directory" && !(hasPrefix "." name);
|
||||||
|
|
||||||
|
# Discover all of the node packages in a folder and turn them into a set
|
||||||
|
# mapping `<name>_<version>` to the expression to build that package.
|
||||||
|
discoverPackages = {callPackage, rootPath}:
|
||||||
|
# if true then throw "huh? ${rootPath}" else
|
||||||
|
let
|
||||||
|
# Names of NPM packages defined in this directory. Don't take
|
||||||
|
# files that start with '@'.
|
||||||
|
nodeDirs = lsFilter (n: t: isRegDir n t && !(hasPrefix "@" n))
|
||||||
|
(/. + rootPath);
|
||||||
|
# Generate the package expression from a package name and .nix path.
|
||||||
|
toPackage = name: filepath: let
|
||||||
|
versionRaw = removeSuffix ".nix" filepath; # Raw version, i.e. "1.2.4"
|
||||||
|
# Join with package name to make the variable name.
|
||||||
|
varName = "${replaceDots "-" name}_${replaceDots "-" versionRaw}";
|
||||||
|
in
|
||||||
|
# Return the singleton set which maps that name to the actual expression.
|
||||||
|
{"${varName}" = callPackage (/. + rootPath + "/${name}/${filepath}") {};};
|
||||||
|
in
|
||||||
|
# For each directory, and each .nix file in it, create a package from that.
|
||||||
|
joinSets (for nodeDirs (pkgName: let
|
||||||
|
pkgDir = /. + rootPath + "/${pkgName}";
|
||||||
|
# List of .nix files in the directory (excluding symlinks).
|
||||||
|
versionFiles = lsFilter (name: type: type == "regular" &&
|
||||||
|
hasSuffix ".nix" name)
|
||||||
|
pkgDir;
|
||||||
|
# Check if there is a `latest.nix` file
|
||||||
|
hasLatest = lsFilter (n: _: n == "latest.nix") pkgDir != [];
|
||||||
|
in
|
||||||
|
joinSets (
|
||||||
|
# Find all of the versions listed in the folder.
|
||||||
|
map (toPackage pkgName) versionFiles ++
|
||||||
|
# If the folder has a `latest.nix` file, link the bare name of
|
||||||
|
# the package to that file.
|
||||||
|
optional hasLatest {
|
||||||
|
"${replaceDots "-" pkgName}" = callPackage
|
||||||
|
(/. + rootPath + "/${pkgName}/latest.nix") {};
|
||||||
|
})));
|
||||||
|
|
||||||
|
# Same as above, except that we take all of the namespaced packages;
|
||||||
|
# these packages are in folders prefaced with `@`, and contain
|
||||||
|
# packages in that folder. So, for example the path `@foo/bar` is
|
||||||
|
# the path to all of the versions of the `bar` package under the
|
||||||
|
# namespace `foo`.
|
||||||
|
discoverNamespacePackages = {callPackage, rootPath}: let
|
||||||
|
isNsDir = name: type: type == "directory" && hasPrefix "@" name;
|
||||||
|
# Names of NPM packages defined in this directory.
|
||||||
|
namespaceDirs = lsFilter isNsDir (/. + rootPath);
|
||||||
|
in
|
||||||
|
# For each namespace directory, each package folder in it, and
|
||||||
|
# each .nix file in that, create a package from that and then
|
||||||
|
# create a namespace out of that.
|
||||||
|
joinSets (for namespaceDirs (nsDirName: {
|
||||||
|
"${dropFirstChar nsDirName}" = discoverPackages {
|
||||||
|
inherit callPackage;
|
||||||
|
rootPath = /. + rootPath + "/${nsDirName}";
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
|
||||||
|
# The function that a default.nix can call into which will scan its
|
||||||
|
# directory for all of the package files and generate a big attribute set
|
||||||
|
# for all of them. Re-exports the `callPackage` function and all of the
|
||||||
|
# attribute sets, as well as the nodeLib.
|
||||||
|
generatePackages = {rootPath, extensions ? []}:
|
||||||
|
let
|
||||||
|
callPackageWith = pkgSet: path: overridingArgs: let
|
||||||
|
inherit (builtins) intersectAttrs functionArgs;
|
||||||
|
inherit (pkgs.lib) filterAttrs;
|
||||||
|
# The path must be a function; import it here.
|
||||||
|
func = import path;
|
||||||
|
# Get the arguments to the function; e.g. "{a=false; b=true;}", where
|
||||||
|
# a false value is an argument that has no default.
|
||||||
|
funcArgs = functionArgs func;
|
||||||
|
# Take only the arguments that don't have a default.
|
||||||
|
noDefaults = filterAttrs (_: v: v == false) funcArgs;
|
||||||
|
# Intersect this set with the package set to create the arguments to
|
||||||
|
# the function.
|
||||||
|
satisfyingArgs = intersectAttrs noDefaults pkgSet;
|
||||||
|
# Override these arguments with whatever's passed in.
|
||||||
|
actualArgs = satisfyingArgs // overridingArgs;
|
||||||
|
# Call the function with these args to get a derivation.
|
||||||
|
deriv = func actualArgs;
|
||||||
|
in deriv;
|
||||||
|
|
||||||
|
callPackage = callPackageWith {
|
||||||
|
inherit fetchUrlWithHeaders namespaces namespaceTokens;
|
||||||
|
inherit pkgs nodePackages buildNodePackage brokenPackage;
|
||||||
|
};
|
||||||
|
nodePackages = joinSets (map (e: e.nodePackages) extensions) //
|
||||||
|
discoverPackages {inherit callPackage rootPath;};
|
||||||
|
namespaces = joinSets (map (e: e.namespaces) extensions) //
|
||||||
|
discoverNamespacePackages {inherit callPackage rootPath;};
|
||||||
|
in {
|
||||||
|
inherit nodePackages callPackage namespaces namespaceTokens pkgs;
|
||||||
|
nodeLib = self;
|
||||||
|
};
|
||||||
|
}
|
||||||
21
npm/nodeLib/fetch.py
Normal file
21
npm/nodeLib/fetch.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import os
|
||||||
|
import requests
|
||||||
|
out = os.environ['out']
|
||||||
|
url = os.environ['url']
|
||||||
|
headers = {"User-Agent": "nix-fetchurl"}
|
||||||
|
header_names = os.environ.get("headerNames", "")
|
||||||
|
for name in header_names.split():
|
||||||
|
if "__HTTP_HEADER_{}".format(name) not in os.environ:
|
||||||
|
exit("FATAL: no corresponding value set for header {}"
|
||||||
|
.format(name))
|
||||||
|
headers[name] = os.environ["__HTTP_HEADER_{}".format(name)]
|
||||||
|
print('GET {} with headers {}'.format(url, headers))
|
||||||
|
response = requests.get(url, headers=headers)
|
||||||
|
if response.status_code != 200:
|
||||||
|
exit("Received a {} response. :(\nContent: {}"
|
||||||
|
.format(response.status_code, response.content))
|
||||||
|
else:
|
||||||
|
print('Response: {} ({} bytes)'
|
||||||
|
.format(response.status_code, len(response.content)))
|
||||||
|
with open(out, 'wb') as f:
|
||||||
|
f.write(response.content)
|
||||||
71
npm/nodeLib/fetchUrlWithHeaders.nix
Normal file
71
npm/nodeLib/fetchUrlWithHeaders.nix
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# A python-based fetchurl function, allowing the passage of custom headers.
|
||||||
|
# Just calls into `requests` under the hood.
|
||||||
|
{
|
||||||
|
pythonPackages, stdenv
|
||||||
|
}:
|
||||||
|
|
||||||
|
|
||||||
|
{ # URL to fetch.
|
||||||
|
url ? ""
|
||||||
|
|
||||||
|
, # Additional curl options needed for the download to succeed.
|
||||||
|
curlOpts ? ""
|
||||||
|
|
||||||
|
, # Name of the file. If empty, use the basename of `url' (or of the
|
||||||
|
# first element of `urls').
|
||||||
|
name ? ""
|
||||||
|
|
||||||
|
# Different ways of specifying the hash.
|
||||||
|
, outputHash ? ""
|
||||||
|
, outputHashAlgo ? ""
|
||||||
|
, md5 ? ""
|
||||||
|
, sha1 ? ""
|
||||||
|
, sha256 ? ""
|
||||||
|
|
||||||
|
, # Meta information, if any.
|
||||||
|
meta ? {}
|
||||||
|
|
||||||
|
# Headers to set, if any.
|
||||||
|
, headers ? {}
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
inherit (stdenv.lib) flip mapAttrs' nameValuePair;
|
||||||
|
hasHash = (outputHash != "" && outputHashAlgo != "")
|
||||||
|
|| md5 != "" || sha1 != "" || sha256 != "";
|
||||||
|
|
||||||
|
# Create an attribute set translating each header name and value into
|
||||||
|
# the header name prefixed with __HTTP_HEADER. When the derivation is
|
||||||
|
# evaluated, the script will pick up these environment variables and use
|
||||||
|
# them to produce the actual headers.
|
||||||
|
headerValues = flip mapAttrs' headers (headerName: headerValue:
|
||||||
|
nameValuePair "__HTTP_HEADER_${headerName}" headerValue);
|
||||||
|
in
|
||||||
|
|
||||||
|
if !hasHash
|
||||||
|
then throw "You must specify the output hash for ${url}"
|
||||||
|
else
|
||||||
|
|
||||||
|
stdenv.mkDerivation ({
|
||||||
|
inherit url;
|
||||||
|
name = if name != "" then name else baseNameOf (toString url);
|
||||||
|
|
||||||
|
outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
|
||||||
|
if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
|
||||||
|
outputHash = if outputHash != "" then outputHash else
|
||||||
|
if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
|
||||||
|
|
||||||
|
# Only flat hashing, which is the normal mode if you're fetching a file.
|
||||||
|
outputHashMode = "flat";
|
||||||
|
|
||||||
|
# Doing the download on a remote machine just duplicates network
|
||||||
|
# traffic, so don't do that.
|
||||||
|
preferLocalBuild = true;
|
||||||
|
|
||||||
|
headerNames = builtins.attrNames headers;
|
||||||
|
|
||||||
|
buildInputs = with pythonPackages; [python requests2];
|
||||||
|
buildCommand = ''
|
||||||
|
python ${./fetch.py}
|
||||||
|
'';
|
||||||
|
} // headerValues)
|
||||||
16
npm/nodeLib/parseNpmAuthTokens.nix
Normal file
16
npm/nodeLib/parseNpmAuthTokens.nix
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# Parses the `NPM_AUTH_TOKENS` environment variable to discover
|
||||||
|
# namespace-token associations and turn them into an attribute set
|
||||||
|
# which we can use as an input to the fetchPrivateNpm function.
|
||||||
|
{pkgs, joinSets}:
|
||||||
|
|
||||||
|
let
|
||||||
|
inherit (pkgs.lib) flip length elemAt;
|
||||||
|
npmAuthTokens = builtins.getEnv "NPM_AUTH_TOKENS";
|
||||||
|
in
|
||||||
|
|
||||||
|
# Split the variable on ':', then turn each k=v element in
|
||||||
|
# the list into an attribute set and join all of those sets.
|
||||||
|
joinSets (
|
||||||
|
flip map (split ":" npmAuthTokens) (kvPair:
|
||||||
|
if length (split "=" kvPair) != 2 then {}
|
||||||
|
else {"${elemAt kvPair 0}" = elemAt kvPair 1;}))
|
||||||
46
npm/nodeLib/removeImpureDependencies.js
Normal file
46
npm/nodeLib/removeImpureDependencies.js
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
// These packages come packaged with nodejs.
|
||||||
|
var fs = require('fs');
|
||||||
|
var url = require('url');
|
||||||
|
|
||||||
|
function versionSpecIsImpure(versionSpec) {
|
||||||
|
// Returns true if a version spec is impure.
|
||||||
|
return (versionSpec == "latest" || versionSpec == "unstable" ||
|
||||||
|
// file path references
|
||||||
|
versionSpec.substr(0, 2) == ".." ||
|
||||||
|
versionSpec.substr(0, 2) == "./" ||
|
||||||
|
versionSpec.substr(0, 2) == "~/" ||
|
||||||
|
versionSpec.substr(0, 1) == '/' ||
|
||||||
|
// github owner/repo references
|
||||||
|
/^[^/]+\/[^/]+(#.*)?$/.test(versionSpec) ||
|
||||||
|
// is a URL
|
||||||
|
url.parse(versionSpec).protocol);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load up the package object.
|
||||||
|
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
|
||||||
|
|
||||||
|
// Purify dependencies.
|
||||||
|
var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'];
|
||||||
|
for (var i in depTypes) {
|
||||||
|
var depType = depTypes[i];
|
||||||
|
var depSet = packageObj[depType];
|
||||||
|
if (depSet !== undefined) {
|
||||||
|
for (var depName in depSet) {
|
||||||
|
if (versionSpecIsImpure(depSet[depName])) {
|
||||||
|
depSet[depName] = '*';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Remove peer dependencies */
|
||||||
|
if (process.env.removePeerDependencies && packageObj.peerDependencies) {
|
||||||
|
console.log("WARNING: removing the following peer dependencies:");
|
||||||
|
for (key in packageObj.peerDependencies) {
|
||||||
|
console.log(" " + key + ": " + packageObj.peerDependencies[key]);
|
||||||
|
}
|
||||||
|
delete packageObj.peerDependencies;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Write the fixed JSON file */
|
||||||
|
fs.writeFileSync("package.json", JSON.stringify(packageObj));
|
||||||
14
npm/nodePackages/abbrev/1.0.7.nix
Normal file
14
npm/nodePackages/abbrev/1.0.7.nix
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "abbrev";
|
||||||
|
version = "1.0.7";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/abbrev/-/abbrev-1.0.7.tgz";
|
||||||
|
sha1 = "5b6035b2ee9d4fb5cf859f08a9be81b208491843";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
meta = {
|
||||||
|
homepage = "https://github.com/isaacs/abbrev-js#readme";
|
||||||
|
description = "Like ruby's abbrev module, but in js";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/abbrev/latest.nix
Symbolic link
1
npm/nodePackages/abbrev/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
1.0.7.nix
|
||||||
14
npm/nodePackages/asn1/0.2.1.nix
Normal file
14
npm/nodePackages/asn1/0.2.1.nix
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "asn1";
|
||||||
|
version = "0.2.1";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/asn1/-/asn1-0.2.1.tgz";
|
||||||
|
sha1 = "ecc73f75d31ea3c6ed9d47428db35fecc7b2c6dc";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
meta = {
|
||||||
|
homepage = "https://github.com/mcavage/node-asn1";
|
||||||
|
description = "Contains parsers and serializers for ASN.1 (currently BER only)";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/asn1/latest.nix
Symbolic link
1
npm/nodePackages/asn1/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.2.1.nix
|
||||||
14
npm/nodePackages/assert-plus/0.1.5.nix
Normal file
14
npm/nodePackages/assert-plus/0.1.5.nix
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "assert-plus";
|
||||||
|
version = "0.1.5";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz";
|
||||||
|
sha1 = "ee74009413002d84cec7219c6ac811812e723160";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
devDependencies = [];
|
||||||
|
meta = {
|
||||||
|
description = "Extra assertions on top of node's assert module";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/assert-plus/latest.nix
Symbolic link
1
npm/nodePackages/assert-plus/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.1.5.nix
|
||||||
26
npm/nodePackages/bunyan/0.22.1.nix
Normal file
26
npm/nodePackages/bunyan/0.22.1.nix
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "bunyan";
|
||||||
|
version = "0.22.1";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/bunyan/-/bunyan-0.22.1.tgz";
|
||||||
|
sha1 = "020c383bed625af5c6c8834dd8c4aca0dd0f765c";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
dtrace-provider_0-2-8
|
||||||
|
mv_0-0-5
|
||||||
|
];
|
||||||
|
optionalDependencies = with nodePackages; [
|
||||||
|
dtrace-provider_0-2-8
|
||||||
|
mv_0-0-5
|
||||||
|
];
|
||||||
|
meta = {
|
||||||
|
description = "a JSON Logger library for node.js services";
|
||||||
|
keywords = [
|
||||||
|
"log"
|
||||||
|
"logging"
|
||||||
|
"log4j"
|
||||||
|
"json"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/bunyan/latest.nix
Symbolic link
1
npm/nodePackages/bunyan/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.22.1.nix
|
||||||
15
npm/nodePackages/dtrace-provider/0.2.8.nix
Normal file
15
npm/nodePackages/dtrace-provider/0.2.8.nix
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "dtrace-provider";
|
||||||
|
version = "0.2.8";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.2.8.tgz";
|
||||||
|
sha1 = "e243f19219aa95fbf0d8f2ffb07f5bd64e94fe20";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
meta = {
|
||||||
|
homepage = "https://github.com/chrisa/node-dtrace-provider#readme";
|
||||||
|
description = "Native DTrace providers for node.js applications";
|
||||||
|
keywords = [ "dtrace" ];
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/dtrace-provider/latest.nix
Symbolic link
1
npm/nodePackages/dtrace-provider/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.2.8.nix
|
||||||
14
npm/nodePackages/extsprintf/1.0.0.nix
Normal file
14
npm/nodePackages/extsprintf/1.0.0.nix
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "extsprintf";
|
||||||
|
version = "1.0.0";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/extsprintf/-/extsprintf-1.0.0.tgz";
|
||||||
|
sha1 = "4d58b815ace5bebfc4ebf03cf98b0a7604a99b86";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
devDependencies = [];
|
||||||
|
meta = {
|
||||||
|
description = "extended POSIX-style sprintf";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/extsprintf/latest.nix
Symbolic link
1
npm/nodePackages/extsprintf/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
1.0.0.nix
|
||||||
14
npm/nodePackages/json-schema/0.2.2.nix
Normal file
14
npm/nodePackages/json-schema/0.2.2.nix
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "json-schema";
|
||||||
|
version = "0.2.2";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/json-schema/-/json-schema-0.2.2.tgz";
|
||||||
|
sha1 = "50354f19f603917c695f70b85afa77c3b0f23506";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
meta = {
|
||||||
|
description = "JSON Schema validation and specifications";
|
||||||
|
keywords = [ "json" "schema" ];
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/json-schema/latest.nix
Symbolic link
1
npm/nodePackages/json-schema/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.2.2.nix
|
||||||
18
npm/nodePackages/jsprim/0.3.0.nix
Normal file
18
npm/nodePackages/jsprim/0.3.0.nix
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "jsprim";
|
||||||
|
version = "0.3.0";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/jsprim/-/jsprim-0.3.0.tgz";
|
||||||
|
sha1 = "cd13466ea2480dbd8396a570d47d31dda476f8b1";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
json-schema_0-2-2
|
||||||
|
extsprintf_1-0-0
|
||||||
|
verror_1-3-3
|
||||||
|
];
|
||||||
|
devDependencies = [];
|
||||||
|
meta = {
|
||||||
|
description = "utilities for primitive JavaScript types";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/jsprim/latest.nix
Symbolic link
1
npm/nodePackages/jsprim/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.3.0.nix
|
||||||
24
npm/nodePackages/ldapjs/0.7.1.nix
Normal file
24
npm/nodePackages/ldapjs/0.7.1.nix
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "ldapjs";
|
||||||
|
version = "0.7.1";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/ldapjs/-/ldapjs-0.7.1.tgz";
|
||||||
|
sha1 = "684798a687640bab1afbd802cf532f30492dfb56";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
asn1_0-2-1
|
||||||
|
nopt_2-1-1
|
||||||
|
bunyan_0-22-1
|
||||||
|
dtrace-provider_0-2-8
|
||||||
|
pooling_0-4-6
|
||||||
|
assert-plus_0-1-5
|
||||||
|
];
|
||||||
|
optionalDependencies = with nodePackages; [
|
||||||
|
dtrace-provider_0-2-8
|
||||||
|
];
|
||||||
|
meta = {
|
||||||
|
homepage = "http://ldapjs.org";
|
||||||
|
description = "LDAP client and server APIs";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/ldapjs/latest.nix
Symbolic link
1
npm/nodePackages/ldapjs/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.7.1.nix
|
||||||
21
npm/nodePackages/mv/0.0.5.nix
Normal file
21
npm/nodePackages/mv/0.0.5.nix
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "mv";
|
||||||
|
version = "0.0.5";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/mv/-/mv-0.0.5.tgz";
|
||||||
|
sha1 = "15eac759479884df1131d6de56bce20b654f5391";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
meta = {
|
||||||
|
description = "fs.rename but works across devices. same as the unix utility 'mv'";
|
||||||
|
keywords = [
|
||||||
|
"mv"
|
||||||
|
"move"
|
||||||
|
"rename"
|
||||||
|
"device"
|
||||||
|
"recursive"
|
||||||
|
"folder"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/mv/latest.nix
Symbolic link
1
npm/nodePackages/mv/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.0.5.nix
|
||||||
16
npm/nodePackages/nopt/2.1.1.nix
Normal file
16
npm/nodePackages/nopt/2.1.1.nix
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "nopt";
|
||||||
|
version = "2.1.1";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/nopt/-/nopt-2.1.1.tgz";
|
||||||
|
sha1 = "91eb7c4b017e7c00adcad1fd6d63944d0fdb75c1";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
abbrev_1-0-7
|
||||||
|
];
|
||||||
|
devDependencies = [];
|
||||||
|
meta = {
|
||||||
|
description = "Option parsing for Node, supporting types, shorthands, etc. Used by npm.";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/nopt/latest.nix
Symbolic link
1
npm/nodePackages/nopt/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
2.1.1.nix
|
||||||
19
npm/nodePackages/once/1.3.0.nix
Normal file
19
npm/nodePackages/once/1.3.0.nix
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "once";
|
||||||
|
version = "1.3.0";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/once/-/once-1.3.0.tgz";
|
||||||
|
sha1 = "151af86bfc1f08c4b9f07d06ab250ffcbeb56581";
|
||||||
|
};
|
||||||
|
deps = [];
|
||||||
|
meta = {
|
||||||
|
description = "Run a function exactly one time";
|
||||||
|
keywords = [
|
||||||
|
"once"
|
||||||
|
"function"
|
||||||
|
"one"
|
||||||
|
"single"
|
||||||
|
];
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/once/latest.nix
Symbolic link
1
npm/nodePackages/once/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
1.3.0.nix
|
||||||
23
npm/nodePackages/pooling/0.4.6.nix
Normal file
23
npm/nodePackages/pooling/0.4.6.nix
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "pooling";
|
||||||
|
version = "0.4.6";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/pooling/-/pooling-0.4.6.tgz";
|
||||||
|
sha1 = "76a317371ea8a363b4858fa4799e60245f30e664";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
bunyan_0-22-1
|
||||||
|
once_1-3-0
|
||||||
|
vasync_1-4-0
|
||||||
|
dtrace-provider_0-2-8
|
||||||
|
assert-plus_0-1-5
|
||||||
|
];
|
||||||
|
optionalDependencies = with nodePackages; [
|
||||||
|
dtrace-provider_0-2-8
|
||||||
|
];
|
||||||
|
meta = {
|
||||||
|
homepage = "https://github.com/mcavage/node-pooling";
|
||||||
|
description = "General purpose resource pool API";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/pooling/latest.nix
Symbolic link
1
npm/nodePackages/pooling/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
0.4.6.nix
|
||||||
17
npm/nodePackages/vasync/1.4.0.nix
Normal file
17
npm/nodePackages/vasync/1.4.0.nix
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "vasync";
|
||||||
|
version = "1.4.0";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/vasync/-/vasync-1.4.0.tgz";
|
||||||
|
sha1 = "6ea5a63582358868d8743cbdd6ffadc9083b910f";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
jsprim_0-3-0
|
||||||
|
verror_1-1-0
|
||||||
|
];
|
||||||
|
devDependencies = [];
|
||||||
|
meta = {
|
||||||
|
description = "utilities for observable asynchronous control flow";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/vasync/latest.nix
Symbolic link
1
npm/nodePackages/vasync/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
1.4.0.nix
|
||||||
16
npm/nodePackages/verror/1.1.0.nix
Normal file
16
npm/nodePackages/verror/1.1.0.nix
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "verror";
|
||||||
|
version = "1.1.0";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/verror/-/verror-1.1.0.tgz";
|
||||||
|
sha1 = "2a4b4eb14a207051e75a6f94ee51315bf173a1b0";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
extsprintf_1-0-0
|
||||||
|
];
|
||||||
|
devDependencies = [];
|
||||||
|
meta = {
|
||||||
|
description = "richer JavaScript errors";
|
||||||
|
};
|
||||||
|
}
|
||||||
16
npm/nodePackages/verror/1.3.3.nix
Normal file
16
npm/nodePackages/verror/1.3.3.nix
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{ buildNodePackage, nodePackages, pkgs }:
|
||||||
|
buildNodePackage {
|
||||||
|
name = "verror";
|
||||||
|
version = "1.3.3";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
url = "http://registry.npmjs.org/verror/-/verror-1.3.3.tgz";
|
||||||
|
sha1 = "8a6a4ac3a8c774b6f687fece49bdffd78552e2cd";
|
||||||
|
};
|
||||||
|
deps = with nodePackages; [
|
||||||
|
extsprintf_1-0-0
|
||||||
|
];
|
||||||
|
devDependencies = [];
|
||||||
|
meta = {
|
||||||
|
description = "richer JavaScript errors";
|
||||||
|
};
|
||||||
|
}
|
||||||
1
npm/nodePackages/verror/latest.nix
Symbolic link
1
npm/nodePackages/verror/latest.nix
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
1.3.3.nix
|
||||||
12
shell.nix
12
shell.nix
@ -1,17 +1,17 @@
|
|||||||
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7101" }: let
|
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }: let
|
||||||
inherit (nixpkgs) pkgs;
|
inherit (nixpkgs) pkgs;
|
||||||
ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages( ps: with ps; [
|
ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages(ps: [
|
||||||
hdevtools doctest
|
ps.hdevtools ps.doctest ps.hspec-discover ps.hlint ps.ghc-mod
|
||||||
]);
|
]);
|
||||||
cabal-install = pkgs.haskell.packages.${compiler}.cabal-install;
|
cabal-install = pkgs.haskell.packages.${compiler}.cabal-install;
|
||||||
pkg = (import ./default.nix { inherit nixpkgs compiler; });
|
pkg = (import ./default.nix { inherit nixpkgs compiler; });
|
||||||
|
npm = (import ./npm {});
|
||||||
in
|
in
|
||||||
pkgs.stdenv.mkDerivation rec {
|
pkgs.stdenv.mkDerivation rec {
|
||||||
name = pkg.pname;
|
name = pkg.pname;
|
||||||
buildInputs = [ ghc cabal-install ] ++ pkg.env.buildInputs;
|
buildInputs = [ ghc cabal-install npm.nodePackages.ldapjs ] ++ pkg.env.buildInputs;
|
||||||
shellHook = ''
|
shellHook = ''
|
||||||
${pkg.env.shellHook}
|
${pkg.env.shellHook}
|
||||||
export IN_WHICH_NIX_SHELL=${name}
|
cabal configure --enable-tests --package-db=$NIX_GHC_LIBDIR/package.conf.d
|
||||||
cabal configure --package-db=$NIX_GHC_LIBDIR/package.conf.d
|
|
||||||
'';
|
'';
|
||||||
}
|
}
|
||||||
|
|||||||
@ -23,6 +23,7 @@ module SpecHelper
|
|||||||
) where
|
) where
|
||||||
|
|
||||||
import Control.Exception (bracket)
|
import Control.Exception (bracket)
|
||||||
|
import System.Environment (getEnvironment)
|
||||||
import System.IO (hGetLine)
|
import System.IO (hGetLine)
|
||||||
import System.Process (runInteractiveProcess, terminateProcess, waitForProcess)
|
import System.Process (runInteractiveProcess, terminateProcess, waitForProcess)
|
||||||
|
|
||||||
@ -31,11 +32,12 @@ import Ldap.Client as Ldap
|
|||||||
|
|
||||||
locally :: (Ldap -> IO a) -> IO (Either LdapError a)
|
locally :: (Ldap -> IO a) -> IO (Either LdapError a)
|
||||||
locally f =
|
locally f =
|
||||||
bracket (do (_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
|
bracket (do env <- getEnvironment
|
||||||
(Just [ ("PORT", show port)
|
(_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
|
||||||
, ("SSL_CERT", "./ssl/cert.pem")
|
(Just (("PORT", show port) :
|
||||||
, ("SSL_KEY", "./ssl/key.pem")
|
("SSL_CERT", "./ssl/cert.pem") :
|
||||||
])
|
("SSL_KEY", "./ssl/key.pem") :
|
||||||
|
env))
|
||||||
hGetLine out
|
hGetLine out
|
||||||
return h)
|
return h)
|
||||||
(\h -> do terminateProcess h
|
(\h -> do terminateProcess h
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env js
|
#!/usr/bin/env node
|
||||||
|
|
||||||
var fs = require('fs');
|
var fs = require('fs');
|
||||||
var ldapjs = require('ldapjs');
|
var ldapjs = require('ldapjs');
|
||||||
@ -81,8 +81,9 @@ function authorize(req, res, next) {
|
|||||||
|
|
||||||
server.search('o=localhost', [authorize], function(req, res, next) {
|
server.search('o=localhost', [authorize], function(req, res, next) {
|
||||||
for (var i = 0; i < pokemon.length; i++) {
|
for (var i = 0; i < pokemon.length; i++) {
|
||||||
if (req.filter.matches(pokemon[i].attributes))
|
if (req.filter.matches(pokemon[i].attributes)) {
|
||||||
res.send(pokemon[i]);
|
res.send(pokemon[i]);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
res.end();
|
res.end();
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user