Compare commits
33 Commits
feature/re
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4241ed7d77 | ||
|
|
0b884c9b68 | ||
|
|
d5d719513a | ||
|
|
91a17947aa | ||
|
|
a68eaf8e89 | ||
|
|
115214b480 | ||
|
|
8eef50dd5d | ||
|
|
e787457cb8 | ||
|
|
46aead0578 | ||
|
|
1dc492e6a2 | ||
|
|
238ebb7913 | ||
|
|
e69fc50d73 | ||
|
|
e8e70a00f8 | ||
|
|
3c7e4585da | ||
|
|
824b44ac22 | ||
|
|
b92564e783 | ||
|
|
f2d0a73aa8 | ||
|
|
ce4e99b7d6 | ||
|
|
c98518ba97 | ||
|
|
9921b3178e | ||
|
|
cbeafaf99a | ||
|
|
fcaf49c7c8 | ||
|
|
b411ecedc0 | ||
|
|
e9e9f35276 | ||
|
|
889c66a046 | ||
|
|
7a2cf37141 | ||
|
|
8e144e01d9 | ||
|
|
6b44408394 | ||
|
|
85dec4c73d | ||
|
|
f60e9e5f4d | ||
|
|
7a1214f773 | ||
|
|
c94763606b | ||
|
|
cc03a13711 |
9
.github_changelog_generator
Normal file
9
.github_changelog_generator
Normal file
@ -0,0 +1,9 @@
|
||||
user=alasconnect
|
||||
project=ldap-client
|
||||
output=CHANGELOG.md
|
||||
|
||||
release-branch=master
|
||||
since-tag=0.2.0
|
||||
|
||||
header=# LDAP Client Changelog
|
||||
exclude-labels=documentation
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,5 +1,9 @@
|
||||
dist/
|
||||
dist-newstyle/
|
||||
.cabal-sandbox/
|
||||
cabal.sandbox.config
|
||||
node_modules
|
||||
Gemfile.lock
|
||||
*~
|
||||
*.swp
|
||||
/result
|
||||
|
||||
49
.travis.yml
49
.travis.yml
@ -1,35 +1,26 @@
|
||||
env:
|
||||
- CABALVER=1.18 GHCVER=7.6.3
|
||||
- CABALVER=1.18 GHCVER=7.8.4
|
||||
- CABALVER=1.22 GHCVER=7.10.1
|
||||
language: haskell
|
||||
|
||||
before_install:
|
||||
- travis_retry sudo add-apt-repository -y ppa:hvr/ghc
|
||||
- travis_retry sudo apt-get update
|
||||
- travis_retry sudo apt-get install cabal-install-$CABALVER ghc-$GHCVER npm
|
||||
- export PATH=/opt/ghc/$GHCVER/bin:/opt/cabal/$CABALVER/bin:$PATH
|
||||
sudo: false
|
||||
|
||||
git:
|
||||
depth: 5
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- "$HOME/.cabal/store"
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- ghc: 8.0.1
|
||||
- ghc: 8.2.2
|
||||
- ghc: 8.4.4
|
||||
- ghc: 8.6.5
|
||||
|
||||
install:
|
||||
- cabal --version
|
||||
- echo "$(ghc --version) [$(ghc --print-project-git-commit-id 2> /dev/null || echo '?')]"
|
||||
- travis_retry cabal update
|
||||
- cabal install --only-dependencies --enable-tests --enable-benchmarks
|
||||
- cabal update
|
||||
- cabal install --only-dependencies --enable-tests ldap-client.cabal
|
||||
- npm install ldapjs
|
||||
|
||||
script:
|
||||
- cabal configure --enable-tests -v2
|
||||
- cabal build
|
||||
- cabal test
|
||||
- |
|
||||
if [ $GHCVER = "7.10.1" ]; then
|
||||
cabal check
|
||||
fi
|
||||
- cabal sdist
|
||||
- export SRC_TGZ=$(cabal info . | awk '{print $2 ".tar.gz";exit}') ;
|
||||
cd dist/;
|
||||
if [ -f "$SRC_TGZ" ]; then
|
||||
cabal install --force-reinstalls "$SRC_TGZ";
|
||||
else
|
||||
echo "expected '$SRC_TGZ' not found";
|
||||
exit 1;
|
||||
fi
|
||||
- cabal install --enable-tests $RUN_TESTS ldap-client.cabal
|
||||
- cabal sdist && cabal install --enable-tests dist/ldap-client-*.tar.gz
|
||||
|
||||
13
.vim.custom
13
.vim.custom
@ -1,14 +1,5 @@
|
||||
function s:hdevtools_options(rgs)
|
||||
return join(map(a:rgs, "'-g ' . v:val"))
|
||||
endfunction
|
||||
|
||||
function s:discover_cabal_sandbox(glob)
|
||||
let l:sandboxes = split(glob(a:glob, "."), "\n")
|
||||
if len(l:sandboxes) > 0
|
||||
return ['-no-user-package-db', '-package-db=' . l:sandboxes[-1]]
|
||||
else
|
||||
return []
|
||||
endif
|
||||
return join(["-s", "/tmp/" . substitute(system("sha1sum <<< $PWD | cut -d' ' -f1"), '\n\+$', '', '') . ".sock"] + map(a:rgs, "'-g ' . v:val"))
|
||||
endfunction
|
||||
|
||||
let g:syntastic_haskell_hdevtools_args = s:hdevtools_options
|
||||
@ -23,5 +14,5 @@ let g:syntastic_haskell_hdevtools_args = s:hdevtools_options
|
||||
\ , '-Wall'
|
||||
\ , '-fno-warn-unused-do-bind'
|
||||
\ , '-fno-warn-type-defaults'
|
||||
\ ] + s:discover_cabal_sandbox(".cabal-sandbox/*.conf.d")
|
||||
\ ]
|
||||
\ )
|
||||
|
||||
18
CHANGELOG.md
Normal file
18
CHANGELOG.md
Normal file
@ -0,0 +1,18 @@
|
||||
# LDAP Client Changelog
|
||||
|
||||
|
||||
## [0.4.0](https://github.com/alasconnect/ldap-client/tree/0.4.0) (2019-11-07)
|
||||
|
||||
[Full Changelog](https://github.com/alasconnect/ldap-client/compare/0.3.0...0.4.0)
|
||||
|
||||
**Merged pull requests:**
|
||||
|
||||
- Poolable [\#1](https://github.com/alasconnect/ldap-client/pull/1) ([dminuoso](https://github.com/dminuoso))
|
||||
|
||||
## [0.3.0](https://github.com/alasconnect/ldap-client/tree/0.3.0) (2019-11-06)
|
||||
|
||||
[Full Changelog](https://github.com/alasconnect/ldap-client/compare/0.2.0...0.3.0)
|
||||
|
||||
|
||||
|
||||
\* *This Changelog was automatically generated by [github_changelog_generator](https://github.com/github-changelog-generator/github-changelog-generator)*
|
||||
2
LICENSE
2
LICENSE
@ -1,4 +1,4 @@
|
||||
Copyright (c) 2015, Matvey Aksenov
|
||||
Copyright (c) 2015-2017, Matvey Aksenov
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
|
||||
@ -1,26 +1,27 @@
|
||||
ldap-client
|
||||
===========
|
||||
[](https://travis-ci.org/supki/ldap-client)
|
||||
[](https://hackage.haskell.org/package/ldap-client)
|
||||
[](https://travis-ci.org/alasconnect/ldap-client)
|
||||
|
||||
This library implements (the parts of) [RFC 4511][rfc4511]
|
||||
|
||||
Feature | RFC Section | Support
|
||||
:--------------------------- |:---------------:|:-----------:
|
||||
Bind Operation | [4.2][4.2] | ✔
|
||||
Unbind Operation | [4.3][4.3] | ✔
|
||||
Unsolicited Notification | [4.4][4.4] | ✔
|
||||
Notice of Disconnection | [4.4.1][4.4.1] | ✔
|
||||
Search Operation | [4.5][4.5] | ✔\*
|
||||
Modify Operation | [4.6][4.6] | ✔
|
||||
Add Operation | [4.7][4.7] | ✔
|
||||
Delete Operation | [4.8][4.8] | ✔
|
||||
Modify DN Operation | [4.9][4.9] | ✔
|
||||
Compare Operation | [4.10][4.10] | ✔
|
||||
Abandon Operation | [4.11][4.11] | ✘
|
||||
Extended Operation | [4.12][4.12] | ✔
|
||||
IntermediateResponse Message | [4.13][4.13] | ✔
|
||||
StartTLS Operation | [4.14][4.14] | ✔†
|
||||
LDAP over TLS | - | ✔
|
||||
| Feature | RFC Section | Support
|
||||
|:---------------------------- |:---------------:|:-----------:
|
||||
| Bind Operation | [4.2][4.2] | ✔
|
||||
| Unbind Operation | [4.3][4.3] | ✔
|
||||
| Unsolicited Notification | [4.4][4.4] | ✔
|
||||
| Notice of Disconnection | [4.4.1][4.4.1] | ✔
|
||||
| Search Operation | [4.5][4.5] | ✔\*
|
||||
| Modify Operation | [4.6][4.6] | ✔
|
||||
| Add Operation | [4.7][4.7] | ✔
|
||||
| Delete Operation | [4.8][4.8] | ✔
|
||||
| Modify DN Operation | [4.9][4.9] | ✔
|
||||
| Compare Operation | [4.10][4.10] | ✔
|
||||
| Abandon Operation | [4.11][4.11] | ✘
|
||||
| Extended Operation | [4.12][4.12] | ✔
|
||||
| IntermediateResponse Message | [4.13][4.13] | ✔
|
||||
| StartTLS Operation | [4.14][4.14] | ✔†
|
||||
| LDAP over TLS | - | ✔
|
||||
|
||||
\* The `:dn` thing is unsupported in Extensible matches
|
||||
† Only serves as an example of Extended Operation. It's useless for all practical purposes as it does not actually enable TLS. In other words, use LDAP over TLS instead.
|
||||
9
default.nix
Normal file
9
default.nix
Normal file
@ -0,0 +1,9 @@
|
||||
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc802" }: let
|
||||
ghc = nixpkgs.pkgs.haskell.packages.${compiler};
|
||||
npm = import ./npm {};
|
||||
in
|
||||
ghc.callPackage ./package.nix {
|
||||
mkDerivation = args: ghc.mkDerivation(args // {
|
||||
buildTools = (if args ? buildTools then args.buildTools else []) ++ [ npm.nodePackages.ldapjs ];
|
||||
});
|
||||
}
|
||||
@ -31,7 +31,7 @@ import qualified System.IO as IO -- base
|
||||
|
||||
data Conf = Conf
|
||||
{ host :: String
|
||||
, port :: PortNumber
|
||||
, port :: Int
|
||||
, dn :: Dn
|
||||
, password :: Password
|
||||
, base :: Dn
|
||||
@ -55,7 +55,7 @@ main = do
|
||||
|
||||
login :: Conf -> IO (Either LdapError ())
|
||||
login conf =
|
||||
Ldap.with (Ldap.Secure (host conf)) (port conf) $ \l -> do
|
||||
Ldap.with (Ldap.Tls (host conf) Ldap.defaultTlsSettings) (port conf) $ \l -> do
|
||||
Ldap.bind l (dn conf) (password conf)
|
||||
fix $ \loop -> do
|
||||
uid <- prompt "Username: "
|
||||
|
||||
@ -1,30 +1,37 @@
|
||||
name: ldap-client
|
||||
version: 0.1.0
|
||||
version: 0.4.0
|
||||
synopsis: Pure Haskell LDAP Client Library
|
||||
description:
|
||||
Pure Haskell LDAP client library implementing (the parts of) RFC 4511.
|
||||
homepage: https://supki.github.io/ldap-client
|
||||
homepage: https://github.com/alasconnect/ldap-client
|
||||
license: BSD2
|
||||
license-file: LICENSE
|
||||
author: Matvey Aksenov
|
||||
maintainer: matvey.aksenov@gmail.com
|
||||
copyright: 2015 Matvey Aksenov
|
||||
author: Matvey Aksenov, AlasConnect LLC
|
||||
maintainer: matvey.aksenov@gmail.com, software@alasconnect.com
|
||||
copyright: 2015 Matvey Aksenov, 2019 AlasConnect LLC
|
||||
category: Network
|
||||
build-type: Simple
|
||||
cabal-version: >= 1.10
|
||||
tested-with:
|
||||
GHC == 7.6.3
|
||||
, GHC == 7.8.4
|
||||
, GHC == 7.10.1
|
||||
GHC == 8.0.1
|
||||
, GHC == 8.2.2
|
||||
, GHC == 8.4.4
|
||||
, GHC == 8.6.5
|
||||
extra-source-files:
|
||||
README.markdown
|
||||
README.md
|
||||
CHANGELOG.md
|
||||
|
||||
source-repository head
|
||||
type: git
|
||||
location: git@github.com:supki/ldap-client
|
||||
tag: 0.1.0
|
||||
location: git@github.com:alasconnect/ldap-client
|
||||
tag: 0.4.0
|
||||
|
||||
library
|
||||
ghc-options:
|
||||
-Wall
|
||||
-fwarn-incomplete-uni-patterns
|
||||
-fwarn-incomplete-record-updates
|
||||
-fwarn-unrecognised-pragmas
|
||||
default-language:
|
||||
Haskell2010
|
||||
hs-source-dirs:
|
||||
@ -50,12 +57,18 @@ library
|
||||
, bytestring
|
||||
, connection >= 0.2
|
||||
, containers
|
||||
, fail
|
||||
, network >= 2.6
|
||||
, semigroups >= 0.16
|
||||
, stm
|
||||
, text
|
||||
|
||||
test-suite spec
|
||||
ghc-options:
|
||||
-Wall
|
||||
-fwarn-incomplete-uni-patterns
|
||||
-fwarn-incomplete-record-updates
|
||||
-fwarn-unrecognised-pragmas
|
||||
default-language:
|
||||
Haskell2010
|
||||
type:
|
||||
|
||||
1
npm/.nixfromnpm-version
Normal file
1
npm/.nixfromnpm-version
Normal file
@ -0,0 +1 @@
|
||||
0.7.0
|
||||
9
npm/default.nix
Normal file
9
npm/default.nix
Normal file
@ -0,0 +1,9 @@
|
||||
{ nodejsVersion ? "4.1", npm3 ? false, pkgs ? import <nixpkgs> {} }:
|
||||
let
|
||||
nodeLib = import ./nodeLib {
|
||||
inherit pkgs npm3 nodejsVersion;
|
||||
self = nodeLib;
|
||||
};
|
||||
in nodeLib.generatePackages {
|
||||
rootPath = ./nodePackages;
|
||||
}
|
||||
394
npm/nodeLib/buildNodePackage.nix
Normal file
394
npm/nodeLib/buildNodePackage.nix
Normal file
@ -0,0 +1,394 @@
|
||||
{
|
||||
# Provides the mkDerivation function.
|
||||
stdenv,
|
||||
# Lets us run a command.
|
||||
runCommand,
|
||||
# Derivation for nodejs and npm.
|
||||
nodejs,
|
||||
# Which version of npm to use.
|
||||
npm ? nodejs,
|
||||
# List of required native build inputs.
|
||||
neededNatives,
|
||||
# Self-reference for overriding purposes.
|
||||
buildNodePackage
|
||||
}:
|
||||
|
||||
let
|
||||
# The path within $out/lib to find a package. If the package does not
|
||||
# have a namespace, it will simply be in `node_modules`, and otherwise it
|
||||
# will appear in `node_modules/@namespace`.
|
||||
modulePath = pkg: if pkg.namespace == null then "node_modules"
|
||||
else "node_modules/@${pkg.namespace}";
|
||||
|
||||
# The path to the package within its modulePath. Just appending the name
|
||||
# of the package.
|
||||
pathInModulePath = pkg: "${modulePath pkg}/${pkg.basicName}";
|
||||
in
|
||||
|
||||
{
|
||||
# Used for private packages. Indicated in the name field of the
|
||||
# package.json, e.g. "@mynamespace/mypackage". Public packages will not
|
||||
# need this.
|
||||
namespace ? null,
|
||||
|
||||
# The name of the package. If it's a private package with a namespace,
|
||||
# this should not contain the namespace.
|
||||
name,
|
||||
|
||||
# Version of the package. This should follow the semver standard, although
|
||||
# we don't explicitly enforce that in this function.
|
||||
version,
|
||||
|
||||
# Source of the package; can be a tarball or a folder on the filesystem.
|
||||
src,
|
||||
|
||||
# by default name of nodejs interpreter e.g. "nodejs-<version>-${name}"
|
||||
namePrefix ? "${nodejs.name}-" +
|
||||
(if namespace == null then "" else "${namespace}-"),
|
||||
|
||||
# List or attribute set of dependencies
|
||||
deps ? {},
|
||||
|
||||
# List or attribute set of peer depencies
|
||||
peerDependencies ? {},
|
||||
|
||||
# List or attribute set of optional dependencies
|
||||
optionalDependencies ? {},
|
||||
|
||||
# List of optional dependencies to skip
|
||||
skipOptionalDependencies ? [],
|
||||
|
||||
# List or set of development dependencies (or null).
|
||||
devDependencies ? null,
|
||||
|
||||
# If true and devDependencies are not null, the package will be
|
||||
# installed contingent on successfully running tests.
|
||||
doCheck ? devDependencies != null,
|
||||
|
||||
# Additional flags passed to npm install
|
||||
flags ? "",
|
||||
|
||||
# Command to be run before shell hook
|
||||
preShellHook ? "",
|
||||
|
||||
# Command to be run after shell hook
|
||||
postShellHook ? "",
|
||||
|
||||
# Same as https://docs.npmjs.com/files/package.json#os
|
||||
os ? [],
|
||||
|
||||
# Same as https://docs.npmjs.com/files/package.json#cpu
|
||||
cpu ? [],
|
||||
|
||||
# Attribute set of already resolved deps (internal),
|
||||
# for avoiding infinite recursion
|
||||
resolvedDeps ? {},
|
||||
|
||||
...
|
||||
} @ args:
|
||||
|
||||
let
|
||||
inherit (stdenv.lib) fold removePrefix hasPrefix subtractLists isList flip
|
||||
intersectLists isAttrs listToAttrs nameValuePair
|
||||
mapAttrs filterAttrs attrNames elem concatMapStrings
|
||||
attrValues getVersion flatten remove concatStringsSep;
|
||||
|
||||
# whether we should run tests.
|
||||
shouldTest = doCheck && devDependencies != null;
|
||||
|
||||
# The package name as it appears in the package.json. This contains a
|
||||
# namespace if there is one, so it will be a distinct identifier for
|
||||
# different packages.
|
||||
pkgName = if namespace == null then name else "@${namespace}/${name}";
|
||||
|
||||
# We create a `self` object for self-referential expressions. It
|
||||
# bottoms out in a call to `mkDerivation` at the end.
|
||||
self = let
|
||||
sources = runCommand "node-sources" {} ''
|
||||
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
|
||||
mv $(find . -type d -mindepth 1 -maxdepth 1) $out
|
||||
'';
|
||||
|
||||
platforms = if os == [] then nodejs.meta.platforms else
|
||||
fold (entry: platforms:
|
||||
let
|
||||
filterPlatforms =
|
||||
stdenv.lib.platforms.${removePrefix "!" entry} or [];
|
||||
in
|
||||
# Ignore unknown platforms
|
||||
if filterPlatforms == [] then (if platforms == [] then nodejs.meta.platforms else platforms)
|
||||
else
|
||||
if hasPrefix "!" entry then
|
||||
subtractLists (intersectLists filterPlatforms nodejs.meta.platforms) platforms
|
||||
else
|
||||
platforms ++ (intersectLists filterPlatforms nodejs.meta.platforms)
|
||||
) [] os;
|
||||
|
||||
toAttrSet = obj: if isAttrs obj then obj else
|
||||
(listToAttrs (map (x: nameValuePair x.name x) obj));
|
||||
|
||||
mapDependencies = deps: filterFunc: let
|
||||
attrDeps = toAttrSet deps;
|
||||
in rec {
|
||||
# All required node modules, without already resolved dependencies
|
||||
# Also override with already resolved dependencies
|
||||
requiredDeps = mapAttrs (name: dep:
|
||||
dep.override {resolvedDeps = resolvedDeps // { "${name}" = self; };}
|
||||
) (filterAttrs filterFunc
|
||||
(removeAttrs attrDeps (attrNames resolvedDeps)));
|
||||
|
||||
# Recursive dependencies that we want to avoid with shim creation
|
||||
recursiveDeps = filterAttrs filterFunc
|
||||
(removeAttrs attrDeps (attrNames requiredDeps));
|
||||
};
|
||||
|
||||
# Filter out self-referential dependencies.
|
||||
_dependencies = mapDependencies deps (name: dep:
|
||||
dep.pkgName != pkgName);
|
||||
|
||||
# Filter out self-referential peer dependencies.
|
||||
_peerDependencies = mapDependencies peerDependencies (name: dep:
|
||||
dep.pkgName != pkgName);
|
||||
|
||||
# Filter out any optional dependencies which don't build correctly.
|
||||
_optionalDependencies = mapDependencies optionalDependencies (name: dep:
|
||||
(builtins.tryEval dep).success &&
|
||||
!(elem dep.pkgName skipOptionalDependencies)
|
||||
);
|
||||
|
||||
# Required dependencies are those that we haven't filtered yet.
|
||||
requiredDependencies =
|
||||
_dependencies.requiredDeps //
|
||||
_optionalDependencies.requiredDeps //
|
||||
_peerDependencies.requiredDeps;
|
||||
|
||||
recursiveDependencies =
|
||||
_dependencies.recursiveDeps //
|
||||
_optionalDependencies.recursiveDeps //
|
||||
_peerDependencies.recursiveDeps;
|
||||
|
||||
npmFlags = concatStringsSep " " ([
|
||||
# We point the registry at something that doesn't exist. This will
|
||||
# mean that NPM will fail if any of the dependencies aren't met, as it
|
||||
# will attempt to hit this registry for the missing dependency.
|
||||
"--registry=fakeprotocol://notaregistry.$UNIQNAME.derp"
|
||||
# These flags make failure fast, as otherwise NPM will spin for a while.
|
||||
"--fetch-retry-mintimeout=0"
|
||||
"--fetch-retry-maxtimeout=10"
|
||||
# This will disable any user-level npm configuration.
|
||||
"--userconfig=/dev/null"
|
||||
# This flag is used for packages which link against the node headers.
|
||||
"--nodedir=${sources}"
|
||||
] ++ (if isList flags then flags else [flags]));
|
||||
|
||||
# A bit of bash to check that variables are set.
|
||||
checkSet = vars: concatStringsSep "\n" (flip map vars (var: ''
|
||||
[[ -z $${var} ]] && { echo "${var} is not set."; exit 1; }
|
||||
''));
|
||||
|
||||
mkDerivationArgs = {
|
||||
inherit src;
|
||||
|
||||
# Define some environment variables that we will use in the build.
|
||||
prePatch = ''
|
||||
export HASHEDNAME=$(echo "$propagatedNativeBuildInputs $name" \
|
||||
| md5sum | awk '{print $1}')
|
||||
export UNIQNAME="''${HASHEDNAME:0:10}-${name}-${version}"
|
||||
export BUILD_DIR=$TMPDIR/$UNIQNAME-build
|
||||
'';
|
||||
|
||||
patchPhase = ''
|
||||
runHook prePatch
|
||||
patchShebangs $PWD
|
||||
|
||||
# Remove any impure dependencies from the package.json (see script
|
||||
# for details)
|
||||
node ${./removeImpureDependencies.js}
|
||||
|
||||
# We do not handle shrinkwraps yet
|
||||
rm npm-shrinkwrap.json 2>/dev/null || true
|
||||
|
||||
# Repackage source into a tarball, so npm pre/post publish hooks are
|
||||
# not triggered,
|
||||
mkdir -p $BUILD_DIR
|
||||
GZIP=-1 tar -czf $BUILD_DIR/package.tgz ./
|
||||
export PATCHED_SRC=$BUILD_DIR/package.tgz
|
||||
runHook postPatch
|
||||
'';
|
||||
|
||||
configurePhase = ''
|
||||
runHook preConfigure
|
||||
(
|
||||
${checkSet ["BUILD_DIR"]}
|
||||
mkdir -p $BUILD_DIR
|
||||
cd $BUILD_DIR
|
||||
# Symlink or copy dependencies for node modules
|
||||
# copy is needed if dependency has recursive dependencies,
|
||||
# because node can't follow symlinks while resolving recursive deps.
|
||||
${
|
||||
let
|
||||
link = dep: ''
|
||||
${if dep.recursiveDeps == [] then "ln -sfv" else "cp -rf"} \
|
||||
${dep}/lib/${pathInModulePath dep} ${modulePath dep}
|
||||
'';
|
||||
in
|
||||
flip concatMapStrings (attrValues requiredDependencies) (dep: ''
|
||||
mkdir -p ${modulePath dep}
|
||||
${link dep}
|
||||
${concatMapStrings link (attrValues dep.peerDependencies)}
|
||||
'')}
|
||||
|
||||
# Create shims for recursive dependenceies
|
||||
${concatMapStrings (dep: ''
|
||||
mkdir -p ${modulePath dep}
|
||||
cat > ${pathInModulePath dep}/package.json <<EOF
|
||||
{
|
||||
"name": "${dep.pkgName}",
|
||||
"version": "${getVersion dep}"
|
||||
}
|
||||
EOF
|
||||
'') (attrValues recursiveDependencies)}
|
||||
|
||||
# Create dummy package.json file
|
||||
cat <<EOF > package.json
|
||||
{"name":"dummy-for-$UNIQNAME","version":"0.0.0", "license":"MIT",
|
||||
"description":"Dummy package file for building $name",
|
||||
"repository":{"type":"git","url":"http://$UNIQNAME.com"}}
|
||||
EOF
|
||||
|
||||
# Create dummy readme
|
||||
echo "Dummy package" > README.md
|
||||
)
|
||||
|
||||
export HOME=$BUILD_DIR
|
||||
runHook postConfigure
|
||||
'';
|
||||
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
|
||||
# Install package
|
||||
(
|
||||
${checkSet ["BUILD_DIR" "PATCHED_SRC"]}
|
||||
|
||||
echo "Building $name in $BUILD_DIR"
|
||||
cd $BUILD_DIR
|
||||
HOME=$PWD npm install $PATCHED_SRC ${npmFlags} || {
|
||||
npm list
|
||||
exit 1
|
||||
}
|
||||
)
|
||||
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
(
|
||||
cd $BUILD_DIR
|
||||
|
||||
# Remove shims
|
||||
${concatMapStrings (dep: ''
|
||||
rm ${pathInModulePath dep}/package.json
|
||||
rmdir ${modulePath dep}
|
||||
'') (attrValues recursiveDependencies)}
|
||||
|
||||
# Install the package that we just built.
|
||||
mkdir -p $out/lib/${modulePath self}
|
||||
|
||||
# Move the folder that was created for this path to $out/lib.
|
||||
mv ${pathInModulePath self} $out/lib/${pathInModulePath self}
|
||||
|
||||
# Remove the node_modules subfolder from there, and instead put things
|
||||
# in $PWD/node_modules into that folder.
|
||||
rm -rf $out/lib/${pathInModulePath self}/node_modules
|
||||
cp -r node_modules $out/lib/${pathInModulePath self}/node_modules
|
||||
|
||||
if [ -e "$out/lib/${pathInModulePath self}/man" ]; then
|
||||
mkdir -p $out/share
|
||||
for dir in $out/lib/${pathInModulePath self}/man/*; do #*/
|
||||
mkdir -p $out/share/man/$(basename "$dir")
|
||||
for page in $dir/*; do #*/
|
||||
ln -sv $page $out/share/man/$(basename "$dir")
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
||||
# Move peer dependencies to node_modules
|
||||
${concatMapStrings (dep: ''
|
||||
mkdir -p ${modulePath dep}
|
||||
mv ${pathInModulePath dep} $out/lib/${modulePath dep}
|
||||
'') (attrValues _peerDependencies.requiredDeps)}
|
||||
|
||||
# Install binaries and patch shebangs. These are always found in
|
||||
# node_modules/.bin, regardless of a package namespace.
|
||||
mv node_modules/.bin $out/lib/node_modules 2>/dev/null || true
|
||||
if [ -d "$out/lib/node_modules/.bin" ]; then
|
||||
ln -sv $out/lib/node_modules/.bin $out/bin
|
||||
patchShebangs $out/lib/node_modules/.bin
|
||||
fi
|
||||
)
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
|
||||
shellHook = ''
|
||||
${preShellHook}
|
||||
export PATH=${npm}/bin:${nodejs}/bin:$(pwd)/node_modules/.bin:$PATH
|
||||
mkdir -p node_modules
|
||||
${concatMapStrings (dep: ''
|
||||
mkdir -p ${modulePath dep}
|
||||
ln -sfv ${dep}/lib/${pathInModulePath dep} ${pathInModulePath dep}
|
||||
'') (attrValues requiredDependencies)}
|
||||
${postShellHook}
|
||||
'';
|
||||
|
||||
# Stipping does not make a lot of sense in node packages
|
||||
dontStrip = true;
|
||||
|
||||
meta = {
|
||||
inherit platforms;
|
||||
maintainers = [ stdenv.lib.maintainers.offline ];
|
||||
};
|
||||
|
||||
# Propagate pieces of information about the package so that downstream
|
||||
# packages can reflect on them.
|
||||
passthru.pkgName = pkgName;
|
||||
passthru.basicName = name;
|
||||
passthru.namespace = namespace;
|
||||
passthru.version = version;
|
||||
passthru.peerDependencies = _peerDependencies.requiredDeps;
|
||||
passthru.recursiveDeps =
|
||||
(flatten (
|
||||
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
|
||||
)) ++
|
||||
(attrNames recursiveDependencies);
|
||||
|
||||
# Add an 'override' attribute, which will call `buildNodePackage` with the
|
||||
# given arguments overridden.
|
||||
passthru.override = newArgs: buildNodePackage (args // newArgs);
|
||||
} // (removeAttrs args ["deps" "resolvedDeps" "optionalDependencies"
|
||||
"devDependencies"]) // {
|
||||
name = "${namePrefix}${name}-${version}";
|
||||
|
||||
# Run the node setup hook when this package is a build input
|
||||
propagatedNativeBuildInputs = (args.propagatedNativeBuildInputs or []) ++
|
||||
[ npm nodejs ];
|
||||
|
||||
nativeBuildInputs =
|
||||
(args.nativeBuildInputs or []) ++ neededNatives ++
|
||||
(attrValues requiredDependencies);
|
||||
|
||||
# Expose list of recursive dependencies upstream, up to the package that
|
||||
# caused recursive dependency
|
||||
recursiveDeps =
|
||||
(flatten (
|
||||
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
|
||||
)) ++
|
||||
(attrNames recursiveDependencies);
|
||||
};
|
||||
|
||||
in stdenv.mkDerivation mkDerivationArgs;
|
||||
|
||||
in self
|
||||
210
npm/nodeLib/default.nix
Normal file
210
npm/nodeLib/default.nix
Normal file
@ -0,0 +1,210 @@
|
||||
/*
|
||||
A set of tools for generating node packages, such as to be imported by
|
||||
default.nix files generated by nixfromnpm.
|
||||
*/
|
||||
|
||||
{
|
||||
# Self-reference so that we can pass through to downstream libraries
|
||||
self,
|
||||
# Base set of packages, i.e. nixpkgs.
|
||||
pkgs,
|
||||
# Version of nodejs.
|
||||
nodejsVersion ? "4.1",
|
||||
# Whether to use npm3 (requires a prebuilt tarball of npm3).
|
||||
npm3 ? true
|
||||
}:
|
||||
|
||||
let
|
||||
# Function to replace dots with something
|
||||
replaceDots = c: replaceChars ["."] [c];
|
||||
inherit (builtins) readDir removeAttrs length getEnv elemAt hasAttr;
|
||||
inherit (pkgs.lib) attrNames attrValues filterAttrs flip foldl
|
||||
hasSuffix hasPrefix removeSuffix replaceChars
|
||||
optional optionals stringToCharacters
|
||||
concatStrings tail splitString;
|
||||
inherit (pkgs.stdenv) isLinux;
|
||||
|
||||
# Function to remove the first character of a string.
|
||||
dropFirstChar = str: concatStrings (tail (stringToCharacters str));
|
||||
|
||||
# Like a for loop.
|
||||
for = flip map;
|
||||
|
||||
# Concatenate a list of sets.
|
||||
joinSets = foldl (a: b: a // b) {};
|
||||
|
||||
# Extracts a tarball containing a bootstrapped version of npm 3.
|
||||
# This tarball must have been previously generated by an invocation
|
||||
# of nixfromnpm, but one of these should be included in the
|
||||
# nixfromnpm distribution (if not, run the `gen_npm3` script).
|
||||
npm3-src = pkgs.runCommand "npm3" {src=./npm3.tar.gz;} ''
|
||||
mkdir -p $out && cd $out && tar -xf $src
|
||||
'';
|
||||
|
||||
# Builds the extracted nix file. Since of course it can't use npm3,
|
||||
# being that it hasn't been built yet, we disable npm3 for this.
|
||||
_npm3 = import npm3-src {
|
||||
inherit pkgs nodejsVersion;
|
||||
npm3 = false;
|
||||
};
|
||||
|
||||
# Parse the `NPM_AUTH_TOKENS` environment variable to discover
|
||||
# namespace-token associations and turn them into an attribute set
|
||||
# which we can use as an input to the fetchPrivateNpm function.
|
||||
# Split the variable on ':', then turn each k=v element in
|
||||
# the list into an attribute set and join all of those sets.
|
||||
namespaceTokens = joinSets (
|
||||
for (splitString ":" (getEnv "NPM_AUTH_TOKENS")) (kvPair:
|
||||
let kv = splitString "=" kvPair; in
|
||||
if length kv != 2 then {}
|
||||
else {"${elemAt kv 0}" = elemAt kv 1;}));
|
||||
|
||||
# A function similar to fetchUrl but allows setting of custom headers.
|
||||
fetchUrlWithHeaders = pkgs.callPackage ./fetchUrlWithHeaders.nix {};
|
||||
|
||||
# Uses the parsed namespace tokens to create a function that can
|
||||
# fetch a private package from an npm repo.
|
||||
fetchPrivateNpm = {namespace, headers ? {}, ...}@args:
|
||||
if !(hasAttr namespace namespaceTokens)
|
||||
then throw "NPM_AUTH_TOKENS does not contain namespace ${namespace}"
|
||||
else let
|
||||
Authorization = "Bearer ${namespaceTokens.${namespace}}";
|
||||
headers = {inherit Authorization;} // headers;
|
||||
in
|
||||
fetchUrlWithHeaders (removeAttrs args ["namespace"] // {inherit headers;});
|
||||
in
|
||||
|
||||
rec {
|
||||
nodejs = pkgs.nodejs or (
|
||||
throw "The given nodejs version ${nodejsVersion} has not been defined."
|
||||
);
|
||||
buildNodePackage = import ./buildNodePackage.nix ({
|
||||
inherit (pkgs) stdenv runCommand;
|
||||
inherit nodejs buildNodePackage;
|
||||
neededNatives = [pkgs.python] ++ optionals isLinux [pkgs.utillinux];
|
||||
} // (if npm3 then {npm = _npm3;} else {}));
|
||||
# A generic package that will fail to build. This is used to indicate
|
||||
# packages that are broken, without failing the entire generation of
|
||||
# a package expression.
|
||||
brokenPackage = {name, reason}:
|
||||
let
|
||||
deriv = pkgs.stdenv.mkDerivation {
|
||||
name = "BROKEN-${name}";
|
||||
buildCommand = ''
|
||||
echo "Package ${name} is broken: ${reason}"
|
||||
exit 1
|
||||
'';
|
||||
passthru.withoutTests = deriv;
|
||||
passthru.pkgName = name;
|
||||
passthru.basicName = "BROKEN";
|
||||
passthru.namespace = null;
|
||||
passthru.version = "BROKEN";
|
||||
passthru.override = _: deriv;
|
||||
passthru.recursiveDeps = [];
|
||||
passthru.peerDependencies = {};
|
||||
};
|
||||
in
|
||||
deriv;
|
||||
|
||||
# List a directory after filtering the files.
|
||||
lsFilter = pred: dir: attrNames (filterAttrs pred (readDir dir));
|
||||
|
||||
# Checks the name and type of a listing to grab non-dotfile dirs.
|
||||
isRegDir = name: type: type == "directory" && !(hasPrefix "." name);
|
||||
|
||||
# Discover all of the node packages in a folder and turn them into a set
|
||||
# mapping `<name>_<version>` to the expression to build that package.
|
||||
discoverPackages = {callPackage, rootPath}:
|
||||
# if true then throw "huh? ${rootPath}" else
|
||||
let
|
||||
# Names of NPM packages defined in this directory. Don't take
|
||||
# files that start with '@'.
|
||||
nodeDirs = lsFilter (n: t: isRegDir n t && !(hasPrefix "@" n))
|
||||
(/. + rootPath);
|
||||
# Generate the package expression from a package name and .nix path.
|
||||
toPackage = name: filepath: let
|
||||
versionRaw = removeSuffix ".nix" filepath; # Raw version, i.e. "1.2.4"
|
||||
# Join with package name to make the variable name.
|
||||
varName = "${replaceDots "-" name}_${replaceDots "-" versionRaw}";
|
||||
in
|
||||
# Return the singleton set which maps that name to the actual expression.
|
||||
{"${varName}" = callPackage (/. + rootPath + "/${name}/${filepath}") {};};
|
||||
in
|
||||
# For each directory, and each .nix file in it, create a package from that.
|
||||
joinSets (for nodeDirs (pkgName: let
|
||||
pkgDir = /. + rootPath + "/${pkgName}";
|
||||
# List of .nix files in the directory (excluding symlinks).
|
||||
versionFiles = lsFilter (name: type: type == "regular" &&
|
||||
hasSuffix ".nix" name)
|
||||
pkgDir;
|
||||
# Check if there is a `latest.nix` file
|
||||
hasLatest = lsFilter (n: _: n == "latest.nix") pkgDir != [];
|
||||
in
|
||||
joinSets (
|
||||
# Find all of the versions listed in the folder.
|
||||
map (toPackage pkgName) versionFiles ++
|
||||
# If the folder has a `latest.nix` file, link the bare name of
|
||||
# the package to that file.
|
||||
optional hasLatest {
|
||||
"${replaceDots "-" pkgName}" = callPackage
|
||||
(/. + rootPath + "/${pkgName}/latest.nix") {};
|
||||
})));
|
||||
|
||||
# Same as above, except that we take all of the namespaced packages;
|
||||
# these packages are in folders prefaced with `@`, and contain
|
||||
# packages in that folder. So, for example the path `@foo/bar` is
|
||||
# the path to all of the versions of the `bar` package under the
|
||||
# namespace `foo`.
|
||||
discoverNamespacePackages = {callPackage, rootPath}: let
|
||||
isNsDir = name: type: type == "directory" && hasPrefix "@" name;
|
||||
# Names of NPM packages defined in this directory.
|
||||
namespaceDirs = lsFilter isNsDir (/. + rootPath);
|
||||
in
|
||||
# For each namespace directory, each package folder in it, and
|
||||
# each .nix file in that, create a package from that and then
|
||||
# create a namespace out of that.
|
||||
joinSets (for namespaceDirs (nsDirName: {
|
||||
"${dropFirstChar nsDirName}" = discoverPackages {
|
||||
inherit callPackage;
|
||||
rootPath = /. + rootPath + "/${nsDirName}";
|
||||
};
|
||||
}));
|
||||
|
||||
# The function that a default.nix can call into which will scan its
|
||||
# directory for all of the package files and generate a big attribute set
|
||||
# for all of them. Re-exports the `callPackage` function and all of the
|
||||
# attribute sets, as well as the nodeLib.
|
||||
generatePackages = {rootPath, extensions ? []}:
|
||||
let
|
||||
callPackageWith = pkgSet: path: overridingArgs: let
|
||||
inherit (builtins) intersectAttrs functionArgs;
|
||||
inherit (pkgs.lib) filterAttrs;
|
||||
# The path must be a function; import it here.
|
||||
func = import path;
|
||||
# Get the arguments to the function; e.g. "{a=false; b=true;}", where
|
||||
# a false value is an argument that has no default.
|
||||
funcArgs = functionArgs func;
|
||||
# Take only the arguments that don't have a default.
|
||||
noDefaults = filterAttrs (_: v: v == false) funcArgs;
|
||||
# Intersect this set with the package set to create the arguments to
|
||||
# the function.
|
||||
satisfyingArgs = intersectAttrs noDefaults pkgSet;
|
||||
# Override these arguments with whatever's passed in.
|
||||
actualArgs = satisfyingArgs // overridingArgs;
|
||||
# Call the function with these args to get a derivation.
|
||||
deriv = func actualArgs;
|
||||
in deriv;
|
||||
|
||||
callPackage = callPackageWith {
|
||||
inherit fetchUrlWithHeaders namespaces namespaceTokens;
|
||||
inherit pkgs nodePackages buildNodePackage brokenPackage;
|
||||
};
|
||||
nodePackages = joinSets (map (e: e.nodePackages) extensions) //
|
||||
discoverPackages {inherit callPackage rootPath;};
|
||||
namespaces = joinSets (map (e: e.namespaces) extensions) //
|
||||
discoverNamespacePackages {inherit callPackage rootPath;};
|
||||
in {
|
||||
inherit nodePackages callPackage namespaces namespaceTokens pkgs;
|
||||
nodeLib = self;
|
||||
};
|
||||
}
|
||||
21
npm/nodeLib/fetch.py
Normal file
21
npm/nodeLib/fetch.py
Normal file
@ -0,0 +1,21 @@
|
||||
import os
|
||||
import requests
|
||||
out = os.environ['out']
|
||||
url = os.environ['url']
|
||||
headers = {"User-Agent": "nix-fetchurl"}
|
||||
header_names = os.environ.get("headerNames", "")
|
||||
for name in header_names.split():
|
||||
if "__HTTP_HEADER_{}".format(name) not in os.environ:
|
||||
exit("FATAL: no corresponding value set for header {}"
|
||||
.format(name))
|
||||
headers[name] = os.environ["__HTTP_HEADER_{}".format(name)]
|
||||
print('GET {} with headers {}'.format(url, headers))
|
||||
response = requests.get(url, headers=headers)
|
||||
if response.status_code != 200:
|
||||
exit("Received a {} response. :(\nContent: {}"
|
||||
.format(response.status_code, response.content))
|
||||
else:
|
||||
print('Response: {} ({} bytes)'
|
||||
.format(response.status_code, len(response.content)))
|
||||
with open(out, 'wb') as f:
|
||||
f.write(response.content)
|
||||
71
npm/nodeLib/fetchUrlWithHeaders.nix
Normal file
71
npm/nodeLib/fetchUrlWithHeaders.nix
Normal file
@ -0,0 +1,71 @@
|
||||
# A python-based fetchurl function, allowing the passage of custom headers.
|
||||
# Just calls into `requests` under the hood.
|
||||
{
|
||||
pythonPackages, stdenv
|
||||
}:
|
||||
|
||||
|
||||
{ # URL to fetch.
|
||||
url ? ""
|
||||
|
||||
, # Additional curl options needed for the download to succeed.
|
||||
curlOpts ? ""
|
||||
|
||||
, # Name of the file. If empty, use the basename of `url' (or of the
|
||||
# first element of `urls').
|
||||
name ? ""
|
||||
|
||||
# Different ways of specifying the hash.
|
||||
, outputHash ? ""
|
||||
, outputHashAlgo ? ""
|
||||
, md5 ? ""
|
||||
, sha1 ? ""
|
||||
, sha256 ? ""
|
||||
|
||||
, # Meta information, if any.
|
||||
meta ? {}
|
||||
|
||||
# Headers to set, if any.
|
||||
, headers ? {}
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (stdenv.lib) flip mapAttrs' nameValuePair;
|
||||
hasHash = (outputHash != "" && outputHashAlgo != "")
|
||||
|| md5 != "" || sha1 != "" || sha256 != "";
|
||||
|
||||
# Create an attribute set translating each header name and value into
|
||||
# the header name prefixed with __HTTP_HEADER. When the derivation is
|
||||
# evaluated, the script will pick up these environment variables and use
|
||||
# them to produce the actual headers.
|
||||
headerValues = flip mapAttrs' headers (headerName: headerValue:
|
||||
nameValuePair "__HTTP_HEADER_${headerName}" headerValue);
|
||||
in
|
||||
|
||||
if !hasHash
|
||||
then throw "You must specify the output hash for ${url}"
|
||||
else
|
||||
|
||||
stdenv.mkDerivation ({
|
||||
inherit url;
|
||||
name = if name != "" then name else baseNameOf (toString url);
|
||||
|
||||
outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
|
||||
if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
|
||||
outputHash = if outputHash != "" then outputHash else
|
||||
if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
|
||||
|
||||
# Only flat hashing, which is the normal mode if you're fetching a file.
|
||||
outputHashMode = "flat";
|
||||
|
||||
# Doing the download on a remote machine just duplicates network
|
||||
# traffic, so don't do that.
|
||||
preferLocalBuild = true;
|
||||
|
||||
headerNames = builtins.attrNames headers;
|
||||
|
||||
buildInputs = with pythonPackages; [python requests2];
|
||||
buildCommand = ''
|
||||
python ${./fetch.py}
|
||||
'';
|
||||
} // headerValues)
|
||||
16
npm/nodeLib/parseNpmAuthTokens.nix
Normal file
16
npm/nodeLib/parseNpmAuthTokens.nix
Normal file
@ -0,0 +1,16 @@
|
||||
# Parses the `NPM_AUTH_TOKENS` environment variable to discover
|
||||
# namespace-token associations and turn them into an attribute set
|
||||
# which we can use as an input to the fetchPrivateNpm function.
|
||||
{pkgs, joinSets}:
|
||||
|
||||
let
|
||||
inherit (pkgs.lib) flip length elemAt;
|
||||
npmAuthTokens = builtins.getEnv "NPM_AUTH_TOKENS";
|
||||
in
|
||||
|
||||
# Split the variable on ':', then turn each k=v element in
|
||||
# the list into an attribute set and join all of those sets.
|
||||
joinSets (
|
||||
flip map (split ":" npmAuthTokens) (kvPair:
|
||||
if length (split "=" kvPair) != 2 then {}
|
||||
else {"${elemAt kvPair 0}" = elemAt kvPair 1;}))
|
||||
46
npm/nodeLib/removeImpureDependencies.js
Normal file
46
npm/nodeLib/removeImpureDependencies.js
Normal file
@ -0,0 +1,46 @@
|
||||
// These packages come packaged with nodejs.
|
||||
var fs = require('fs');
|
||||
var url = require('url');
|
||||
|
||||
function versionSpecIsImpure(versionSpec) {
|
||||
// Returns true if a version spec is impure.
|
||||
return (versionSpec == "latest" || versionSpec == "unstable" ||
|
||||
// file path references
|
||||
versionSpec.substr(0, 2) == ".." ||
|
||||
versionSpec.substr(0, 2) == "./" ||
|
||||
versionSpec.substr(0, 2) == "~/" ||
|
||||
versionSpec.substr(0, 1) == '/' ||
|
||||
// github owner/repo references
|
||||
/^[^/]+\/[^/]+(#.*)?$/.test(versionSpec) ||
|
||||
// is a URL
|
||||
url.parse(versionSpec).protocol);
|
||||
}
|
||||
|
||||
// Load up the package object.
|
||||
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
|
||||
|
||||
// Purify dependencies.
|
||||
var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'];
|
||||
for (var i in depTypes) {
|
||||
var depType = depTypes[i];
|
||||
var depSet = packageObj[depType];
|
||||
if (depSet !== undefined) {
|
||||
for (var depName in depSet) {
|
||||
if (versionSpecIsImpure(depSet[depName])) {
|
||||
depSet[depName] = '*';
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove peer dependencies */
|
||||
if (process.env.removePeerDependencies && packageObj.peerDependencies) {
|
||||
console.log("WARNING: removing the following peer dependencies:");
|
||||
for (key in packageObj.peerDependencies) {
|
||||
console.log(" " + key + ": " + packageObj.peerDependencies[key]);
|
||||
}
|
||||
delete packageObj.peerDependencies;
|
||||
}
|
||||
|
||||
/* Write the fixed JSON file */
|
||||
fs.writeFileSync("package.json", JSON.stringify(packageObj));
|
||||
14
npm/nodePackages/asn1/0.2.3.nix
Normal file
14
npm/nodePackages/asn1/0.2.3.nix
Normal file
@ -0,0 +1,14 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "asn1";
|
||||
version = "0.2.3";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz";
|
||||
sha1 = "dac8787713c9966849fc8180777ebe9c1ddf3b86";
|
||||
};
|
||||
deps = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/mcavage/node-asn1";
|
||||
description = "Contains parsers and serializers for ASN.1 (currently BER only)";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/asn1/latest.nix
Symbolic link
1
npm/nodePackages/asn1/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.2.3.nix
|
||||
14
npm/nodePackages/assert-plus/0.1.5.nix
Normal file
14
npm/nodePackages/assert-plus/0.1.5.nix
Normal file
@ -0,0 +1,14 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "assert-plus";
|
||||
version = "0.1.5";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz";
|
||||
sha1 = "ee74009413002d84cec7219c6ac811812e723160";
|
||||
};
|
||||
deps = [];
|
||||
devDependencies = [];
|
||||
meta = {
|
||||
description = "Extra assertions on top of node's assert module";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/assert-plus/latest.nix
Symbolic link
1
npm/nodePackages/assert-plus/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.1.5.nix
|
||||
21
npm/nodePackages/backoff/2.4.1.nix
Normal file
21
npm/nodePackages/backoff/2.4.1.nix
Normal file
@ -0,0 +1,21 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "backoff";
|
||||
version = "2.4.1";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/backoff/-/backoff-2.4.1.tgz";
|
||||
sha1 = "2f68c50e0dd789dbefe24200a62efb04d2456d68";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
precond_0-2-3
|
||||
];
|
||||
meta = {
|
||||
description = "Fibonacci and exponential backoffs.";
|
||||
keywords = [
|
||||
"backoff"
|
||||
"retry"
|
||||
"fibonacci"
|
||||
"exponential"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/backoff/latest.nix
Symbolic link
1
npm/nodePackages/backoff/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
2.4.1.nix
|
||||
30
npm/nodePackages/bunyan/1.5.1.nix
Normal file
30
npm/nodePackages/bunyan/1.5.1.nix
Normal file
@ -0,0 +1,30 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "bunyan";
|
||||
version = "1.5.1";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz";
|
||||
sha1 = "5f6e7d44c43b952f56b0f41309e3ab12391b4e2d";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
dtrace-provider_0-6-0
|
||||
safe-json-stringify_1-0-3
|
||||
mv_2-0-3
|
||||
];
|
||||
optionalDependencies = with nodePackages; [
|
||||
dtrace-provider_0-6-0
|
||||
safe-json-stringify_1-0-3
|
||||
mv_2-0-3
|
||||
];
|
||||
meta = {
|
||||
homepage = "https://github.com/trentm/node-bunyan";
|
||||
description = "a JSON logging library for node.js services";
|
||||
keywords = [
|
||||
"log"
|
||||
"logging"
|
||||
"log4j"
|
||||
"json"
|
||||
"bunyan"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/bunyan/latest.nix
Symbolic link
1
npm/nodePackages/bunyan/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.5.1.nix
|
||||
24
npm/nodePackages/dashdash/1.10.1.nix
Normal file
24
npm/nodePackages/dashdash/1.10.1.nix
Normal file
@ -0,0 +1,24 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "dashdash";
|
||||
version = "1.10.1";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/dashdash/-/dashdash-1.10.1.tgz";
|
||||
sha1 = "0abf1af89a8f5129a81f18c2b35b21df22622f60";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
assert-plus_0-1-5
|
||||
];
|
||||
meta = {
|
||||
homepage = "https://github.com/trentm/node-dashdash";
|
||||
description = "A light, featureful and explicit option parsing library.";
|
||||
keywords = [
|
||||
"option"
|
||||
"parser"
|
||||
"parsing"
|
||||
"cli"
|
||||
"command"
|
||||
"args"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/dashdash/latest.nix
Symbolic link
1
npm/nodePackages/dashdash/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.10.1.nix
|
||||
17
npm/nodePackages/dtrace-provider/0.6.0.nix
Normal file
17
npm/nodePackages/dtrace-provider/0.6.0.nix
Normal file
@ -0,0 +1,17 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "dtrace-provider";
|
||||
version = "0.6.0";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz";
|
||||
sha1 = "0b078d5517937d873101452d9146737557b75e51";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
nan_2-1-0
|
||||
];
|
||||
meta = {
|
||||
homepage = "https://github.com/chrisa/node-dtrace-provider#readme";
|
||||
description = "Native DTrace providers for node.js applications";
|
||||
keywords = [ "dtrace" ];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/dtrace-provider/latest.nix
Symbolic link
1
npm/nodePackages/dtrace-provider/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.6.0.nix
|
||||
15
npm/nodePackages/extsprintf/1.2.0.nix
Normal file
15
npm/nodePackages/extsprintf/1.2.0.nix
Normal file
@ -0,0 +1,15 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "extsprintf";
|
||||
version = "1.2.0";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/extsprintf/-/extsprintf-1.2.0.tgz";
|
||||
sha1 = "5ad946c22f5b32ba7f8cd7426711c6e8a3fc2529";
|
||||
};
|
||||
deps = [];
|
||||
devDependencies = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/davepacheco/node-extsprintf";
|
||||
description = "extended POSIX-style sprintf";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/extsprintf/latest.nix
Symbolic link
1
npm/nodePackages/extsprintf/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.2.0.nix
|
||||
16
npm/nodePackages/ldap-filter/0.2.2.nix
Normal file
16
npm/nodePackages/ldap-filter/0.2.2.nix
Normal file
@ -0,0 +1,16 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "ldap-filter";
|
||||
version = "0.2.2";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/ldap-filter/-/ldap-filter-0.2.2.tgz";
|
||||
sha1 = "f2b842be0b86da3352798505b31ebcae590d77d0";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
assert-plus_0-1-5
|
||||
];
|
||||
meta = {
|
||||
homepage = "http://ldapjs.org";
|
||||
description = "API for handling LDAP-style filters";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/ldap-filter/latest.nix
Symbolic link
1
npm/nodePackages/ldap-filter/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.2.2.nix
|
||||
28
npm/nodePackages/ldapjs/1.0.0.nix
Normal file
28
npm/nodePackages/ldapjs/1.0.0.nix
Normal file
@ -0,0 +1,28 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "ldapjs";
|
||||
version = "1.0.0";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/ldapjs/-/ldapjs-1.0.0.tgz";
|
||||
sha1 = "1da2cd5bfb9cb103c1ba516938da971bc2bbc3f2";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
ldap-filter_0-2-2
|
||||
asn1_0-2-3
|
||||
bunyan_1-5-1
|
||||
once_1-3-2
|
||||
vasync_1-6-3
|
||||
dtrace-provider_0-6-0
|
||||
backoff_2-4-1
|
||||
assert-plus_0-1-5
|
||||
verror_1-6-0
|
||||
dashdash_1-10-1
|
||||
];
|
||||
optionalDependencies = with nodePackages; [
|
||||
dtrace-provider_0-6-0
|
||||
];
|
||||
meta = {
|
||||
homepage = "http://ldapjs.org";
|
||||
description = "LDAP client and server APIs";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/ldapjs/latest.nix
Symbolic link
1
npm/nodePackages/ldapjs/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.0.0.nix
|
||||
20
npm/nodePackages/minimist/0.0.8.nix
Normal file
20
npm/nodePackages/minimist/0.0.8.nix
Normal file
@ -0,0 +1,20 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "minimist";
|
||||
version = "0.0.8";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz";
|
||||
sha1 = "857fcabfc3397d2625b8228262e86aa7a011b05d";
|
||||
};
|
||||
deps = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/substack/minimist";
|
||||
description = "parse argument options";
|
||||
keywords = [
|
||||
"argv"
|
||||
"getopt"
|
||||
"parser"
|
||||
"optimist"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/minimist/latest.nix
Symbolic link
1
npm/nodePackages/minimist/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.0.8.nix
|
||||
20
npm/nodePackages/mkdirp/0.5.1.nix
Normal file
20
npm/nodePackages/mkdirp/0.5.1.nix
Normal file
@ -0,0 +1,20 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "mkdirp";
|
||||
version = "0.5.1";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz";
|
||||
sha1 = "30057438eac6cf7f8c4767f38648d6697d75c903";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
minimist_0-0-8
|
||||
];
|
||||
meta = {
|
||||
homepage = "https://github.com/substack/node-mkdirp#readme";
|
||||
description = "Recursively mkdir, like `mkdir -p`";
|
||||
keywords = [
|
||||
"mkdir"
|
||||
"directory"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/mkdirp/latest.nix
Symbolic link
1
npm/nodePackages/mkdirp/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.5.1.nix
|
||||
26
npm/nodePackages/mv/2.0.3.nix
Normal file
26
npm/nodePackages/mv/2.0.3.nix
Normal file
@ -0,0 +1,26 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "mv";
|
||||
version = "2.0.3";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/mv/-/mv-2.0.3.tgz";
|
||||
sha1 = "e9ab707d71dc38de24edcc637a8e2f5f480c7f32";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
ncp_0-6-0
|
||||
mkdirp_0-5-1
|
||||
rimraf_2-2-8
|
||||
];
|
||||
meta = {
|
||||
homepage = "https://github.com/andrewrk/node-mv";
|
||||
description = "fs.rename but works across devices. same as the unix utility 'mv'";
|
||||
keywords = [
|
||||
"mv"
|
||||
"move"
|
||||
"rename"
|
||||
"device"
|
||||
"recursive"
|
||||
"folder"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/mv/latest.nix
Symbolic link
1
npm/nodePackages/mv/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
2.0.3.nix
|
||||
14
npm/nodePackages/nan/2.1.0.nix
Normal file
14
npm/nodePackages/nan/2.1.0.nix
Normal file
@ -0,0 +1,14 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "nan";
|
||||
version = "2.1.0";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/nan/-/nan-2.1.0.tgz";
|
||||
sha1 = "020a7ccedc63fdee85f85967d5607849e74abbe8";
|
||||
};
|
||||
deps = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/nodejs/nan#readme";
|
||||
description = "Native Abstractions for Node.js: C++ header for Node 0.8 -> 4 compatibility";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/nan/latest.nix
Symbolic link
1
npm/nodePackages/nan/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
2.1.0.nix
|
||||
15
npm/nodePackages/ncp/0.6.0.nix
Normal file
15
npm/nodePackages/ncp/0.6.0.nix
Normal file
@ -0,0 +1,15 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "ncp";
|
||||
version = "0.6.0";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/ncp/-/ncp-0.6.0.tgz";
|
||||
sha1 = "df8ce021e262be21b52feb3d3e5cfaab12491f0d";
|
||||
};
|
||||
deps = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/AvianFlu/ncp";
|
||||
description = "Asynchronous recursive file copy utility.";
|
||||
keywords = [ "cli" "copy" ];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/ncp/latest.nix
Symbolic link
1
npm/nodePackages/ncp/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.6.0.nix
|
||||
22
npm/nodePackages/once/1.3.2.nix
Normal file
22
npm/nodePackages/once/1.3.2.nix
Normal file
@ -0,0 +1,22 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "once";
|
||||
version = "1.3.2";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/once/-/once-1.3.2.tgz";
|
||||
sha1 = "d8feeca93b039ec1dcdee7741c92bdac5e28081b";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
wrappy_1-0-1
|
||||
];
|
||||
meta = {
|
||||
homepage = "https://github.com/isaacs/once#readme";
|
||||
description = "Run a function exactly one time";
|
||||
keywords = [
|
||||
"once"
|
||||
"function"
|
||||
"one"
|
||||
"single"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/once/latest.nix
Symbolic link
1
npm/nodePackages/once/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.3.2.nix
|
||||
20
npm/nodePackages/precond/0.2.3.nix
Normal file
20
npm/nodePackages/precond/0.2.3.nix
Normal file
@ -0,0 +1,20 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "precond";
|
||||
version = "0.2.3";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/precond/-/precond-0.2.3.tgz";
|
||||
sha1 = "aa9591bcaa24923f1e0f4849d240f47efc1075ac";
|
||||
};
|
||||
deps = [];
|
||||
meta = {
|
||||
description = "Precondition checking utilities.";
|
||||
keywords = [
|
||||
"precondition"
|
||||
"assert"
|
||||
"invariant"
|
||||
"contract"
|
||||
"condition"
|
||||
];
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/precond/latest.nix
Symbolic link
1
npm/nodePackages/precond/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
0.2.3.nix
|
||||
15
npm/nodePackages/rimraf/2.2.8.nix
Normal file
15
npm/nodePackages/rimraf/2.2.8.nix
Normal file
@ -0,0 +1,15 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "rimraf";
|
||||
version = "2.2.8";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz";
|
||||
sha1 = "e439be2aaee327321952730f99a8929e4fc50582";
|
||||
};
|
||||
deps = [];
|
||||
devDependencies = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/isaacs/rimraf";
|
||||
description = "A deep deletion module for node (like `rm -rf`)";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/rimraf/latest.nix
Symbolic link
1
npm/nodePackages/rimraf/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
2.2.8.nix
|
||||
14
npm/nodePackages/safe-json-stringify/1.0.3.nix
Normal file
14
npm/nodePackages/safe-json-stringify/1.0.3.nix
Normal file
@ -0,0 +1,14 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "safe-json-stringify";
|
||||
version = "1.0.3";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz";
|
||||
sha1 = "3cb6717660a086d07cb5bd9b7a6875bcf67bd05e";
|
||||
};
|
||||
deps = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/e-conomic/safe-json-stringify";
|
||||
description = "Prevent defined property getters from throwing errors";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/safe-json-stringify/latest.nix
Symbolic link
1
npm/nodePackages/safe-json-stringify/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.0.3.nix
|
||||
16
npm/nodePackages/vasync/1.6.3.nix
Normal file
16
npm/nodePackages/vasync/1.6.3.nix
Normal file
@ -0,0 +1,16 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "vasync";
|
||||
version = "1.6.3";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/vasync/-/vasync-1.6.3.tgz";
|
||||
sha1 = "4a69d7052a47f4ce85503d7641df1cbf40432a94";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
verror_1-6-0
|
||||
];
|
||||
meta = {
|
||||
homepage = "https://github.com/davepacheco/node-vasync";
|
||||
description = "utilities for observable asynchronous control flow";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/vasync/latest.nix
Symbolic link
1
npm/nodePackages/vasync/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.6.3.nix
|
||||
17
npm/nodePackages/verror/1.6.0.nix
Normal file
17
npm/nodePackages/verror/1.6.0.nix
Normal file
@ -0,0 +1,17 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "verror";
|
||||
version = "1.6.0";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/verror/-/verror-1.6.0.tgz";
|
||||
sha1 = "7d13b27b1facc2e2da90405eb5ea6e5bdd252ea5";
|
||||
};
|
||||
deps = with nodePackages; [
|
||||
extsprintf_1-2-0
|
||||
];
|
||||
devDependencies = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/davepacheco/node-verror";
|
||||
description = "richer JavaScript errors";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/verror/latest.nix
Symbolic link
1
npm/nodePackages/verror/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.6.0.nix
|
||||
14
npm/nodePackages/wrappy/1.0.1.nix
Normal file
14
npm/nodePackages/wrappy/1.0.1.nix
Normal file
@ -0,0 +1,14 @@
|
||||
{ buildNodePackage, nodePackages, pkgs }:
|
||||
buildNodePackage {
|
||||
name = "wrappy";
|
||||
version = "1.0.1";
|
||||
src = pkgs.fetchurl {
|
||||
url = "http://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz";
|
||||
sha1 = "1e65969965ccbc2db4548c6b84a6f2c5aedd4739";
|
||||
};
|
||||
deps = [];
|
||||
meta = {
|
||||
homepage = "https://github.com/npm/wrappy";
|
||||
description = "Callback wrapping utility";
|
||||
};
|
||||
}
|
||||
1
npm/nodePackages/wrappy/latest.nix
Symbolic link
1
npm/nodePackages/wrappy/latest.nix
Symbolic link
@ -0,0 +1 @@
|
||||
1.0.1.nix
|
||||
17
package.nix
Normal file
17
package.nix
Normal file
@ -0,0 +1,17 @@
|
||||
{ mkDerivation, asn1-encoding, asn1-types, async, base, bytestring
|
||||
, connection, containers, doctest, hspec, network, process
|
||||
, semigroups, stdenv, stm, text
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "ldap-client";
|
||||
version = "0.4.0";
|
||||
src = ./.;
|
||||
buildDepends = [
|
||||
asn1-encoding asn1-types async base bytestring connection
|
||||
containers network semigroups stm text
|
||||
];
|
||||
testDepends = [ base bytestring doctest hspec process semigroups ];
|
||||
homepage = "https://supki.github.io/ldap-client";
|
||||
description = "Pure Haskell LDAP Client Library";
|
||||
license = stdenv.lib.licenses.bsd2;
|
||||
}
|
||||
23
shell.nix
Normal file
23
shell.nix
Normal file
@ -0,0 +1,23 @@
|
||||
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc802" }: let
|
||||
inherit (nixpkgs) pkgs;
|
||||
haskell = pkgs.haskell.packages.${compiler};
|
||||
|
||||
ghc = haskell.ghcWithPackages(ps: [
|
||||
ps.hdevtools ps.doctest ps.hspec-discover ps.hlint ps.ghc-mod
|
||||
]);
|
||||
npm = import ./npm {};
|
||||
|
||||
this = import ./default.nix { inherit nixpkgs compiler; };
|
||||
in
|
||||
pkgs.stdenv.mkDerivation rec {
|
||||
name = this.pname;
|
||||
buildInputs = [
|
||||
ghc
|
||||
haskell.cabal-install
|
||||
npm.nodePackages.ldapjs
|
||||
] ++ this.env.buildInputs;
|
||||
shellHook = ''
|
||||
${this.env.shellHook}
|
||||
cabal configure --enable-tests --package-db=$NIX_GHC_LIBDIR/package.conf.d
|
||||
'';
|
||||
}
|
||||
@ -11,6 +11,9 @@ import Control.Applicative (Alternative(..), liftA2, optional)
|
||||
import Control.Applicative (Applicative(..), Alternative(..), liftA2, optional)
|
||||
#endif
|
||||
import Control.Monad (MonadPlus(..), (>=>), guard)
|
||||
#if __GLASGOW_HASKELL__ >= 86
|
||||
import Control.Monad.Fail (MonadFail, fail)
|
||||
#endif
|
||||
import Data.ASN1.Types (ASN1)
|
||||
import qualified Data.ASN1.Types as Asn1
|
||||
import Data.Foldable (asum)
|
||||
@ -19,8 +22,8 @@ import qualified Data.Text.Encoding as Text
|
||||
|
||||
import Ldap.Asn1.Type
|
||||
|
||||
{-# ANN module "HLint: ignore Use const" #-}
|
||||
{-# ANN module "HLint: ignore Avoid lambda" #-}
|
||||
{-# ANN module ("HLint: ignore Use const" :: String) #-}
|
||||
{-# ANN module ("HLint: ignore Avoid lambda" :: String) #-}
|
||||
|
||||
|
||||
-- | Convert a part of ASN.1 stream to a LDAP type returning the remainder of the stream.
|
||||
@ -416,6 +419,11 @@ instance MonadPlus (Parser s) where
|
||||
Parser ma `mplus` Parser mb =
|
||||
Parser (\s -> ma s `mplus` mb s)
|
||||
|
||||
#if __GLASGOW_HASKELL__ >= 86
|
||||
instance MonadFail (Parser s) where
|
||||
fail _ = mzero
|
||||
#endif
|
||||
|
||||
parse :: Parser s a -> s -> Maybe (s, a)
|
||||
parse = unParser
|
||||
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
-- | This module contains convertions from LDAP types to ASN.1.
|
||||
--
|
||||
-- Various hacks are employed because "asn1-encoding" only encodes to DER, but
|
||||
@ -309,12 +310,22 @@ instance ToAsn1 ProtocolClientOp where
|
||||
@
|
||||
AuthenticationChoice ::= CHOICE {
|
||||
simple [0] OCTET STRING,
|
||||
sasl [3] SaslCredentials,
|
||||
... }
|
||||
|
||||
|
||||
SaslCredentials ::= SEQUENCE {
|
||||
mechanism LDAPString,
|
||||
credentials OCTET STRING OPTIONAL }
|
||||
@
|
||||
-}
|
||||
instance ToAsn1 AuthenticationChoice where
|
||||
toAsn1 (Simple s) = other Asn1.Context 0 s
|
||||
|
||||
toAsn1 (Sasl External c) =
|
||||
context 3 (fold
|
||||
[ toAsn1 (LdapString "EXTERNAL")
|
||||
, maybe mempty (toAsn1 . LdapString) c
|
||||
])
|
||||
{- |
|
||||
@
|
||||
AttributeSelection ::= SEQUENCE OF selector LDAPString
|
||||
|
||||
@ -37,7 +37,7 @@ data ProtocolServerOp =
|
||||
BindResponse !LdapResult !(Maybe ByteString)
|
||||
| SearchResultEntry !LdapDn !PartialAttributeList
|
||||
| SearchResultReference !(NonEmpty Uri)
|
||||
| SearchResultDone !(LdapResult)
|
||||
| SearchResultDone !LdapResult
|
||||
| ModifyResponse !LdapResult
|
||||
| AddResponse !LdapResult
|
||||
| DeleteResponse !LdapResult
|
||||
@ -48,7 +48,14 @@ data ProtocolServerOp =
|
||||
deriving (Show, Eq)
|
||||
|
||||
-- | Not really a choice until SASL is supported.
|
||||
newtype AuthenticationChoice = Simple ByteString
|
||||
data AuthenticationChoice =
|
||||
Simple !ByteString
|
||||
| Sasl !SaslMechanism !(Maybe Text)
|
||||
deriving (Show, Eq)
|
||||
|
||||
-- | SASL Mechanism, for now only SASL EXTERNAL is supported
|
||||
data SaslMechanism =
|
||||
External
|
||||
deriving (Show, Eq)
|
||||
|
||||
-- | Scope of the search to be performed.
|
||||
@ -70,16 +77,16 @@ data DerefAliases =
|
||||
|
||||
-- | Conditions that must be fulfilled in order for the Search to match a given entry.
|
||||
data Filter =
|
||||
And !(NonEmpty Filter) -- ^ All filters evaluate to @TRUE@
|
||||
| Or !(NonEmpty Filter) -- ^ Any filter evaluates to @TRUE@
|
||||
| Not Filter -- ^ Filter evaluates to @FALSE@
|
||||
| EqualityMatch AttributeValueAssertion -- ^ @EQUALITY@ rule returns @TRUE@
|
||||
| Substrings SubstringFilter -- ^ @SUBSTR@ rule returns @TRUE@
|
||||
| GreaterOrEqual AttributeValueAssertion -- ^ @ORDERING@ rule returns @FALSE@
|
||||
| LessOrEqual AttributeValueAssertion -- ^ @ORDERING@ or @EQUALITY@ rule returns @TRUE@
|
||||
| Present AttributeDescription -- ^ Attribute is present in the entry
|
||||
| ApproxMatch AttributeValueAssertion -- ^ Same as 'EqualityMatch' for most servers
|
||||
| ExtensibleMatch MatchingRuleAssertion
|
||||
And !(NonEmpty Filter) -- ^ All filters evaluate to @TRUE@
|
||||
| Or !(NonEmpty Filter) -- ^ Any filter evaluates to @TRUE@
|
||||
| Not !Filter -- ^ Filter evaluates to @FALSE@
|
||||
| EqualityMatch !AttributeValueAssertion -- ^ @EQUALITY@ rule returns @TRUE@
|
||||
| Substrings !SubstringFilter -- ^ @SUBSTR@ rule returns @TRUE@
|
||||
| GreaterOrEqual !AttributeValueAssertion -- ^ @ORDERING@ rule returns @FALSE@
|
||||
| LessOrEqual !AttributeValueAssertion -- ^ @ORDERING@ or @EQUALITY@ rule returns @TRUE@
|
||||
| Present !AttributeDescription -- ^ Attribute is present in the entry
|
||||
| ApproxMatch !AttributeValueAssertion -- ^ Same as 'EqualityMatch' for most servers
|
||||
| ExtensibleMatch !MatchingRuleAssertion
|
||||
deriving (Show, Eq)
|
||||
|
||||
data SubstringFilter = SubstringFilter !AttributeDescription !(NonEmpty Substring)
|
||||
|
||||
@ -2,6 +2,8 @@
|
||||
{-# LANGUAGE BangPatterns #-}
|
||||
{-# LANGUAGE DeriveDataTypeable #-}
|
||||
{-# LANGUAGE NamedFieldPuns #-}
|
||||
{-# LANGUAGE ScopedTypeVariables #-}
|
||||
|
||||
-- | This module is intended to be imported qualified
|
||||
--
|
||||
-- @
|
||||
@ -9,15 +11,24 @@
|
||||
-- @
|
||||
module Ldap.Client
|
||||
( with
|
||||
, with'
|
||||
, runsIn
|
||||
, runsInEither
|
||||
, open
|
||||
, close
|
||||
, Host(..)
|
||||
, defaultTlsSettings
|
||||
, insecureTlsSettings
|
||||
, PortNumber
|
||||
, Ldap
|
||||
, LdapH
|
||||
, LdapError(..)
|
||||
, ResponseError(..)
|
||||
, Type.ResultCode(..)
|
||||
-- * Bind
|
||||
, Password(..)
|
||||
, bind
|
||||
, externalBind
|
||||
-- * Search
|
||||
, search
|
||||
, SearchEntry(..)
|
||||
@ -63,8 +74,9 @@ import qualified Control.Concurrent.Async as Async
|
||||
import Control.Concurrent.STM (atomically, throwSTM)
|
||||
import Control.Concurrent.STM.TMVar (putTMVar)
|
||||
import Control.Concurrent.STM.TQueue (TQueue, newTQueueIO, writeTQueue, readTQueue)
|
||||
import Control.Exception (Exception, Handler(..), bracket, throwIO, catch, catches)
|
||||
import Control.Exception (Exception, bracket, throwIO, SomeException, fromException, throw, Handler(..))
|
||||
import Control.Monad (forever)
|
||||
import Data.Void (Void)
|
||||
import qualified Data.ASN1.BinaryEncoding as Asn1
|
||||
import qualified Data.ASN1.Encoding as Asn1
|
||||
import qualified Data.ASN1.Error as Asn1
|
||||
@ -75,7 +87,6 @@ import Data.Function (fix)
|
||||
import Data.List.NonEmpty (NonEmpty((:|)))
|
||||
import qualified Data.Map.Strict as Map
|
||||
import Data.Monoid (Endo(appEndo))
|
||||
import Data.String (fromString)
|
||||
import Data.Text (Text)
|
||||
#if __GLASGOW_HASKELL__ < 710
|
||||
import Data.Traversable (traverse)
|
||||
@ -90,7 +101,7 @@ import Ldap.Asn1.ToAsn1 (ToAsn1(toAsn1))
|
||||
import Ldap.Asn1.FromAsn1 (FromAsn1, parseAsn1)
|
||||
import qualified Ldap.Asn1.Type as Type
|
||||
import Ldap.Client.Internal
|
||||
import Ldap.Client.Bind (Password(..), bind)
|
||||
import Ldap.Client.Bind (Password(..), bind, externalBind)
|
||||
import Ldap.Client.Search
|
||||
( search
|
||||
, Search
|
||||
@ -107,160 +118,219 @@ import Ldap.Client.Modify (Operation(..), modify, RelativeDn(..), modi
|
||||
import Ldap.Client.Add (add)
|
||||
import Ldap.Client.Delete (delete)
|
||||
import Ldap.Client.Compare (compare)
|
||||
import Ldap.Client.Extended (Oid(..), extended)
|
||||
import Ldap.Client.Extended (Oid(..), extended, noticeOfDisconnectionOid)
|
||||
|
||||
{-# ANN module "HLint: ignore Use first" #-}
|
||||
{-# ANN module ("HLint: ignore Use first" :: String) #-}
|
||||
|
||||
|
||||
newLdap :: IO Ldap
|
||||
newLdap = Ldap
|
||||
<$> newTQueueIO
|
||||
|
||||
-- | Various failures that can happen when working with LDAP.
|
||||
data LdapError =
|
||||
IOError IOError -- ^ Network failure.
|
||||
| ParseError Asn1.ASN1Error -- ^ Invalid ASN.1 data received from the server.
|
||||
| ResponseError ResponseError -- ^ An LDAP operation failed.
|
||||
| DisconnectError Disconnect -- ^ Notice of Disconnection has been received.
|
||||
data LdapError
|
||||
= IOError !IOError -- ^ Network failure.
|
||||
| ParseError !Asn1.ASN1Error -- ^ Invalid ASN.1 data received from the server.
|
||||
| ResponseError !ResponseError -- ^ An LDAP operation failed.
|
||||
| DisconnectError !Disconnect -- ^ Notice of Disconnection has been received.
|
||||
deriving (Show, Eq)
|
||||
|
||||
newtype WrappedIOError = WrappedIOError IOError
|
||||
deriving (Show, Eq, Typeable)
|
||||
instance Exception LdapError
|
||||
|
||||
instance Exception WrappedIOError
|
||||
|
||||
data Disconnect = Disconnect Type.ResultCode Dn Text
|
||||
data Disconnect = Disconnect !Type.ResultCode !Dn !Text
|
||||
deriving (Show, Eq, Typeable)
|
||||
|
||||
instance Exception Disconnect
|
||||
|
||||
newtype LdapH = LdapH Ldap
|
||||
|
||||
-- | Provide a 'LdapH' to a function needing an 'Ldap' handle.
|
||||
runsIn :: (Ldap -> IO a)
|
||||
-> LdapH
|
||||
-> IO a
|
||||
runsIn act (LdapH ldap) = do
|
||||
actor <- Async.async (act ldap)
|
||||
r <- Async.waitEitherCatch (workers ldap) actor
|
||||
case r of
|
||||
Left (Right _a) -> error "Unreachable"
|
||||
Left (Left e) -> throwIO =<< catchesHandler workerErr e
|
||||
Right (Right r') -> pure r'
|
||||
Right (Left e) -> throwIO =<< catchesHandler respErr e
|
||||
|
||||
-- | Provide a 'LdapH' to a function needing an 'Ldap' handle
|
||||
runsInEither :: (Ldap -> IO a)
|
||||
-> LdapH
|
||||
-> IO (Either LdapError a)
|
||||
runsInEither act (LdapH ldap) = do
|
||||
actor <- Async.async (act ldap)
|
||||
r <- Async.waitEitherCatch (workers ldap) actor
|
||||
case r of
|
||||
Left (Right _a) -> error "Unreachable"
|
||||
Left (Left e) -> do Left <$> catchesHandler workerErr e
|
||||
Right (Right r') -> pure (Right r')
|
||||
Right (Left e) -> do Left <$> catchesHandler respErr e
|
||||
|
||||
|
||||
workerErr :: [Handler LdapError]
|
||||
workerErr = [ Handler (\(ex :: IOError) -> pure (IOError ex))
|
||||
, Handler (\(ex :: Asn1.ASN1Error) -> pure (ParseError ex))
|
||||
, Handler (\(ex :: Disconnect) -> pure (DisconnectError ex))
|
||||
]
|
||||
|
||||
respErr :: [Handler LdapError]
|
||||
respErr = [ Handler (\(ex :: ResponseError) -> pure (ResponseError ex))
|
||||
]
|
||||
|
||||
catchesHandler :: [Handler a] -> SomeException -> IO a
|
||||
catchesHandler handlers e = foldr tryHandler (throw e) handlers
|
||||
where tryHandler (Handler handler) res
|
||||
= case fromException e of
|
||||
Just e' -> handler e'
|
||||
Nothing -> res
|
||||
|
||||
-- | The entrypoint into LDAP.
|
||||
--
|
||||
-- It catches all LDAP-related exceptions.
|
||||
with' :: Host -> PortNumber -> (Ldap -> IO a) -> IO a
|
||||
with' host port act = bracket (open host port) close (runsIn act)
|
||||
|
||||
with :: Host -> PortNumber -> (Ldap -> IO a) -> IO (Either LdapError a)
|
||||
with host port f = do
|
||||
with host port act = bracket (open host port) close (runsInEither act)
|
||||
|
||||
-- | Creates an LDAP handle. This action is useful for creating your own resource
|
||||
-- management, such as with 'resource-pool'. The handle must be manually closed
|
||||
-- with 'close'.
|
||||
open :: Host -> PortNumber -> IO (LdapH)
|
||||
open host port = do
|
||||
context <- Conn.initConnectionContext
|
||||
bracket (Conn.connectTo context params) Conn.connectionClose (\conn ->
|
||||
bracket newLdap unbindAsync (\l -> do
|
||||
inq <- newTQueueIO
|
||||
outq <- newTQueueIO
|
||||
as <- traverse Async.async
|
||||
[ input inq conn
|
||||
, output outq conn
|
||||
, dispatch l inq outq
|
||||
, f l
|
||||
]
|
||||
fmap (Right . snd) (Async.waitAnyCancel as)))
|
||||
`catches`
|
||||
[ Handler (\(WrappedIOError e) -> return (Left (IOError e)))
|
||||
, Handler (return . Left . ParseError)
|
||||
, Handler (return . Left . ResponseError)
|
||||
]
|
||||
conn <- Conn.connectTo context params
|
||||
reqQ <- newTQueueIO
|
||||
inQ <- newTQueueIO
|
||||
outQ <- newTQueueIO
|
||||
|
||||
-- The input worker that reads data off the network.
|
||||
(inW :: Async.Async Void) <- Async.async (input inQ conn)
|
||||
|
||||
-- The output worker that sends data onto the network.
|
||||
(outW :: Async.Async Void) <- Async.async (output outQ conn)
|
||||
|
||||
-- The dispatch worker that sends data between the three queues.
|
||||
(dispW :: Async.Async Void) <- Async.async (dispatch reqQ inQ outQ)
|
||||
|
||||
-- We use this to propagate exceptions between the workers. The `workers` Async is just a tool to
|
||||
-- exchange exceptions between the entire worker group and another thread.
|
||||
workers <- Async.async (snd <$> Async.waitAnyCancel [inW, outW, dispW])
|
||||
|
||||
pure (LdapH (Ldap reqQ workers conn))
|
||||
where
|
||||
params = Conn.ConnectionParams
|
||||
{ Conn.connectionHostname =
|
||||
case host of
|
||||
Plain h -> h
|
||||
Secure h -> h
|
||||
Insecure h -> h
|
||||
Plain h -> h
|
||||
Tls h _ -> h
|
||||
, Conn.connectionPort = port
|
||||
, Conn.connectionUseSecure =
|
||||
case host of
|
||||
Plain _ -> Nothing
|
||||
Secure _ -> Just Conn.TLSSettingsSimple
|
||||
{ Conn.settingDisableCertificateValidation = False
|
||||
, Conn.settingDisableSession = False
|
||||
, Conn.settingUseServerName = False
|
||||
}
|
||||
Insecure _ -> Just Conn.TLSSettingsSimple
|
||||
{ Conn.settingDisableCertificateValidation = True
|
||||
, Conn.settingDisableSession = False
|
||||
, Conn.settingUseServerName = False
|
||||
}
|
||||
Tls _ settings -> pure settings
|
||||
, Conn.connectionUseSocks = Nothing
|
||||
}
|
||||
|
||||
input :: FromAsn1 a => TQueue a -> Connection -> IO b
|
||||
input inq conn = wrap . flip fix [] $ \loop chunks -> do
|
||||
chunk <- Conn.connectionGet conn 8192
|
||||
case ByteString.length chunk of
|
||||
0 -> throwIO (IO.mkIOError IO.eofErrorType "Ldap.Client.input" Nothing Nothing)
|
||||
_ -> do
|
||||
let chunks' = chunk : chunks
|
||||
case Asn1.decodeASN1 Asn1.DER (ByteString.Lazy.fromChunks (reverse chunks')) of
|
||||
Left Asn1.ParsingPartial
|
||||
-> loop chunks'
|
||||
Left e -> throwIO e
|
||||
Right asn1 -> do
|
||||
flip fix asn1 $ \loop' asn1' ->
|
||||
case parseAsn1 asn1' of
|
||||
Nothing -> return ()
|
||||
Just (asn1'', a) -> do
|
||||
atomically (writeTQueue inq a)
|
||||
loop' asn1''
|
||||
loop []
|
||||
-- | Closes an LDAP connection.
|
||||
-- This is to be used in together with 'open'.
|
||||
close :: LdapH -> IO ()
|
||||
close (LdapH ldap) = do
|
||||
unbindAsync ldap
|
||||
Conn.connectionClose (conn ldap)
|
||||
Async.cancel (workers ldap)
|
||||
|
||||
defaultTlsSettings :: Conn.TLSSettings
|
||||
defaultTlsSettings = Conn.TLSSettingsSimple
|
||||
{ Conn.settingDisableCertificateValidation = False
|
||||
, Conn.settingDisableSession = False
|
||||
, Conn.settingUseServerName = False
|
||||
}
|
||||
|
||||
insecureTlsSettings :: Conn.TLSSettings
|
||||
insecureTlsSettings = Conn.TLSSettingsSimple
|
||||
{ Conn.settingDisableCertificateValidation = True
|
||||
, Conn.settingDisableSession = False
|
||||
, Conn.settingUseServerName = False
|
||||
}
|
||||
|
||||
-- | Reads Asn1 BER encoded chunks off a connection into a TQueue.
|
||||
input :: FromAsn1 a => TQueue a -> Connection -> IO b
|
||||
input inq conn = loop []
|
||||
where
|
||||
loop chunks = do
|
||||
chunk <- Conn.connectionGet conn 8192
|
||||
case ByteString.length chunk of
|
||||
0 -> throwIO (IO.mkIOError IO.eofErrorType "Ldap.Client.input" Nothing Nothing)
|
||||
_ -> do
|
||||
let chunks' = chunk : chunks
|
||||
case Asn1.decodeASN1 Asn1.BER (ByteString.Lazy.fromChunks (reverse chunks')) of
|
||||
Left Asn1.ParsingPartial
|
||||
-> loop chunks'
|
||||
Left e -> throwIO e
|
||||
Right asn1 -> do
|
||||
flip fix asn1 $ \loop' asn1' ->
|
||||
case parseAsn1 asn1' of
|
||||
Nothing -> return ()
|
||||
Just (asn1'', a) -> do
|
||||
atomically (writeTQueue inq a)
|
||||
loop' asn1''
|
||||
loop []
|
||||
|
||||
-- | Transmits Asn1 DER encoded data from a TQueue into a Connection.
|
||||
output :: ToAsn1 a => TQueue a -> Connection -> IO b
|
||||
output out conn = wrap . forever $ do
|
||||
output out conn = forever $ do
|
||||
msg <- atomically (readTQueue out)
|
||||
Conn.connectionPut conn (encode (toAsn1 msg))
|
||||
where
|
||||
encode x = Asn1.encodeASN1' Asn1.DER (appEndo x [])
|
||||
|
||||
dispatch
|
||||
:: Ldap
|
||||
:: TQueue ClientMessage
|
||||
-> TQueue (Type.LdapMessage Type.ProtocolServerOp)
|
||||
-> TQueue (Type.LdapMessage Request)
|
||||
-> IO a
|
||||
dispatch Ldap { client } inq outq =
|
||||
flip fix (Map.empty, 1) $ \loop (!req, !counter) ->
|
||||
loop =<< atomically (asum
|
||||
[ do New new var <- readTQueue client
|
||||
writeTQueue outq (Type.LdapMessage (Type.Id counter) new Nothing)
|
||||
return (Map.insert (Type.Id counter) ([], var) req, counter + 1)
|
||||
, do Type.LdapMessage mid op _
|
||||
<- readTQueue inq
|
||||
res <- case op of
|
||||
Type.BindResponse {} -> done mid op req
|
||||
Type.SearchResultEntry {} -> saveUp mid op req
|
||||
Type.SearchResultReference {} -> return req
|
||||
Type.SearchResultDone {} -> done mid op req
|
||||
Type.ModifyResponse {} -> done mid op req
|
||||
Type.AddResponse {} -> done mid op req
|
||||
Type.DeleteResponse {} -> done mid op req
|
||||
Type.ModifyDnResponse {} -> done mid op req
|
||||
Type.CompareResponse {} -> done mid op req
|
||||
Type.ExtendedResponse {} -> probablyDisconnect mid op req
|
||||
Type.IntermediateResponse {} -> saveUp mid op req
|
||||
return (res, counter)
|
||||
])
|
||||
where
|
||||
saveUp mid op res =
|
||||
return (Map.adjust (\(stack, var) -> (op : stack, var)) mid res)
|
||||
dispatch reqq inq outq = loop (Map.empty, 1)
|
||||
where
|
||||
saveUp mid op res = return (Map.adjust (\(stack, var) -> (op : stack, var)) mid res)
|
||||
|
||||
done mid op req =
|
||||
case Map.lookup mid req of
|
||||
Nothing -> return req
|
||||
Just (stack, var) -> do
|
||||
putTMVar var (op :| stack)
|
||||
return (Map.delete mid req)
|
||||
loop (!req, !counter) =
|
||||
loop =<< atomically (asum
|
||||
[ do New new var <- readTQueue reqq
|
||||
writeTQueue outq (Type.LdapMessage (Type.Id counter) new Nothing)
|
||||
return (Map.insert (Type.Id counter) ([], var) req, counter + 1)
|
||||
, do Type.LdapMessage mid op _
|
||||
<- readTQueue inq
|
||||
res <- case op of
|
||||
Type.BindResponse {} -> done mid op req
|
||||
Type.SearchResultEntry {} -> saveUp mid op req
|
||||
Type.SearchResultReference {} -> return req
|
||||
Type.SearchResultDone {} -> done mid op req
|
||||
Type.ModifyResponse {} -> done mid op req
|
||||
Type.AddResponse {} -> done mid op req
|
||||
Type.DeleteResponse {} -> done mid op req
|
||||
Type.ModifyDnResponse {} -> done mid op req
|
||||
Type.CompareResponse {} -> done mid op req
|
||||
Type.ExtendedResponse {} -> probablyDisconnect mid op req
|
||||
Type.IntermediateResponse {} -> saveUp mid op req
|
||||
return (res, counter)
|
||||
])
|
||||
|
||||
probablyDisconnect (Type.Id 0)
|
||||
(Type.ExtendedResponse
|
||||
(Type.LdapResult code
|
||||
(Type.LdapDn (Type.LdapString dn))
|
||||
(Type.LdapString reason)
|
||||
_)
|
||||
moid _)
|
||||
req =
|
||||
case moid of
|
||||
Just (Type.LdapOid oid)
|
||||
| oid == noticeOfDisconnection -> throwSTM (Disconnect code (Dn dn) reason)
|
||||
_ -> return req
|
||||
probablyDisconnect mid op req = done mid op req
|
||||
done mid op req =
|
||||
case Map.lookup mid req of
|
||||
Nothing -> return req
|
||||
Just (stack, var) -> do
|
||||
putTMVar var (op :| stack)
|
||||
return (Map.delete mid req)
|
||||
|
||||
noticeOfDisconnection :: Text
|
||||
noticeOfDisconnection = fromString "1.3.6.1.4.1.1466.20036"
|
||||
|
||||
wrap :: IO a -> IO a
|
||||
wrap m = m `catch` (throwIO . WrappedIOError)
|
||||
probablyDisconnect (Type.Id 0)
|
||||
(Type.ExtendedResponse
|
||||
(Type.LdapResult code
|
||||
(Type.LdapDn (Type.LdapString dn))
|
||||
(Type.LdapString reason)
|
||||
_)
|
||||
moid _)
|
||||
req =
|
||||
case moid of
|
||||
Just (Type.LdapOid oid)
|
||||
| Oid oid == noticeOfDisconnectionOid -> throwSTM (Disconnect code (Dn dn) reason)
|
||||
_ -> return req
|
||||
probablyDisconnect mid op req = done mid op req
|
||||
|
||||
@ -31,7 +31,7 @@ import Ldap.Client.Internal
|
||||
-- | Perform the Add operation synchronously. Raises 'ResponseError' on failures.
|
||||
add :: Ldap -> Dn -> AttrList NonEmpty -> IO ()
|
||||
add l dn as =
|
||||
raise =<< addEither l dn as
|
||||
eitherToIO =<< addEither l dn as
|
||||
|
||||
-- | Perform the Add operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
|
||||
@ -17,6 +17,10 @@ module Ldap.Client.Bind
|
||||
, bindEither
|
||||
, bindAsync
|
||||
, bindAsyncSTM
|
||||
, externalBind
|
||||
, externalBindEither
|
||||
, externalBindAsync
|
||||
, externalBindAsyncSTM
|
||||
, Async
|
||||
, wait
|
||||
, waitSTM
|
||||
@ -24,6 +28,7 @@ module Ldap.Client.Bind
|
||||
|
||||
import Control.Monad.STM (STM, atomically)
|
||||
import Data.ByteString (ByteString)
|
||||
import Data.Text (Text)
|
||||
import Data.List.NonEmpty (NonEmpty((:|)))
|
||||
|
||||
import qualified Ldap.Asn1.Type as Type
|
||||
@ -37,7 +42,7 @@ newtype Password = Password ByteString
|
||||
-- | Perform the Bind operation synchronously. Raises 'ResponseError' on failures.
|
||||
bind :: Ldap -> Dn -> Password -> IO ()
|
||||
bind l username password =
|
||||
raise =<< bindEither l username password
|
||||
eitherToIO =<< bindEither l username password
|
||||
|
||||
-- | Perform the Bind operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
@ -73,3 +78,37 @@ bindResult req (Type.BindResponse (Type.LdapResult code (Type.LdapDn (Type.LdapS
|
||||
| Type.Success <- code = Right ()
|
||||
| otherwise = Left (ResponseErrorCode req code (Dn dn) msg)
|
||||
bindResult req res = Left (ResponseInvalid req res)
|
||||
|
||||
-- | Perform a SASL EXTERNAL Bind operation synchronously. Raises 'ResponseError' on failures.
|
||||
externalBind :: Ldap -> Dn -> Maybe Text -> IO ()
|
||||
externalBind l username mCredentials =
|
||||
eitherToIO =<< externalBindEither l username mCredentials
|
||||
|
||||
-- | Perform a SASL EXTERNAL Bind operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
externalBindEither :: Ldap -> Dn -> Maybe Text -> IO (Either ResponseError ())
|
||||
externalBindEither l username mCredentials =
|
||||
wait =<< externalBindAsync l username mCredentials
|
||||
|
||||
-- | Perform the SASL EXTERNAL Bind operation asynchronously. Call 'Ldap.Client.wait' to wait
|
||||
-- for its completion.
|
||||
externalBindAsync :: Ldap -> Dn -> Maybe Text -> IO (Async ())
|
||||
externalBindAsync l username mCredentials =
|
||||
atomically (externalBindAsyncSTM l username mCredentials)
|
||||
|
||||
-- | Perform the SASL EXTERNAL Bind operation asynchronously.
|
||||
--
|
||||
-- Don't wait for its completion (with 'Ldap.Client.waitSTM') in the
|
||||
-- same transaction you've performed it in.
|
||||
externalBindAsyncSTM :: Ldap -> Dn -> Maybe Text -> STM (Async ())
|
||||
externalBindAsyncSTM l username mCredentials =
|
||||
let req = externalBindRequest username mCredentials in sendRequest l (bindResult req) req
|
||||
|
||||
externalBindRequest :: Dn -> Maybe Text -> Request
|
||||
externalBindRequest (Dn username) mCredentials =
|
||||
Type.BindRequest ldapVersion
|
||||
(Type.LdapDn (Type.LdapString username))
|
||||
(Type.Sasl Type.External mCredentials)
|
||||
where
|
||||
ldapVersion = 3
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ import qualified Ldap.Asn1.Type as Type
|
||||
-- | Perform the Compare operation synchronously. Raises 'ResponseError' on failures.
|
||||
compare :: Ldap -> Dn -> Attr -> AttrValue -> IO Bool
|
||||
compare l dn k v =
|
||||
raise =<< compareEither l dn k v
|
||||
eitherToIO =<< compareEither l dn k v
|
||||
|
||||
-- | Perform the Compare operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
|
||||
@ -31,7 +31,7 @@ import Ldap.Client.Internal
|
||||
-- | Perform the Delete operation synchronously. Raises 'ResponseError' on failures.
|
||||
delete :: Ldap -> Dn -> IO ()
|
||||
delete l dn =
|
||||
raise =<< deleteEither l dn
|
||||
eitherToIO =<< deleteEither l dn
|
||||
|
||||
-- | Perform the Delete operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
-- | <https://tools.ietf.org/html/rfc4511#section-4.12 Extended> operation.
|
||||
--
|
||||
-- This operation comes in four flavours:
|
||||
@ -18,11 +19,14 @@ module Ldap.Client.Extended
|
||||
, extendedEither
|
||||
, extendedAsync
|
||||
, extendedAsyncSTM
|
||||
-- ** StartTLS Operation
|
||||
-- * StartTLS Operation
|
||||
, startTls
|
||||
, startTlsEither
|
||||
, startTlsAsync
|
||||
, startTlsAsyncSTM
|
||||
-- * OIDs
|
||||
, noticeOfDisconnectionOid
|
||||
, startTlsOid
|
||||
, Async
|
||||
, wait
|
||||
, waitSTM
|
||||
@ -32,7 +36,7 @@ import Control.Monad ((<=<))
|
||||
import Control.Monad.STM (STM, atomically)
|
||||
import Data.ByteString (ByteString)
|
||||
import Data.List.NonEmpty (NonEmpty((:|)))
|
||||
import Data.String (fromString)
|
||||
import Data.String (IsString(fromString))
|
||||
import Data.Text (Text)
|
||||
|
||||
import qualified Ldap.Asn1.Type as Type
|
||||
@ -43,10 +47,14 @@ import Ldap.Client.Internal
|
||||
newtype Oid = Oid Text
|
||||
deriving (Show, Eq)
|
||||
|
||||
instance IsString Oid where
|
||||
fromString =
|
||||
Oid . fromString
|
||||
|
||||
-- | Perform the Extended operation synchronously. Raises 'ResponseError' on failures.
|
||||
extended :: Ldap -> Oid -> Maybe ByteString -> IO ()
|
||||
extended l oid mv =
|
||||
raise =<< extendedEither l oid mv
|
||||
eitherToIO =<< extendedEither l oid mv
|
||||
|
||||
-- | Perform the Extended operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
@ -84,7 +92,7 @@ extendedResult req res = Left (ResponseInvalid req res)
|
||||
-- | An example of @Extended Operation@, cf. 'extended'.
|
||||
startTls :: Ldap -> IO ()
|
||||
startTls =
|
||||
raise <=< startTlsEither
|
||||
eitherToIO <=< startTlsEither
|
||||
|
||||
-- | An example of @Extended Operation@, cf. 'extendedEither'.
|
||||
startTlsEither :: Ldap -> IO (Either ResponseError ())
|
||||
@ -99,5 +107,10 @@ startTlsAsync =
|
||||
-- | An example of @Extended Operation@, cf. 'extendedAsyncSTM'.
|
||||
startTlsAsyncSTM :: Ldap -> STM (Async ())
|
||||
startTlsAsyncSTM l =
|
||||
extendedAsyncSTM l (Oid (fromString "1.3.6.1.4.1.1466.20037"))
|
||||
Nothing
|
||||
extendedAsyncSTM l startTlsOid Nothing
|
||||
|
||||
noticeOfDisconnectionOid :: Oid
|
||||
noticeOfDisconnectionOid = "1.3.6.1.4.1.1466.20036"
|
||||
|
||||
startTlsOid :: Oid
|
||||
startTlsOid = "1.3.6.1.4.1.1466.20037"
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE DeriveDataTypeable #-}
|
||||
{-# LANGUAGE NamedFieldPuns #-}
|
||||
module Ldap.Client.Internal
|
||||
@ -15,7 +16,7 @@ module Ldap.Client.Internal
|
||||
, Response
|
||||
, ResponseError(..)
|
||||
, Request
|
||||
, raise
|
||||
, eitherToIO
|
||||
, sendRequest
|
||||
, Dn(..)
|
||||
, Attr(..)
|
||||
@ -26,6 +27,7 @@ module Ldap.Client.Internal
|
||||
, unbindAsyncSTM
|
||||
) where
|
||||
|
||||
import qualified Control.Concurrent.Async as Async (Async)
|
||||
import Control.Concurrent.STM (STM, atomically)
|
||||
import Control.Concurrent.STM.TMVar (TMVar, newEmptyTMVar, readTMVar)
|
||||
import Control.Concurrent.STM.TQueue (TQueue, writeTQueue)
|
||||
@ -35,31 +37,37 @@ import Data.ByteString (ByteString)
|
||||
import Data.List.NonEmpty (NonEmpty)
|
||||
import Data.Text (Text)
|
||||
import Data.Typeable (Typeable)
|
||||
#if __GLASGOW_HASKELL__ >= 84
|
||||
import Network.Socket (PortNumber)
|
||||
#else
|
||||
import Network (PortNumber)
|
||||
#endif
|
||||
import Network.Connection (TLSSettings, Connection)
|
||||
import Data.Void (Void)
|
||||
|
||||
import qualified Ldap.Asn1.Type as Type
|
||||
|
||||
|
||||
-- | LDAP host.
|
||||
data Host =
|
||||
Plain String -- ^ Plain LDAP. Do not use!
|
||||
| Insecure String -- ^ LDAP over TLS without the certificate validity check.
|
||||
-- Only use for testing!
|
||||
| Secure String -- ^ LDAP over TLS. Use!
|
||||
deriving (Show, Eq, Ord)
|
||||
Plain String -- ^ Plain LDAP.
|
||||
| Tls String TLSSettings -- ^ LDAP over TLS.
|
||||
deriving (Show)
|
||||
|
||||
-- | A token. All functions that interact with the Directory require one.
|
||||
-- | An LDAP connection handle
|
||||
data Ldap = Ldap
|
||||
{ client :: TQueue ClientMessage
|
||||
} deriving (Eq)
|
||||
{ reqQ :: !(TQueue ClientMessage) -- ^ Request queue for client messages to be send.
|
||||
, workers :: !(Async.Async Void) -- ^ Workers group for communicating with the server.
|
||||
, conn :: !Connection -- ^ Network connection to the server.
|
||||
}
|
||||
|
||||
data ClientMessage = New Request (TMVar (NonEmpty Type.ProtocolServerOp))
|
||||
data ClientMessage = New !Request !(TMVar (NonEmpty Type.ProtocolServerOp))
|
||||
type Request = Type.ProtocolClientOp
|
||||
type InMessage = Type.ProtocolServerOp
|
||||
type Response = NonEmpty InMessage
|
||||
|
||||
-- | Asynchronous LDAP operation. Use 'wait' or 'waitSTM' to wait for its completion.
|
||||
data Async a = Async (STM (Either ResponseError a))
|
||||
newtype Async a = Async (STM (Either ResponseError a))
|
||||
|
||||
instance Functor Async where
|
||||
fmap f (Async stm) = Async (fmap (fmap f) stm)
|
||||
@ -70,8 +78,8 @@ newtype Dn = Dn Text
|
||||
|
||||
-- | Response indicates a failed operation.
|
||||
data ResponseError =
|
||||
ResponseInvalid Request Response -- ^ LDAP server did not follow the protocol, so @ldap-client@ couldn't make sense of the response.
|
||||
| ResponseErrorCode Request Type.ResultCode Dn Text -- ^ The response contains a result code indicating failure and an error message.
|
||||
ResponseInvalid !Request !Response -- ^ LDAP server did not follow the protocol, so @ldap-client@ couldn't make sense of the response.
|
||||
| ResponseErrorCode !Request !Type.ResultCode !Dn !Text -- ^ The response contains a result code indicating failure and an error message.
|
||||
deriving (Show, Eq, Typeable)
|
||||
|
||||
instance Exception ResponseError
|
||||
@ -112,11 +120,10 @@ sendRequest l p msg =
|
||||
return (Async (fmap p (readTMVar var)))
|
||||
|
||||
writeRequest :: Ldap -> TMVar Response -> Request -> STM ()
|
||||
writeRequest Ldap { client } var msg = writeTQueue client (New msg var)
|
||||
|
||||
raise :: Exception e => Either e a -> IO a
|
||||
raise = either throwIO return
|
||||
writeRequest Ldap { reqQ } var msg = writeTQueue reqQ (New msg var)
|
||||
|
||||
eitherToIO :: Exception e => Either e a -> IO a
|
||||
eitherToIO = either throwIO pure
|
||||
|
||||
-- | Terminate the connection to the Directory.
|
||||
--
|
||||
|
||||
@ -40,15 +40,15 @@ import Ldap.Client.Internal
|
||||
|
||||
-- | Type of modification being performed.
|
||||
data Operation =
|
||||
Delete Attr [AttrValue] -- ^ Delete values from the attribute. Deletes the attribute if the list is empty or all current values are listed.
|
||||
| Add Attr [AttrValue] -- ^ Add values to the attribute, creating it if necessary.
|
||||
| Replace Attr [AttrValue] -- ^ Replace all existing values of the attribute with the new list. Deletes the attribute if the list is empty.
|
||||
Delete !Attr ![AttrValue] -- ^ Delete values from the attribute. Deletes the attribute if the list is empty or all current values are listed.
|
||||
| Add !Attr ![AttrValue] -- ^ Add values to the attribute, creating it if necessary.
|
||||
| Replace !Attr ![AttrValue] -- ^ Replace all existing values of the attribute with the new list. Deletes the attribute if the list is empty.
|
||||
deriving (Show, Eq)
|
||||
|
||||
-- | Perform the Modify operation synchronously. Raises 'ResponseError' on failures.
|
||||
modify :: Ldap -> Dn -> [Operation] -> IO ()
|
||||
modify l dn as =
|
||||
raise =<< modifyEither l dn as
|
||||
eitherToIO =<< modifyEither l dn as
|
||||
|
||||
-- | Perform the Modify operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
@ -98,7 +98,7 @@ newtype RelativeDn = RelativeDn Text
|
||||
-- | Perform the Modify DN operation synchronously. Raises 'ResponseError' on failures.
|
||||
modifyDn :: Ldap -> Dn -> RelativeDn -> Bool -> Maybe Dn -> IO ()
|
||||
modifyDn l dn rdn del new =
|
||||
raise =<< modifyDnEither l dn rdn del new
|
||||
eitherToIO =<< modifyDnEither l dn rdn del new
|
||||
|
||||
-- | Perform the Modify DN operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
|
||||
@ -52,7 +52,7 @@ import Ldap.Client.Internal
|
||||
-- | Perform the Search operation synchronously. Raises 'ResponseError' on failures.
|
||||
search :: Ldap -> Dn -> Mod Search -> Filter -> [Attr] -> IO [SearchEntry]
|
||||
search l base opts flt attributes =
|
||||
raise =<< searchEither l base opts flt attributes
|
||||
eitherToIO =<< searchEither l base opts flt attributes
|
||||
|
||||
-- | Perform the Search operation synchronously. Returns @Left e@ where
|
||||
-- @e@ is a 'ResponseError' on failures.
|
||||
@ -215,7 +215,7 @@ data Filter =
|
||||
| !Attr :~= !AttrValue -- ^ Attribute's value approximately matches the assertion
|
||||
| !Attr :=* !(Maybe AttrValue, [AttrValue], Maybe AttrValue)
|
||||
-- ^ Glob match
|
||||
| (Maybe Attr, Maybe Attr, Bool) ::= AttrValue
|
||||
| !(Maybe Attr, Maybe Attr, Bool) ::= !AttrValue
|
||||
-- ^ Extensible match
|
||||
|
||||
-- | Entry found during the Search.
|
||||
|
||||
@ -30,7 +30,7 @@ spec = do
|
||||
(Ldap.Type.Simple "public"))
|
||||
Ldap.InvalidCredentials
|
||||
(Dn "cn=admin")
|
||||
"Invalid Credentials"))
|
||||
"InvalidCredentialsError"))
|
||||
|
||||
it "binds as ‘pikachu’" $ do
|
||||
res <- locally $ \l -> do
|
||||
|
||||
@ -16,7 +16,7 @@ spec = do
|
||||
res `shouldBe` True
|
||||
res `shouldBe` Right ()
|
||||
|
||||
it "compares and looses" $ do
|
||||
it "compares and loses" $ do
|
||||
res <- locally $ \l -> do
|
||||
res <- Ldap.compare l charmander (Attr "type") "flying"
|
||||
res `shouldBe` False
|
||||
|
||||
@ -47,7 +47,7 @@ spec = do
|
||||
(Ldap.ResponseErrorCode req
|
||||
Ldap.InsufficientAccessRights
|
||||
(Dn "o=localhost")
|
||||
"Insufficient Access Rights"))
|
||||
"InsufficientAccessRightsError"))
|
||||
|
||||
it "‘present’ filter" $ do
|
||||
res <- locally $ \l -> do
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
module SpecHelper
|
||||
( locally
|
||||
@ -22,8 +23,15 @@ module SpecHelper
|
||||
, oddish
|
||||
) where
|
||||
|
||||
#if __GLASGOW_HASKELL__ < 710
|
||||
import Control.Applicative ((<$))
|
||||
#endif
|
||||
import Control.Monad (forever)
|
||||
import Control.Concurrent (forkIO)
|
||||
import Control.Exception (bracket)
|
||||
import System.Environment (getEnvironment)
|
||||
import System.IO (hGetLine)
|
||||
import System.IO.Error (tryIOError)
|
||||
import System.Process (runInteractiveProcess, terminateProcess, waitForProcess)
|
||||
|
||||
import Ldap.Client as Ldap
|
||||
@ -31,19 +39,21 @@ import Ldap.Client as Ldap
|
||||
|
||||
locally :: (Ldap -> IO a) -> IO (Either LdapError a)
|
||||
locally f =
|
||||
bracket (do (_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
|
||||
(Just [ ("PORT", show port)
|
||||
, ("SSL_CERT", "./ssl/cert.pem")
|
||||
, ("SSL_KEY", "./ssl/key.pem")
|
||||
])
|
||||
hGetLine out
|
||||
bracket (do env <- getEnvironment
|
||||
(_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
|
||||
(Just (("PORT", show (port :: Int)) :
|
||||
("SSL_CERT", "./ssl/cert.pem") :
|
||||
("SSL_KEY", "./ssl/key.pem") :
|
||||
env))
|
||||
_ <- hGetLine out
|
||||
_ <- forkIO (() <$ tryIOError (forever (hGetLine out >>= putStrLn)))
|
||||
return h)
|
||||
(\h -> do terminateProcess h
|
||||
waitForProcess h)
|
||||
(\_ -> Ldap.with localhost port f)
|
||||
|
||||
localhost :: Host
|
||||
localhost = Insecure "localhost"
|
||||
localhost = Tls "localhost" insecureTlsSettings
|
||||
|
||||
port :: Num a => a
|
||||
port = 24620
|
||||
|
||||
19
test/ldap.js
19
test/ldap.js
@ -1,8 +1,20 @@
|
||||
#!/usr/bin/env js
|
||||
#!/usr/bin/env node
|
||||
|
||||
var fs = require('fs');
|
||||
var ldapjs = require('ldapjs');
|
||||
|
||||
// Stub unimplemented functionality.
|
||||
ldapjs.ExtensibleFilter.prototype.matches = ldapjs.EqualityFilter.prototype.matches;
|
||||
ldapjs.ApproximateFilter.prototype.matches = ldapjs.EqualityFilter.prototype.matches;
|
||||
|
||||
// Remove superfluous spaces from DNs.
|
||||
var wrappee = ldapjs.DN.prototype.format;
|
||||
ldapjs.DN.prototype.format = function(options) {
|
||||
options = options || this._format;
|
||||
options['skipSpace'] = true;
|
||||
return (wrappee.bind(this))(options);
|
||||
};
|
||||
|
||||
var port = process.env.PORT;
|
||||
var certificate = fs.readFileSync(process.env.SSL_CERT, "utf-8");
|
||||
var key = fs.readFileSync(process.env.SSL_KEY, "utf-8");
|
||||
@ -81,8 +93,9 @@ function authorize(req, res, next) {
|
||||
|
||||
server.search('o=localhost', [authorize], function(req, res, next) {
|
||||
for (var i = 0; i < pokemon.length; i++) {
|
||||
if (req.filter.matches(pokemon[i].attributes))
|
||||
if (req.filter.matches(pokemon[i].attributes)) {
|
||||
res.send(pokemon[i]);
|
||||
}
|
||||
};
|
||||
|
||||
res.end();
|
||||
@ -163,7 +176,7 @@ server.modifyDN('o=localhost', [], function(req, res, next) {
|
||||
if (req.dn.toString() === pokemon[i].dn) {
|
||||
req.dn.rdns[0] = req.newRdn.rdns[0];
|
||||
pokemon[i].dn = req.dn.toString();
|
||||
pokemon[i].attributes.cn = req.newRdn.rdns[0].cn;
|
||||
pokemon[i].attributes.cn = req.newRdn.rdns[0].attrs.cn.value;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user