Compare commits

...

10 Commits

Author SHA1 Message Date
Matvey Aksenov
5d8b2fc5a9 fix nix-build 2015-11-24 18:23:43 +00:00
Matvey Aksenov
c1123fb1a0 M 2015-11-24 17:56:32 +00:00
Matvey Aksenov
064fd9b377 shame 2015-11-23 22:14:49 +00:00
Matvey Aksenov
990a1021b8 fix stuff that can be fixed 2015-11-23 22:04:59 +00:00
Matvey Aksenov
82b1769fc9 the latest ldapjs is firmly in the insane shit camp, but I think I've massaged it into a working condition 2015-11-23 21:27:00 +00:00
Matvey Aksenov
f74ee35d6d finally some progress 2015-11-23 20:37:06 +00:00
Matvey Aksenov
bab975ef06 Nix configuration 2015-06-08 20:24:37 +00:00
Matvey Aksenov
da54207774 Implement the Abandon operation 2015-04-23 20:26:31 +00:00
Matvey Aksenov
fcaf02b044 Homegrown ASN.1 encoding.
The main purpose of this is to allow `ToAsn1` instances to match
the spec closer. It also lets us implement Abandon operation fairly
easily (see the subsequent commit).
2015-04-23 20:23:44 +00:00
Matvey Aksenov
57bf3c066a Hackage badge 2015-04-11 17:01:17 +00:00
67 changed files with 1956 additions and 449 deletions

View File

@ -1,14 +1,5 @@
function s:hdevtools_options(rgs)
return join(map(a:rgs, "'-g ' . v:val"))
endfunction
function s:discover_cabal_sandbox(glob)
let l:sandboxes = split(glob(a:glob, "."), "\n")
if len(l:sandboxes) > 0
return ['-no-user-package-db', '-package-db=' . l:sandboxes[-1]]
else
return []
endif
return join(["-s", "/tmp/" . substitute(system("sha1sum <<< $PWD | cut -d' ' -f1"), '\n\+$', '', '') . ".sock"] + map(a:rgs, "'-g ' . v:val"))
endfunction
let g:syntastic_haskell_hdevtools_args = s:hdevtools_options
@ -23,5 +14,5 @@ let g:syntastic_haskell_hdevtools_args = s:hdevtools_options
\ , '-Wall'
\ , '-fno-warn-unused-do-bind'
\ , '-fno-warn-type-defaults'
\ ] + s:discover_cabal_sandbox(".cabal-sandbox/*.conf.d")
\ ]
\ )

View File

@ -1,5 +1,6 @@
ldap-client
===========
[![Hackage](https://budueba.com/hackage/ldap-client)](https://hackage.haskell.org/package/ldap-client)
[![Build Status](https://travis-ci.org/supki/ldap-client.svg?branch=master)](https://travis-ci.org/supki/ldap-client)
This library implements (the parts of) [RFC 4511][rfc4511]
@ -10,19 +11,18 @@ Bind Operation | [4.2][4.2] | ✔
Unbind Operation | [4.3][4.3] | ✔
Unsolicited Notification | [4.4][4.4] | ✔
Notice of Disconnection | [4.4.1][4.4.1] | ✔
Search Operation | [4.5][4.5] | ✔\*
Search Operation | [4.5][4.5] | ✔
Modify Operation | [4.6][4.6] | ✔
Add Operation | [4.7][4.7] | ✔
Delete Operation | [4.8][4.8] | ✔
Modify DN Operation | [4.9][4.9] | ✔
Compare Operation | [4.10][4.10] | ✔
Abandon Operation | [4.11][4.11] |
Abandon Operation | [4.11][4.11] |
Extended Operation | [4.12][4.12] | ✔
IntermediateResponse Message | [4.13][4.13] | ✔
StartTLS Operation | [4.14][4.14] | ✔†
LDAP over TLS | - | ✔
\* The `:dn` thing is unsupported in Extensible matches
† Only serves as an example of Extended Operation. It's useless for all practical purposes as it does not actually enable TLS. In other words, use LDAP over TLS instead.
[rfc4511]: https://tools.ietf.org/html/rfc4511

9
default.nix Normal file
View File

@ -0,0 +1,9 @@
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }: let
ghc = nixpkgs.pkgs.haskell.packages.${compiler};
npm = import ./npm {};
in
ghc.callPackage ./package.nix {
mkDerivation = args: ghc.mkDerivation(args // {
buildTools = (if args ? buildTools then args.buildTools else []) ++ [ npm.nodePackages.ldapjs ];
});
}

View File

@ -34,7 +34,9 @@ library
Ldap.Asn1.ToAsn1
Ldap.Asn1.Type
Ldap.Client
Ldap.Client.Abandon
Ldap.Client.Add
Ldap.Client.Asn1.ToAsn1
Ldap.Client.Bind
Ldap.Client.Compare
Ldap.Client.Delete
@ -81,3 +83,16 @@ test-suite spec
, ldap-client
, process
, semigroups
test-suite doctests
default-language:
Haskell2010
type:
exitcode-stdio-1.0
hs-source-dirs:
test
main-is:
Doctests.hs
build-depends:
base >= 4.6 && < 5
, doctest

1
npm/.nixfromnpm-version Normal file
View File

@ -0,0 +1 @@
0.7.0

9
npm/default.nix Normal file
View File

@ -0,0 +1,9 @@
{ nodejsVersion ? "4.1", npm3 ? false, pkgs ? import <nixpkgs> {} }:
let
nodeLib = import ./nodeLib {
inherit pkgs npm3 nodejsVersion;
self = nodeLib;
};
in nodeLib.generatePackages {
rootPath = ./nodePackages;
}

View File

@ -0,0 +1,394 @@
{
# Provides the mkDerivation function.
stdenv,
# Lets us run a command.
runCommand,
# Derivation for nodejs and npm.
nodejs,
# Which version of npm to use.
npm ? nodejs,
# List of required native build inputs.
neededNatives,
# Self-reference for overriding purposes.
buildNodePackage
}:
let
# The path within $out/lib to find a package. If the package does not
# have a namespace, it will simply be in `node_modules`, and otherwise it
# will appear in `node_modules/@namespace`.
modulePath = pkg: if pkg.namespace == null then "node_modules"
else "node_modules/@${pkg.namespace}";
# The path to the package within its modulePath. Just appending the name
# of the package.
pathInModulePath = pkg: "${modulePath pkg}/${pkg.basicName}";
in
{
# Used for private packages. Indicated in the name field of the
# package.json, e.g. "@mynamespace/mypackage". Public packages will not
# need this.
namespace ? null,
# The name of the package. If it's a private package with a namespace,
# this should not contain the namespace.
name,
# Version of the package. This should follow the semver standard, although
# we don't explicitly enforce that in this function.
version,
# Source of the package; can be a tarball or a folder on the filesystem.
src,
# by default name of nodejs interpreter e.g. "nodejs-<version>-${name}"
namePrefix ? "${nodejs.name}-" +
(if namespace == null then "" else "${namespace}-"),
# List or attribute set of dependencies
deps ? {},
# List or attribute set of peer depencies
peerDependencies ? {},
# List or attribute set of optional dependencies
optionalDependencies ? {},
# List of optional dependencies to skip
skipOptionalDependencies ? [],
# List or set of development dependencies (or null).
devDependencies ? null,
# If true and devDependencies are not null, the package will be
# installed contingent on successfully running tests.
doCheck ? devDependencies != null,
# Additional flags passed to npm install
flags ? "",
# Command to be run before shell hook
preShellHook ? "",
# Command to be run after shell hook
postShellHook ? "",
# Same as https://docs.npmjs.com/files/package.json#os
os ? [],
# Same as https://docs.npmjs.com/files/package.json#cpu
cpu ? [],
# Attribute set of already resolved deps (internal),
# for avoiding infinite recursion
resolvedDeps ? {},
...
} @ args:
let
inherit (stdenv.lib) fold removePrefix hasPrefix subtractLists isList flip
intersectLists isAttrs listToAttrs nameValuePair
mapAttrs filterAttrs attrNames elem concatMapStrings
attrValues getVersion flatten remove concatStringsSep;
# whether we should run tests.
shouldTest = doCheck && devDependencies != null;
# The package name as it appears in the package.json. This contains a
# namespace if there is one, so it will be a distinct identifier for
# different packages.
pkgName = if namespace == null then name else "@${namespace}/${name}";
# We create a `self` object for self-referential expressions. It
# bottoms out in a call to `mkDerivation` at the end.
self = let
sources = runCommand "node-sources" {} ''
tar --no-same-owner --no-same-permissions -xf ${nodejs.src}
mv $(find . -type d -mindepth 1 -maxdepth 1) $out
'';
platforms = if os == [] then nodejs.meta.platforms else
fold (entry: platforms:
let
filterPlatforms =
stdenv.lib.platforms.${removePrefix "!" entry} or [];
in
# Ignore unknown platforms
if filterPlatforms == [] then (if platforms == [] then nodejs.meta.platforms else platforms)
else
if hasPrefix "!" entry then
subtractLists (intersectLists filterPlatforms nodejs.meta.platforms) platforms
else
platforms ++ (intersectLists filterPlatforms nodejs.meta.platforms)
) [] os;
toAttrSet = obj: if isAttrs obj then obj else
(listToAttrs (map (x: nameValuePair x.name x) obj));
mapDependencies = deps: filterFunc: let
attrDeps = toAttrSet deps;
in rec {
# All required node modules, without already resolved dependencies
# Also override with already resolved dependencies
requiredDeps = mapAttrs (name: dep:
dep.override {resolvedDeps = resolvedDeps // { "${name}" = self; };}
) (filterAttrs filterFunc
(removeAttrs attrDeps (attrNames resolvedDeps)));
# Recursive dependencies that we want to avoid with shim creation
recursiveDeps = filterAttrs filterFunc
(removeAttrs attrDeps (attrNames requiredDeps));
};
# Filter out self-referential dependencies.
_dependencies = mapDependencies deps (name: dep:
dep.pkgName != pkgName);
# Filter out self-referential peer dependencies.
_peerDependencies = mapDependencies peerDependencies (name: dep:
dep.pkgName != pkgName);
# Filter out any optional dependencies which don't build correctly.
_optionalDependencies = mapDependencies optionalDependencies (name: dep:
(builtins.tryEval dep).success &&
!(elem dep.pkgName skipOptionalDependencies)
);
# Required dependencies are those that we haven't filtered yet.
requiredDependencies =
_dependencies.requiredDeps //
_optionalDependencies.requiredDeps //
_peerDependencies.requiredDeps;
recursiveDependencies =
_dependencies.recursiveDeps //
_optionalDependencies.recursiveDeps //
_peerDependencies.recursiveDeps;
npmFlags = concatStringsSep " " ([
# We point the registry at something that doesn't exist. This will
# mean that NPM will fail if any of the dependencies aren't met, as it
# will attempt to hit this registry for the missing dependency.
"--registry=fakeprotocol://notaregistry.$UNIQNAME.derp"
# These flags make failure fast, as otherwise NPM will spin for a while.
"--fetch-retry-mintimeout=0"
"--fetch-retry-maxtimeout=10"
# This will disable any user-level npm configuration.
"--userconfig=/dev/null"
# This flag is used for packages which link against the node headers.
"--nodedir=${sources}"
] ++ (if isList flags then flags else [flags]));
# A bit of bash to check that variables are set.
checkSet = vars: concatStringsSep "\n" (flip map vars (var: ''
[[ -z $${var} ]] && { echo "${var} is not set."; exit 1; }
''));
mkDerivationArgs = {
inherit src;
# Define some environment variables that we will use in the build.
prePatch = ''
export HASHEDNAME=$(echo "$propagatedNativeBuildInputs $name" \
| md5sum | awk '{print $1}')
export UNIQNAME="''${HASHEDNAME:0:10}-${name}-${version}"
export BUILD_DIR=$TMPDIR/$UNIQNAME-build
'';
patchPhase = ''
runHook prePatch
patchShebangs $PWD
# Remove any impure dependencies from the package.json (see script
# for details)
node ${./removeImpureDependencies.js}
# We do not handle shrinkwraps yet
rm npm-shrinkwrap.json 2>/dev/null || true
# Repackage source into a tarball, so npm pre/post publish hooks are
# not triggered,
mkdir -p $BUILD_DIR
GZIP=-1 tar -czf $BUILD_DIR/package.tgz ./
export PATCHED_SRC=$BUILD_DIR/package.tgz
runHook postPatch
'';
configurePhase = ''
runHook preConfigure
(
${checkSet ["BUILD_DIR"]}
mkdir -p $BUILD_DIR
cd $BUILD_DIR
# Symlink or copy dependencies for node modules
# copy is needed if dependency has recursive dependencies,
# because node can't follow symlinks while resolving recursive deps.
${
let
link = dep: ''
${if dep.recursiveDeps == [] then "ln -sfv" else "cp -rf"} \
${dep}/lib/${pathInModulePath dep} ${modulePath dep}
'';
in
flip concatMapStrings (attrValues requiredDependencies) (dep: ''
mkdir -p ${modulePath dep}
${link dep}
${concatMapStrings link (attrValues dep.peerDependencies)}
'')}
# Create shims for recursive dependenceies
${concatMapStrings (dep: ''
mkdir -p ${modulePath dep}
cat > ${pathInModulePath dep}/package.json <<EOF
{
"name": "${dep.pkgName}",
"version": "${getVersion dep}"
}
EOF
'') (attrValues recursiveDependencies)}
# Create dummy package.json file
cat <<EOF > package.json
{"name":"dummy-for-$UNIQNAME","version":"0.0.0", "license":"MIT",
"description":"Dummy package file for building $name",
"repository":{"type":"git","url":"http://$UNIQNAME.com"}}
EOF
# Create dummy readme
echo "Dummy package" > README.md
)
export HOME=$BUILD_DIR
runHook postConfigure
'';
buildPhase = ''
runHook preBuild
# Install package
(
${checkSet ["BUILD_DIR" "PATCHED_SRC"]}
echo "Building $name in $BUILD_DIR"
cd $BUILD_DIR
HOME=$PWD npm install $PATCHED_SRC ${npmFlags} || {
npm list
exit 1
}
)
runHook postBuild
'';
installPhase = ''
runHook preInstall
(
cd $BUILD_DIR
# Remove shims
${concatMapStrings (dep: ''
rm ${pathInModulePath dep}/package.json
rmdir ${modulePath dep}
'') (attrValues recursiveDependencies)}
# Install the package that we just built.
mkdir -p $out/lib/${modulePath self}
# Move the folder that was created for this path to $out/lib.
mv ${pathInModulePath self} $out/lib/${pathInModulePath self}
# Remove the node_modules subfolder from there, and instead put things
# in $PWD/node_modules into that folder.
rm -rf $out/lib/${pathInModulePath self}/node_modules
cp -r node_modules $out/lib/${pathInModulePath self}/node_modules
if [ -e "$out/lib/${pathInModulePath self}/man" ]; then
mkdir -p $out/share
for dir in $out/lib/${pathInModulePath self}/man/*; do #*/
mkdir -p $out/share/man/$(basename "$dir")
for page in $dir/*; do #*/
ln -sv $page $out/share/man/$(basename "$dir")
done
done
fi
# Move peer dependencies to node_modules
${concatMapStrings (dep: ''
mkdir -p ${modulePath dep}
mv ${pathInModulePath dep} $out/lib/${modulePath dep}
'') (attrValues _peerDependencies.requiredDeps)}
# Install binaries and patch shebangs. These are always found in
# node_modules/.bin, regardless of a package namespace.
mv node_modules/.bin $out/lib/node_modules 2>/dev/null || true
if [ -d "$out/lib/node_modules/.bin" ]; then
ln -sv $out/lib/node_modules/.bin $out/bin
patchShebangs $out/lib/node_modules/.bin
fi
)
runHook postInstall
'';
shellHook = ''
${preShellHook}
export PATH=${npm}/bin:${nodejs}/bin:$(pwd)/node_modules/.bin:$PATH
mkdir -p node_modules
${concatMapStrings (dep: ''
mkdir -p ${modulePath dep}
ln -sfv ${dep}/lib/${pathInModulePath dep} ${pathInModulePath dep}
'') (attrValues requiredDependencies)}
${postShellHook}
'';
# Stipping does not make a lot of sense in node packages
dontStrip = true;
meta = {
inherit platforms;
maintainers = [ stdenv.lib.maintainers.offline ];
};
# Propagate pieces of information about the package so that downstream
# packages can reflect on them.
passthru.pkgName = pkgName;
passthru.basicName = name;
passthru.namespace = namespace;
passthru.version = version;
passthru.peerDependencies = _peerDependencies.requiredDeps;
passthru.recursiveDeps =
(flatten (
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
)) ++
(attrNames recursiveDependencies);
# Add an 'override' attribute, which will call `buildNodePackage` with the
# given arguments overridden.
passthru.override = newArgs: buildNodePackage (args // newArgs);
} // (removeAttrs args ["deps" "resolvedDeps" "optionalDependencies"
"devDependencies"]) // {
name = "${namePrefix}${name}-${version}";
# Run the node setup hook when this package is a build input
propagatedNativeBuildInputs = (args.propagatedNativeBuildInputs or []) ++
[ npm nodejs ];
nativeBuildInputs =
(args.nativeBuildInputs or []) ++ neededNatives ++
(attrValues requiredDependencies);
# Expose list of recursive dependencies upstream, up to the package that
# caused recursive dependency
recursiveDeps =
(flatten (
map (dep: remove name dep.recursiveDeps) (attrValues requiredDependencies)
)) ++
(attrNames recursiveDependencies);
};
in stdenv.mkDerivation mkDerivationArgs;
in self

210
npm/nodeLib/default.nix Normal file
View File

@ -0,0 +1,210 @@
/*
A set of tools for generating node packages, such as to be imported by
default.nix files generated by nixfromnpm.
*/
{
# Self-reference so that we can pass through to downstream libraries
self,
# Base set of packages, i.e. nixpkgs.
pkgs,
# Version of nodejs.
nodejsVersion ? "4.1",
# Whether to use npm3 (requires a prebuilt tarball of npm3).
npm3 ? true
}:
let
# Function to replace dots with something
replaceDots = c: replaceChars ["."] [c];
inherit (builtins) readDir removeAttrs length getEnv elemAt hasAttr;
inherit (pkgs.lib) attrNames attrValues filterAttrs flip foldl
hasSuffix hasPrefix removeSuffix replaceChars
optional optionals stringToCharacters
concatStrings tail splitString;
inherit (pkgs.stdenv) isLinux;
# Function to remove the first character of a string.
dropFirstChar = str: concatStrings (tail (stringToCharacters str));
# Like a for loop.
for = flip map;
# Concatenate a list of sets.
joinSets = foldl (a: b: a // b) {};
# Extracts a tarball containing a bootstrapped version of npm 3.
# This tarball must have been previously generated by an invocation
# of nixfromnpm, but one of these should be included in the
# nixfromnpm distribution (if not, run the `gen_npm3` script).
npm3-src = pkgs.runCommand "npm3" {src=./npm3.tar.gz;} ''
mkdir -p $out && cd $out && tar -xf $src
'';
# Builds the extracted nix file. Since of course it can't use npm3,
# being that it hasn't been built yet, we disable npm3 for this.
_npm3 = import npm3-src {
inherit pkgs nodejsVersion;
npm3 = false;
};
# Parse the `NPM_AUTH_TOKENS` environment variable to discover
# namespace-token associations and turn them into an attribute set
# which we can use as an input to the fetchPrivateNpm function.
# Split the variable on ':', then turn each k=v element in
# the list into an attribute set and join all of those sets.
namespaceTokens = joinSets (
for (splitString ":" (getEnv "NPM_AUTH_TOKENS")) (kvPair:
let kv = splitString "=" kvPair; in
if length kv != 2 then {}
else {"${elemAt kv 0}" = elemAt kv 1;}));
# A function similar to fetchUrl but allows setting of custom headers.
fetchUrlWithHeaders = pkgs.callPackage ./fetchUrlWithHeaders.nix {};
# Uses the parsed namespace tokens to create a function that can
# fetch a private package from an npm repo.
fetchPrivateNpm = {namespace, headers ? {}, ...}@args:
if !(hasAttr namespace namespaceTokens)
then throw "NPM_AUTH_TOKENS does not contain namespace ${namespace}"
else let
Authorization = "Bearer ${namespaceTokens.${namespace}}";
headers = {inherit Authorization;} // headers;
in
fetchUrlWithHeaders (removeAttrs args ["namespace"] // {inherit headers;});
in
rec {
nodejs = pkgs."nodejs-${replaceDots "_" nodejsVersion}" or (
throw "The given nodejs version ${nodejsVersion} has not been defined."
);
buildNodePackage = import ./buildNodePackage.nix ({
inherit (pkgs) stdenv runCommand;
inherit nodejs buildNodePackage;
neededNatives = [pkgs.python] ++ optionals isLinux [pkgs.utillinux];
} // (if npm3 then {npm = _npm3;} else {}));
# A generic package that will fail to build. This is used to indicate
# packages that are broken, without failing the entire generation of
# a package expression.
brokenPackage = {name, reason}:
let
deriv = pkgs.stdenv.mkDerivation {
name = "BROKEN-${name}";
buildCommand = ''
echo "Package ${name} is broken: ${reason}"
exit 1
'';
passthru.withoutTests = deriv;
passthru.pkgName = name;
passthru.basicName = "BROKEN";
passthru.namespace = null;
passthru.version = "BROKEN";
passthru.override = _: deriv;
passthru.recursiveDeps = [];
passthru.peerDependencies = {};
};
in
deriv;
# List a directory after filtering the files.
lsFilter = pred: dir: attrNames (filterAttrs pred (readDir dir));
# Checks the name and type of a listing to grab non-dotfile dirs.
isRegDir = name: type: type == "directory" && !(hasPrefix "." name);
# Discover all of the node packages in a folder and turn them into a set
# mapping `<name>_<version>` to the expression to build that package.
discoverPackages = {callPackage, rootPath}:
# if true then throw "huh? ${rootPath}" else
let
# Names of NPM packages defined in this directory. Don't take
# files that start with '@'.
nodeDirs = lsFilter (n: t: isRegDir n t && !(hasPrefix "@" n))
(/. + rootPath);
# Generate the package expression from a package name and .nix path.
toPackage = name: filepath: let
versionRaw = removeSuffix ".nix" filepath; # Raw version, i.e. "1.2.4"
# Join with package name to make the variable name.
varName = "${replaceDots "-" name}_${replaceDots "-" versionRaw}";
in
# Return the singleton set which maps that name to the actual expression.
{"${varName}" = callPackage (/. + rootPath + "/${name}/${filepath}") {};};
in
# For each directory, and each .nix file in it, create a package from that.
joinSets (for nodeDirs (pkgName: let
pkgDir = /. + rootPath + "/${pkgName}";
# List of .nix files in the directory (excluding symlinks).
versionFiles = lsFilter (name: type: type == "regular" &&
hasSuffix ".nix" name)
pkgDir;
# Check if there is a `latest.nix` file
hasLatest = lsFilter (n: _: n == "latest.nix") pkgDir != [];
in
joinSets (
# Find all of the versions listed in the folder.
map (toPackage pkgName) versionFiles ++
# If the folder has a `latest.nix` file, link the bare name of
# the package to that file.
optional hasLatest {
"${replaceDots "-" pkgName}" = callPackage
(/. + rootPath + "/${pkgName}/latest.nix") {};
})));
# Same as above, except that we take all of the namespaced packages;
# these packages are in folders prefaced with `@`, and contain
# packages in that folder. So, for example the path `@foo/bar` is
# the path to all of the versions of the `bar` package under the
# namespace `foo`.
discoverNamespacePackages = {callPackage, rootPath}: let
isNsDir = name: type: type == "directory" && hasPrefix "@" name;
# Names of NPM packages defined in this directory.
namespaceDirs = lsFilter isNsDir (/. + rootPath);
in
# For each namespace directory, each package folder in it, and
# each .nix file in that, create a package from that and then
# create a namespace out of that.
joinSets (for namespaceDirs (nsDirName: {
"${dropFirstChar nsDirName}" = discoverPackages {
inherit callPackage;
rootPath = /. + rootPath + "/${nsDirName}";
};
}));
# The function that a default.nix can call into which will scan its
# directory for all of the package files and generate a big attribute set
# for all of them. Re-exports the `callPackage` function and all of the
# attribute sets, as well as the nodeLib.
generatePackages = {rootPath, extensions ? []}:
let
callPackageWith = pkgSet: path: overridingArgs: let
inherit (builtins) intersectAttrs functionArgs;
inherit (pkgs.lib) filterAttrs;
# The path must be a function; import it here.
func = import path;
# Get the arguments to the function; e.g. "{a=false; b=true;}", where
# a false value is an argument that has no default.
funcArgs = functionArgs func;
# Take only the arguments that don't have a default.
noDefaults = filterAttrs (_: v: v == false) funcArgs;
# Intersect this set with the package set to create the arguments to
# the function.
satisfyingArgs = intersectAttrs noDefaults pkgSet;
# Override these arguments with whatever's passed in.
actualArgs = satisfyingArgs // overridingArgs;
# Call the function with these args to get a derivation.
deriv = func actualArgs;
in deriv;
callPackage = callPackageWith {
inherit fetchUrlWithHeaders namespaces namespaceTokens;
inherit pkgs nodePackages buildNodePackage brokenPackage;
};
nodePackages = joinSets (map (e: e.nodePackages) extensions) //
discoverPackages {inherit callPackage rootPath;};
namespaces = joinSets (map (e: e.namespaces) extensions) //
discoverNamespacePackages {inherit callPackage rootPath;};
in {
inherit nodePackages callPackage namespaces namespaceTokens pkgs;
nodeLib = self;
};
}

21
npm/nodeLib/fetch.py Normal file
View File

@ -0,0 +1,21 @@
import os
import requests
out = os.environ['out']
url = os.environ['url']
headers = {"User-Agent": "nix-fetchurl"}
header_names = os.environ.get("headerNames", "")
for name in header_names.split():
if "__HTTP_HEADER_{}".format(name) not in os.environ:
exit("FATAL: no corresponding value set for header {}"
.format(name))
headers[name] = os.environ["__HTTP_HEADER_{}".format(name)]
print('GET {} with headers {}'.format(url, headers))
response = requests.get(url, headers=headers)
if response.status_code != 200:
exit("Received a {} response. :(\nContent: {}"
.format(response.status_code, response.content))
else:
print('Response: {} ({} bytes)'
.format(response.status_code, len(response.content)))
with open(out, 'wb') as f:
f.write(response.content)

View File

@ -0,0 +1,71 @@
# A python-based fetchurl function, allowing the passage of custom headers.
# Just calls into `requests` under the hood.
{
pythonPackages, stdenv
}:
{ # URL to fetch.
url ? ""
, # Additional curl options needed for the download to succeed.
curlOpts ? ""
, # Name of the file. If empty, use the basename of `url' (or of the
# first element of `urls').
name ? ""
# Different ways of specifying the hash.
, outputHash ? ""
, outputHashAlgo ? ""
, md5 ? ""
, sha1 ? ""
, sha256 ? ""
, # Meta information, if any.
meta ? {}
# Headers to set, if any.
, headers ? {}
}:
let
inherit (stdenv.lib) flip mapAttrs' nameValuePair;
hasHash = (outputHash != "" && outputHashAlgo != "")
|| md5 != "" || sha1 != "" || sha256 != "";
# Create an attribute set translating each header name and value into
# the header name prefixed with __HTTP_HEADER. When the derivation is
# evaluated, the script will pick up these environment variables and use
# them to produce the actual headers.
headerValues = flip mapAttrs' headers (headerName: headerValue:
nameValuePair "__HTTP_HEADER_${headerName}" headerValue);
in
if !hasHash
then throw "You must specify the output hash for ${url}"
else
stdenv.mkDerivation ({
inherit url;
name = if name != "" then name else baseNameOf (toString url);
outputHashAlgo = if outputHashAlgo != "" then outputHashAlgo else
if sha256 != "" then "sha256" else if sha1 != "" then "sha1" else "md5";
outputHash = if outputHash != "" then outputHash else
if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
# Only flat hashing, which is the normal mode if you're fetching a file.
outputHashMode = "flat";
# Doing the download on a remote machine just duplicates network
# traffic, so don't do that.
preferLocalBuild = true;
headerNames = builtins.attrNames headers;
buildInputs = with pythonPackages; [python requests2];
buildCommand = ''
python ${./fetch.py}
'';
} // headerValues)

View File

@ -0,0 +1,16 @@
# Parses the `NPM_AUTH_TOKENS` environment variable to discover
# namespace-token associations and turn them into an attribute set
# which we can use as an input to the fetchPrivateNpm function.
{pkgs, joinSets}:
let
inherit (pkgs.lib) flip length elemAt;
npmAuthTokens = builtins.getEnv "NPM_AUTH_TOKENS";
in
# Split the variable on ':', then turn each k=v element in
# the list into an attribute set and join all of those sets.
joinSets (
flip map (split ":" npmAuthTokens) (kvPair:
if length (split "=" kvPair) != 2 then {}
else {"${elemAt kvPair 0}" = elemAt kvPair 1;}))

View File

@ -0,0 +1,46 @@
// These packages come packaged with nodejs.
var fs = require('fs');
var url = require('url');
function versionSpecIsImpure(versionSpec) {
// Returns true if a version spec is impure.
return (versionSpec == "latest" || versionSpec == "unstable" ||
// file path references
versionSpec.substr(0, 2) == ".." ||
versionSpec.substr(0, 2) == "./" ||
versionSpec.substr(0, 2) == "~/" ||
versionSpec.substr(0, 1) == '/' ||
// github owner/repo references
/^[^/]+\/[^/]+(#.*)?$/.test(versionSpec) ||
// is a URL
url.parse(versionSpec).protocol);
}
// Load up the package object.
var packageObj = JSON.parse(fs.readFileSync('./package.json'));
// Purify dependencies.
var depTypes = ['dependencies', 'devDependencies', 'optionalDependencies'];
for (var i in depTypes) {
var depType = depTypes[i];
var depSet = packageObj[depType];
if (depSet !== undefined) {
for (var depName in depSet) {
if (versionSpecIsImpure(depSet[depName])) {
depSet[depName] = '*';
}
}
}
}
/* Remove peer dependencies */
if (process.env.removePeerDependencies && packageObj.peerDependencies) {
console.log("WARNING: removing the following peer dependencies:");
for (key in packageObj.peerDependencies) {
console.log(" " + key + ": " + packageObj.peerDependencies[key]);
}
delete packageObj.peerDependencies;
}
/* Write the fixed JSON file */
fs.writeFileSync("package.json", JSON.stringify(packageObj));

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "asn1";
version = "0.2.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/asn1/-/asn1-0.2.3.tgz";
sha1 = "dac8787713c9966849fc8180777ebe9c1ddf3b86";
};
deps = [];
meta = {
homepage = "https://github.com/mcavage/node-asn1";
description = "Contains parsers and serializers for ASN.1 (currently BER only)";
};
}

View File

@ -0,0 +1 @@
0.2.3.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "assert-plus";
version = "0.1.5";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/assert-plus/-/assert-plus-0.1.5.tgz";
sha1 = "ee74009413002d84cec7219c6ac811812e723160";
};
deps = [];
devDependencies = [];
meta = {
description = "Extra assertions on top of node's assert module";
};
}

View File

@ -0,0 +1 @@
0.1.5.nix

View File

@ -0,0 +1,21 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "backoff";
version = "2.4.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/backoff/-/backoff-2.4.1.tgz";
sha1 = "2f68c50e0dd789dbefe24200a62efb04d2456d68";
};
deps = with nodePackages; [
precond_0-2-3
];
meta = {
description = "Fibonacci and exponential backoffs.";
keywords = [
"backoff"
"retry"
"fibonacci"
"exponential"
];
};
}

View File

@ -0,0 +1 @@
2.4.1.nix

View File

@ -0,0 +1,30 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "bunyan";
version = "1.5.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/bunyan/-/bunyan-1.5.1.tgz";
sha1 = "5f6e7d44c43b952f56b0f41309e3ab12391b4e2d";
};
deps = with nodePackages; [
dtrace-provider_0-6-0
safe-json-stringify_1-0-3
mv_2-0-3
];
optionalDependencies = with nodePackages; [
dtrace-provider_0-6-0
safe-json-stringify_1-0-3
mv_2-0-3
];
meta = {
homepage = "https://github.com/trentm/node-bunyan";
description = "a JSON logging library for node.js services";
keywords = [
"log"
"logging"
"log4j"
"json"
"bunyan"
];
};
}

View File

@ -0,0 +1 @@
1.5.1.nix

View File

@ -0,0 +1,24 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "dashdash";
version = "1.10.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/dashdash/-/dashdash-1.10.1.tgz";
sha1 = "0abf1af89a8f5129a81f18c2b35b21df22622f60";
};
deps = with nodePackages; [
assert-plus_0-1-5
];
meta = {
homepage = "https://github.com/trentm/node-dashdash";
description = "A light, featureful and explicit option parsing library.";
keywords = [
"option"
"parser"
"parsing"
"cli"
"command"
"args"
];
};
}

View File

@ -0,0 +1 @@
1.10.1.nix

View File

@ -0,0 +1,17 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "dtrace-provider";
version = "0.6.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/dtrace-provider/-/dtrace-provider-0.6.0.tgz";
sha1 = "0b078d5517937d873101452d9146737557b75e51";
};
deps = with nodePackages; [
nan_2-1-0
];
meta = {
homepage = "https://github.com/chrisa/node-dtrace-provider#readme";
description = "Native DTrace providers for node.js applications";
keywords = [ "dtrace" ];
};
}

View File

@ -0,0 +1 @@
0.6.0.nix

View File

@ -0,0 +1,15 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "extsprintf";
version = "1.2.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/extsprintf/-/extsprintf-1.2.0.tgz";
sha1 = "5ad946c22f5b32ba7f8cd7426711c6e8a3fc2529";
};
deps = [];
devDependencies = [];
meta = {
homepage = "https://github.com/davepacheco/node-extsprintf";
description = "extended POSIX-style sprintf";
};
}

View File

@ -0,0 +1 @@
1.2.0.nix

View File

@ -0,0 +1,16 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "ldap-filter";
version = "0.2.2";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/ldap-filter/-/ldap-filter-0.2.2.tgz";
sha1 = "f2b842be0b86da3352798505b31ebcae590d77d0";
};
deps = with nodePackages; [
assert-plus_0-1-5
];
meta = {
homepage = "http://ldapjs.org";
description = "API for handling LDAP-style filters";
};
}

View File

@ -0,0 +1 @@
0.2.2.nix

View File

@ -0,0 +1,28 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "ldapjs";
version = "1.0.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/ldapjs/-/ldapjs-1.0.0.tgz";
sha1 = "1da2cd5bfb9cb103c1ba516938da971bc2bbc3f2";
};
deps = with nodePackages; [
ldap-filter_0-2-2
asn1_0-2-3
bunyan_1-5-1
once_1-3-2
vasync_1-6-3
dtrace-provider_0-6-0
backoff_2-4-1
assert-plus_0-1-5
verror_1-6-0
dashdash_1-10-1
];
optionalDependencies = with nodePackages; [
dtrace-provider_0-6-0
];
meta = {
homepage = "http://ldapjs.org";
description = "LDAP client and server APIs";
};
}

View File

@ -0,0 +1 @@
1.0.0.nix

View File

@ -0,0 +1,20 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "minimist";
version = "0.0.8";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz";
sha1 = "857fcabfc3397d2625b8228262e86aa7a011b05d";
};
deps = [];
meta = {
homepage = "https://github.com/substack/minimist";
description = "parse argument options";
keywords = [
"argv"
"getopt"
"parser"
"optimist"
];
};
}

View File

@ -0,0 +1 @@
0.0.8.nix

View File

@ -0,0 +1,20 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "mkdirp";
version = "0.5.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz";
sha1 = "30057438eac6cf7f8c4767f38648d6697d75c903";
};
deps = with nodePackages; [
minimist_0-0-8
];
meta = {
homepage = "https://github.com/substack/node-mkdirp#readme";
description = "Recursively mkdir, like `mkdir -p`";
keywords = [
"mkdir"
"directory"
];
};
}

View File

@ -0,0 +1 @@
0.5.1.nix

View File

@ -0,0 +1,26 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "mv";
version = "2.0.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/mv/-/mv-2.0.3.tgz";
sha1 = "e9ab707d71dc38de24edcc637a8e2f5f480c7f32";
};
deps = with nodePackages; [
ncp_0-6-0
mkdirp_0-5-1
rimraf_2-2-8
];
meta = {
homepage = "https://github.com/andrewrk/node-mv";
description = "fs.rename but works across devices. same as the unix utility 'mv'";
keywords = [
"mv"
"move"
"rename"
"device"
"recursive"
"folder"
];
};
}

View File

@ -0,0 +1 @@
2.0.3.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "nan";
version = "2.1.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/nan/-/nan-2.1.0.tgz";
sha1 = "020a7ccedc63fdee85f85967d5607849e74abbe8";
};
deps = [];
meta = {
homepage = "https://github.com/nodejs/nan#readme";
description = "Native Abstractions for Node.js: C++ header for Node 0.8 -> 4 compatibility";
};
}

View File

@ -0,0 +1 @@
2.1.0.nix

View File

@ -0,0 +1,15 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "ncp";
version = "0.6.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/ncp/-/ncp-0.6.0.tgz";
sha1 = "df8ce021e262be21b52feb3d3e5cfaab12491f0d";
};
deps = [];
meta = {
homepage = "https://github.com/AvianFlu/ncp";
description = "Asynchronous recursive file copy utility.";
keywords = [ "cli" "copy" ];
};
}

View File

@ -0,0 +1 @@
0.6.0.nix

View File

@ -0,0 +1,22 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "once";
version = "1.3.2";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/once/-/once-1.3.2.tgz";
sha1 = "d8feeca93b039ec1dcdee7741c92bdac5e28081b";
};
deps = with nodePackages; [
wrappy_1-0-1
];
meta = {
homepage = "https://github.com/isaacs/once#readme";
description = "Run a function exactly one time";
keywords = [
"once"
"function"
"one"
"single"
];
};
}

View File

@ -0,0 +1 @@
1.3.2.nix

View File

@ -0,0 +1,20 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "precond";
version = "0.2.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/precond/-/precond-0.2.3.tgz";
sha1 = "aa9591bcaa24923f1e0f4849d240f47efc1075ac";
};
deps = [];
meta = {
description = "Precondition checking utilities.";
keywords = [
"precondition"
"assert"
"invariant"
"contract"
"condition"
];
};
}

View File

@ -0,0 +1 @@
0.2.3.nix

View File

@ -0,0 +1,15 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "rimraf";
version = "2.2.8";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/rimraf/-/rimraf-2.2.8.tgz";
sha1 = "e439be2aaee327321952730f99a8929e4fc50582";
};
deps = [];
devDependencies = [];
meta = {
homepage = "https://github.com/isaacs/rimraf";
description = "A deep deletion module for node (like `rm -rf`)";
};
}

View File

@ -0,0 +1 @@
2.2.8.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "safe-json-stringify";
version = "1.0.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/safe-json-stringify/-/safe-json-stringify-1.0.3.tgz";
sha1 = "3cb6717660a086d07cb5bd9b7a6875bcf67bd05e";
};
deps = [];
meta = {
homepage = "https://github.com/e-conomic/safe-json-stringify";
description = "Prevent defined property getters from throwing errors";
};
}

View File

@ -0,0 +1 @@
1.0.3.nix

View File

@ -0,0 +1,16 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "vasync";
version = "1.6.3";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/vasync/-/vasync-1.6.3.tgz";
sha1 = "4a69d7052a47f4ce85503d7641df1cbf40432a94";
};
deps = with nodePackages; [
verror_1-6-0
];
meta = {
homepage = "https://github.com/davepacheco/node-vasync";
description = "utilities for observable asynchronous control flow";
};
}

View File

@ -0,0 +1 @@
1.6.3.nix

View File

@ -0,0 +1,17 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "verror";
version = "1.6.0";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/verror/-/verror-1.6.0.tgz";
sha1 = "7d13b27b1facc2e2da90405eb5ea6e5bdd252ea5";
};
deps = with nodePackages; [
extsprintf_1-2-0
];
devDependencies = [];
meta = {
homepage = "https://github.com/davepacheco/node-verror";
description = "richer JavaScript errors";
};
}

View File

@ -0,0 +1 @@
1.6.0.nix

View File

@ -0,0 +1,14 @@
{ buildNodePackage, nodePackages, pkgs }:
buildNodePackage {
name = "wrappy";
version = "1.0.1";
src = pkgs.fetchurl {
url = "http://registry.npmjs.org/wrappy/-/wrappy-1.0.1.tgz";
sha1 = "1e65969965ccbc2db4548c6b84a6f2c5aedd4739";
};
deps = [];
meta = {
homepage = "https://github.com/npm/wrappy";
description = "Callback wrapping utility";
};
}

View File

@ -0,0 +1 @@
1.0.1.nix

17
package.nix Normal file
View File

@ -0,0 +1,17 @@
{ mkDerivation, asn1-encoding, asn1-types, async, base, bytestring
, connection, containers, doctest, hspec, network, process
, semigroups, stdenv, stm, text
}:
mkDerivation {
pname = "ldap-client";
version = "0.1.0";
src = ./.;
buildDepends = [
asn1-encoding asn1-types async base bytestring connection
containers network semigroups stm text
];
testDepends = [ base bytestring doctest hspec process semigroups ];
homepage = "https://supki.github.io/ldap-client";
description = "Pure Haskell LDAP Client Library";
license = stdenv.lib.licenses.bsd2;
}

17
shell.nix Normal file
View File

@ -0,0 +1,17 @@
{ nixpkgs ? import <nixpkgs> {}, compiler ? "ghc7102" }: let
inherit (nixpkgs) pkgs;
ghc = pkgs.haskell.packages.${compiler}.ghcWithPackages(ps: [
ps.hdevtools ps.doctest ps.hspec-discover ps.hlint ps.ghc-mod
]);
cabal-install = pkgs.haskell.packages.${compiler}.cabal-install;
pkg = import ./default.nix { inherit nixpkgs compiler; };
npm = import ./npm {};
in
pkgs.stdenv.mkDerivation rec {
name = pkg.pname;
buildInputs = [ ghc cabal-install npm.nodePackages.ldapjs ] ++ pkg.env.buildInputs;
shellHook = ''
${pkg.env.shellHook}
cabal configure --enable-tests --package-db=$NIX_GHC_LIBDIR/package.conf.d
'';
}

View File

@ -1,429 +1,217 @@
-- | This module contains convertions from LDAP types to ASN.1.
--
-- Various hacks are employed because "asn1-encoding" only encodes to DER, but
-- LDAP demands BER-encoding. So, when a definition looks suspiciously different
-- from the spec in the comment, that's why. I hope all that will be fixed
-- eventually.
{-# LANGUAGE CPP #-}
module Ldap.Asn1.ToAsn1
( ToAsn1(toAsn1)
( Ber
, encode
, bool
, int32
, enum
, octetstring
, null
, sequence
, set
, tagged
, Mod
, Tag
, application
, context
, tag
) where
import Data.ASN1.Types (ASN1, ASN1Class, ASN1Tag, ASN1ConstructionType)
import qualified Data.ASN1.Types as Asn1
import Data.ByteString (ByteString)
import Data.Foldable (fold, foldMap)
import Data.List.NonEmpty (NonEmpty)
import Data.Maybe (maybe)
import Data.Monoid (Endo(Endo), (<>), mempty)
import qualified Data.Text.Encoding as Text
import Prelude (Integer, (.), fromIntegral)
import Data.Bits (Bits((.&.), (.|.), shiftR))
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Lazy as ByteString.Lazy
import Data.ByteString.Lazy (ByteString)
import Data.ByteString.Lazy.Builder (Builder)
import qualified Data.ByteString.Lazy.Builder as Builder
import Data.Int (Int64, Int32)
import Data.List.NonEmpty (NonEmpty((:|)))
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid (Monoid(..))
#endif
import Data.Semigroup (Semigroup(..))
import Data.Word (Word8)
import Prelude hiding (null, sequence)
import Ldap.Asn1.Type
-- $setup
-- >>> :set -XOverloadedStrings
data Ber = Ber !Int64 !Builder
-- | Convert a LDAP type to ASN.1.
instance Semigroup Ber where
Ber l b <> Ber l' b' = Ber (l + l') (b <> b')
instance Monoid Ber where
mempty = Ber 0 mempty
mappend = (<>)
encode :: Ber -> ByteString
encode (Ber _ b) = Builder.toLazyByteString b
-- | Encoding of a boolean value.
--
-- When it's relevant, instances include the part of RFC describing the encoding.
class ToAsn1 a where
toAsn1 :: a -> Endo [ASN1]
-- >>> encode (bool mempty True)
-- "\SOH\SOH\255"
--
-- >>> encode (bool mempty False)
-- "\SOH\SOH\NUL"
bool :: Mod -> Bool -> Ber
bool f b = fromBytes ((t .|. classBit f) : ts ++ [0x01, if b then 0xFF else 0x00])
where
t :| ts = tagBits (tag 0x01 <> f)
{- |
@
LDAPMessage ::= SEQUENCE {
messageID MessageID,
protocolOp CHOICE {
bindRequest BindRequest,
bindResponse BindResponse,
unbindRequest UnbindRequest,
searchRequest SearchRequest,
searchResEntry SearchResultEntry,
searchResDone SearchResultDone,
searchResRef SearchResultReference,
addRequest AddRequest,
addResponse AddResponse,
... },
controls [0] Controls OPTIONAL }
@
-}
instance ToAsn1 op => ToAsn1 (LdapMessage op) where
toAsn1 (LdapMessage i op mc) =
sequence (toAsn1 i <> toAsn1 op <> maybe mempty (context 0 . toAsn1) mc)
{- |
@
MessageID ::= INTEGER (0 .. maxInt)
@
-}
instance ToAsn1 Id where
toAsn1 (Id i) = single (Asn1.IntVal (fromIntegral i))
{- |
@
LDAPString ::= OCTET STRING -- UTF-8 encoded
@
-}
instance ToAsn1 LdapString where
toAsn1 (LdapString s) = single (Asn1.OctetString (Text.encodeUtf8 s))
{- |
@
LDAPOID ::= OCTET STRING -- Constrained to \<numericoid\>
@
-}
instance ToAsn1 LdapOid where
toAsn1 (LdapOid s) = single (Asn1.OctetString (Text.encodeUtf8 s))
{- |
@
LDAPDN ::= LDAPString -- Constrained to \<distinguishedName\>
@
-}
instance ToAsn1 LdapDn where
toAsn1 (LdapDn s) = toAsn1 s
{- |
@
RelativeLDAPDN ::= LDAPString -- Constrained to \<name-component\>
@
-}
instance ToAsn1 RelativeLdapDn where
toAsn1 (RelativeLdapDn s) = toAsn1 s
{- |
@
AttributeDescription ::= LDAPString
@
-}
instance ToAsn1 AttributeDescription where
toAsn1 (AttributeDescription s) = toAsn1 s
{- |
@
AttributeValue ::= OCTET STRING
@
-}
instance ToAsn1 AttributeValue where
toAsn1 (AttributeValue s) = single (Asn1.OctetString s)
{- |
@
AttributeValueAssertion ::= SEQUENCE {
attributeDesc AttributeDescription,
assertionValue AssertionValue }
@
-}
instance ToAsn1 AttributeValueAssertion where
toAsn1 (AttributeValueAssertion d v) = toAsn1 d <> toAsn1 v
{- |
@
AssertionValue ::= OCTET STRING
@
-}
instance ToAsn1 AssertionValue where
toAsn1 (AssertionValue s) = single (Asn1.OctetString s)
{- |
@
PartialAttribute ::= SEQUENCE {
type AttributeDescription,
vals SET OF value AttributeValue }
@
-}
instance ToAsn1 PartialAttribute where
toAsn1 (PartialAttribute d xs) = sequence (toAsn1 d <> set (toAsn1 xs))
{- |
@
Attribute ::= PartialAttribute(WITH COMPONENTS {
...,
vals (SIZE(1..MAX))})
@
-}
instance ToAsn1 Attribute where
toAsn1 (Attribute d xs) = sequence (toAsn1 d <> set (toAsn1 xs))
{- |
@
MatchingRuleId ::= LDAPString
@
-}
instance ToAsn1 MatchingRuleId where
toAsn1 (MatchingRuleId s) = toAsn1 s
{- |
@
Controls ::= SEQUENCE OF control Control
@
-}
instance ToAsn1 Controls where
toAsn1 (Controls cs) = sequence (toAsn1 cs)
{- |
@
Control ::= SEQUENCE {
controlType LDAPOID,
criticality BOOLEAN DEFAULT FALSE,
controlValue OCTET STRING OPTIONAL }
@
-}
instance ToAsn1 Control where
toAsn1 (Control t c v) =
sequence (fold
[ toAsn1 t
, single (Asn1.Boolean c)
, maybe mempty (single . Asn1.OctetString) v
])
{- |
@
BindRequest ::= [APPLICATION 0] SEQUENCE {
version INTEGER (1 .. 127),
name LDAPDN,
authentication AuthenticationChoice }
@
@
UnbindRequest ::= [APPLICATION 2] NULL
@
@
SearchRequest ::= [APPLICATION 3] SEQUENCE {
baseObject LDAPDN,
scope ENUMERATED {
baseObject (0),
singleLevel (1),
wholeSubtree (2),
... },
derefAliases ENUMERATED {
neverDerefAliases (0),
derefInSearching (1),
derefFindingBaseObj (2),
derefAlways (3) },
sizeLimit INTEGER (0 .. maxInt),
timeLimit INTEGER (0 .. maxInt),
typesOnly BOOLEAN,
filter Filter,
attributes AttributeSelection }
@
@
ModifyRequest ::= [APPLICATION 6] SEQUENCE {
object LDAPDN,
changes SEQUENCE OF change SEQUENCE {
operation ENUMERATED {
add (0),
delete (1),
replace (2),
... },
modification PartialAttribute } }
@
@
AddRequest ::= [APPLICATION 8] SEQUENCE {
entry LDAPDN,
attributes AttributeList }
@
@
DelRequest ::= [APPLICATION 10] LDAPDN
@
@
ModifyDNRequest ::= [APPLICATION 12] SEQUENCE {
entry LDAPDN,
newrdn RelativeLDAPDN,
deleteoldrdn BOOLEAN,
newSuperior [0] LDAPDN OPTIONAL }
@
@
CompareRequest ::= [APPLICATION 14] SEQUENCE {
entry LDAPDN,
ava AttributeValueAssertion }
@
@
ExtendedRequest ::= [APPLICATION 23] SEQUENCE {
requestName [0] LDAPOID,
requestValue [1] OCTET STRING OPTIONAL }
@
-}
instance ToAsn1 ProtocolClientOp where
toAsn1 (BindRequest v n a) =
application 0 (single (Asn1.IntVal (fromIntegral v)) <> toAsn1 n <> toAsn1 a)
toAsn1 UnbindRequest =
other Asn1.Application 2 mempty
toAsn1 (SearchRequest bo s da sl tl to f a) =
application 3 (fold
[ toAsn1 bo
, enum s'
, enum da'
, single (Asn1.IntVal (fromIntegral sl))
, single (Asn1.IntVal (fromIntegral tl))
, single (Asn1.Boolean to)
, toAsn1 f
, toAsn1 a
])
-- | Encoding of an integer value.
--
-- >>> encode (int32 mempty 0)
-- "\STX\SOH\NUL"
--
-- >>> encode (int32 mempty 127)
-- "\STX\SOH\DEL"
--
-- >>> encode (int32 mempty 128)
-- "\STX\STX\NUL\128"
int32 :: Mod -> Int32 -> Ber
int32 f n = fromBytes ((t .|. classBit f) : ts ++ fromIntegral (length bytes) : bytes)
where
t :| ts = tagBits (tag 0x02 <> f)
bytes
| n .&. 0x80 == 0x80 = 0x00 : reverse (go n)
| otherwise = reverse (go n)
where
s' = case s of
BaseObject -> 0
SingleLevel -> 1
WholeSubtree -> 2
da' = case da of
NeverDerefAliases -> 0
DerefInSearching -> 1
DerefFindingBaseObject -> 2
DerefAlways -> 3
toAsn1 (ModifyRequest dn xs) =
application 6 (fold
[ toAsn1 dn
, sequence (foldMap (\(op, pa) -> sequence (enum (case op of
Add -> 0
Delete -> 1
Replace -> 2) <> toAsn1 pa)) xs)
])
toAsn1 (AddRequest dn as) =
application 8 (toAsn1 dn <> toAsn1 as)
toAsn1 (DeleteRequest (LdapDn (LdapString dn))) =
other Asn1.Application 10 (Text.encodeUtf8 dn)
toAsn1 (ModifyDnRequest dn rdn del new) =
application 12 (fold
[ toAsn1 dn
, toAsn1 rdn
, single (Asn1.Boolean del)
, maybe mempty
(\(LdapDn (LdapString dn')) -> other Asn1.Context 0 (Text.encodeUtf8 dn'))
new
])
toAsn1 (CompareRequest dn av) =
application 14 (toAsn1 dn <> sequence (toAsn1 av))
toAsn1 (ExtendedRequest (LdapOid oid) mv) =
application 23 (fold
[ other Asn1.Context 0 (Text.encodeUtf8 oid)
, maybe mempty (other Asn1.Context 1) mv
])
go i
| i <= 0xff = return (fromIntegral i)
| otherwise = (fromIntegral i .&. 0xff) : go (i `shiftR` 8)
{- |
@
AuthenticationChoice ::= CHOICE {
simple [0] OCTET STRING,
... }
@
-}
instance ToAsn1 AuthenticationChoice where
toAsn1 (Simple s) = other Asn1.Context 0 s
-- | Encoding of an enumerated value.
--
-- It is encoded exactly the same as an integer value, but the tag number is different.
enum :: Mod -> Int32 -> Ber
enum f = int32 (tag 0x0a <> f)
{- |
@
AttributeSelection ::= SEQUENCE OF selector LDAPString
@
-}
instance ToAsn1 AttributeSelection where
toAsn1 (AttributeSelection as) = sequence (toAsn1 as)
-- | Encoding of an octet string.
octetstring :: Mod -> ByteString.ByteString -> Ber
octetstring f bs = Ber
(fromIntegral (ByteString.length bs) + 2 + fromIntegral (length ts))
(Builder.word8 (t .|. classBit f) <> Builder.lazyByteString (ByteString.Lazy.pack ts) <>
Builder.byteString (ByteString.pack (encodeLength (ByteString.length bs))) <>
Builder.byteString bs)
where
t :| ts = tagBits (tag 0x04 <> f)
{- |
@
Filter ::= CHOICE {
and [0] SET SIZE (1..MAX) OF filter Filter,
or [1] SET SIZE (1..MAX) OF filter Filter,
not [2] Filter,
equalityMatch [3] AttributeValueAssertion,
substrings [4] SubstringFilter,
greaterOrEqual [5] AttributeValueAssertion,
lessOrEqual [6] AttributeValueAssertion,
present [7] AttributeDescription,
approxMatch [8] AttributeValueAssertion,
extensibleMatch [9] MatchingRuleAssertion,
... }
@
-}
instance ToAsn1 Filter where
toAsn1 f = case f of
And xs -> context 0 (toAsn1 xs)
Or xs -> context 1 (toAsn1 xs)
Not x -> context 2 (toAsn1 x)
EqualityMatch x -> context 3 (toAsn1 x)
Substrings x -> context 4 (toAsn1 x)
GreaterOrEqual x -> context 5 (toAsn1 x)
LessOrEqual x -> context 6 (toAsn1 x)
Present (AttributeDescription (LdapString x))
-> other Asn1.Context 7 (Text.encodeUtf8 x)
ApproxMatch x -> context 8 (toAsn1 x)
ExtensibleMatch x -> context 9 (toAsn1 x)
-- | Encoding of NULL
--
-- >>> encode (null mempty)
-- "\ENQ\NUL"
null :: Mod -> Ber
null f = fromBytes ((t .|. classBit f) : ts ++ [0])
where
t :| ts = tagBits (tag 0x05 <> f)
{- |
@
SubstringFilter ::= SEQUENCE {
type AttributeDescription,
substrings SEQUENCE SIZE (1..MAX) OF substring CHOICE {
initial [0] AssertionValue, -- can occur at most once
any [1] AssertionValue,
final [2] AssertionValue } -- can occur at most once
}
@
-}
instance ToAsn1 SubstringFilter where
toAsn1 (SubstringFilter ad ss) =
toAsn1 ad <> sequence (foldMap (\s -> case s of
Initial (AssertionValue v) -> other Asn1.Context 0 v
Any (AssertionValue v) -> other Asn1.Context 1 v
Final (AssertionValue v) -> other Asn1.Context 2 v) ss)
-- | Encoding of a sequence [of].
--
-- >>> encode (sequence mempty (octetstring mempty "Smith" <> bool mempty True))
-- "0\n\EOT\ENQSmith\SOH\SOH\255"
sequence :: Mod -> Ber -> Ber
sequence m = tagged (tag 0x10 <> m)
{- |
@
MatchingRuleAssertion ::= SEQUENCE {
matchingRule [1] MatchingRuleId OPTIONAL,
type [2] AttributeDescription OPTIONAL,
matchValue [3] AssertionValue,
dnAttributes [4] BOOLEAN DEFAULT FALSE }
@
-}
instance ToAsn1 MatchingRuleAssertion where
toAsn1 (MatchingRuleAssertion mmr mad (AssertionValue av) _) = fold
[ maybe mempty f mmr
, maybe mempty g mad
, other Asn1.Context 3 av
]
where
f (MatchingRuleId (LdapString x)) = other Asn1.Context 1 (Text.encodeUtf8 x)
g (AttributeDescription (LdapString x)) = other Asn1.Context 2 (Text.encodeUtf8 x)
-- | Encoding of a set [of].
--
-- >>> encode (set mempty (octetstring mempty "Smith" <> bool mempty True))
-- "1\n\EOT\ENQSmith\SOH\SOH\255"
set :: Mod -> Ber -> Ber
set m = tagged (tag 0x11 <> m)
{- |
@
AttributeList ::= SEQUENCE OF attribute Attribute
@
-}
instance ToAsn1 AttributeList where
toAsn1 (AttributeList xs) = sequence (toAsn1 xs)
-- | Encoding of a (possibly tagged) constructed value.
tagged :: Mod -> Ber -> Ber
tagged f b@(Ber l _) = fromBytes ((t .|. constructedTag .|. classBit f) : ts ++ encodeLength l) <> b
where
t :| ts = tagBits f
constructedTag = 0x20
instance ToAsn1 a => ToAsn1 [a] where
toAsn1 = foldMap toAsn1
fromBytes :: [Word8] -> Ber
fromBytes xs = let bs = ByteString.Lazy.pack xs in Ber (ByteString.Lazy.length bs) (Builder.lazyByteString bs)
instance ToAsn1 a => ToAsn1 (NonEmpty a) where
toAsn1 = foldMap toAsn1
defaultTag :: Tag
defaultTag = Tag Universal (Number 0)
sequence :: Endo [ASN1] -> Endo [ASN1]
sequence = construction Asn1.Sequence
newtype Mod = Mod (Tag -> Tag)
set :: Endo [ASN1] -> Endo [ASN1]
set = construction Asn1.Set
instance Semigroup Mod where
Mod f <> Mod g = Mod (g . f)
application :: ASN1Tag -> Endo [ASN1] -> Endo [ASN1]
application = construction . Asn1.Container Asn1.Application
instance Monoid Mod where
mappend = (<>)
mempty = Mod id
context :: ASN1Tag -> Endo [ASN1] -> Endo [ASN1]
context = construction . Asn1.Container Asn1.Context
data Class =
Universal
| Application
| Context
deriving (Show, Eq)
construction :: ASN1ConstructionType -> Endo [ASN1] -> Endo [ASN1]
construction t x = single (Asn1.Start t) <> x <> single (Asn1.End t)
data Tag = Tag !Class !Number
deriving (Show, Eq)
other :: ASN1Class -> ASN1Tag -> ByteString -> Endo [ASN1]
other c t = single . Asn1.Other c t
newtype Number = Number Word8
deriving (Show, Eq)
enum :: Integer -> Endo [ASN1]
enum = single . Asn1.Enumerated
classBit :: Mod -> Word8
classBit (Mod f) = case f defaultTag of
Tag Universal _ -> 0x00
Tag Application _ -> 0x40
Tag Context _ -> 0x80
single :: a -> Endo [a]
single x = Endo (x :)
tagBits :: Mod -> NonEmpty Word8
tagBits (Mod f) = case f defaultTag of Tag _ t -> encodeTagNumber t
application, context :: Mod
application = class_ Application
context = class_ Context
class_ :: Class -> Mod
class_ c = Mod (\(Tag _ t) -> Tag c t)
tag :: Word8 -> Mod
tag t = Mod (\(Tag c _) -> Tag c (Number t))
-- | Small tag numbers (up to and including 30) are bit-OR'd
-- directly with the first Identifier byte, while the bigger ones
-- are encoded idiosyncratically.
--
-- >>> encodeTagNumber (Number 19)
-- 19 :| []
--
-- >>> encodeTagNumber (Number 31)
-- 31 :| [31]
--
-- >>> encodeTagNumber (Number 137)
-- 31 :| [129,9]
encodeTagNumber :: Number -> NonEmpty Word8
encodeTagNumber (Number n)
| n < 30 = return n
| otherwise = 0x1f :| reverse (go n)
where
go x = fromIntegral (x .&. 0x7f) : go' (x `shiftR` 7)
go' 0 = []
go' x = (fromIntegral (x .&. 0x7f) .|. 0x80) : go' (x `shiftR` 7)
-- | Small lengths (up to and including 127) are returned as a single
-- byte equal to length itself, while the bigger one are encoded
-- idiosyncratically.
--
-- >>> encodeLength 7
-- [7]
--
-- >>> encodeLength 12238
-- [130,47,206]
--
-- @
-- encodeLength :: (Integral a, Bits a) => a -> NonEmpty Word8
-- @
encodeLength :: (Integral a, Bits a) => a -> [Word8]
encodeLength n
| n < 0x80 = [fromIntegral n]
| otherwise = let (l, xs) = go n in (l .|. 0x80) : reverse xs
where
go x
| x <= 0xff = (1, [fromIntegral x])
| otherwise = let (l, xs) = go (x `shiftR` 8) in (l + 1, (fromIntegral x .&. 0xff) : xs)

View File

@ -28,6 +28,7 @@ data ProtocolClientOp =
| DeleteRequest !LdapDn
| ModifyDnRequest !LdapDn !RelativeLdapDn !Bool !(Maybe LdapDn)
| CompareRequest !LdapDn !AttributeValueAssertion
| AbandonRequest !Id
| ExtendedRequest !LdapOid !(Maybe ByteString)
deriving (Show, Eq)

View File

@ -57,12 +57,13 @@ module Ldap.Client
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$>))
import Control.Applicative ((<$>), (<*>))
#endif
import qualified Control.Concurrent.Async as Async
import Control.Concurrent.STM (atomically, throwSTM)
import Control.Concurrent.STM.TMVar (putTMVar)
import Control.Concurrent.STM.TQueue (TQueue, newTQueueIO, writeTQueue, readTQueue)
import Control.Concurrent.STM.TVar (newTVarIO)
import Control.Exception (Exception, Handler(..), bracket, throwIO, catch, catches)
import Control.Monad (forever)
import qualified Data.ASN1.BinaryEncoding as Asn1
@ -74,7 +75,9 @@ import Data.Foldable (asum)
import Data.Function (fix)
import Data.List.NonEmpty (NonEmpty((:|)))
import qualified Data.Map.Strict as Map
import Data.Monoid (Endo(appEndo))
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid (mempty)
#endif
import Data.String (fromString)
import Data.Text (Text)
#if __GLASGOW_HASKELL__ < 710
@ -86,9 +89,10 @@ import qualified Network.Connection as Conn
import Prelude hiding (compare)
import qualified System.IO.Error as IO
import Ldap.Asn1.ToAsn1 (ToAsn1(toAsn1))
import Ldap.Asn1.FromAsn1 (FromAsn1, parseAsn1)
import Ldap.Asn1.ToAsn1 (encode)
import qualified Ldap.Asn1.Type as Type
import Ldap.Client.Asn1.ToAsn1 (ToAsn1(toAsn1))
import Ldap.Client.Internal
import Ldap.Client.Bind (Password(..), bind)
import Ldap.Client.Search
@ -115,6 +119,7 @@ import Ldap.Client.Extended (Oid(..), extended)
newLdap :: IO Ldap
newLdap = Ldap
<$> newTQueueIO
<*> newTVarIO (Type.Id 0)
-- | Various failures that can happen when working with LDAP.
data LdapError =
@ -203,9 +208,7 @@ input inq conn = wrap . flip fix [] $ \loop chunks -> do
output :: ToAsn1 a => TQueue a -> Connection -> IO b
output out conn = wrap . forever $ do
msg <- atomically (readTQueue out)
Conn.connectionPut conn (encode (toAsn1 msg))
where
encode x = Asn1.encodeASN1' Asn1.DER (appEndo x [])
Conn.connectionPut conn (ByteString.Lazy.toStrict (encode (toAsn1 mempty msg)))
dispatch
:: Ldap
@ -213,11 +216,11 @@ dispatch
-> TQueue (Type.LdapMessage Request)
-> IO a
dispatch Ldap { client } inq outq =
flip fix (Map.empty, 1) $ \loop (!req, !counter) ->
flip fix Map.empty $ \loop !req ->
loop =<< atomically (asum
[ do New new var <- readTQueue client
writeTQueue outq (Type.LdapMessage (Type.Id counter) new Nothing)
return (Map.insert (Type.Id counter) ([], var) req, counter + 1)
[ do New mid new var <- readTQueue client
writeTQueue outq (Type.LdapMessage mid new Nothing)
return (Map.insert mid ([], var) req)
, do Type.LdapMessage mid op _
<- readTQueue inq
res <- case op of
@ -232,7 +235,7 @@ dispatch Ldap { client } inq outq =
Type.CompareResponse {} -> done mid op req
Type.ExtendedResponse {} -> probablyDisconnect mid op req
Type.IntermediateResponse {} -> saveUp mid op req
return (res, counter)
return res
])
where
saveUp mid op res =

View File

@ -0,0 +1,39 @@
-- | <https://tools.ietf.org/html/rfc4511#section-4.11 Abandon> operation.
--
-- This operation comes in two flavours:
--
-- * asynchronous, 'IO' based ('abandonAsync')
--
-- * asynchronous, 'STM' based ('abandonAsyncSTM')
--
-- Of those, the first one ('abandonAsync') is probably the most useful for the typical usecase.
--
-- Synchronous variants are unavailable because the Directory does not
-- respond to @AbandonRequest@s.
module Ldap.Client.Abandon
( abandonAsync
, abandonAsyncSTM
) where
import Control.Monad (void)
import Control.Monad.STM (STM, atomically)
import qualified Ldap.Asn1.Type as Type
import Ldap.Client.Internal
-- | Perform the Abandon operation asynchronously.
abandonAsync :: Ldap -> Async a -> IO ()
abandonAsync l =
atomically . abandonAsyncSTM l
-- | Perform the Abandon operation asynchronously.
abandonAsyncSTM :: Ldap -> Async a -> STM ()
abandonAsyncSTM l =
void . sendRequest l die . abandonRequest
where
die = error "Ldap.Client.Abandon: do not wait for the response to UnbindRequest"
abandonRequest :: Async a -> Request
abandonRequest (Async i _) =
Type.AbandonRequest i

View File

@ -0,0 +1,408 @@
-- | This module contains convertions from LDAP types to ASN.1.
module Ldap.Client.Asn1.ToAsn1
( ToAsn1(toAsn1)
) where
import Data.Bool (Bool(False))
import Data.Foldable (foldMap)
import Data.Eq (Eq((==)))
import Data.List.NonEmpty (NonEmpty)
import Data.Monoid (Monoid(mempty), (<>))
import qualified Data.Text.Encoding as Text
import Prelude (fromIntegral)
import Ldap.Asn1.Type
import Ldap.Asn1.ToAsn1
-- | Convert a LDAP type to ASN.1.
--
-- When it's relevant, instances include the part of the RFC describing the encoding.
class ToAsn1 a where
toAsn1 :: Mod -> a -> Ber
{- |
@
LDAPMessage ::= SEQUENCE {
messageID MessageID,
protocolOp CHOICE {
bindRequest BindRequest,
bindResponse BindResponse,
unbindRequest UnbindRequest,
searchRequest SearchRequest,
searchResEntry SearchResultEntry,
searchResDone SearchResultDone,
searchResRef SearchResultReference,
addRequest AddRequest,
addResponse AddResponse,
... },
controls [0] Controls OPTIONAL }
@
-}
instance ToAsn1 op => ToAsn1 (LdapMessage op) where
toAsn1 m (LdapMessage i op mc) =
sequence m
(toAsn1 mempty i <>
toAsn1 mempty op <>
foldMap (toAsn1 (context <> tag 0)) mc)
{- |
@
MessageID ::= INTEGER (0 .. maxInt)
@
-}
instance ToAsn1 Id where
toAsn1 m (Id i) = int32 m i
{- |
@
LDAPString ::= OCTET STRING -- UTF-8 encoded
@
-}
instance ToAsn1 LdapString where
toAsn1 m (LdapString s) = octetstring m (Text.encodeUtf8 s)
{- |
@
LDAPOID ::= OCTET STRING -- Constrained to \<numericoid\>
@
-}
instance ToAsn1 LdapOid where
toAsn1 m (LdapOid s) = octetstring m (Text.encodeUtf8 s)
{- |
@
LDAPDN ::= LDAPString -- Constrained to \<distinguishedName\>
@
-}
instance ToAsn1 LdapDn where
toAsn1 m (LdapDn s) = toAsn1 m s
{- |
@
RelativeLDAPDN ::= LDAPString -- Constrained to \<name-component\>
@
-}
instance ToAsn1 RelativeLdapDn where
toAsn1 m (RelativeLdapDn s) = toAsn1 m s
{- |
@
AttributeDescription ::= LDAPString
@
-}
instance ToAsn1 AttributeDescription where
toAsn1 m (AttributeDescription s) = toAsn1 m s
{- |
@
AttributeValue ::= OCTET STRING
@
-}
instance ToAsn1 AttributeValue where
toAsn1 m (AttributeValue s) = octetstring m s
{- |
@
AttributeValueAssertion ::= SEQUENCE {
attributeDesc AttributeDescription,
assertionValue AssertionValue }
@
-}
instance ToAsn1 AttributeValueAssertion where
toAsn1 m (AttributeValueAssertion d v) =
sequence m (toAsn1 mempty d <> toAsn1 mempty v)
{- |
@
AssertionValue ::= OCTET STRING
@
-}
instance ToAsn1 AssertionValue where
toAsn1 m (AssertionValue s) = octetstring m s
{- |
@
PartialAttribute ::= SEQUENCE {
type AttributeDescription,
vals SET OF value AttributeValue }
@
-}
instance ToAsn1 PartialAttribute where
toAsn1 m (PartialAttribute d xs) =
sequence m (toAsn1 mempty d <> set mempty (toAsn1 mempty xs))
{- |
@
Attribute ::= PartialAttribute(WITH COMPONENTS {
...,
vals (SIZE(1..MAX))})
@
-}
instance ToAsn1 Attribute where
toAsn1 m (Attribute d xs) =
sequence m (toAsn1 mempty d <> set mempty (toAsn1 mempty xs))
{- |
@
MatchingRuleId ::= LDAPString
@
-}
instance ToAsn1 MatchingRuleId where
toAsn1 m (MatchingRuleId s) = toAsn1 m s
{- |
@
Controls ::= SEQUENCE OF control Control
@
-}
instance ToAsn1 Controls where
toAsn1 m (Controls cs) = sequence m (toAsn1 mempty cs)
{- |
@
Control ::= SEQUENCE {
controlType LDAPOID,
criticality BOOLEAN DEFAULT FALSE,
controlValue OCTET STRING OPTIONAL }
@
-}
instance ToAsn1 Control where
toAsn1 m (Control t c v) =
sequence m
(toAsn1 mempty t <>
default_ False c (bool mempty c) <>
foldMap (octetstring mempty) v)
{- |
@
BindRequest ::= [APPLICATION 0] SEQUENCE {
version INTEGER (1 .. 127),
name LDAPDN,
authentication AuthenticationChoice }
@
@
UnbindRequest ::= [APPLICATION 2] NULL
@
@
SearchRequest ::= [APPLICATION 3] SEQUENCE {
baseObject LDAPDN,
scope ENUMERATED {
baseObject (0),
singleLevel (1),
wholeSubtree (2),
... },
derefAliases ENUMERATED {
neverDerefAliases (0),
derefInSearching (1),
derefFindingBaseObj (2),
derefAlways (3) },
sizeLimit INTEGER (0 .. maxInt),
timeLimit INTEGER (0 .. maxInt),
typesOnly BOOLEAN,
filter Filter,
attributes AttributeSelection }
@
@
ModifyRequest ::= [APPLICATION 6] SEQUENCE {
object LDAPDN,
changes SEQUENCE OF change SEQUENCE {
operation ENUMERATED {
add (0),
delete (1),
replace (2),
... },
modification PartialAttribute } }
@
@
AddRequest ::= [APPLICATION 8] SEQUENCE {
entry LDAPDN,
attributes AttributeList }
@
@
DelRequest ::= [APPLICATION 10] LDAPDN
@
@
ModifyDNRequest ::= [APPLICATION 12] SEQUENCE {
entry LDAPDN,
newrdn RelativeLDAPDN,
deleteoldrdn BOOLEAN,
newSuperior [0] LDAPDN OPTIONAL }
@
@
CompareRequest ::= [APPLICATION 14] SEQUENCE {
entry LDAPDN,
ava AttributeValueAssertion }
@
@
AbandonRequest ::= [APPLICATION 16] MessageID
@
@
ExtendedRequest ::= [APPLICATION 23] SEQUENCE {
requestName [0] LDAPOID,
requestValue [1] OCTET STRING OPTIONAL }
@
-}
instance ToAsn1 ProtocolClientOp where
toAsn1 _ (BindRequest v n a) =
sequence (application <> tag 0)
(int32 mempty (fromIntegral v) <>
toAsn1 mempty n <>
toAsn1 mempty a)
toAsn1 _ UnbindRequest =
null (application <> tag 2)
toAsn1 _ (SearchRequest bo s da sl tl to f a) =
sequence (application <> tag 3)
(toAsn1 mempty bo <>
enum mempty s' <>
enum mempty da' <>
int32 mempty sl <>
int32 mempty tl <>
bool mempty to <>
toAsn1 mempty f <>
toAsn1 mempty a)
where
s' = case s of
BaseObject -> 0
SingleLevel -> 1
WholeSubtree -> 2
da' = case da of
NeverDerefAliases -> 0
DerefInSearching -> 1
DerefFindingBaseObject -> 2
DerefAlways -> 3
toAsn1 _ (ModifyRequest dn xs) =
sequence (application <> tag 6)
(toAsn1 mempty dn <>
sequence mempty (foldMap (\(op, pa) -> sequence mempty (enum mempty (case op of
Add -> 0
Delete -> 1
Replace -> 2) <> toAsn1 mempty pa)) xs))
toAsn1 _ (AddRequest dn as) =
sequence (application <> tag 8) (toAsn1 mempty dn <> toAsn1 mempty as)
toAsn1 _ (DeleteRequest dn) =
toAsn1 (application <> tag 10) dn
toAsn1 _ (ModifyDnRequest dn rdn del new) =
sequence (application <> tag 12)
(toAsn1 mempty dn <>
toAsn1 mempty rdn <>
bool mempty del <>
foldMap (toAsn1 (context <> tag 0)) new)
toAsn1 _ (CompareRequest dn av) =
sequence (application <> tag 14) (toAsn1 mempty dn <> toAsn1 mempty av)
toAsn1 _ (AbandonRequest i) =
toAsn1 (application <> tag 16) i
toAsn1 _ (ExtendedRequest oid mv) =
sequence (application <> tag 23)
(toAsn1 (context <> tag 0) oid <>
foldMap (octetstring (context <> tag 1)) mv)
{- |
@
AuthenticationChoice ::= CHOICE {
simple [0] OCTET STRING,
... }
@
-}
instance ToAsn1 AuthenticationChoice where
toAsn1 _ (Simple s) = octetstring (context <> tag 0) s
{- |
@
AttributeSelection ::= SEQUENCE OF selector LDAPString
@
-}
instance ToAsn1 AttributeSelection where
toAsn1 m (AttributeSelection as) = sequence m (toAsn1 mempty as)
{- |
@
Filter ::= CHOICE {
and [0] SET SIZE (1..MAX) OF filter Filter,
or [1] SET SIZE (1..MAX) OF filter Filter,
not [2] Filter,
equalityMatch [3] AttributeValueAssertion,
substrings [4] SubstringFilter,
greaterOrEqual [5] AttributeValueAssertion,
lessOrEqual [6] AttributeValueAssertion,
present [7] AttributeDescription,
approxMatch [8] AttributeValueAssertion,
extensibleMatch [9] MatchingRuleAssertion,
... }
@
-}
instance ToAsn1 Filter where
toAsn1 _ f = case f of
And xs -> set (context <> tag 0) (toAsn1 mempty xs)
Or xs -> set (context <> tag 1) (toAsn1 mempty xs)
Not x -> tagged (context <> tag 2) (toAsn1 mempty x)
EqualityMatch x -> toAsn1 (context <> tag 3) x
Substrings x -> toAsn1 (context <> tag 4) x
GreaterOrEqual x -> toAsn1 (context <> tag 5) x
LessOrEqual x -> toAsn1 (context <> tag 6) x
Present x -> toAsn1 (context <> tag 7) x
ApproxMatch x -> toAsn1 (context <> tag 8) x
ExtensibleMatch x -> toAsn1 (context <> tag 9) x
{- |
@
SubstringFilter ::= SEQUENCE {
type AttributeDescription,
substrings SEQUENCE SIZE (1..MAX) OF substring CHOICE {
initial [0] AssertionValue, -- can occur at most once
any [1] AssertionValue,
final [2] AssertionValue } -- can occur at most once
}
@
-}
instance ToAsn1 SubstringFilter where
toAsn1 m (SubstringFilter ad ss) =
sequence m
(toAsn1 mempty ad <>
sequence mempty (foldMap (\s -> case s of
Initial v -> toAsn1 (context <> tag 0) v
Any v -> toAsn1 (context <> tag 1) v
Final v -> toAsn1 (context <> tag 2) v) ss))
{- |
@
MatchingRuleAssertion ::= SEQUENCE {
matchingRule [1] MatchingRuleId OPTIONAL,
type [2] AttributeDescription OPTIONAL,
matchValue [3] AssertionValue,
dnAttributes [4] BOOLEAN DEFAULT FALSE }
@
-}
instance ToAsn1 MatchingRuleAssertion where
toAsn1 m (MatchingRuleAssertion mmr mad av b) = sequence m
(foldMap (toAsn1 (context <> tag 1)) mmr <>
foldMap (toAsn1 (context <> tag 2)) mad <>
toAsn1 (context <> tag 3) av <>
default_ False b (bool (context <> tag 4) b))
{- |
@
AttributeList ::= SEQUENCE OF attribute Attribute
@
-}
instance ToAsn1 AttributeList where
toAsn1 m (AttributeList xs) = sequence m (toAsn1 mempty xs)
instance ToAsn1 a => ToAsn1 [a] where
toAsn1 _ = foldMap (toAsn1 mempty)
instance ToAsn1 a => ToAsn1 (NonEmpty a) where
toAsn1 _ = foldMap (toAsn1 mempty)
default_ :: (Eq a, Monoid m) => a -> a -> m -> m
default_ a b c = if a == b then mempty else c

View File

@ -6,7 +6,7 @@ module Ldap.Client.Internal
, Ldap(..)
, ClientMessage(..)
, Type.ResultCode(..)
, Async
, Async(..)
, AttrList
-- * Waiting for Request Completion
, wait
@ -29,6 +29,7 @@ module Ldap.Client.Internal
import Control.Concurrent.STM (STM, atomically)
import Control.Concurrent.STM.TMVar (TMVar, newEmptyTMVar, readTMVar)
import Control.Concurrent.STM.TQueue (TQueue, writeTQueue)
import Control.Concurrent.STM.TVar (TVar, modifyTVar, readTVar)
import Control.Exception (Exception, throwIO)
import Control.Monad (void)
import Data.ByteString (ByteString)
@ -51,18 +52,19 @@ data Host =
-- | A token. All functions that interact with the Directory require one.
data Ldap = Ldap
{ client :: TQueue ClientMessage
, counter :: TVar Type.Id
} deriving (Eq)
data ClientMessage = New Request (TMVar (NonEmpty Type.ProtocolServerOp))
data ClientMessage = New Type.Id Request (TMVar (NonEmpty Type.ProtocolServerOp))
type Request = Type.ProtocolClientOp
type InMessage = Type.ProtocolServerOp
type Response = NonEmpty InMessage
-- | Asynchronous LDAP operation. Use 'wait' or 'waitSTM' to wait for its completion.
data Async a = Async (STM (Either ResponseError a))
data Async a = Async Type.Id (STM (Either ResponseError a))
instance Functor Async where
fmap f (Async stm) = Async (fmap (fmap f) stm)
fmap f (Async mid stm) = Async mid (fmap (fmap f) stm)
-- | Unique identifier of an LDAP entry.
newtype Dn = Dn Text
@ -103,16 +105,22 @@ wait = atomically . waitSTM
-- should commit. After that, applying 'waitSTM' to the corresponding 'Async'
-- starts to make sense.
waitSTM :: Async a -> STM (Either ResponseError a)
waitSTM (Async stm) = stm
waitSTM (Async _ stm) = stm
sendRequest :: Ldap -> (Response -> Either ResponseError a) -> Request -> STM (Async a)
sendRequest l p msg =
do var <- newEmptyTMVar
writeRequest l var msg
return (Async (fmap p (readTMVar var)))
mid <- newId l
writeRequest l (New mid msg var)
return (Async mid (fmap p (readTMVar var)))
writeRequest :: Ldap -> TMVar Response -> Request -> STM ()
writeRequest Ldap { client } var msg = writeTQueue client (New msg var)
newId :: Ldap -> STM Type.Id
newId Ldap { counter } =
do modifyTVar counter (\(Type.Id mid) -> Type.Id (mid + 1))
readTVar counter
writeRequest :: Ldap -> ClientMessage -> STM ()
writeRequest Ldap { client } = writeTQueue client
raise :: Exception e => Either e a -> IO a
raise = either throwIO return
@ -138,4 +146,4 @@ unbindAsyncSTM :: Ldap -> STM ()
unbindAsyncSTM l =
void (sendRequest l die Type.UnbindRequest)
where
die = error "Ldap.Client: do not wait for the response to UnbindRequest"
die = error "Ldap.Client.Internal: do not wait for the response to UnbindRequest"

7
test/Doctests.hs Normal file
View File

@ -0,0 +1,7 @@
module Main (main) where
import Test.DocTest (doctest)
main :: IO ()
main = doctest ["src//Ldap/Asn1/ToAsn1.hs"]

View File

@ -16,7 +16,7 @@ spec = do
res `shouldBe` True
res `shouldBe` Right ()
it "compares and looses" $ do
it "compares and loses" $ do
res <- locally $ \l -> do
res <- Ldap.compare l charmander (Attr "type") "flying"
res `shouldBe` False

View File

@ -155,7 +155,7 @@ spec = do
it "extensible filter" $ do
res <- locally $ \l -> do
res <- go l ((Just (Attr "type"), Nothing, True) ::= "flying")
res <- go l ((Just (Attr "type"), Nothing, False) ::= "flying")
dns res `shouldMatchList`
[ butterfree
, charizard

View File

@ -1,3 +1,4 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module SpecHelper
( locally
@ -22,8 +23,15 @@ module SpecHelper
, oddish
) where
#if __GLASGOW_HASKELL__ < 710
import Control.Applicative ((<$))
#endif
import Control.Monad (forever)
import Control.Concurrent (forkIO)
import Control.Exception (bracket)
import System.Environment (getEnvironment)
import System.IO (hGetLine)
import System.IO.Error (tryIOError)
import System.Process (runInteractiveProcess, terminateProcess, waitForProcess)
import Ldap.Client as Ldap
@ -31,12 +39,14 @@ import Ldap.Client as Ldap
locally :: (Ldap -> IO a) -> IO (Either LdapError a)
locally f =
bracket (do (_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
(Just [ ("PORT", show port)
, ("SSL_CERT", "./ssl/cert.pem")
, ("SSL_KEY", "./ssl/key.pem")
])
bracket (do env <- getEnvironment
(_, out, _, h) <- runInteractiveProcess "./test/ldap.js" [] Nothing
(Just (("PORT", show port) :
("SSL_CERT", "./ssl/cert.pem") :
("SSL_KEY", "./ssl/key.pem") :
env))
hGetLine out
forkIO (() <$ tryIOError (forever (hGetLine out >>= putStrLn)))
return h)
(\h -> do terminateProcess h
waitForProcess h)

View File

@ -1,8 +1,20 @@
#!/usr/bin/env js
#!/usr/bin/env node
var fs = require('fs');
var ldapjs = require('ldapjs');
// Stub unimplemented functionality.
ldapjs.ExtensibleFilter.prototype.matches = ldapjs.EqualityFilter.prototype.matches;
ldapjs.ApproximateFilter.prototype.matches = ldapjs.EqualityFilter.prototype.matches;
// Remove superfluous spaces from DNs.
var wrappee = ldapjs.DN.prototype.format;
ldapjs.DN.prototype.format = function(options) {
options = options || this._format;
options['skipSpace'] = true;
return (wrappee.bind(this))(options);
};
var port = process.env.PORT;
var certificate = fs.readFileSync(process.env.SSL_CERT, "utf-8");
var key = fs.readFileSync(process.env.SSL_KEY, "utf-8");
@ -81,8 +93,9 @@ function authorize(req, res, next) {
server.search('o=localhost', [authorize], function(req, res, next) {
for (var i = 0; i < pokemon.length; i++) {
if (req.filter.matches(pokemon[i].attributes))
if (req.filter.matches(pokemon[i].attributes)) {
res.send(pokemon[i]);
}
};
res.end();
@ -163,7 +176,7 @@ server.modifyDN('o=localhost', [], function(req, res, next) {
if (req.dn.toString() === pokemon[i].dn) {
req.dn.rdns[0] = req.newRdn.rdns[0];
pokemon[i].dn = req.dn.toString();
pokemon[i].attributes.cn = req.newRdn.rdns[0].cn;
pokemon[i].attributes.cn = req.newRdn.rdns[0].attrs.cn.value;
}
}