diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cee924a..a43ee45 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,8 +28,12 @@ fail-fast: false matrix: # Latest stable version, update at will - os: [ macOS-10.15, ubuntu-18.04, windows-2019 ] - dc: [ dmd-latest, ldc-latest, dmd-master, ldc-master ] + os: [ macOS-11, ubuntu-18.04, windows-2019 ] + dc: + - dmd-latest + - ldc-latest + - dmd-master + - ldc-master runs-on: ${{ matrix.os }} steps: @@ -79,3 +83,4 @@ dub run --compiler=${{ env.DC }} --single test\issue2051_running_unittests_from_dub_single_file_packages_fails.d cd test dub --single run-unittest.d + shell: pwsh diff --git a/.gitignore b/.gitignore index e0ee768..47dcf7d 100644 --- a/.gitignore +++ b/.gitignore @@ -18,23 +18,6 @@ /bin/dub-* # Ignore files or directories created by the test suite. -/test/test.log -/test/custom-source-main-bug487/custom-source-main-bug487 -/test/3-copyFiles/bin/ -/test/ignore-hidden-1/ignore-hidden-1 -/test/ignore-hidden-2/ignore-hidden-2 -/test/expected-import-path-output -/test/expected-string-import-path-output -/test/expected-describe-data-1-list-output -/test/expected-describe-data-2-dmd-output -/test/expected-issue616-output -/test/describe-project/dummy.dat -/test/describe-project/dummy-dep1.dat -/test/*/main/main -/test/*/*test-library -/test/*/*test-application -/test/*/exec-simple -/test/issue1474/ext/fortytwo.d *.exe *.log @@ -48,3 +31,11 @@ # Ignore generated files for examples /examples/generated-sources/generated-sources /examples/generated-sources/source/test.d + +dub_test_root.sh.*/ + +test-cov-ctfe-test.lst +test-unittest-cov-ctfe-source-mod.lst +test-unittest-cov-ctfe-.dub-code-test-test-library-unittest-cov-ctfe-*-dub_test_root.lst + +example.yaml diff --git a/build-files.txt b/build-files.txt index 0624ecb..59a0b51 100644 --- a/build-files.txt +++ b/build-files.txt @@ -1,34 +1,27 @@ source/app.d +source/configy/Attributes.d +source/configy/Exceptions.d +source/configy/FieldRef.d +source/configy/Read.d +source/configy/Utils.d source/dub/commandline.d -source/dub/dependency.d -source/dub/dependencyresolver.d -source/dub/description.d -source/dub/dub.d -source/dub/init.d -source/dub/packagemanager.d -source/dub/packagesupplier.d -source/dub/packagesuppliers/package.d -source/dub/packagesuppliers/fallback.d -source/dub/packagesuppliers/filesystem.d -source/dub/packagesuppliers/packagesupplier.d -source/dub/packagesuppliers/maven.d -source/dub/packagesuppliers/registry.d -source/dub/package_.d -source/dub/platform.d -source/dub/project.d -source/dub/semver.d source/dub/compilers/buildsettings.d source/dub/compilers/compiler.d source/dub/compilers/dmd.d source/dub/compilers/gdc.d source/dub/compilers/ldc.d source/dub/compilers/utils.d +source/dub/dependency.d +source/dub/dependencyresolver.d +source/dub/description.d +source/dub/dub.d source/dub/generators/build.d source/dub/generators/cmake.d source/dub/generators/generator.d source/dub/generators/sublimetext.d source/dub/generators/targetdescription.d source/dub/generators/visuald.d +source/dub/init.d source/dub/internal/git.d source/dub/internal/libInputVisitor.d source/dub/internal/sdlang/ast.d @@ -42,13 +35,55 @@ source/dub/internal/undead/xml.d source/dub/internal/utils.d source/dub/internal/vibecompat/core/file.d -source/dub/internal/vibecompat/core/log.d source/dub/internal/vibecompat/data/json.d source/dub/internal/vibecompat/data/serialization.d source/dub/internal/vibecompat/data/utils.d source/dub/internal/vibecompat/inet/path.d source/dub/internal/vibecompat/inet/url.d +source/dub/internal/colorize/colors.d +source/dub/internal/colorize/cwrite.d +source/dub/internal/colorize/package.d +source/dub/internal/colorize/winterm.d +source/dub/internal/logging.d +source/dub/package_.d +source/dub/packagemanager.d +source/dub/packagesupplier.d +source/dub/packagesuppliers/fallback.d +source/dub/packagesuppliers/filesystem.d +source/dub/packagesuppliers/maven.d +source/dub/packagesuppliers/package.d +source/dub/packagesuppliers/packagesupplier.d +source/dub/packagesuppliers/registry.d +source/dub/platform.d +source/dub/project.d source/dub/recipe/io.d source/dub/recipe/json.d source/dub/recipe/packagerecipe.d +source/dub/recipe/selection.d source/dub/recipe/sdl.d +source/dub/semver.d +source/dub/version_.d +source/dyaml/composer.d +source/dyaml/constructor.d +source/dyaml/dumper.d +source/dyaml/emitter.d +source/dyaml/encoding.d +source/dyaml/escapes.d +source/dyaml/event.d +source/dyaml/exception.d +source/dyaml/linebreak.d +source/dyaml/loader.d +source/dyaml/node.d +source/dyaml/package.d +source/dyaml/parser.d +source/dyaml/queue.d +source/dyaml/reader.d +source/dyaml/representer.d +source/dyaml/resolver.d +source/dyaml/scanner.d +source/dyaml/serializer.d +source/dyaml/stdsumtype.d +source/dyaml/style.d +source/dyaml/tagdirective.d +source/dyaml/token.d +source/tinyendian.d diff --git a/changelog/build_unittest.dd b/changelog/build_unittest.dd new file mode 100644 index 0000000..2574c29 --- /dev/null +++ b/changelog/build_unittest.dd @@ -0,0 +1,8 @@ +Building the special test runner configuration + +`dub build --config=unittest --build=unittest[-cov]` can now be used to +mimic building the test runner executable of `dub test [--coverage]`. +Note that this doesn't require an existing `unittest` configuration. + +`dub describe --config=unittest` allows to derive the path to the +executable. diff --git a/changelog/colored_output.dd b/changelog/colored_output.dd new file mode 100644 index 0000000..8102feb --- /dev/null +++ b/changelog/colored_output.dd @@ -0,0 +1,14 @@ +Output will now be colorized + +Dub output has been improved to be more human readable, +which means the most important informations in the output +will now be colorized / bold. + +As is usual with CLI tools, this behavior is automatically +turned on whether the output is a TTY. To force the old output +in the presence of a TTY, use `--color=off`. To force colored +output in the absence of a TTY, use `--color=on`. + +The `--color` flag, if set to `on` or `off`, is automatically +forwarded to the compiler. This is especially useful for CI +pipelines to ensure human-readable output. diff --git a/changelog/configy_stricter_parsing.dd b/changelog/configy_stricter_parsing.dd new file mode 100644 index 0000000..0cac88e --- /dev/null +++ b/changelog/configy_stricter_parsing.dd @@ -0,0 +1,9 @@ +dub will now warn on unrecognized settings or selections file + +Previously, dub was silently accepting anything it didn't recognize +in `[dub.]settings.json` and `dub.selections.json`. While the original +intent was to make forward-compatibility easy, it proved detrimental +as typos would just mean the user setting was ignored. + +From this release, dub will now warn about any entry in its configuration files +or in `dub.selections.json`. After 10 releases, those warnings will turn into errors. diff --git a/changelog/cov_ctfe.dd b/changelog/cov_ctfe.dd new file mode 100644 index 0000000..a6ccb63 --- /dev/null +++ b/changelog/cov_ctfe.dd @@ -0,0 +1,4 @@ +The two new build types `cov-ctfe` and `unittest-cov-ctfe` have been added. + +These extend the existing build types `cov` and `unittest-cov` respectively by +appending `-cov=ctfe` to the set of flags passed to the compiler. diff --git a/changelog/dpath.dd b/changelog/dpath.dd new file mode 100644 index 0000000..098bf00 --- /dev/null +++ b/changelog/dpath.dd @@ -0,0 +1,34 @@ +DUB settings & packages directory placement overhauled + +You can now configure where DUB places its downloaded packages and where the user configuration is stored through environment variables or through the dub configuration. You need to use an environment variable or the system-wide dub configuration to specify where the user configuration is stored. + +By default DUB stores the packages on +- Windows: `%APPDATA%/dub/settings.json` + `%LOCALAPPDATA%/dub/packages/` +- Posix: `$HOME/.dub/{packages/,settings.json}` + +now if the `DUB_HOME` environment variable is set it instead stores the packages (and other config) in +- `$DUB_HOME/{packages/,settings.json}` + +alternatively if `DUB_HOME` isn't set, but `DPATH` is set, the following path is used: +- `$DPATH/dub/{packages/,settings.json}` + +The `DPATH` environment variable is intended to be used by all D tooling related things doing user-space installation of things. It can be used to avoid cluttering the home folder. + +Additionally to environment variables it is possible to configure the package placement path + settings.json path through DUB's settings.json file. To configure where the user-editable settings.json is placed you need to adjust the system-wide dub configuration. + +In the settings.json you can set the following fields: + +```json +{ + "dubHome": "/path/to/dub", // sets both package store and config location +} +``` + +Additionally, these config paths will have environment variables using the `$VARIABLE` syntax resolved. + +The following list describes which path is going to be picked, from top to bottom, stopping whenever one is found: + +- `$DUB_HOME` environment variable +- `$DPATH` environment variable +- system-wide settings.json: `"dubHome"` property (only for userSettings) +- most specific settings.json: `"dubHome"` property (only for localRepository) diff --git a/changelog/exitcodes.dd b/changelog/exitcodes.dd new file mode 100644 index 0000000..0ecdb3e --- /dev/null +++ b/changelog/exitcodes.dd @@ -0,0 +1,11 @@ +DUB command exit codes have been made more consistent + +Some dub commands have been adjusted to return exit code 2 instead of exit code 1. Exit code 1 is now always used for usage errors, while exit code 2 is the more generic any error occurred or package failed to load. + +The following commands are affected: + +- dub clean +- dub add +- dub search +- dub convert + diff --git a/changelog/install_uninstall.dd b/changelog/install_uninstall.dd new file mode 100644 index 0000000..445c64b --- /dev/null +++ b/changelog/install_uninstall.dd @@ -0,0 +1,5 @@ +'install' and 'uninstall' commands have been removed + +Those commands were long-deprecated aliases to `fetch` and `remove`, +respectively, and usage of them triggered a warning. +They are no longer listed as command in help and dub will no longer recognize them. diff --git a/changelog/make-copies-writable.dd b/changelog/make-copies-writable.dd new file mode 100644 index 0000000..ce5edef --- /dev/null +++ b/changelog/make-copies-writable.dd @@ -0,0 +1,8 @@ +When `copyFiles` is used to copy read-only files, it now makes the copy writable. + +Previously, if the target file would already exist due to a prior run of Dub, `copyFiles` would produce an access +denied error because the read-only target could not be overwritten. Note that if you were affected by this behaviour, +you will need to remove those files by hand once to eliminate these errors. + +It is common for version control systems to mark binary files read-only in the working copy, to prevent concurrent +edits of files in unmergeable formats. diff --git a/changelog/overrides_deprecated.dd b/changelog/overrides_deprecated.dd new file mode 100644 index 0000000..eaafbd0 --- /dev/null +++ b/changelog/overrides_deprecated.dd @@ -0,0 +1,9 @@ +The override system is deprecated + +Dub had an "override" system, allowing a specific version or version range +to be overriden by a specific package. +This override system was developed with a purely version-based approach in mind, +however since its inception, more ways to specify dependencies have been added, +making the override approach redundant and less flexible than other approaches. +From this release, dub will warn you if it finds an override file, +or when using the `dub add-override` / `dub remove-override` commands. diff --git a/changelog/subpackage_short_syntax.dd b/changelog/subpackage_short_syntax.dd new file mode 100644 index 0000000..cf50a10 --- /dev/null +++ b/changelog/subpackage_short_syntax.dd @@ -0,0 +1,4 @@ +The shortcut syntax for "dub run" is now also available for sub packages. + +Invoking dub as "dub :subpackage" is now equivalent to "dub run :subpackage", +analogous to just "dub" being equivalent to "dub run". diff --git a/changelog/subpackage_upgrade.dd b/changelog/subpackage_upgrade.dd new file mode 100644 index 0000000..c9a28ce --- /dev/null +++ b/changelog/subpackage_upgrade.dd @@ -0,0 +1,5 @@ +Upgrading all sub packages at once + +A new "-s" switch allows to "dub upgrade" all sub packages together with the +base package. This aims to provide a better workflow for fully reproducible +builds and tests. diff --git a/dub.sdl b/dub.sdl index a6bb5d5..04a2b11 100644 --- a/dub.sdl +++ b/dub.sdl @@ -12,6 +12,9 @@ mainSourceFile "source/app.d" libs "curl" versions "DubUseCurl" "DubApplication" + // Uncomment to get rich output about the file parsing and json <-> YAML + // integrity checks + //debugVersions "ConfigFillerDebug" } configuration "library" { diff --git a/dub.selections.json b/dub.selections.json index e00205d..28ff597 100644 --- a/dub.selections.json +++ b/dub.selections.json @@ -1,19 +1,19 @@ { "fileVersion": 1, "versions": { - "botan": "1.12.18", + "botan": "1.12.19", "botan-math": "1.0.3", - "diet-ng": "1.7.3", - "eventcore": "0.9.9", + "diet-ng": "1.8.1", + "eventcore": "0.9.20", "libasync": "0.8.6", "libev": "5.0.0+4.04", "libevent": "2.0.2+2.0.16", "memutils": "1.0.4", "mir-linux-kernel": "1.0.1", - "openssl": "1.1.6+1.0.1g", + "openssl": "3.2.2", "stdx-allocator": "2.77.5", - "taggedalgebraic": "0.11.17", - "vibe-core": "1.10.1", - "vibe-d": "0.9.2" + "taggedalgebraic": "0.11.22", + "vibe-core": "1.22.4", + "vibe-d": "0.9.5" } } diff --git a/scripts/ci/travis.sh b/scripts/ci/travis.sh index 067d848..a811e75 100755 --- a/scripts/ci/travis.sh +++ b/scripts/ci/travis.sh @@ -3,13 +3,13 @@ set -v -e -o pipefail vibe_ver=$(jq -r '.versions | .["vibe-d"]' < dub.selections.json) -dub fetch vibe-d --version=$vibe_ver # get optional dependency +dub fetch vibe-d@$vibe_ver # get optional dependency dub test --compiler=${DC} -c library-nonet export DMD="$(command -v $DMD)" if [ "$FRONTEND" \> 2.087.z ]; then - ./build.d -preview=dip1000 -w -g -debug + ./build.d -preview=dip1000 -preview=in -w -g -debug fi function clean() { diff --git a/scripts/fish-completion/dub.fish b/scripts/fish-completion/dub.fish index c8cd222..bda55ed 100644 --- a/scripts/fish-completion/dub.fish +++ b/scripts/fish-completion/dub.fish @@ -49,7 +49,7 @@ complete -c dub -n "contains '$cmd' (commandline -poc)" -s a -l arch -r -d "Force architecture" complete -c dub -n "contains '$cmd' (commandline -poc)" -s d -l debug -r -d "Debug identifier" complete -c dub -n "contains '$cmd' (commandline -poc)" -l nodeps -d "No dependency check" - complete -c dub -n "contains '$cmd' (commandline -poc)" -s b -l build -u -x -d "Build type" -a "debug plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov unittest-cov syntax" + complete -c dub -n "contains '$cmd' (commandline -poc)" -s b -l build -u -x -d "Build type" -a "debug plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov cov-ctfe unittest-cov unittest-cov-ctfe syntax" complete -c dub -n "contains '$cmd' (commandline -poc)" -l build-mode -x -d "How compiler & linker are invoked" -a "separate allAtOnce singleFile" complete -c dub -n "contains '$cmd' (commandline -poc)" -l compiler -x -d "Compiler binary" -a "dmd gdc ldc gdmd ldmd" end diff --git a/scripts/man/gen_man.d b/scripts/man/gen_man.d index e1b2089..aff9118 100755 --- a/scripts/man/gen_man.d +++ b/scripts/man/gen_man.d @@ -30,6 +30,7 @@ { import std.datetime; SysTime date; + string[] relatedSubCommands; static Config init(){ import std.process : environment; @@ -62,7 +63,8 @@ Copyright (c) 1999-%s by The D Language Foundation .SH "ONLINE DOCUMENTATION" .UR http://code.dlang.org/docs/commandline -.UE http://code.dlang.org/docs/commandline +http://code.dlang.org/docs/commandline +.UE .SH "SEE ALSO" %s`; manFile.writefln(manFooter, config.date.year, seeAlso); @@ -116,26 +118,28 @@ void writeArgs(CommandArgs args, ref File manFile) { - alias write = (m) => manFile.write(m); + alias write = (m) => manFile.write(m.replace(`-`, `\-`)); foreach (arg; args.recognizedArgs) { auto names = arg.names.split("|"); assert(names.length == 1 || names.length == 2); string sarg = names[0].length == 1 ? names[0] : null; string larg = names[0].length > 1 ? names[0] : names.length > 1 ? names[1] : null; - write(".IP "); + manFile.writeln(".PP"); if (sarg !is null) { - write("-%s".format(sarg)); + write("-%s".format(sarg).bold); if (larg !is null) write(", "); } if (larg !is null) { - write("--%s".format(larg)); + write("--%s".format(larg).bold); if (!arg.defaultValue.peek!bool) write("=VALUE"); } manFile.writeln; + manFile.writeln(".RS 4"); manFile.writeln(arg.helpText.join("\n")); + manFile.writeln(".RE"); } } @@ -149,11 +153,25 @@ auto manFile = File(config.cwd.buildPath(fileName), "w"); auto manName = format("DUB-%s", command.name).toUpper; manFile.writeHeader(manName, config); - static immutable seeAlso = ["dmd(1)", "dub(1)"].map!bold.joiner(", ").to!string; + + string[] extraRelated; + foreach (arg; args.recognizedArgs) { + if (arg.names.canFind("rdmd")) + extraRelated ~= "rdmd(1)"; + } + if (command.name == "dustmite") + extraRelated ~= "dustmite(1)"; + + const seeAlso = ["dub(1)"] + .chain(config.relatedSubCommands.map!(s => s.format!"dub-%s(1)")) + .chain(extraRelated) + .map!bold + .joiner(", ") + .to!string; scope(exit) manFile.writeFooter(seeAlso, config); alias writeln = (m) => manFile.writeln(m); - writeln(`dub \- Package and build management system for D`); + manFile.writefln(`dub-%s \- %s`, command.name, command.description); writeln("SYNOPSIS".header); writeln("dub %s".format(command.name).bold); @@ -168,6 +186,25 @@ writeln(command.helpText.joiner("\n\n")); writeln("OPTIONS".header); args.writeArgs(manFile); + + static immutable exitStatus = +`.SH EXIT STATUS +.TP +.BR 0 +DUB succeeded +.TP +.BR 1 +usage errors, unknown command line flags +.TP +.BR 2 +package not found, package failed to load, miscellaneous error`; + static immutable exitStatusDustmite = +`.SH EXIT STATUS +Forwards the exit code from ` ~ `dustmite(1)`.bold; + if (command.name == "dustmite") + manFile.writeln(exitStatusDustmite); + else + manFile.writeln(exitStatus); } void main() @@ -183,8 +220,30 @@ args.writeMainManFile(commands, "dub.1", config); } + string[][] relatedSubCommands = [ + ["run", "build", "test"], + ["test", "dustmite", "lint"], + ["describe", "gemerate"], + ["add", "fetch"], + ["init", "add", "convert"], + ["add-path", "remove-path"], + ["add-local", "remove-local"], + ["list", "search"], + ["add-override", "remove-override", "list-overrides"], + ["clean-caches", "clean", "remove"], + ]; + // options for each specific command foreach (cmd; commands.map!(a => a.commands).joiner) { + string[] related; + foreach (relatedList; relatedSubCommands) { + if (relatedList.canFind(cmd.name)) + related ~= relatedList; + } + related = related.sort!"a c == cmd.name); + config.relatedSubCommands = related; + cmd.writeManFile(config); } } diff --git a/scripts/zsh-completion/_dub b/scripts/zsh-completion/_dub index 75fefa5..21457f0 100644 --- a/scripts/zsh-completion/_dub +++ b/scripts/zsh-completion/_dub @@ -147,7 +147,7 @@ $@ \ '::package:_dub_add' \ '(* : -)'{-h,--help}'[Display general or command specific help and exit]' \ - '(-b --build)'{-b,--build=}'[Specifies the type of build to perform]:build type:("debug (default)" plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov unittest-cov syntax)' \ + '(-b --build)'{-b,--build=}'[Specifies the type of build to perform]:build type:("debug (default)" plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov cov-ctfe unittest-cov unittest-cov-ctfe syntax)' \ '(-c --config)'{-c,--config=}'[Builds the specified configuration]:package configuration: ' \ '*--override-config=[ Uses the specified configuration for a certain dependency]:dependency/config: ' \ '--compiler=[Specifies the compiler binary to use (can be a path)]:compiler:(dmd gdc ldc gdmd ldmd)' \ diff --git a/source/configy/Attributes.d b/source/configy/Attributes.d new file mode 100644 index 0000000..c8f090f --- /dev/null +++ b/source/configy/Attributes.d @@ -0,0 +1,315 @@ +/******************************************************************************* + + Define UDAs that can be applied to a configuration struct + + This module is stand alone (a leaf module) to allow importing the UDAs + without importing the whole configuration parsing code. + + Copyright: + Copyright (c) 2019-2022 BOSAGORA Foundation + All rights reserved. + + License: + MIT License. See LICENSE for details. + +*******************************************************************************/ + +module configy.Attributes; + +import std.traits; + +/******************************************************************************* + + An optional parameter with an initial value of `T.init` + + The config parser automatically recognize non-default initializer, + so that the following: + ``` + public struct Config + { + public string greeting = "Welcome home"; + } + ``` + Will not error out if `greeting` is not defined in the config file. + However, this relies on the initializer of the field (`greeting`) being + different from the type initializer (`string.init` is `null`). + In some cases, the default value is also the desired initializer, e.g.: + ``` + public struct Config + { + /// Maximum number of connections. 0 means unlimited. + public uint connections_limit = 0; + } + ``` + In this case, one can add `@Optional` to the field to inform the parser. + +*******************************************************************************/ + +public struct Optional {} + +/******************************************************************************* + + Inform the config filler that this sequence is to be read as a mapping + + On some occasions, one might want to read a mapping as an array. + One reason to do so may be to provide a better experience to the user, + e.g. having to type: + ``` + interfaces: + eth0: + ip: "192.168.0.1" + private: true + wlan0: + ip: "1.2.3.4" + ``` + Instead of the slightly more verbose: + ``` + interfaces: + - name: eth0 + ip: "192.168.0.1" + private: true + - name: wlan0 + ip: "1.2.3.4" + ``` + + The former would require to be expressed as an associative arrays. + However, one major drawback of associative arrays is that they can't have + an initializer, which makes them cumbersome to use in the context of the + config filler. To remediate this issue, one may use `@Key("name")` + on a field (here, `interfaces`) so that the mapping is flattened + to an array. If `name` is `null`, the key will be discarded. + +*******************************************************************************/ + +public struct Key +{ + /// + public string name; +} + +/******************************************************************************* + + Look up the provided name in the YAML node, instead of the field name. + + By default, the config filler will look up the field name of a mapping in + the YAML node. If this is not desired, an explicit `Name` attribute can + be given. This is especially useful for names which are keyword. + + ``` + public struct Config + { + public @Name("delete") bool remove; + } + ``` + +*******************************************************************************/ + +public struct Name +{ + /// + public string name; + + /// + public bool startsWith; +} + +/// Short hand syntax +public Name StartsWith(string name) @safe pure nothrow @nogc +{ + return Name(name, true); +} + +/******************************************************************************* + + A field which carries informations about whether it was set or not + + Some configurations may need to know which fields were set explicitly while + keeping defaults. An example of this is a `struct` where at least one field + needs to be set, such as the following: + ``` + public struct ProtoDuration + { + public @Optional long weeks; + public @Optional long days; + public @Optional long hours; + public @Optional long minutes; + public long seconds = 42; + public @Optional long msecs; + public @Optional long usecs; + public @Optional long hnsecs; + public @Optional long nsecs; + } + ``` + In this case, it would be impossible to know if any field was explicitly + provided. Hence, the struct should be written as: + ``` + public struct ProtoDuration + { + public SetInfo!long weeks; + public SetInfo!long days; + public SetInfo!long hours; + public SetInfo!long minutes; + public SetInfo!long seconds = 42; + public SetInfo!long msecs; + public SetInfo!long usecs; + public SetInfo!long hnsecs; + public SetInfo!long nsecs; + } + ``` + Note that `SetInfo` implies `Optional`, and supports default values. + +*******************************************************************************/ + +public struct SetInfo (T) +{ + /*************************************************************************** + + Allow initialization as a field + + This sets the field as having been set, so that: + ``` + struct Config { SetInfo!Duration timeout; } + + Config myConf = { timeout: 10.minutes } + ``` + Will behave as if set explicitly. If this behavior is not wanted, + pass `false` as second argument: + ``` + Config myConf = { timeout: SetInfo!Duration(10.minutes, false) } + ``` + + ***************************************************************************/ + + public this (T initVal, bool isSet = true) @safe pure nothrow @nogc + { + this.value = initVal; + this.set = isSet; + } + + /// Underlying data + public T value; + + /// + alias value this; + + /// Whether this field was set or not + public bool set; +} + +/******************************************************************************* + + Provides a means to convert a field from a `Node` to a complex type + + When filling the config, it might be useful to store types which are + not only simple `string` and integer, such as `URL`, `BigInt`, or any other + library type not directly under the user's control. + + To allow reading those values from the config file, a `Converter` may + be used. The converter will tell the `ConfigFiller` how to convert from + `Node` to the desired type `T`. + + If the type is under the user's control, one can also add a constructor + accepting a single string, or define the `fromString` method, both of which + are tried if no `Converter` is found. + + For types not under the user's control, there might be different ways + to parse the same type within the same struct, or neither the ctor nor + the `fromString` method may be defined under that name. + The exmaple below uses `parse` in place of `fromString`, for example. + + ``` + /// Complex structure representing the age of a person based on its birthday + public struct Age + { + /// + public uint birth_year; + /// + public uint birth_month; + /// + public uint birth_day; + + /// Note that this will be picked up automatically if named `fromString` + /// but this struct might be a library type. + public static Age parse (string value) { /+ Magic +/ } + } + + public struct Person + { + /// + @Converter!Age((Node value) => Age.parse(value.as!string)) + public Age age; + } + ``` + + Note that some fields may also be of multiple YAML types, such as DUB's + `dependencies`, which is either a simple string (`"vibe-d": "~>1.0 "`), + or an in its complex form (`"vibe-d": { "version": "~>1.0" }`). + For those use cases, a `Converter` is the best approach. + + To avoid repeating the field type, a convenience function is provided: + ``` + public struct Age + { + public uint birth_year; + public uint birth_month; + public uint birth_day; + public static Age parse (string value) { /+ Magic +/ } + } + + public struct Person + { + /// Here `converter` will deduct the type from the delegate argument, + /// and return an instance of `Converter`. Mind the case. + @converter((Node value) => Age.parse(value.as!string)) + public Age age; + } + ``` + +*******************************************************************************/ + +public struct Converter (T) +{ + /// + public alias ConverterFunc = T function (scope ConfigParser!T context); + + /// + public ConverterFunc converter; +} + +/// Ditto +public auto converter (FT) (FT func) +{ + static assert(isFunctionPointer!FT, + "Error: Argument to `converter` should be a function pointer, not: " + ~ FT.stringof); + + alias RType = ReturnType!FT; + static assert(!is(RType == void), + "Error: Converter needs to be of the return type of the field, not `void`"); + return Converter!RType(func); +} + +public interface ConfigParser (T) +{ + import dyaml.node; + import configy.FieldRef : StructFieldRef; + import configy.Read : Context, parseField; + + /// Returns: the node being processed + public inout(Node) node () inout @safe pure nothrow @nogc; + + /// Returns: current location we are parsing + public string path () const @safe pure nothrow @nogc; + + /// + public final auto parseAs (OtherType) + (auto ref OtherType defaultValue = OtherType.init) + { + alias TypeFieldRef = StructFieldRef!OtherType; + return this.node().parseField!(TypeFieldRef)( + this.path(), defaultValue, this.context()); + } + + /// Internal use only + protected const(Context) context () const @safe pure nothrow @nogc; +} diff --git a/source/configy/DubTest.d b/source/configy/DubTest.d new file mode 100644 index 0000000..8a4f6f8 --- /dev/null +++ b/source/configy/DubTest.d @@ -0,0 +1,83 @@ +/******************************************************************************* + + Contains tests for dub-specific extensions + + Whenever integrating changes from upstream configy, most conflicts tend + to be on `configy.Test`, and as the structure is very similar, + the default diff algorithms are useless. Having a separate module simplify + this greatly. + + License: + MIT License. See LICENSE for details. + +*******************************************************************************/ + +module configy.DubTest; + +import configy.Attributes; +import configy.Read; + +import dyaml.node; + +/// Test name pattern matching +unittest +{ + static struct Config + { + @StartsWith("names") + string[][string] names_; + } + + auto c = parseConfigString!Config("names-x86:\n - John\n - Luca\nnames:\n - Marie", "/dev/null"); + assert(c.names_[null] == [ "Marie" ]); + assert(c.names_["x86"] == [ "John", "Luca" ]); +} + +/// Test our `fromYAML` extension +unittest +{ + static struct PackageDef + { + string name; + @Optional string target; + int build = 42; + } + + static struct Package + { + string path; + PackageDef def; + + public static Package fromYAML (scope ConfigParser!Package parser) + { + if (parser.node.nodeID == NodeID.mapping) + return Package(null, parser.parseAs!PackageDef); + else + return Package(parser.parseAs!string); + } + } + + static struct Config + { + string name; + Package[] deps; + } + + auto c = parseConfigString!Config( +` +name: myPkg +deps: + - /foo/bar + - name: foo + target: bar + build: 24 + - name: fur + - /one/last/path +`, "/dev/null"); + assert(c.name == "myPkg"); + assert(c.deps.length == 4); + assert(c.deps[0] == Package("/foo/bar")); + assert(c.deps[1] == Package(null, PackageDef("foo", "bar", 24))); + assert(c.deps[2] == Package(null, PackageDef("fur", null, 42))); + assert(c.deps[3] == Package("/one/last/path")); +} diff --git a/source/configy/Exceptions.d b/source/configy/Exceptions.d new file mode 100644 index 0000000..5e1f98c --- /dev/null +++ b/source/configy/Exceptions.d @@ -0,0 +1,382 @@ +/******************************************************************************* + + Definitions for Exceptions used by the config module. + + Copyright: + Copyright (c) 2019-2022 BOSAGORA Foundation + All rights reserved. + + License: + MIT License. See LICENSE for details. + +*******************************************************************************/ + +module configy.Exceptions; + +import configy.Utils; + +import dyaml.exception; +import dyaml.node; + +import std.algorithm : filter, map; +import std.format; +import std.string : soundexer; + +/******************************************************************************* + + Base exception type thrown by the config parser + + Whenever dealing with Exceptions thrown by the config parser, catching + this type will allow to optionally format with colors: + ``` + try + { + auto conf = parseConfigFile!Config(cmdln); + // ... + } + catch (ConfigException exc) + { + writeln("Parsing the config file failed:"); + writelfln(isOutputATTY() ? "%S" : "%s", exc); + } + ``` + +*******************************************************************************/ + +public abstract class ConfigException : Exception +{ + /// Position at which the error happened + public Mark yamlPosition; + + /// The path at which the key resides + public string path; + + /// If non-empty, the key under 'path' which triggered the error + /// If empty, the key should be considered part of 'path' + public string key; + + /// Constructor + public this (string path, string key, Mark position, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + super(null, file, line); + this.path = path; + this.key = key; + this.yamlPosition = position; + } + + /// Ditto + public this (string path, Mark position, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + this(path, null, position, file, line); + } + + /*************************************************************************** + + Overrides `Throwable.toString` and its sink overload + + It is quite likely that errors from this module may be printed directly + to the end user, who might not have technical knowledge. + + This format the error in a nicer format (e.g. with colors), + and will additionally provide a stack-trace if the `ConfigFillerDebug` + `debug` version was provided. + + Format_chars: + The default format char ("%s") will print a regular message. + If an uppercase 's' is used ("%S"), colors will be used. + + Params: + sink = The sink to send the piece-meal string to + spec = See https://dlang.org/phobos/std_format_spec.html + + ***************************************************************************/ + + public override string toString () scope + { + // Need to be overriden otherwise the overload is shadowed + return super.toString(); + } + + /// Ditto + public override void toString (scope void delegate(in char[]) sink) const scope + @trusted + { + // This breaks the type system, as it blindly trusts a delegate + // However, the type system lacks a way to sanely build an utility + // which accepts a delegate with different qualifiers, so this is the + // less evil approach. + this.toString(cast(SinkType) sink, FormatSpec!char("%s")); + } + + /// Ditto + public void toString (scope SinkType sink, in FormatSpec!char spec) + const scope @safe + { + import core.internal.string : unsignedToTempString; + + const useColors = spec.spec == 'S'; + char[20] buffer = void; + + if (useColors) sink(Yellow); + sink(this.yamlPosition.name); + if (useColors) sink(Reset); + + sink("("); + if (useColors) sink(Cyan); + sink(unsignedToTempString(this.yamlPosition.line, buffer)); + if (useColors) sink(Reset); + sink(":"); + if (useColors) sink(Cyan); + sink(unsignedToTempString(this.yamlPosition.column, buffer)); + if (useColors) sink(Reset); + sink("): "); + + if (this.path.length || this.key.length) + { + if (useColors) sink(Yellow); + sink(this.path); + if (this.path.length && this.key.length) + sink("."); + sink(this.key); + if (useColors) sink(Reset); + sink(": "); + } + + this.formatMessage(sink, spec); + + debug (ConfigFillerDebug) + { + sink("\n\tError originated from: "); + sink(this.file); + sink("("); + sink(unsignedToTempString(line, buffer)); + sink(")"); + + if (!this.info) + return; + + () @trusted nothrow + { + try + { + sink("\n----------------"); + foreach (t; info) + { + sink("\n"); sink(t); + } + } + // ignore more errors + catch (Throwable) {} + }(); + } + } + + /// Hook called by `toString` to simplify coloring + protected abstract void formatMessage ( + scope SinkType sink, in FormatSpec!char spec) + const scope @safe; +} + +/// A configuration exception that is only a single message +package final class ConfigExceptionImpl : ConfigException +{ + public this (string msg, Mark position, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + this(msg, null, null, position, file, line); + } + + public this (string msg, string path, string key, Mark position, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + super(path, key, position, file, line); + this.msg = msg; + } + + protected override void formatMessage ( + scope SinkType sink, in FormatSpec!char spec) + const scope @safe + { + sink(this.msg); + } +} + +/// Exception thrown when the type of the YAML node does not match the D type +package final class TypeConfigException : ConfigException +{ + /// The actual (in the YAML document) type of the node + public string actual; + + /// The expected (as specified in the D type) type + public string expected; + + /// Constructor + public this (Node node, string expected, string path, string key = null, + string file = __FILE__, size_t line = __LINE__) + @safe nothrow + { + this(node.nodeTypeString(), expected, path, key, node.startMark(), + file, line); + } + + /// Ditto + public this (string actual, string expected, string path, string key, + Mark position, string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + super(path, key, position, file, line); + this.actual = actual; + this.expected = expected; + } + + /// Format the message with or without colors + protected override void formatMessage ( + scope SinkType sink, in FormatSpec!char spec) + const scope @safe + { + const useColors = spec.spec == 'S'; + + const fmt = "Expected to be of type %s, but is a %s"; + + if (useColors) + formattedWrite(sink, fmt, this.expected.paint(Green), this.actual.paint(Red)); + else + formattedWrite(sink, fmt, this.expected, this.actual); + } +} + +/// Similar to a `TypeConfigException`, but specific to `Duration` +package final class DurationTypeConfigException : ConfigException +{ + /// The list of valid fields + public immutable string[] DurationSuffixes = [ + "weeks", "days", "hours", "minutes", "seconds", + "msecs", "usecs", "hnsecs", "nsecs", + ]; + + /// Actual type of the node + public string actual; + + /// Constructor + public this (Node node, string path, string file = __FILE__, size_t line = __LINE__) + @safe nothrow + { + super(path, null, node.startMark(), file, line); + this.actual = node.nodeTypeString(); + } + + /// Format the message with or without colors + protected override void formatMessage ( + scope SinkType sink, in FormatSpec!char spec) + const scope @safe + { + const useColors = spec.spec == 'S'; + + const fmt = "Field is of type %s, but expected a mapping with at least one of: %-(%s, %)"; + if (useColors) + formattedWrite(sink, fmt, this.actual.paint(Red), + this.DurationSuffixes.map!(s => s.paint(Green))); + else + formattedWrite(sink, fmt, this.actual, this.DurationSuffixes); + } +} + +/// Exception thrown when an unknown key is found in strict mode +public class UnknownKeyConfigException : ConfigException +{ + /// The list of valid field names + public immutable string[] fieldNames; + + /// Constructor + public this (string path, string key, immutable string[] fieldNames, + Mark position, string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + super(path, key, position, file, line); + this.fieldNames = fieldNames; + } + + /// Format the message with or without colors + protected override void formatMessage ( + scope SinkType sink, in FormatSpec!char spec) + const scope @safe + { + const useColors = spec.spec == 'S'; + + // Try to find a close match, as the error is likely a typo + // This is especially important when the config file has a large + // number of fields, where the message is otherwise near-useless. + const origSound = soundexer(this.key); + auto matches = this.fieldNames.filter!(f => f.soundexer == origSound); + const hasMatch = !matches.save.empty; + + if (hasMatch) + { + const fmt = "Key is not a valid member of this section. Did you mean: %-(%s, %)"; + if (useColors) + formattedWrite(sink, fmt, matches.map!(f => f.paint(Green))); + else + formattedWrite(sink, fmt, matches); + } + else + { + // No match, just print everything + const fmt = "Key is not a valid member of this section. There are %s valid keys: %-(%s, %)"; + if (useColors) + formattedWrite(sink, fmt, this.fieldNames.length.paint(Yellow), + this.fieldNames.map!(f => f.paint(Green))); + else + formattedWrite(sink, fmt, this.fieldNames.length, this.fieldNames); + } + } +} + +/// Exception thrown when a required key is missing +public class MissingKeyException : ConfigException +{ + /// Constructor + public this (string path, string key, Mark position, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + super(path, key, position, file, line); + } + + /// Format the message with or without colors + protected override void formatMessage ( + scope SinkType sink, in FormatSpec!char spec) + const scope @safe + { + sink("Required key was not found in configuration or command line arguments"); + } +} + +/// Wrap an user-thrown Exception that happened in a Converter/ctor/fromString +public class ConstructionException : ConfigException +{ + /// Constructor + public this (Exception next, string path, Mark position, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + super(path, position, file, line); + this.next = next; + } + + /// Format the message with or without colors + protected override void formatMessage ( + scope SinkType sink, in FormatSpec!char spec) + const scope @trusted + { + if (auto dyn = cast(ConfigException) this.next) + dyn.toString(sink, spec); + else + sink(this.next.message); + } +} diff --git a/source/configy/FieldRef.d b/source/configy/FieldRef.d new file mode 100644 index 0000000..96f91b1 --- /dev/null +++ b/source/configy/FieldRef.d @@ -0,0 +1,201 @@ +/******************************************************************************* + + Implement a template to keep track of a field references + + Passing field references by `alias` template parameter creates many problem, + and is extremely cumbersome to work with. Instead, we pass an instance of + a `FieldRef` around, which also contains structured information. + + Copyright: + Copyright (c) 2019-2022 BOSAGORA Foundation + All rights reserved. + + License: + MIT License. See LICENSE for details. + +*******************************************************************************/ + +module configy.FieldRef; + +// Renamed imports as the names exposed by `FieldRef` shadow the imported ones. +import configy.Attributes : CAName = Name, CAOptional = Optional, SetInfo; + +import std.meta; +import std.traits; + +/******************************************************************************* + + A reference to a field in a `struct` + + The compiler sometimes rejects passing fields by `alias`, or complains about + missing `this` (meaning it tries to evaluate the value). Sometimes, it also + discards the UDAs. + + To prevent this from happening, we always pass around a `FieldRef`, + which wraps the parent struct type (`T`), the name of the field + as `FieldName`, and other informations. + + To avoid any issue, eponymous usage is also avoided, hence the reference + needs to be accessed using `Ref`. + +*******************************************************************************/ + +package template FieldRef (alias T, string name, bool forceOptional = false) +{ + /// The reference to the field + public alias Ref = __traits(getMember, T, name); + + /// Type of the field + public alias Type = typeof(Ref); + + /// The name of the field in the struct itself + public alias FieldName = name; + + /// The name used in the configuration field (taking `@Name` into account) + static if (hasUDA!(Ref, CAName)) + { + static assert (getUDAs!(Ref, CAName).length == 1, + "Field `" ~ fullyQualifiedName!(Ref) ~ + "` cannot have more than one `Name` attribute"); + + public immutable Name = getUDAs!(Ref, CAName)[0].name; + + public immutable Pattern = getUDAs!(Ref, CAName)[0].startsWith; + } + else + { + public immutable Name = FieldName; + public immutable Pattern = false; + } + + /// Default value of the field (may or may not be `Type.init`) + public enum Default = __traits(getMember, T.init, name); + + /// Evaluates to `true` if this field is to be considered optional + /// (does not need to be present in the YAML document) + public enum Optional = forceOptional || + hasUDA!(Ref, CAOptional) || + is(immutable(Type) == immutable(bool)) || + is(Type : SetInfo!FT, FT) || + (Default != Type.init); +} + +unittest +{ + import configy.Attributes : Name; + + static struct Config1 + { + int integer2 = 42; + @Name("notStr2") + @(42) string str2; + } + + static struct Config2 + { + Config1 c1dup = { 42, "Hello World" }; + string message = "Something"; + } + + static struct Config3 + { + Config1 c1; + int integer; + string str; + Config2 c2 = { c1dup: { integer2: 69 } }; + } + + static assert(is(FieldRef!(Config3, "c2").Type == Config2)); + static assert(FieldRef!(Config3, "c2").Default != Config2.init); + static assert(FieldRef!(Config2, "message").Default == Config2.init.message); + alias NFR1 = FieldRef!(Config3, "c2"); + alias NFR2 = FieldRef!(NFR1.Ref, "c1dup"); + alias NFR3 = FieldRef!(NFR2.Ref, "integer2"); + alias NFR4 = FieldRef!(NFR2.Ref, "str2"); + static assert(hasUDA!(NFR4.Ref, int)); + + static assert(FieldRefTuple!(Config3)[1].Name == "integer"); + static assert(FieldRefTuple!(FieldRefTuple!(Config3)[0].Type)[1].Name == "notStr2"); +} + +/// A pseudo `FieldRef` used for structs which are not fields (top-level) +package template StructFieldRef (ST, string DefaultName = null) +{ + /// + public enum Ref = ST.init; + + /// + public alias Type = ST; + + /// + public enum Default = ST.init; + + /// + public enum Optional = false; + + /// Some places reference their parent's Name / FieldName + public enum Name = DefaultName; + /// Ditto + public enum FieldName = DefaultName; +} + +/// A pseudo `FieldRef` for nested types (e.g. arrays / associative arrays) +package template NestedFieldRef (ElemT, alias FR) +{ + /// + public enum Ref = ElemT.init; + /// + public alias Type = ElemT; + /// + public enum Name = FR.Name; + /// + public enum FieldName = FR.FieldName; + /// Element or keys are never optional + public enum Optional = false; + +} + +/// Get a tuple of `FieldRef` from a `struct` +package template FieldRefTuple (T) +{ + static assert(is(T == struct), + "Argument " ~ T.stringof ~ " to `FieldRefTuple` should be a `struct`"); + + /// + static if (__traits(getAliasThis, T).length == 0) + public alias FieldRefTuple = staticMap!(Pred, FieldNameTuple!T); + else + { + /// Tuple of strings of aliased fields + /// As of DMD v2.100.0, only a single alias this is supported in D. + private immutable AliasedFieldNames = __traits(getAliasThis, T); + static assert(AliasedFieldNames.length == 1, "Multiple `alias this` are not supported"); + + // Ignore alias to functions (if it's a property we can't do anything) + static if (isSomeFunction!(__traits(getMember, T, AliasedFieldNames))) + public alias FieldRefTuple = staticMap!(Pred, FieldNameTuple!T); + else + { + /// "Base" field names minus aliased ones + private immutable BaseFields = Erase!(AliasedFieldNames, FieldNameTuple!T); + static assert(BaseFields.length == FieldNameTuple!(T).length - 1); + + public alias FieldRefTuple = AliasSeq!( + staticMap!(Pred, BaseFields), + FieldRefTuple!(typeof(__traits(getMember, T, AliasedFieldNames)))); + } + } + + private alias Pred (string name) = FieldRef!(T, name); +} + +/// Returns: An alias sequence of field names, taking UDAs (`@Name` et al) into account +package alias FieldsName (T) = staticMap!(FieldRefToName, FieldRefTuple!T); + +/// Helper template for `staticMap` used for strict mode +private enum FieldRefToName (alias FR) = FR.Name; + +/// Dub extension +package enum IsPattern (alias FR) = FR.Pattern; +/// Dub extension +package alias Patterns (T) = staticMap!(FieldRefToName, Filter!(IsPattern, FieldRefTuple!T)); diff --git a/source/configy/Read.d b/source/configy/Read.d new file mode 100644 index 0000000..1e30d53 --- /dev/null +++ b/source/configy/Read.d @@ -0,0 +1,1075 @@ +/******************************************************************************* + + Utilities to fill a struct representing the configuration with the content + of a YAML document. + + The main function of this module is `parseConfig`. Convenience functions + `parseConfigString` and `parseConfigFile` are also available. + + The type parameter to those three functions must be a struct and is used + to drive the processing of the YAML node. When an error is encountered, + an `Exception` will be thrown, with a descriptive message. + The rules by which the struct is filled are designed to be + as intuitive as possible, and are described below. + + Optional_Fields: + One of the major convenience offered by this utility is its handling + of optional fields. A field is detected as optional if it has + an initializer that is different from its type `init` value, + for example `string field = "Something";` is an optional field, + but `int count = 0;` is not. + To mark a field as optional even with its default value, + use the `Optional` UDA: `@Optional int count = 0;`. + + Converter: + Because config structs may contain complex types such as + a Phobos type, a user-defined `Amount`, or Vibe.d's `URL`, + one may need to apply a converter to a struct's field. + Converters are functions that take a YAML `Node` as argument + and return a type that is implicitly convertible to the field type + (usually just the field type). They offer the most power to users, + as they can inspect the YAML structure, but should be used as a last resort. + + Composite_Types: + Processing starts from a `struct` at the top level, and recurse into + every fields individually. If a field is itself a struct, + the filler will attempt the following, in order: + - If the field has no value and is not optional, an Exception will + be thrown with an error message detailing where the issue happened. + - If the field has no value and is optional, the default value will + be used. + - If the field has a value, the filler will first check for a converter + and use it if present. + - If the type has a `static` method named `fromString` whose sole argument + is a `string`, it will be used. + - If the type has a constructor whose sole argument is a `string`, + it will be used; + - Finally, the filler will attempt to deserialize all struct members + one by one and pass them to the default constructor, if there is any. + - If none of the above succeeded, a `static assert` will trigger. + + Alias_this: + If a `struct` contains an `alias this`, the field that is aliased will be + ignored, instead the config parser will parse nested fields as if they + were part of the enclosing structure. This allow to re-use a single `struct` + in multiple place without having to resort to a `mixin template`. + Having an initializer will make all fields in the aliased struct optional. + The aliased field cannot have attributes other than `@Optional`, + which will then apply to all fields it exposes. + + Duration_parsing: + If the config field is of type `core.time.Duration`, special parsing rules + will apply. There are two possible forms in which a Duration field may + be expressed. In the first form, the YAML node should be a mapping, + and it will be checked for fields matching the supported units + in `core.time`: `weeks`, `days`, `hours`, `minutes`, `seconds`, `msecs`, + `usecs`, `hnsecs`, `nsecs`. Strict parsing option will be respected. + The values of the fields will then be added together, so the following + YAML usages are equivalent: + --- + // sleepFor: + // hours: 8 + // minutes: 30 + --- + and: + --- + // sleepFor: + // minutes: 510 + --- + Provided that the definition of the field is: + --- + public Duration sleepFor; + --- + + In the second form, the field should have a suffix composed of an + underscore ('_'), followed by a unit name as defined in `core.time`. + This can be either the field name directly, or a name override. + The latter is recommended to avoid confusion when using the field in code. + In this form, the YAML node is expected to be a scalar. + So the previous example, using this form, would be expressed as: + --- + sleepFor_minutes: 510 + --- + and the field definition should be one of those two: + --- + public @Name("sleepFor_minutes") Duration sleepFor; /// Prefer this + public Duration sleepFor_minutes; /// This works too + --- + + Those forms are mutually exclusive, so a field with a unit suffix + will error out if a mapping is used. This prevents surprises and ensures + that the error message, if any, is consistent accross user input. + + To disable or change this behavior, one may use a `Converter` instead. + + Strict_Parsing: + When strict parsing is enabled, the config filler will also validate + that the YAML nodes do not contains entry which are not present in the + mapping (struct) being processed. + This can be useful to catch typos or outdated configuration options. + + Post_Validation: + Some configuration will require validation accross multiple sections. + For example, two sections may be mutually exclusive as a whole, + or may have fields which are mutually exclusive with another section's + field(s). This kind of dependence is hard to account for declaratively, + and does not affect parsing. For this reason, the preferred way to + handle those cases is to define a `validate` member method on the + affected config struct(s), which will be called once + parsing for that mapping is completed. + If an error is detected, this method should throw an Exception. + + Enabled_or_disabled_field: + While most complex logic validation should be handled post-parsing, + some section may be optional by default, but if provided, will have + required fields. To support this use case, if a field with the name + `enabled` is present in a struct, the parser will first process it. + If it is `false`, the parser will not attempt to process the struct + further, and the other fields will have their default value. + Likewise, if a field named `disabled` exists, the struct will not + be processed if it is set to `true`. + + Copyright: + Copyright (c) 2019-2022 BOSAGORA Foundation + All rights reserved. + + License: + MIT License. See LICENSE for details. + +*******************************************************************************/ + +module configy.Read; + +public import configy.Attributes; +public import configy.Exceptions : ConfigException; +import configy.Exceptions; +import configy.FieldRef; +import configy.Utils; + +import dyaml.exception; +import dyaml.node; +import dyaml.loader; + +import std.algorithm; +import std.conv; +import std.datetime; +import std.format; +import std.getopt; +import std.meta; +import std.range; +import std.traits; +import std.typecons : Nullable, nullable, tuple; + +static import core.time; + +// Dub-specific adjustments for output +import dub.internal.logging; + +/// Command-line arguments +public struct CLIArgs +{ + /// Path to the config file + public string config_path = "config.yaml"; + + /// Overrides for config options + public string[][string] overrides; + + /// Helper to add items to `overrides` + public void overridesHandler (string, string value) + { + import std.string; + const idx = value.indexOf('='); + if (idx < 0) return; + string k = value[0 .. idx], v = value[idx + 1 .. $]; + if (auto val = k in this.overrides) + (*val) ~= v; + else + this.overrides[k] = [ v ]; + } + + /*************************************************************************** + + Parses the base command line arguments + + This can be composed with the program argument. + For example, consider a program which wants to expose a `--version` + switch, the definition could look like this: + --- + public struct ProgramCLIArgs + { + public CLIArgs base; // This struct + + public alias base this; // For convenience + + public bool version_; // Program-specific part + } + --- + Then, an application-specific configuration routine would be: + --- + public GetoptResult parse (ref ProgramCLIArgs clargs, ref string[] args) + { + auto r = clargs.base.parse(args); + if (r.helpWanted) return r; + return getopt( + args, + "version", "Print the application version, &clargs.version_"); + } + --- + + Params: + args = The command line args to parse (parsed options will be removed) + passThrough = Whether to enable `config.passThrough` and + `config.keepEndOfOptions`. `true` by default, to allow + composability. If your program doesn't have other + arguments, pass `false`. + + Returns: + The result of calling `getopt` + + ***************************************************************************/ + + public GetoptResult parse (ref string[] args, bool passThrough = true) + { + return getopt( + args, + // `caseInsensistive` is the default, but we need something + // with the same type for the ternary + passThrough ? config.keepEndOfOptions : config.caseInsensitive, + // Also the default, same reasoning + passThrough ? config.passThrough : config.noPassThrough, + "config|c", + "Path to the config file. Defaults to: " ~ this.config_path, + &this.config_path, + + "override|O", + "Override a config file value\n" ~ + "Example: -O foo.bar=true -o dns=1.1.1.1 -o dns=2.2.2.2\n" ~ + "Array values are additive, other items are set to the last override", + &this.overridesHandler, + ); + } +} + +/******************************************************************************* + + Attempt to read and process the config file at `path`, print any error + + This 'simple' overload of the more detailed `parseConfigFile` will attempt + to read the file at `path`, and return a `Nullable` instance of it. + If an error happens, either because the file isn't readable or + the configuration has an issue, a message will be printed to `stderr`, + with colors if the output is a TTY, and a `null` instance will be returned. + + The calling code can hence just read a config file via: + ``` + int main () + { + auto configN = parseConfigFileSimple!Config("config.yaml"); + if (configN.isNull()) return 1; // Error path + auto config = configN.get(); + // Rest of the program ... + } + ``` + An overload accepting `CLIArgs args` also exists. + + Params: + path = Path of the file to read from + args = Command line arguments on which `parse` has been called + strict = Whether the parsing should reject unknown keys in the + document, warn, or ignore them (default: `StrictMode.Error`) + + Returns: + An initialized `Config` instance if reading/parsing was successful; + a `null` instance otherwise. + +*******************************************************************************/ + +public Nullable!T parseConfigFileSimple (T) (string path, StrictMode strict = StrictMode.Error) +{ + return parseConfigFileSimple!(T)(CLIArgs(path), strict); +} + + +/// Ditto +public Nullable!T parseConfigFileSimple (T) (in CLIArgs args, StrictMode strict = StrictMode.Error) +{ + try + { + Node root = Loader.fromFile(args.config_path).load(); + return nullable(parseConfig!T(args, root, strict)); + } + catch (ConfigException exc) + { + exc.printException(); + return typeof(return).init; + } + catch (Exception exc) + { + // Other Exception type may be thrown by D-YAML, + // they won't include rich information. + logWarn("%s", exc.message()); + return typeof(return).init; + } +} + +/******************************************************************************* + + Print an Exception, potentially with colors on + + Trusted because of `stderr` usage. + +*******************************************************************************/ + +private void printException (scope ConfigException exc) @trusted +{ + import dub.internal.logging; + + if (hasColors) + logWarn("%S", exc); + else + logWarn("%s", exc.message()); +} + +/******************************************************************************* + + Parses the config file or string and returns a `Config` instance. + + Params: + cmdln = command-line arguments (containing the path to the config) + path = When parsing a string, the path corresponding to it + strict = Whether the parsing should reject unknown keys in the + document, warn, or ignore them (default: `StrictMode.Error`) + + Throws: + `Exception` if parsing the config file failed. + + Returns: + `Config` instance + +*******************************************************************************/ + +public T parseConfigFile (T) (in CLIArgs cmdln, StrictMode strict = StrictMode.Error) +{ + Node root = Loader.fromFile(cmdln.config_path).load(); + return parseConfig!T(cmdln, root, strict); +} + +/// ditto +public T parseConfigString (T) (string data, string path, StrictMode strict = StrictMode.Error) +{ + CLIArgs cmdln = { config_path: path }; + auto loader = Loader.fromString(data); + loader.name = path; + Node root = loader.load(); + return parseConfig!T(cmdln, root, strict); +} + +/******************************************************************************* + + Process the content of the YAML document described by `node` into an + instance of the struct `T`. + + See the module description for a complete overview of this function. + + Params: + T = Type of the config struct to fill + cmdln = Command line arguments + node = The root node matching `T` + strict = Action to take when encountering unknown keys in the document + + Returns: + An instance of `T` filled with the content of `node` + + Throws: + If the content of `node` cannot satisfy the requirements set by `T`, + or if `node` contain extra fields and `strict` is `true`. + +*******************************************************************************/ + +public T parseConfig (T) ( + in CLIArgs cmdln, Node node, StrictMode strict = StrictMode.Error) +{ + static assert(is(T == struct), "`" ~ __FUNCTION__ ~ + "` should only be called with a `struct` type as argument, not: `" ~ + fullyQualifiedName!T ~ "`"); + + final switch (node.nodeID) + { + case NodeID.mapping: + dbgWrite("Parsing config '%s', strict: %s", + fullyQualifiedName!T, + strict == StrictMode.Warn ? + strict.paint(Yellow) : strict.paintIf(!!strict, Green, Red)); + return node.parseMapping!(StructFieldRef!T)( + null, T.init, const(Context)(cmdln, strict), null); + case NodeID.sequence: + case NodeID.scalar: + case NodeID.invalid: + throw new TypeConfigException(node, "mapping (object)", "document root"); + } +} + +/******************************************************************************* + + The behavior to have when encountering a field in YAML not present + in the config definition. + +*******************************************************************************/ + +public enum StrictMode +{ + /// Issue an error by throwing an `UnknownKeyConfigException` + Error = 0, + /// Write a message to `stderr`, but continue processing the file + Warn = 1, + /// Be silent and do nothing + Ignore = 2, +} + +/// Used to pass around configuration +package struct Context +{ + /// + private CLIArgs cmdln; + + /// + private StrictMode strict; +} + +/******************************************************************************* + + Parse a mapping from `node` into an instance of `T` + + Params: + TLFR = Top level field reference for this mapping + node = The YAML node object matching the struct being read + path = The runtime path to this mapping, used for nested types + defaultValue = The default value to use for `T`, which can be different + from `T.init` when recursing into fields with initializers. + ctx = A context where properties that need to be conserved during + recursion are stored + fieldDefaults = Default value for some fields, used for `Key` recursion + +*******************************************************************************/ +private TLFR.Type parseMapping (alias TLFR) + (Node node, string path, auto ref TLFR.Type defaultValue, + in Context ctx, in Node[string] fieldDefaults) +{ + static assert(is(TLFR.Type == struct), "`parseMapping` called with wrong type (should be a `struct`)"); + assert(node.nodeID == NodeID.mapping, "Internal error: parseMapping shouldn't have been called"); + + dbgWrite("%s: `parseMapping` called for '%s' (node entries: %s)", + TLFR.Type.stringof.paint(Cyan), path.paint(Cyan), + node.length.paintIf(!!node.length, Green, Red)); + + static foreach (FR; FieldRefTuple!(TLFR.Type)) + { + static if (FR.Name != FR.FieldName && hasMember!(TLFR.Type, FR.Name) && + !is(typeof(mixin("TLFR.Type.", FR.Name)) == function)) + static assert (FieldRef!(TLFR.Type, FR.Name).Name != FR.Name, + "Field `" ~ FR.FieldName ~ "` `@Name` attribute shadows field `" ~ + FR.Name ~ "` in `" ~ TLFR.Type.stringof ~ "`: Add a `@Name` attribute to `" ~ + FR.Name ~ "` or change that of `" ~ FR.FieldName ~ "`"); + } + + if (ctx.strict != StrictMode.Ignore) + { + /// First, check that all the sections found in the mapping are present in the type + /// If not, the user might have made a typo. + immutable string[] fieldNames = [ FieldsName!(TLFR.Type) ]; + immutable string[] patterns = [ Patterns!(TLFR.Type) ]; + FIELD: foreach (const ref Node key, const ref Node value; node) + { + const k = key.as!string; + if (!fieldNames.canFind(k)) + { + foreach (p; patterns) + if (k.startsWith(p)) + // Require length because `0` would match `canFind` + // and we don't want to allow `$PATTERN-` + if (k[p.length .. $].length > 1 && k[p.length] == '-') + continue FIELD; + + if (ctx.strict == StrictMode.Warn) + { + scope exc = new UnknownKeyConfigException( + path, key.as!string, fieldNames, key.startMark()); + exc.printException(); + } + else + throw new UnknownKeyConfigException( + path, key.as!string, fieldNames, key.startMark()); + } + } + } + + const enabledState = node.isMappingEnabled!(TLFR.Type)(defaultValue); + + if (enabledState.field != EnabledState.Field.None) + dbgWrite("%s: Mapping is enabled: %s", TLFR.Type.stringof.paint(Cyan), (!!enabledState).paintBool()); + + auto convertField (alias FR) () + { + static if (FR.Name != FR.FieldName) + dbgWrite("Field name `%s` will use YAML field `%s`", + FR.FieldName.paint(Yellow), FR.Name.paint(Green)); + // Using exact type here matters: we could get a qualified type + // (e.g. `immutable(string)`) if the field is qualified, + // which causes problems. + FR.Type default_ = __traits(getMember, defaultValue, FR.FieldName); + + // If this struct is disabled, do not attempt to parse anything besides + // the `enabled` / `disabled` field. + if (!enabledState) + { + // Even this is too noisy + version (none) + dbgWrite("%s: %s field of disabled struct, default: %s", + path.paint(Cyan), "Ignoring".paint(Yellow), default_); + + static if (FR.Name == "enabled") + return false; + else static if (FR.Name == "disabled") + return true; + else + return default_; + } + + if (auto ptr = FR.FieldName in fieldDefaults) + { + dbgWrite("Found %s (%s.%s) in `fieldDefaults`", + FR.Name.paint(Cyan), path.paint(Cyan), FR.FieldName.paint(Cyan)); + + if (ctx.strict && FR.FieldName in node) + throw new ConfigExceptionImpl("'Key' field is specified twice", path, FR.FieldName, node.startMark()); + return (*ptr).parseField!(FR)(path.addPath(FR.FieldName), default_, ctx) + .dbgWriteRet("Using value '%s' from fieldDefaults for field '%s'", + FR.FieldName.paint(Cyan)); + } + + // This, `FR.Pattern`, and the field in `@Name` are special support for `dub` + static if (FR.Pattern) + { + static if (is(FR.Type : V[K], K, V)) + { + alias AAFieldRef = NestedFieldRef!(V, FR); + static assert(is(K : string), "Key type should be string-like"); + } + else + static assert(0, "Cannot have pattern on non-AA field"); + + AAFieldRef.Type[string] result; + foreach (pair; node.mapping) + { + const key = pair.key.as!string; + if (!key.startsWith(FR.Name)) + continue; + string suffix = key[FR.Name.length .. $]; + if (suffix.length) + { + if (suffix[0] == '-') suffix = suffix[1 .. $]; + else continue; + } + + result[suffix] = pair.value.parseField!(AAFieldRef)( + path.addPath(key), default_.get(key, AAFieldRef.Type.init), ctx); + } + bool hack = true; + if (hack) return result; + } + + if (auto ptr = FR.Name in node) + { + dbgWrite("%s: YAML field is %s in node%s", + FR.Name.paint(Cyan), "present".paint(Green), + (FR.Name == FR.FieldName ? "" : " (note that field name is overriden)").paint(Yellow)); + return (*ptr).parseField!(FR)(path.addPath(FR.Name), default_, ctx) + .dbgWriteRet("Using value '%s' from YAML document for field '%s'", + FR.FieldName.paint(Cyan)); + } + + dbgWrite("%s: Field is %s from node%s", + FR.Name.paint(Cyan), "missing".paint(Red), + (FR.Name == FR.FieldName ? "" : " (note that field name is overriden)").paint(Yellow)); + + // A field is considered optional if it has an initializer that is different + // from its default value, or if it has the `Optional` UDA. + // In that case, just return this value. + static if (FR.Optional) + return default_ + .dbgWriteRet("Using default value '%s' for optional field '%s'", FR.FieldName.paint(Cyan)); + + // The field is not present, but it could be because it is an optional section. + // For example, the section could be defined as: + // --- + // struct RequestLimit { size_t reqs = 100; } + // struct Config { RequestLimit limits; } + // --- + // In this case we need to recurse into `RequestLimit` to check if any + // of its field is required. + else static if (mightBeOptional!FR) + { + const npath = path.addPath(FR.Name); + string[string] aa; + return Node(aa).parseMapping!(FR)(npath, default_, ctx, null); + } + else + throw new MissingKeyException(path, FR.Name, node.startMark()); + } + + FR.Type convert (alias FR) () + { + static if (__traits(getAliasThis, TLFR.Type).length == 1 && + __traits(getAliasThis, TLFR.Type)[0] == FR.FieldName) + { + static assert(FR.Name == FR.FieldName, + "Field `" ~ fullyQualifiedName!(FR.Ref) ~ + "` is the target of an `alias this` and cannot have a `@Name` attribute"); + static assert(!hasConverter!(FR.Ref), + "Field `" ~ fullyQualifiedName!(FR.Ref) ~ + "` is the target of an `alias this` and cannot have a `@Converter` attribute"); + + alias convertW(string FieldName) = convert!(FieldRef!(FR.Type, FieldName, FR.Optional)); + return FR.Type(staticMap!(convertW, FieldNameTuple!(FR.Type))); + } + else + return convertField!(FR)(); + } + + debug (ConfigFillerDebug) + { + indent++; + scope (exit) indent--; + } + + TLFR.Type doValidation (TLFR.Type result) + { + static if (is(typeof(result.validate()))) + { + if (enabledState) + { + dbgWrite("%s: Calling `%s` method", + TLFR.Type.stringof.paint(Cyan), "validate()".paint(Green)); + result.validate(); + } + else + { + dbgWrite("%s: Ignoring `%s` method on disabled mapping", + TLFR.Type.stringof.paint(Cyan), "validate()".paint(Green)); + } + } + else if (enabledState) + dbgWrite("%s: No `%s` method found", + TLFR.Type.stringof.paint(Cyan), "validate()".paint(Yellow)); + + return result; + } + + // This might trigger things like "`this` is not accessible". + // In this case, the user most likely needs to provide a converter. + alias convertWrapper(string FieldName) = convert!(FieldRef!(TLFR.Type, FieldName)); + return doValidation(TLFR.Type(staticMap!(convertWrapper, FieldNameTuple!(TLFR.Type)))); +} + +/******************************************************************************* + + Parse a field, trying to match up the compile-time expectation with + the run time value of the Node (`nodeID`). + + This is the central point which does "type conversion", from the YAML node + to the field type. Whenever adding support for a new type, things should + happen here. + + Because a `struct` can be filled from either a mapping or a scalar, + this function will first try the converter / fromString / string ctor + methods before defaulting to fieldwise construction. + + Note that optional fields are checked before recursion happens, + so this method does not do this check. + +*******************************************************************************/ + +package FR.Type parseField (alias FR) + (Node node, string path, auto ref FR.Type defaultValue, in Context ctx) +{ + if (node.nodeID == NodeID.invalid) + throw new TypeConfigException(node, "valid", path); + + // If we reached this, it means the field is set, so just recurse + // to peel the type + static if (is(FR.Type : SetInfo!FT, FT)) + return FR.Type( + parseField!(FieldRef!(FR.Type, "value"))(node, path, defaultValue, ctx), + true); + + else static if (hasConverter!(FR.Ref)) + return wrapException(node.viaConverter!(FR)(path, ctx), path, node.startMark()); + + else static if (hasFromYAML!(FR.Type)) + { + scope impl = new ConfigParserImpl!(FR.Type)(node, path, ctx); + return wrapException(FR.Type.fromYAML(impl), path, node.startMark()); + } + + else static if (hasFromString!(FR.Type)) + return wrapException(FR.Type.fromString(node.as!string), path, node.startMark()); + + else static if (hasStringCtor!(FR.Type)) + return wrapException(FR.Type(node.as!string), path, node.startMark()); + + else static if (is(immutable(FR.Type) == immutable(core.time.Duration))) + { + if (node.nodeID != NodeID.mapping) + throw new DurationTypeConfigException(node, path); + return node.parseMapping!(StructFieldRef!DurationMapping)( + path, DurationMapping.make(defaultValue), ctx, null).opCast!Duration; + } + + else static if (is(FR.Type == struct)) + { + if (node.nodeID != NodeID.mapping) + throw new TypeConfigException(node, "mapping (object)", path); + return node.parseMapping!(FR)(path, defaultValue, ctx, null); + } + + // Handle string early as they match the sequence rule too + else static if (isSomeString!(FR.Type)) + // Use `string` type explicitly because `Variant` thinks + // `immutable(char)[]` (aka `string`) and `immutable(char[])` + // (aka `immutable(string)`) are not compatible. + return node.parseScalar!(string)(path); + // Enum too, as their base type might be an array (including strings) + else static if (is(FR.Type == enum)) + return node.parseScalar!(FR.Type)(path); + + else static if (is(FR.Type : E[K], E, K)) + { + if (node.nodeID != NodeID.mapping) + throw new TypeConfigException(node, "mapping (associative array)", path); + + // Note: As of June 2022 (DMD v2.100.0), associative arrays cannot + // have initializers, hence their UX for config is less optimal. + return node.mapping().map!( + (Node.Pair pair) { + return tuple( + pair.key.get!K, + pair.value.parseField!(NestedFieldRef!(E, FR))( + format("%s[%s]", path, pair.key.as!string), E.init, ctx)); + }).assocArray(); + + } + else static if (is(FR.Type : E[], E)) + { + static if (hasUDA!(FR.Ref, Key)) + { + static assert(getUDAs!(FR.Ref, Key).length == 1, + "`" ~ fullyQualifiedName!(FR.Ref) ~ + "` field shouldn't have more than one `Key` attribute"); + static assert(is(E == struct), + "Field `" ~ fullyQualifiedName!(FR.Ref) ~ + "` has a `Key` attribute, but is a sequence of `" ~ + fullyQualifiedName!E ~ "`, not a sequence of `struct`"); + + string key = getUDAs!(FR.Ref, Key)[0].name; + + if (node.nodeID != NodeID.mapping && node.nodeID != NodeID.sequence) + throw new TypeConfigException(node, "mapping (object) or sequence", path); + + if (node.nodeID == NodeID.mapping) return node.mapping().map!( + (Node.Pair pair) { + if (pair.value.nodeID != NodeID.mapping) + throw new TypeConfigException( + "sequence of " ~ pair.value.nodeTypeString(), + "sequence of mapping (array of objects)", + path, null, node.startMark()); + + return pair.value.parseMapping!(StructFieldRef!E)( + path.addPath(pair.key.as!string), + E.init, ctx, key.length ? [ key: pair.key ] : null); + }).array(); + } + if (node.nodeID != NodeID.sequence) + throw new TypeConfigException(node, "sequence (array)", path); + + // We pass `E.init` as default value as it is not going to be used: + // Either there is something in the YAML document, and that will be + // converted, or `sequence` will not iterate. + return node.sequence.enumerate.map!( + kv => kv.value.parseField!(NestedFieldRef!(E, FR))( + format("%s[%s]", path, kv.index), E.init, ctx)) + .array(); + } + else + { + static assert (!is(FR.Type == union), + "`union` are not supported. Use a converter instead"); + return node.parseScalar!(FR.Type)(path); + } +} + +/// Parse a node as a scalar +private T parseScalar (T) (Node node, string path) +{ + if (node.nodeID != NodeID.scalar) + throw new TypeConfigException(node, "scalar (value)", path); + + static if (is(T == enum)) + return node.as!string.to!(T); + else + return node.as!(T); +} + +/******************************************************************************* + + Write a potentially throwing user-provided expression in ConfigException + + The user-provided hooks may throw (e.g. `fromString / the constructor), + and the error may or may not be clear. We can't do anything about a bad + message but we can wrap the thrown exception in a `ConfigException` + to provide the location in the yaml file where the error happened. + + Params: + exp = The expression that may throw + path = Path within the config file of the field + position = Position of the node in the YAML file + file = Call site file (otherwise the message would point to this function) + line = Call site line (see `file` reasoning) + + Returns: + The result of `exp` evaluation. + +*******************************************************************************/ + +private T wrapException (T) (lazy T exp, string path, Mark position, + string file = __FILE__, size_t line = __LINE__) +{ + try + return exp; + catch (ConfigException exc) + throw exc; + catch (Exception exc) + throw new ConstructionException(exc, path, position, file, line); +} + +/// Allows us to reuse parseMapping and strict parsing +private struct DurationMapping +{ + public SetInfo!long weeks; + public SetInfo!long days; + public SetInfo!long hours; + public SetInfo!long minutes; + public SetInfo!long seconds; + public SetInfo!long msecs; + public SetInfo!long usecs; + public SetInfo!long hnsecs; + public SetInfo!long nsecs; + + private static DurationMapping make (Duration def) @safe pure nothrow @nogc + { + typeof(return) result; + auto fullSplit = def.split(); + result.weeks = SetInfo!long(fullSplit.weeks, fullSplit.weeks != 0); + result.days = SetInfo!long(fullSplit.days, fullSplit.days != 0); + result.hours = SetInfo!long(fullSplit.hours, fullSplit.hours != 0); + result.minutes = SetInfo!long(fullSplit.minutes, fullSplit.minutes != 0); + result.seconds = SetInfo!long(fullSplit.seconds, fullSplit.seconds != 0); + result.msecs = SetInfo!long(fullSplit.msecs, fullSplit.msecs != 0); + result.usecs = SetInfo!long(fullSplit.usecs, fullSplit.usecs != 0); + result.hnsecs = SetInfo!long(fullSplit.hnsecs, fullSplit.hnsecs != 0); + // nsecs is ignored by split as it's not representable in `Duration` + return result; + } + + /// + public void validate () const @safe + { + // That check should never fail, as the YAML parser would error out, + // but better be safe than sorry. + foreach (field; this.tupleof) + if (field.set) + return; + + throw new Exception( + "Expected at least one of the components (weeks, days, hours, " ~ + "minutes, seconds, msecs, usecs, hnsecs, nsecs) to be set"); + } + + /// Allow conversion to a `Duration` + public Duration opCast (T : Duration) () const scope @safe pure nothrow @nogc + { + return core.time.weeks(this.weeks) + core.time.days(this.days) + + core.time.hours(this.hours) + core.time.minutes(this.minutes) + + core.time.seconds(this.seconds) + core.time.msecs(this.msecs) + + core.time.usecs(this.usecs) + core.time.hnsecs(this.hnsecs) + + core.time.nsecs(this.nsecs); + } +} + +/// Evaluates to `true` if we should recurse into the struct via `parseMapping` +private enum mightBeOptional (alias FR) = is(FR.Type == struct) && + !is(immutable(FR.Type) == immutable(core.time.Duration)) && + !hasConverter!(FR.Ref) && !hasFromString!(FR.Type) && + !hasStringCtor!(FR.Type) && !hasFromYAML!(FR.Type); + +/// Convenience template to check for the presence of converter(s) +private enum hasConverter (alias Field) = hasUDA!(Field, Converter); + +/// Provided a field reference `FR` which is known to have at least one converter, +/// perform basic checks and return the value after applying the converter. +private auto viaConverter (alias FR) (Node node, string path, in Context context) +{ + enum Converters = getUDAs!(FR.Ref, Converter); + static assert (Converters.length, + "Internal error: `viaConverter` called on field `" ~ + FR.FieldName ~ "` with no converter"); + + static assert(Converters.length == 1, + "Field `" ~ FR.FieldName ~ "` cannot have more than one `Converter`"); + + scope impl = new ConfigParserImpl!(FR.Type)(node, path, context); + return Converters[0].converter(impl); +} + +private final class ConfigParserImpl (T) : ConfigParser!T +{ + private Node node_; + private string path_; + private const(Context) context_; + + /// Ctor + public this (Node n, string p, const Context c) scope @safe pure nothrow @nogc + { + this.node_ = n; + this.path_ = p; + this.context_ = c; + } + + public final override inout(Node) node () inout @safe pure nothrow @nogc + { + return this.node_; + } + + public final override string path () const @safe pure nothrow @nogc + { + return this.path_; + } + + protected final override const(Context) context () const @safe pure nothrow @nogc + { + return this.context_; + } +} + +/// Helper predicate +private template NameIs (string searching) +{ + enum bool Pred (alias FR) = (searching == FR.Name); +} + +/// Returns whether or not the field has a `enabled` / `disabled` field, +/// and its value. If it does not, returns `true`. +private EnabledState isMappingEnabled (M) (Node node, auto ref M default_) +{ + import std.meta : Filter; + + alias EMT = Filter!(NameIs!("enabled").Pred, FieldRefTuple!M); + alias DMT = Filter!(NameIs!("disabled").Pred, FieldRefTuple!M); + + static if (EMT.length) + { + static assert (DMT.length == 0, + "`enabled` field `" ~ EMT[0].FieldName ~ + "` conflicts with `disabled` field `" ~ DMT[0].FieldName ~ "`"); + + if (auto ptr = "enabled" in node) + return EnabledState(EnabledState.Field.Enabled, (*ptr).as!bool); + return EnabledState(EnabledState.Field.Enabled, __traits(getMember, default_, EMT[0].FieldName)); + } + else static if (DMT.length) + { + if (auto ptr = "disabled" in node) + return EnabledState(EnabledState.Field.Disabled, (*ptr).as!bool); + return EnabledState(EnabledState.Field.Disabled, __traits(getMember, default_, DMT[0].FieldName)); + } + else + { + return EnabledState(EnabledState.Field.None); + } +} + +/// Retun value of `isMappingEnabled` +private struct EnabledState +{ + /// Used to determine which field controls a mapping enabled state + private enum Field + { + /// No such field, the mapping is considered enabled + None, + /// The field is named 'enabled' + Enabled, + /// The field is named 'disabled' + Disabled, + } + + /// Check if the mapping is considered enabled + public bool opCast () const scope @safe pure @nogc nothrow + { + return this.field == Field.None || + (this.field == Field.Enabled && this.fieldValue) || + (this.field == Field.Disabled && !this.fieldValue); + } + + /// Type of field found + private Field field; + + /// Value of the field, interpretation depends on `field` + private bool fieldValue; +} + +/// Evaluates to `true` if `T` is a `struct` with a default ctor +private enum hasFieldwiseCtor (T) = (is(T == struct) && is(typeof(() => T(T.init.tupleof)))); + +/// Evaluates to `true` if `T` has a static method that is designed to work with this library +private enum hasFromYAML (T) = is(typeof(T.fromYAML(ConfigParser!(T).init)) : T); + +/// Evaluates to `true` if `T` has a static method that accepts a `string` and returns a `T` +private enum hasFromString (T) = is(typeof(T.fromString(string.init)) : T); + +/// Evaluates to `true` if `T` is a `struct` which accepts a single string as argument +private enum hasStringCtor (T) = (is(T == struct) && is(typeof(T.__ctor)) && + Parameters!(T.__ctor).length == 1 && + is(typeof(() => T(string.init)))); + +unittest +{ + static struct Simple + { + int value; + string otherValue; + } + + static assert( hasFieldwiseCtor!Simple); + static assert(!hasStringCtor!Simple); + + static struct PubKey + { + ubyte[] data; + + this (string hex) @safe pure nothrow @nogc{} + } + + static assert(!hasFieldwiseCtor!PubKey); + static assert( hasStringCtor!PubKey); + + static assert(!hasFieldwiseCtor!string); + static assert(!hasFieldwiseCtor!int); + static assert(!hasStringCtor!string); + static assert(!hasStringCtor!int); +} + +/// Convenience function to extend a YAML path +private string addPath (string opath, string newPart) +in(newPart.length) +do { + return opath.length ? format("%s.%s", opath, newPart) : newPart; +} diff --git a/source/configy/Test.d b/source/configy/Test.d new file mode 100644 index 0000000..a8a36a6 --- /dev/null +++ b/source/configy/Test.d @@ -0,0 +1,694 @@ +/******************************************************************************* + Contains all the tests for this library. + + Copyright: + Copyright (c) 2019-2022 BOSAGORA Foundation + All rights reserved. + + License: + MIT License. See LICENSE for details. + +*******************************************************************************/ + +module configy.Test; + +import configy.Attributes; +import configy.Exceptions; +import configy.Read; +import configy.Utils; + +import dyaml.node; + +import std.format; + +import core.time; + +/// Basic usage tests +unittest +{ + static struct Address + { + string address; + string city; + bool accessible; + } + + static struct Nested + { + Address address; + } + + static struct Config + { + bool enabled = true; + + string name = "Jessie"; + int age = 42; + double ratio = 24.42; + + Address address = { address: "Yeoksam-dong", city: "Seoul", accessible: true }; + + Nested nested = { address: { address: "Gangnam-gu", city: "Also Seoul", accessible: false } }; + } + + auto c1 = parseConfigString!Config("enabled: false", "/dev/null"); + assert(!c1.enabled); + assert(c1.name == "Jessie"); + assert(c1.age == 42); + assert(c1.ratio == 24.42); + + assert(c1.address.address == "Yeoksam-dong"); + assert(c1.address.city == "Seoul"); + assert(c1.address.accessible); + + assert(c1.nested.address.address == "Gangnam-gu"); + assert(c1.nested.address.city == "Also Seoul"); + assert(!c1.nested.address.accessible); +} + +// Tests for SetInfo +unittest +{ + static struct Address + { + string address; + string city; + bool accessible; + } + + static struct Config + { + SetInfo!int value; + SetInfo!int answer = 42; + SetInfo!string name = SetInfo!string("Lorene", false); + + SetInfo!Address address; + } + + auto c1 = parseConfigString!Config("value: 24", "/dev/null"); + assert(c1.value == 24); + assert(c1.value.set); + + assert(c1.answer.set); + assert(c1.answer == 42); + + assert(!c1.name.set); + assert(c1.name == "Lorene"); + + assert(!c1.address.set); + + auto c2 = parseConfigString!Config(` +name: Lorene +address: + address: Somewhere + city: Over the rainbow +`, "/dev/null"); + + assert(!c2.value.set); + assert(c2.name == "Lorene"); + assert(c2.name.set); + assert(c2.address.set); + assert(c2.address.address == "Somewhere"); + assert(c2.address.city == "Over the rainbow"); +} + +unittest +{ + static struct Nested { core.time.Duration timeout; } + static struct Config { Nested node; } + + try + { + auto result = parseConfigString!Config("node:\n timeout:", "/dev/null"); + assert(0); + } + catch (Exception exc) + { + assert(exc.toString() == "/dev/null(1:10): node.timeout: Field is of type scalar, " ~ + "but expected a mapping with at least one of: weeks, days, hours, minutes, " ~ + "seconds, msecs, usecs, hnsecs, nsecs"); + } + + { + auto result = parseConfigString!Nested("timeout:\n days: 10\n minutes: 100\n hours: 3\n", "/dev/null"); + assert(result.timeout == 10.days + 4.hours + 40.minutes); + } +} + +unittest +{ + static struct Config { string required; } + try + auto result = parseConfigString!Config("value: 24", "/dev/null"); + catch (ConfigException e) + { + assert(format("%s", e) == + "/dev/null(0:0): value: Key is not a valid member of this section. There are 1 valid keys: required"); + assert(format("%S", e) == + format("%s/dev/null%s(%s0%s:%s0%s): %svalue%s: Key is not a valid member of this section. " ~ + "There are %s1%s valid keys: %srequired%s", Yellow, Reset, Cyan, Reset, Cyan, Reset, + Yellow, Reset, Yellow, Reset, Green, Reset)); + } +} + +// Test for various type errors +unittest +{ + static struct Mapping + { + string value; + } + + static struct Config + { + @Optional Mapping map; + @Optional Mapping[] array; + int scalar; + } + + try + { + auto result = parseConfigString!Config("map: Hello World", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(0:5): map: Expected to be of type mapping (object), but is a scalar"); + } + + try + { + auto result = parseConfigString!Config("map:\n - Hello\n - World", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(1:2): map: Expected to be of type mapping (object), but is a sequence"); + } + + try + { + auto result = parseConfigString!Config("scalar:\n - Hello\n - World", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(1:2): scalar: Expected to be of type scalar (value), but is a sequence"); + } + + try + { + auto result = parseConfigString!Config("scalar:\n hello:\n World", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(1:2): scalar: Expected to be of type scalar (value), but is a mapping"); + } +} + +// Test for strict mode +unittest +{ + static struct Config + { + string value; + string valhu; + string halvue; + } + + try + { + auto result = parseConfigString!Config("valeu: This is a typo", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(0:0): valeu: Key is not a valid member of this section. Did you mean: value, valhu"); + } +} + +// Test for required key +unittest +{ + static struct Nested + { + string required; + string optional = "Default"; + } + + static struct Config + { + Nested inner; + } + + try + { + auto result = parseConfigString!Config("inner:\n optional: Not the default value", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(1:2): inner.required: Required key was not found in configuration or command line arguments"); + } +} + +// Testing 'validate()' on nested structures +unittest +{ + __gshared int validateCalls0 = 0; + __gshared int validateCalls1 = 1; + __gshared int validateCalls2 = 2; + + static struct SecondLayer + { + string value = "default"; + + public void validate () const + { + validateCalls2++; + } + } + + static struct FirstLayer + { + bool enabled = true; + SecondLayer ltwo; + + public void validate () const + { + validateCalls1++; + } + } + + static struct Config + { + FirstLayer lone; + + public void validate () const + { + validateCalls0++; + } + } + + auto r1 = parseConfigString!Config("lone:\n ltwo:\n value: Something\n", "/dev/null"); + + assert(r1.lone.ltwo.value == "Something"); + // `validateCalls` are given different value to avoid false-positive + // if they are set to 0 / mixed up + assert(validateCalls0 == 1); + assert(validateCalls1 == 2); + assert(validateCalls2 == 3); + + auto r2 = parseConfigString!Config("lone:\n enabled: false\n", "/dev/null"); + assert(validateCalls0 == 2); // + 1 + assert(validateCalls1 == 2); // Other are disabled + assert(validateCalls2 == 3); +} + +// Test the throwing ctor / fromString +unittest +{ + static struct ThrowingFromString + { + public static ThrowingFromString fromString (scope const(char)[] value) + @safe pure + { + throw new Exception("Some meaningful error message"); + } + + public int value; + } + + static struct ThrowingCtor + { + public this (scope const(char)[] value) + @safe pure + { + throw new Exception("Something went wrong... Obviously"); + } + + public int value; + } + + static struct InnerConfig + { + public int value; + @Optional ThrowingCtor ctor; + @Optional ThrowingFromString fromString; + + @Converter!int( + (scope ConfigParser!int parser) { + // We have to trick DMD a bit so that it infers an `int` return + // type but doesn't emit a "Statement is not reachable" warning + if (parser.node is Node.init || parser.node !is Node.init ) + throw new Exception("You shall not pass"); + return 42; + }) + @Optional int converter; + } + + static struct Config + { + public InnerConfig config; + } + + try + { + auto result = parseConfigString!Config("config:\n value: 42\n ctor: 42", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(2:8): config.ctor: Something went wrong... Obviously"); + } + + try + { + auto result = parseConfigString!Config("config:\n value: 42\n fromString: 42", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(2:14): config.fromString: Some meaningful error message"); + } + + try + { + auto result = parseConfigString!Config("config:\n value: 42\n converter: 42", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(2:13): config.converter: You shall not pass"); + } + + // We also need to test with arrays, to ensure they are correctly called + static struct InnerArrayConfig + { + @Optional int value; + @Optional ThrowingCtor ctor; + @Optional ThrowingFromString fromString; + } + + static struct ArrayConfig + { + public InnerArrayConfig[] configs; + } + + try + { + auto result = parseConfigString!ArrayConfig("configs:\n - ctor: something", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(1:10): configs[0].ctor: Something went wrong... Obviously"); + } + + try + { + auto result = parseConfigString!ArrayConfig( + "configs:\n - value: 42\n - fromString: something", "/dev/null"); + assert(0); + } + catch (ConfigException exc) + { + assert(exc.toString() == "/dev/null(2:16): configs[1].fromString: Some meaningful error message"); + } +} + +// Test duplicate fields detection +unittest +{ + static struct Config + { + @Name("shadow") int value; + @Name("value") int shadow; + } + + auto result = parseConfigString!Config("shadow: 42\nvalue: 84\n", "/dev/null"); + assert(result.value == 42); + assert(result.shadow == 84); + + static struct BadConfig + { + int value; + @Name("value") int something; + } + + // Cannot test the error message, so this is as good as it gets + static assert(!is(typeof(() { + auto r = parseConfigString!BadConfig("shadow: 42\nvalue: 84\n", "/dev/null"); + }))); +} + +// Test a renamed `enabled` / `disabled` +unittest +{ + static struct ConfigA + { + @Name("enabled") bool shouldIStay; + int value; + } + + static struct ConfigB + { + @Name("disabled") bool orShouldIGo; + int value; + } + + { + auto c = parseConfigString!ConfigA("enabled: true\nvalue: 42", "/dev/null"); + assert(c.shouldIStay == true); + assert(c.value == 42); + } + + { + auto c = parseConfigString!ConfigB("disabled: false\nvalue: 42", "/dev/null"); + assert(c.orShouldIGo == false); + assert(c.value == 42); + } +} + +// Test for 'mightBeOptional' & missing key +unittest +{ + static struct RequestLimit { size_t reqs = 100; } + static struct Nested { @Name("jay") int value; } + static struct Config { @Name("chris") Nested value; RequestLimit limits; } + + auto r = parseConfigString!Config("chris:\n jay: 42", "/dev/null"); + assert(r.limits.reqs == 100); + + try + { + auto _ = parseConfigString!Config("limits:\n reqs: 42", "/dev/null"); + } + catch (ConfigException exc) + { + assert(exc.toString() == "(0:0): chris.jay: Required key was not found in configuration or command line arguments"); + } +} + +// Support for associative arrays +unittest +{ + static struct Nested + { + int[string] answers; + } + + static struct Parent + { + Nested[string] questions; + string[int] names; + } + + auto c = parseConfigString!Parent( +`names: + 42: "Forty two" + 97: "Quatre vingt dix sept" +questions: + first: + answers: + # Need to use quotes here otherwise it gets interpreted as + # true / false, perhaps a dyaml issue ? + 'yes': 42 + 'no': 24 + second: + answers: + maybe: 69 + whynot: 20 +`, "/dev/null"); + + assert(c.names == [42: "Forty two", 97: "Quatre vingt dix sept"]); + assert(c.questions.length == 2); + assert(c.questions["first"] == Nested(["yes": 42, "no": 24])); + assert(c.questions["second"] == Nested(["maybe": 69, "whynot": 20])); +} + +unittest +{ + static struct FlattenMe + { + int value; + string name; + } + + static struct Config + { + FlattenMe flat = FlattenMe(24, "Four twenty"); + alias flat this; + + FlattenMe not_flat; + } + + auto c = parseConfigString!Config( + "value: 42\nname: John\nnot_flat:\n value: 69\n name: Henry", + "/dev/null"); + assert(c.flat.value == 42); + assert(c.flat.name == "John"); + assert(c.not_flat.value == 69); + assert(c.not_flat.name == "Henry"); + + auto c2 = parseConfigString!Config( + "not_flat:\n value: 69\n name: Henry", "/dev/null"); + assert(c2.flat.value == 24); + assert(c2.flat.name == "Four twenty"); + + static struct OptConfig + { + @Optional FlattenMe flat; + alias flat this; + + int value; + } + auto c3 = parseConfigString!OptConfig("value: 69\n", "/dev/null"); + assert(c3.value == 69); +} + +unittest +{ + static struct Config + { + @Name("names") + string[] names_; + + size_t names () const scope @safe pure nothrow @nogc + { + return this.names_.length; + } + } + + auto c = parseConfigString!Config("names:\n - John\n - Luca\n", "/dev/null"); + assert(c.names_ == [ "John", "Luca" ]); + assert(c.names == 2); +} + +unittest +{ + static struct BuildTemplate + { + string targetName; + string platform; + } + static struct BuildConfig + { + BuildTemplate config; + alias config this; + } + static struct Config + { + string name; + + @Optional BuildConfig config; + alias config this; + } + + auto c = parseConfigString!Config("name: dummy\n", "/dev/null"); + assert(c.name == "dummy"); + + auto c2 = parseConfigString!Config("name: dummy\nplatform: windows\n", "/dev/null"); + assert(c2.name == "dummy"); + assert(c2.config.platform == "windows"); +} + +// Make sure unions don't compile +unittest +{ + static union MyUnion + { + string value; + int number; + } + + static struct Config + { + MyUnion hello; + } + + static assert(!is(typeof(parseConfigString!Config("hello: world\n", "/dev/null")))); + static assert(!is(typeof(parseConfigString!MyUnion("hello: world\n", "/dev/null")))); +} + +// Test the `@Key` attribute +unittest +{ + static struct Interface + { + string name; + string static_ip; + } + + static struct Config + { + string profile; + + @Key("name") + immutable(Interface)[] ifaces = [ + Interface("lo", "127.0.0.1"), + ]; + } + + auto c = parseConfigString!Config(`profile: default +ifaces: + eth0: + static_ip: "192.168.1.42" + lo: + static_ip: "127.0.0.42" +`, "/dev/null"); + assert(c.ifaces.length == 2); + assert(c.ifaces == [ Interface("eth0", "192.168.1.42"), Interface("lo", "127.0.0.42")]); +} + +// Nested ConstructionException +unittest +{ + static struct WillFail + { + string name; + this (string value) @safe pure + { + throw new Exception("Parsing failed!"); + } + } + + static struct Container + { + WillFail[] array; + } + + static struct Config + { + Container data; + } + + try auto c = parseConfigString!Config(`data: + array: + - Not + - Working +`, "/dev/null"); + catch (Exception exc) + assert(exc.toString() == `/dev/null(2:6): data.array[0]: Parsing failed!`); +} diff --git a/source/configy/Utils.d b/source/configy/Utils.d new file mode 100644 index 0000000..f2ce79d --- /dev/null +++ b/source/configy/Utils.d @@ -0,0 +1,124 @@ +/******************************************************************************* + + Utilities used internally by the config parser. + + Compile this library with `-debug=ConfigFillerDebug` to get verbose output. + This can be achieved with `debugVersions` in dub, or by depending on the + `debug` configuration provided by `dub.json`. + + Copyright: + Copyright (c) 2019-2022 BOSAGORA Foundation + All rights reserved. + + License: + MIT License. See LICENSE for details. + +*******************************************************************************/ + +module configy.Utils; + +import std.format; + +/// Type of sink used by the `toString` +package alias SinkType = void delegate (in char[]) @safe; + +/******************************************************************************* + + Debugging utility for config filler + + Since this module does a lot of meta-programming, some things can easily + go wrong. For example, a condition being false might happen because it is + genuinely false or because the condition is buggy. + + To make figuring out if a config is properly parsed or not, a little utility + (config-dumper) exists, which will provide a verbose output of what the + config filler does. To do this, `config-dumper` is compiled with + the below `debug` version. + +*******************************************************************************/ + +debug (ConfigFillerDebug) +{ + /// A thin wrapper around `stderr.writefln` with indentation + package void dbgWrite (Args...) (string fmt, Args args) + { + import std.stdio; + stderr.write(IndentChars[0 .. indent >= IndentChars.length ? $ : indent]); + stderr.writefln(fmt, args); + } + + /// Log a value that is to be returned + /// The value will be the first argument and painted yellow + package T dbgWriteRet (T, Args...) (auto ref T return_, string fmt, Args args) + { + dbgWrite(fmt, return_.paint(Yellow), args); + return return_; + } + + /// The current indentation + package size_t indent; + + /// Helper for indentation (who needs more than 16 levels of indent?) + private immutable IndentChars = "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t"; +} +else +{ + /// No-op + package void dbgWrite (Args...) (string fmt, lazy Args args) {} + + /// Ditto + package T dbgWriteRet (T, Args...) (auto ref T return_, string fmt, lazy Args args) + { + return return_; + } +} + +/// Thin wrapper to simplify colorization +package struct Colored (T) +{ + /// Color used + private string color; + + /// Value to print + private T value; + + /// Hook for `formattedWrite` + public void toString (scope SinkType sink) + { + static if (is(typeof(T.init.length) : size_t)) + if (this.value.length == 0) return; + + formattedWrite(sink, "%s%s%s", this.color, this.value, Reset); + } +} + +/// Ditto +package Colored!T paint (T) (T arg, string color) +{ + return Colored!T(color, arg); +} + +/// Paint `arg` in color `ifTrue` if `cond` evaluates to `true`, use color `ifFalse` otherwise +package Colored!T paintIf (T) (T arg, bool cond, string ifTrue, string ifFalse) +{ + return Colored!T(cond ? ifTrue : ifFalse, arg); +} + +/// Paint a boolean in green if `true`, red otherwise, unless `reverse` is set to `true`, +/// in which case the colors are swapped +package Colored!bool paintBool (bool value, bool reverse = false) +{ + return value.paintIf(reverse ^ value, Green, Red); +} + +/// Reset the foreground color used +package immutable Reset = "\u001b[0m"; +/// Set the foreground color to red, used for `false`, missing, errors, etc... +package immutable Red = "\u001b[31m"; +/// Set the foreground color to red, used for warnings and other things +/// that should draw attention but do not pose an immediate issue +package immutable Yellow = "\u001b[33m"; +/// Set the foreground color to green, used for `true`, present, etc... +package immutable Green = "\u001b[32m"; +/// Set the foreground color to green, used field names / path +package immutable Cyan = "\u001b[36m"; diff --git a/source/dub/commandline.d b/source/dub/commandline.d index d7be6fe..752437b 100644 --- a/source/dub/commandline.d +++ b/source/dub/commandline.d @@ -12,9 +12,9 @@ import dub.dub; import dub.generators.generator; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.packagesuppliers; @@ -32,6 +32,7 @@ import std.process; import std.stdio; import std.string; +import std.sumtype; import std.typecons : Tuple, tuple; import std.variant; import std.path: setExtension; @@ -58,10 +59,8 @@ ), CommandGroup("Package management", new FetchCommand, - new InstallCommand, new AddCommand, new RemoveCommand, - new UninstallCommand, new UpgradeCommand, new AddPathCommand, new RemovePathCommand, @@ -94,7 +93,7 @@ string[] remaining; } - if (args.length >= 1 && !args[0].startsWith("-")) { + if (args.length >= 1 && !args[0].startsWith("-") && !args[0].canFind(":")) { return Result(args[0], args[1 .. $]); } @@ -118,6 +117,10 @@ /// It returns the an empty string when it starts with `-` assert(extractCommandNameArgument(["-test"]).value == ""); + + // Sub package names are ignored as command names + assert(extractCommandNameArgument(["foo:bar"]).value == ""); + assert(extractCommandNameArgument([":foo"]).value == ""); } /** Handles the Command Line options and commands. @@ -163,6 +166,27 @@ { options.root_path = options.root_path.expandTilde.absolutePath.buildNormalizedPath; } + + final switch (options.color_mode) with (options.color) + { + case automatic: + // Use default determined in internal.logging.initLogging(). + break; + case on: + foreach (ref grp; commandGroups) + foreach (ref cmd; grp.commands) + if (auto pc = cast(PackageBuildCommand)cmd) + pc.baseSettings.buildSettings.options |= BuildOption.color; + setLoggingColorsEnabled(true); // enable colors, no matter what + break; + case off: + foreach (ref grp; commandGroups) + foreach (ref cmd; grp.commands) + if (auto pc = cast(PackageBuildCommand)cmd) + pc.baseSettings.buildSettings.options &= ~BuildOption.color; + setLoggingColorsEnabled(false); // disable colors, no matter what + break; + } } /** Get an instance of the requested command. @@ -262,7 +286,7 @@ // make the CWD package available so that for example sub packages can reference their // parent package. - try dub.packageManager.getOrLoadPackage(NativePath(options.root_path)); + try dub.packageManager.getOrLoadPackage(NativePath(options.root_path), NativePath.init, false, StrictMode.Warn); catch (Exception e) { logDiagnostic("No valid package found in current working directory: %s", e.msg); } return dub; @@ -275,7 +299,7 @@ handler.commandGroups = getCommands(); assert(handler.commandNames == ["init", "run", "build", "test", "lint", "generate", - "describe", "clean", "dustmite", "fetch", "install", "add", "remove", "uninstall", + "describe", "clean", "dustmite", "fetch", "add", "remove", "upgrade", "add-path", "remove-path", "add-local", "remove-local", "list", "search", "add-override", "remove-override", "list-overrides", "clean-caches", "convert"]); } @@ -391,6 +415,15 @@ */ int runDubCommandLine(string[] args) { + static string[] toSinglePackageArgs (string args0, string file, string[] trailing) + { + return [args0, "run", "-q", "--temp-build", "--single", file, "--"] ~ trailing; + } + + // Initialize the logging module, ensure that whether stdout/stderr are a TTY + // or not is detected in order to disable colors if the output isn't a console + initLogging(); + logDiagnostic("DUB version %s", getDUBVersion()); version(Windows){ @@ -403,34 +436,49 @@ auto handler = CommandLineHandler(getCommands()); auto commandNames = handler.commandNames(); - // special stdin syntax - if (args.length >= 2 && args[1] == "-") + // Special syntaxes need to be handled before regular argument parsing + if (args.length >= 2) { - auto path = getTempFile("app", ".d"); - stdin.byChunk(4096).joiner.toFile(path.toNativeString()); - args = args[0] ~ [path.toNativeString()] ~ args[2..$]; - } - - // Shebang syntax support for files without .d extension - if (args.length >= 2 && !args[1].endsWith(".d") && !args[1].startsWith("-") && !commandNames.canFind(args[1])) { - if (exists(args[1])) { + // Read input source code from stdin + if (args[1] == "-") + { auto path = getTempFile("app", ".d"); - copy(args[1], path.toNativeString()); - args[1] = path.toNativeString(); - } else if (exists(args[1].setExtension(".d"))) { - args[1] = args[1].setExtension(".d"); + stdin.byChunk(4096).joiner.toFile(path.toNativeString()); + args = toSinglePackageArgs(args[0], path.toNativeString(), args[2 .. $]); } - } - // special single-file package shebang syntax - if (args.length >= 2 && args[1].endsWith(".d")) { - args = args[0] ~ ["run", "-q", "--temp-build", "--single", args[1], "--"] ~ args[2 ..$]; + // Dub has a shebang syntax to be able to use it as script, e.g. + // #/usr/bin/env dub + // With this approach, we need to support the file having + // both the `.d` extension, or having none at all. + // We also need to make sure arguments passed to the script + // are passed to the program, not `dub`, e.g.: + // ./my_dub_script foo bar + // Gives us `args = [ "dub", "./my_dub_script" "foo", "bar" ]`, + // which we need to interpret as: + // `args = [ "dub", "./my_dub_script", "--", "foo", "bar" ]` + else if (args[1].endsWith(".d")) + args = toSinglePackageArgs(args[0], args[1], args[2 .. $]); + + // Here we have a problem: What if the script name is a command name ? + // We have to assume it isn't, and to reduce the risk of false positive + // we only consider the case where the file name is the first argument, + // as the shell invocation cannot be controlled. + else if (!commandNames.canFind(args[1]) && !args[1].startsWith("-")) { + if (exists(args[1])) { + auto path = getTempFile("app", ".d"); + copy(args[1], path.toNativeString()); + args = toSinglePackageArgs(args[0], path.toNativeString(), args[2 .. $]); + } else if (exists(args[1].setExtension(".d"))) { + args = toSinglePackageArgs(args[0], args[1].setExtension(".d"), args[2 .. $]); + } + } } auto common_args = new CommandArgs(args[1..$]); try handler.prepareOptions(common_args); - catch (Throwable e) { + catch (Exception e) { logError("Error processing arguments: %s", e.msg); logDiagnostic("Full exception: %s", e.toString().sanitize); logInfo("Run 'dub help' for usage information."); @@ -453,7 +501,7 @@ try { cmd = handler.prepareCommand(command_name_argument.value, command_args); - } catch (Throwable e) { + } catch (Exception e) { logError("Error processing arguments: %s", e.msg); logDiagnostic("Full exception: %s", e.toString().sanitize); logInfo("Run 'dub help' for usage information."); @@ -479,8 +527,8 @@ auto remaining_args = command_args.extractRemainingArgs(); if (remaining_args.any!(a => a.startsWith("-"))) { - logError("Unknown command line flags: %s", remaining_args.filter!(a => a.startsWith("-")).array.join(" ")); - logError(`Type "dub %s -h" to get a list of all supported flags.`, cmd.name); + logError("Unknown command line flags: %s", remaining_args.filter!(a => a.startsWith("-")).array.join(" ").color(Mode.bold)); + logInfo(`Type "%s" to get a list of all supported flags.`, text("dub ", cmd.name, " -h").color(Mode.bold)); return 1; } @@ -494,12 +542,20 @@ // execute the command try return cmd.execute(dub, remaining_args, command_args.appArgs); catch (UsageException e) { + // usage exceptions get thrown before any logging, so we are + // making the errors more narrow to better fit on small screens. + tagWidth.push(5); logError("%s", e.msg); logDebug("Full exception: %s", e.toString().sanitize); - logInfo(`Run "dub %s -h" for more information about the "%s" command.`, cmd.name, cmd.name); + logInfo(`Run "%s" for more information about the "%s" command.`, + text("dub ", cmd.name, " -h").color(Mode.bold), cmd.name.color(Mode.bold)); return 1; } - catch (Throwable e) { + catch (Exception e) { + // most exceptions get thrown before logging, so same thing here as + // above. However this might be subject to change if it results in + // weird behavior anywhere. + tagWidth.push(5); logError("%s", e.msg); logDebug("Full exception: %s", e.toString().sanitize); return 2; @@ -514,6 +570,8 @@ bool help, annotate, bare; string[] registry_urls; string root_path; + enum color { automatic, on, off } // Lower case "color" in support of invalid option error formatting. + color color_mode = color.automatic; SkipPackageSuppliers skipRegistry = SkipPackageSuppliers.none; PlacementLocation placementLocation = PlacementLocation.user; @@ -541,6 +599,13 @@ args.getopt("q|quiet", &quiet, ["Only print warnings and errors"]); args.getopt("verror", &verror, ["Only print errors"]); args.getopt("vquiet", &vquiet, ["Print no messages"]); + args.getopt("color", &color_mode, [ + "Configure colored output. Accepted values:", + " automatic: Colored output on console/terminal,", + " unless NO_COLOR is set and non-empty (default)", + " on: Force colors enabled", + " off: Force colors disabled" + ]); args.getopt("cache", &placementLocation, ["Puts any fetched packages in the specified location [local|system|user]."]); version_ = args.hasAppVersion; @@ -613,7 +678,7 @@ { foreach (ref arg; m_recognizedArgs) if (names == arg.names) { - assert(help_text is null); + assert(help_text is null, format!("Duplicated argument '%s' must not change helptext, consider to remove the duplication")(names)); *var = arg.value.get!T; return; } @@ -742,22 +807,16 @@ private bool loadCwdPackage(Dub dub, bool warn_missing_package) { - bool found; - foreach (f; packageInfoFiles) - if (existsFile(dub.rootPath ~ f.filename)) - { - found = true; - break; - } + auto filePath = Package.findPackageFile(dub.rootPath); - if (!found) { + if (filePath.empty) { if (warn_missing_package) { - logInfo(""); - logInfo("No package manifest (dub.json or dub.sdl) was found in"); - logInfo(dub.rootPath.toNativeString()); - logInfo("Please run DUB from the root directory of an existing package, or run"); - logInfo("\"dub init --help\" to get information on creating a new package."); - logInfo(""); + logInfoNoTag(""); + logInfoNoTag("No package manifest (dub.json or dub.sdl) was found in"); + logInfoNoTag(dub.rootPath.toNativeString()); + logInfoNoTag("Please run DUB from the root directory of an existing package, or run"); + logInfoNoTag("\"%s\" to get information on creating a new package.", "dub init --help".color(Mode.bold)); + logInfoNoTag(""); } return false; } @@ -881,7 +940,7 @@ fmt = rawfmt.to!PackageFormat; break; } catch (Exception) { - logError("Invalid format, \""~rawfmt~"\", enter either \"sdl\" or \"json\"."); + logError(`Invalid format '%s', enter either 'sdl' or 'json'.`, rawfmt); } } auto author = p.authors.join(", "); @@ -891,7 +950,7 @@ auto nameRegex = regex(`^[a-z0-9\-_]+$`); string triedName = input("Name", p.name); if (triedName.matchFirst(nameRegex).empty) { - logError("Invalid name, \""~triedName~"\", names should consist only of lowercase alphanumeric characters, - and _."); + logError(`Invalid name '%s', names should consist only of lowercase alphanumeric characters, dashes ('-') and underscores ('_').`, triedName); } else { p.name = triedName; break; @@ -928,31 +987,24 @@ abstract class PackageBuildCommand : Command { protected { - string m_buildType; - BuildMode m_buildMode; - string m_buildConfig; string m_compilerName; string m_arch; string[] m_debugVersions; string[] m_overrideConfigs; - Compiler m_compiler; - BuildPlatform m_buildPlatform; - BuildSettings m_buildSettings; + GeneratorSettings baseSettings; string m_defaultConfig; bool m_nodeps; bool m_forceRemove = false; - bool m_single; - bool m_filterVersions = false; } override void prepare(scope CommandArgs args) { - args.getopt("b|build", &m_buildType, [ + args.getopt("b|build", &this.baseSettings.buildType, [ "Specifies the type of build to perform. Note that setting the DFLAGS environment variable will override the build type with custom flags.", "Possible names:", " "~builtinBuildTypes.join(", ")~" and custom types" ]); - args.getopt("c|config", &m_buildConfig, [ + args.getopt("c|config", &this.baseSettings.config, [ "Builds the specified configuration. Configurations can be defined in dub.json" ]); args.getopt("override-config", &m_overrideConfigs, [ @@ -973,17 +1025,17 @@ args.getopt("nodeps", &m_nodeps, [ "Do not resolve missing dependencies before building" ]); - args.getopt("build-mode", &m_buildMode, [ + args.getopt("build-mode", &this.baseSettings.buildMode, [ "Specifies the way the compiler and linker are invoked. Valid values:", " separate (default), allAtOnce, singleFile" ]); - args.getopt("single", &m_single, [ + args.getopt("single", &this.baseSettings.single, [ "Treats the package name as a filename. The file must contain a package recipe comment." ]); args.getopt("force-remove", &m_forceRemove, [ "Deprecated option that does nothing." ]); - args.getopt("filter-versions", &m_filterVersions, [ + args.getopt("filter-versions", &this.baseSettings.filterVersions, [ "[Experimental] Filter version identifiers and debug version identifiers to improve build cache efficiency." ]); } @@ -998,43 +1050,45 @@ { if (!m_compilerName.length) m_compilerName = dub.defaultCompiler; if (!m_arch.length) m_arch = dub.defaultArchitecture; - if (dub.defaultLowMemory) m_buildSettings.options |= BuildOption.lowmem; - if (dub.defaultEnvironments) m_buildSettings.addEnvironments(dub.defaultEnvironments); - if (dub.defaultBuildEnvironments) m_buildSettings.addBuildEnvironments(dub.defaultBuildEnvironments); - if (dub.defaultRunEnvironments) m_buildSettings.addRunEnvironments(dub.defaultRunEnvironments); - if (dub.defaultPreGenerateEnvironments) m_buildSettings.addPreGenerateEnvironments(dub.defaultPreGenerateEnvironments); - if (dub.defaultPostGenerateEnvironments) m_buildSettings.addPostGenerateEnvironments(dub.defaultPostGenerateEnvironments); - if (dub.defaultPreBuildEnvironments) m_buildSettings.addPreBuildEnvironments(dub.defaultPreBuildEnvironments); - if (dub.defaultPostBuildEnvironments) m_buildSettings.addPostBuildEnvironments(dub.defaultPostBuildEnvironments); - if (dub.defaultPreRunEnvironments) m_buildSettings.addPreRunEnvironments(dub.defaultPreRunEnvironments); - if (dub.defaultPostRunEnvironments) m_buildSettings.addPostRunEnvironments(dub.defaultPostRunEnvironments); - m_compiler = getCompiler(m_compilerName); - m_buildPlatform = m_compiler.determinePlatform(m_buildSettings, m_compilerName, m_arch); - m_buildSettings.addDebugVersions(m_debugVersions); + if (dub.defaultLowMemory) this.baseSettings.buildSettings.options |= BuildOption.lowmem; + if (dub.defaultEnvironments) this.baseSettings.buildSettings.addEnvironments(dub.defaultEnvironments); + if (dub.defaultBuildEnvironments) this.baseSettings.buildSettings.addBuildEnvironments(dub.defaultBuildEnvironments); + if (dub.defaultRunEnvironments) this.baseSettings.buildSettings.addRunEnvironments(dub.defaultRunEnvironments); + if (dub.defaultPreGenerateEnvironments) this.baseSettings.buildSettings.addPreGenerateEnvironments(dub.defaultPreGenerateEnvironments); + if (dub.defaultPostGenerateEnvironments) this.baseSettings.buildSettings.addPostGenerateEnvironments(dub.defaultPostGenerateEnvironments); + if (dub.defaultPreBuildEnvironments) this.baseSettings.buildSettings.addPreBuildEnvironments(dub.defaultPreBuildEnvironments); + if (dub.defaultPostBuildEnvironments) this.baseSettings.buildSettings.addPostBuildEnvironments(dub.defaultPostBuildEnvironments); + if (dub.defaultPreRunEnvironments) this.baseSettings.buildSettings.addPreRunEnvironments(dub.defaultPreRunEnvironments); + if (dub.defaultPostRunEnvironments) this.baseSettings.buildSettings.addPostRunEnvironments(dub.defaultPostRunEnvironments); + this.baseSettings.compiler = getCompiler(m_compilerName); + this.baseSettings.platform = this.baseSettings.compiler.determinePlatform(this.baseSettings.buildSettings, m_compilerName, m_arch); + this.baseSettings.buildSettings.addDebugVersions(m_debugVersions); m_defaultConfig = null; enforce (loadSpecificPackage(dub, package_name, ver), "Failed to load package."); - if (m_buildConfig.length != 0 && !dub.configurations.canFind(m_buildConfig)) + if (this.baseSettings.config.length != 0 && + !dub.configurations.canFind(this.baseSettings.config) && + this.baseSettings.config != "unittest") { - string msg = "Unknown build configuration: "~m_buildConfig; + string msg = "Unknown build configuration: " ~ this.baseSettings.config; enum distance = 3; - auto match = dub.configurations.getClosestMatch(m_buildConfig, distance); + auto match = dub.configurations.getClosestMatch(this.baseSettings.config, distance); if (match !is null) msg ~= ". Did you mean '" ~ match ~ "'?"; enforce(0, msg); } - if (m_buildType.length == 0) { - if (environment.get("DFLAGS") !is null) m_buildType = "$DFLAGS"; - else m_buildType = default_build_type; + if (this.baseSettings.buildType.length == 0) { + if (environment.get("DFLAGS") !is null) this.baseSettings.buildType = "$DFLAGS"; + else this.baseSettings.buildType = default_build_type; } if (!m_nodeps) { // retrieve missing packages - dub.project.reinit(); if (!dub.project.hasAllDependencies) { logDiagnostic("Checking for missing dependencies."); - if (m_single) dub.upgrade(UpgradeOptions.select | UpgradeOptions.noSaveSelections); + if (this.baseSettings.single) + dub.upgrade(UpgradeOptions.select | UpgradeOptions.noSaveSelections); else dub.upgrade(UpgradeOptions.select); } } @@ -1050,7 +1104,7 @@ private bool loadSpecificPackage(Dub dub, string package_name, string ver) { - if (m_single) { + if (this.baseSettings.single) { enforce(package_name.length, "Missing file name of single-file package."); dub.loadSingleFilePackage(package_name); return true; @@ -1070,9 +1124,8 @@ enforce(package_name.length, "No valid root package found - aborting."); - auto pack = ver == "" - ? dub.packageManager.getLatestPackage(package_name) - : dub.packageManager.getBestPackage(package_name, ver); + const vers = ver.length ? VersionRange.fromString(ver) : VersionRange.Any; + auto pack = dub.packageManager.getBestPackage(package_name, vers); enforce(pack, format!"Failed to find a package named '%s%s' locally."(package_name, ver == "" ? "" : ("@" ~ ver) @@ -1086,12 +1139,6 @@ class GenerateCommand : PackageBuildCommand { protected { string m_generator; - bool m_rdmd = false; - bool m_tempBuild = false; - bool m_run = false; - bool m_force = false; - bool m_combined = false; - bool m_parallel = false; bool m_printPlatform, m_printBuilds, m_printConfigs; } @@ -1116,7 +1163,7 @@ { super.prepare(args); - args.getopt("combined", &m_combined, [ + args.getopt("combined", &this.baseSettings.combined, [ "Tries to build the whole project in a single compiler run." ]); @@ -1129,7 +1176,7 @@ args.getopt("print-platform", &m_printPlatform, [ "Prints the identifiers for the current build platform as used for the build fields in dub.json" ]); - args.getopt("parallel", &m_parallel, [ + args.getopt("parallel", &this.baseSettings.parallelBuild, [ "Runs multiple compiler instances in parallel, if possible." ]); } @@ -1155,7 +1202,7 @@ logInfo(""); } - m_defaultConfig = dub.project.getDefaultConfiguration(m_buildPlatform); + m_defaultConfig = dub.project.getDefaultConfiguration(this.baseSettings.platform); if (m_printConfigs) { logInfo("Available configurations:"); foreach (tp; dub.configurations) @@ -1163,26 +1210,14 @@ logInfo(""); } - GeneratorSettings gensettings; - gensettings.platform = m_buildPlatform; - gensettings.config = m_buildConfig.length ? m_buildConfig : m_defaultConfig; - gensettings.buildType = m_buildType; - gensettings.buildMode = m_buildMode; - gensettings.compiler = m_compiler; - gensettings.buildSettings = m_buildSettings; - gensettings.combined = m_combined; - gensettings.filterVersions = m_filterVersions; - gensettings.run = m_run; + GeneratorSettings gensettings = this.baseSettings; + if (!gensettings.config.length) + gensettings.config = m_defaultConfig; gensettings.runArgs = app_args; - gensettings.force = m_force; - gensettings.rdmd = m_rdmd; - gensettings.tempBuild = m_tempBuild; - gensettings.parallelBuild = m_parallel; - gensettings.single = m_single; logDiagnostic("Generating using %s", m_generator); dub.generateProject(m_generator, gensettings); - if (m_buildType == "ddox") dub.runDdox(gensettings.run, app_args); + if (this.baseSettings.buildType == "ddox") dub.runDdox(gensettings.run, app_args); return 0; } } @@ -1204,11 +1239,15 @@ override void prepare(scope CommandArgs args) { - args.getopt("rdmd", &m_rdmd, [ + args.getopt("temp-build", &this.baseSettings.tempBuild, [ + "Builds the project in the temp folder if possible." + ]); + + args.getopt("rdmd", &this.baseSettings.rdmd, [ "Use rdmd instead of directly invoking the compiler" ]); - args.getopt("f|force", &m_force, [ + args.getopt("f|force", &this.baseSettings.force, [ "Forces a recompilation even if the target is up to date" ]); args.getopt("y|yes", &m_yes, [ @@ -1224,7 +1263,7 @@ override int execute(Dub dub, string[] free_args, string[] app_args) { // single package files don't need to be downloaded, they are on the disk. - if (free_args.length < 1 || m_single) + if (free_args.length < 1 || this.baseSettings.single) return super.execute(dub, free_args, app_args); if (!m_nonInteractive) @@ -1255,17 +1294,18 @@ } } - Dependency dep; + VersionRange dep; if (packageParts.version_.length > 0) { // the user provided a version manually - dep = Dependency(packageParts.version_); + dep = VersionRange.fromString(packageParts.version_); + } else if (packageParts.name.startsWith(":")) { + // Subpackages are always assumed to be present + return 0; + } else if (dub.packageManager.getBestPackage(packageParts.name)) { + // found locally + return 0; } else { - if (packageParts.name.startsWith(":") || - dub.packageManager.getFirstPackage(packageParts.name)) - // found locally - return 0; - // search for the package and filter versions for exact matches auto basePackageName = getBasePackageName(packageParts.name); auto search = dub.searchPackages(basePackageName) @@ -1286,7 +1326,7 @@ const answer = m_yes ? true : input("Do you want to fetch '%s' now?".format(packageParts.name)); if (!answer) return 0; - dep = Dependency(p.version_); + dep = VersionRange.fromString(p.version_); } dub.fetch(packageParts.name, dep, dub.defaultPlacementLocation, FetchOptions.none); @@ -1308,12 +1348,8 @@ override void prepare(scope CommandArgs args) { - args.getopt("temp-build", &m_tempBuild, [ - "Builds the project in the temp folder if possible." - ]); - super.prepare(args); - m_run = true; + this.baseSettings.run = true; } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -1325,9 +1361,6 @@ class TestCommand : PackageBuildCommand { private { string m_mainFile; - bool m_combined = false; - bool m_parallel = false; - bool m_force = false; } this() @safe pure nothrow @@ -1359,23 +1392,34 @@ override void prepare(scope CommandArgs args) { + args.getopt("temp-build", &this.baseSettings.tempBuild, [ + "Builds the project in the temp folder if possible." + ]); + args.getopt("main-file", &m_mainFile, [ "Specifies a custom file containing the main() function to use for running the tests." ]); - args.getopt("combined", &m_combined, [ + args.getopt("combined", &this.baseSettings.combined, [ "Tries to build the whole project in a single compiler run." ]); - args.getopt("parallel", &m_parallel, [ + args.getopt("parallel", &this.baseSettings.parallelBuild, [ "Runs multiple compiler instances in parallel, if possible." ]); - args.getopt("f|force", &m_force, [ + args.getopt("f|force", &this.baseSettings.force, [ "Forces a recompilation even if the target is up to date" ]); + bool coverage = false; args.getopt("coverage", &coverage, [ "Enables code coverage statistics to be generated." ]); - if (coverage) m_buildType = "unittest-cov"; + if (coverage) this.baseSettings.buildType = "unittest-cov"; + + bool coverageCTFE = false; + args.getopt("coverage-ctfe", &coverageCTFE, [ + "Enables code coverage (including CTFE) statistics to be generated." + ]); + if (coverageCTFE) this.baseSettings.buildType = "unittest-cov-ctfe"; super.prepare(args); } @@ -1388,22 +1432,12 @@ setupVersionPackage(dub, str_package_info, "unittest"); - GeneratorSettings settings; - settings.platform = m_buildPlatform; - settings.compiler = getCompiler(m_buildPlatform.compilerBinary); - settings.buildType = m_buildType; - settings.buildMode = m_buildMode; - settings.buildSettings = m_buildSettings; - settings.combined = m_combined; - settings.filterVersions = m_filterVersions; - settings.parallelBuild = m_parallel; - settings.force = m_force; - settings.tempBuild = m_single; + GeneratorSettings settings = this.baseSettings; + settings.compiler = getCompiler(this.baseSettings.platform.compilerBinary); settings.run = true; settings.runArgs = app_args; - settings.single = m_single; - dub.testProject(settings, m_buildConfig, NativePath(m_mainFile)); + dub.testProject(settings, this.baseSettings.config, NativePath(m_mainFile)); return 0; } } @@ -1465,8 +1499,8 @@ "Import paths" ]); - args.getopt("config", &m_config, [ - "Use the given configuration file." + args.getopt("dscanner-config", &m_config, [ + "Use the given d-scanner configuration file." ]); super.prepare(args); @@ -1592,17 +1626,20 @@ if (free_args.length >= 1) str_package_info = free_args[0]; setupVersionPackage(dub, str_package_info); - m_defaultConfig = dub.project.getDefaultConfiguration(m_buildPlatform); + m_defaultConfig = dub.project.getDefaultConfiguration(this.baseSettings.platform); - auto config = m_buildConfig.length ? m_buildConfig : m_defaultConfig; + GeneratorSettings settings = this.baseSettings; + if (!settings.config.length) + settings.config = m_defaultConfig; + // Ignore other options + settings.buildSettings.options = this.baseSettings.buildSettings.options & BuildOption.lowmem; - GeneratorSettings settings; - settings.platform = m_buildPlatform; - settings.config = config; - settings.buildType = m_buildType; - settings.compiler = m_compiler; - settings.filterVersions = m_filterVersions; - settings.buildSettings.options |= m_buildSettings.options & BuildOption.lowmem; + // With a requested `unittest` config, switch to the special test runner + // config (which doesn't require an existing `unittest` configuration). + if (this.baseSettings.config == "unittest") { + const test_config = dub.project.addTestRunnerConfiguration(settings, !dub.dryRun); + if (test_config) settings.config = test_config; + } if (m_importPaths) { m_data = ["import-paths"]; m_dataList = true; } else if (m_stringImportPaths) { m_data = ["string-import-paths"]; m_dataList = true; } @@ -1671,7 +1708,7 @@ } } - if (any_error) return 1; + if (any_error) return 2; } else { dub.cleanPackage(dub.rootPath); } @@ -1708,12 +1745,12 @@ enforceUsage(free_args.length != 0, "Expected one or more arguments."); enforceUsage(app_args.length == 0, "Unexpected application arguments."); - if (!loadCwdPackage(dub, true)) return 1; + if (!loadCwdPackage(dub, true)) return 2; auto recipe = dub.project.rootPackage.rawRecipe.clone; foreach (depspec; free_args) { if (!addDependency(dub, recipe, depspec)) - return 1; + return 2; } writePackageRecipe(dub.project.rootPackage.recipePath, recipe); @@ -1724,6 +1761,7 @@ class UpgradeCommand : Command { private { bool m_prerelease = false; + bool m_includeSubPackages = false; bool m_forceRemove = false; bool m_missingOnly = false; bool m_verify = false; @@ -1749,6 +1787,9 @@ args.getopt("prerelease", &m_prerelease, [ "Uses the latest pre-release version, even if release versions are available" ]); + args.getopt("s|sub-packages", &m_includeSubPackages, [ + "Also upgrades dependencies of all directory based sub packages" + ]); args.getopt("verify", &m_verify, [ "Updates the project and performs a build. If successful, rewrites the selected versions file ." ]); @@ -1769,12 +1810,47 @@ enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(!m_verify, "--verify is not yet implemented."); enforce(loadCwdPackage(dub, true), "Failed to load package."); - logInfo("Upgrading project in %s", dub.projectPath.toNativeString()); + logInfo("Upgrading", Color.cyan, "project in %s", dub.projectPath.toNativeString().color(Mode.bold)); auto options = UpgradeOptions.upgrade|UpgradeOptions.select; if (m_missingOnly) options &= ~UpgradeOptions.upgrade; if (m_prerelease) options |= UpgradeOptions.preRelease; if (m_dryRun) options |= UpgradeOptions.dryRun; dub.upgrade(options, free_args); + + auto spacks = dub.project.rootPackage + .subPackages + .filter!(sp => sp.path.length); + + if (m_includeSubPackages) { + bool any_error = false; + + // Go through each path based sub package, load it as a new instance + // and perform an upgrade as if the upgrade had been run from within + // the sub package folder. Note that we have to use separate Dub + // instances, because the upgrade always works on the root package + // of a project, which in this case are the individual sub packages. + foreach (sp; spacks) { + try { + auto fullpath = (dub.projectPath ~ sp.path).toNativeString(); + logInfo("Upgrading", Color.cyan, "sub package in %s", fullpath); + auto sdub = new Dub(fullpath, dub.packageSuppliers, SkipPackageSuppliers.all); + sdub.defaultPlacementLocation = dub.defaultPlacementLocation; + sdub.loadPackage(); + sdub.upgrade(options, free_args); + } catch (Exception e) { + logError("Failed to update sub package at %s: %s", + sp.path, e.msg); + any_error = true; + } + } + + if (any_error) return 1; + } else if (!spacks.empty) { + foreach (sp; spacks) + logInfo("Not upgrading sub package in %s", sp.path); + logInfo("\nNote: specify -s to also upgrade sub packages."); + } + return 0; } } @@ -1818,8 +1894,6 @@ "$ dub run vibelog --cache=local", "", "This will grab all needed dependencies and compile and run the application.", - "", - "Note: DUB does not do a system installation of packages. Packages are instead only registered within DUB's internal ecosystem. Generation of native system packages/installers may be added later as a separate feature." ]; } @@ -1842,42 +1916,29 @@ if (m_version.length) { // remove then --version removed enforceUsage(!name.canFindVersionSplitter, "Double version spec not allowed."); logWarn("The '--version' parameter was deprecated, use %s@%s. Please update your scripts.", name, m_version); - dub.fetch(name, Dependency(m_version), location, fetchOpts); + dub.fetch(name, VersionRange.fromString(m_version), location, fetchOpts); } else if (name.canFindVersionSplitter) { const parts = name.splitPackageName; - dub.fetch(parts.name, Dependency(parts.version_), location, fetchOpts); + dub.fetch(parts.name, VersionRange.fromString(parts.version_), location, fetchOpts); } else { try { - dub.fetch(name, Dependency(">=0.0.0"), location, fetchOpts); + dub.fetch(name, VersionRange.Any, location, fetchOpts); + logInfo("Finished", Color.green, "%s fetched", name.color(Mode.bold)); logInfo( "Please note that you need to use `dub run ` " ~ - "or add it to dependencies of your package to actually use/run it. " ~ - "dub does not do actual installation of packages outside of its own ecosystem."); + "or add it to dependencies of your package to actually use/run it. " + ); } catch(Exception e){ logInfo("Getting a release version failed: %s", e.msg); logInfo("Retry with ~master..."); - dub.fetch(name, Dependency("~master"), location, fetchOpts); + dub.fetch(name, VersionRange.fromString("~master"), location, fetchOpts); } } return 0; } } -class InstallCommand : FetchCommand { - this() @safe pure nothrow - { - this.name = "install"; - this.hidden = true; - } - override void prepare(scope CommandArgs args) { super.prepare(args); } - override int execute(Dub dub, string[] free_args, string[] app_args) - { - warnRenamed("install", "fetch"); - return super.execute(dub, free_args, app_args); - } -} - class RemoveCommand : FetchRemoveCommand { private { bool m_nonInteractive; @@ -1950,21 +2011,6 @@ } } -class UninstallCommand : RemoveCommand { - this() @safe pure nothrow - { - this.name = "uninstall"; - this.hidden = true; - } - override void prepare(scope CommandArgs args) { super.prepare(args); } - override int execute(Dub dub, string[] free_args, string[] app_args) - { - warnRenamed("uninstall", "remove"); - return super.execute(dub, free_args, app_args); - } -} - - /******************************************************************************/ /* ADD/REMOVE PATH/LOCAL */ /******************************************************************************/ @@ -2079,7 +2125,7 @@ "Prints a list of all or selected local packages. This includes all cached "~ "packages (user or system wide), all packages in the package search paths "~ "(\"dub add-path\") and all manually registered packages (\"dub add-local\"). "~ - "If package specified output filtered by package spec." + "If a package (and optionally a version spec) is specified, only matching packages are shown." ]; } override void prepare(scope CommandArgs args) {} @@ -2090,10 +2136,10 @@ const pname = pinfo.name; const pvlim = Dependency(pinfo.version_ == "" ? "*" : pinfo.version_); enforceUsage(app_args.length == 0, "The list command supports no application arguments."); - logInfo("Packages present in the system and known to dub:"); + logInfoNoTag("Packages present in the system and known to dub:"); foreach (p; dub.packageManager.getPackageIterator()) { if ((pname == "" || pname == p.name) && pvlim.matches(p.version_)) - logInfo(" %s %s: %s", p.name, p.version_, p.path.toNativeString()); + logInfoNoTag(" %s %s: %s", p.name.color(Mode.bold), p.version_, p.path.toNativeString()); } logInfo(""); return 0; @@ -2104,10 +2150,10 @@ this() @safe pure nothrow { this.name = "search"; - this.argumentsPattern = ""; + this.argumentsPattern = ""; this.description = "Search for available packages."; this.helpText = [ - "Search all specified DUB registries for packages matching query." + "Search all specified providers for matching packages." ]; } override void prepare(scope CommandArgs args) {} @@ -2118,7 +2164,7 @@ if (res.empty) { logError("No matches found."); - return 1; + return 2; } auto justify = res .map!((descNmatches) => descNmatches[1]) @@ -2126,11 +2172,14 @@ .map!(m => m.name.length + m.version_.length) .reduce!max + " ()".length; justify += (~justify & 3) + 1; // round to next multiple of 4 + int colorDifference = cast(int)"a".color(Mode.bold).length - 1; + justify += colorDifference; foreach (desc, matches; res) { - logInfo("==== %s ====", desc); + logInfoNoTag("==== %s ====", desc); foreach (m; matches) - logInfo("%s%s", leftJustify(m.name ~ " (" ~ m.version_ ~ ")", justify), m.description); + logInfoNoTag(" %s%s", leftJustify(m.name.color(Mode.bold) + ~ " (" ~ m.version_ ~ ")", justify), m.description); } return 0; } @@ -2146,13 +2195,19 @@ bool m_system = false; } + static immutable string DeprecationMessage = + "This command is deprecated. Use path based dependency, custom cache path, " ~ + "or edit `dub.selections.json` to achieve the same results."; + + this() @safe pure nothrow { this.name = "add-override"; this.argumentsPattern = " "; this.description = "Adds a new package override."; - this.helpText = [ - ]; + + this.hidden = true; + this.helpText = [ DeprecationMessage ]; } override void prepare(scope CommandArgs args) @@ -2164,20 +2219,21 @@ override int execute(Dub dub, string[] free_args, string[] app_args) { + logWarn(DeprecationMessage); enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(free_args.length == 3, "Expected three arguments, not "~free_args.length.to!string); - auto scope_ = m_system ? LocalPackageType.system : LocalPackageType.user; + auto scope_ = m_system ? PlacementLocation.system : PlacementLocation.user; auto pack = free_args[0]; - auto ver = Dependency(free_args[1]); + auto source = VersionRange.fromString(free_args[1]); if (existsFile(NativePath(free_args[2]))) { auto target = NativePath(free_args[2]); if (!target.absolute) target = NativePath(getcwd()) ~ target; - dub.packageManager.addOverride(scope_, pack, ver, target); - logInfo("Added override %s %s => %s", pack, ver, target); + dub.packageManager.addOverride_(scope_, pack, source, target); + logInfo("Added override %s %s => %s", pack, source, target); } else { auto target = Version(free_args[2]); - dub.packageManager.addOverride(scope_, pack, ver, target); - logInfo("Added override %s %s => %s", pack, ver, target); + dub.packageManager.addOverride_(scope_, pack, source, target); + logInfo("Added override %s %s => %s", pack, source, target); } return 0; } @@ -2193,8 +2249,9 @@ this.name = "remove-override"; this.argumentsPattern = " "; this.description = "Removes an existing package override."; - this.helpText = [ - ]; + + this.hidden = true; + this.helpText = [ AddOverrideCommand.DeprecationMessage ]; } override void prepare(scope CommandArgs args) @@ -2206,10 +2263,12 @@ override int execute(Dub dub, string[] free_args, string[] app_args) { + logWarn(AddOverrideCommand.DeprecationMessage); enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(free_args.length == 2, "Expected two arguments, not "~free_args.length.to!string); - auto scope_ = m_system ? LocalPackageType.system : LocalPackageType.user; - dub.packageManager.removeOverride(scope_, free_args[0], Dependency(free_args[1])); + auto scope_ = m_system ? PlacementLocation.system : PlacementLocation.user; + auto source = VersionRange.fromString(free_args[1]); + dub.packageManager.removeOverride_(scope_, free_args[0], source); return 0; } } @@ -2220,24 +2279,25 @@ this.name = "list-overrides"; this.argumentsPattern = ""; this.description = "Prints a list of all local package overrides"; - this.helpText = [ - "Prints a list of all overridden packages added via \"dub add-override\"." - ]; + + this.hidden = true; + this.helpText = [ AddOverrideCommand.DeprecationMessage ]; } override void prepare(scope CommandArgs args) {} override int execute(Dub dub, string[] free_args, string[] app_args) { - void printList(in PackageOverride[] overrides, string caption) + logWarn(AddOverrideCommand.DeprecationMessage); + + void printList(in PackageOverride_[] overrides, string caption) { if (overrides.length == 0) return; - logInfo("# %s", caption); - foreach (ovr; overrides) { - if (!ovr.targetPath.empty) logInfo("%s %s => %s", ovr.package_, ovr.version_, ovr.targetPath); - else logInfo("%s %s => %s", ovr.package_, ovr.version_, ovr.targetVersion); - } + logInfoNoTag("# %s", caption); + foreach (ovr; overrides) + ovr.target.match!( + t => logInfoNoTag("%s %s => %s", ovr.package_.color(Mode.bold), ovr.version_, t)); } - printList(dub.packageManager.getOverrides(LocalPackageType.user), "User wide overrides"); - printList(dub.packageManager.getOverrides(LocalPackageType.system), "System wide overrides"); + printList(dub.packageManager.getOverrides_(PlacementLocation.user), "User wide overrides"); + printList(dub.packageManager.getOverrides_(PlacementLocation.system), "System wide overrides"); return 0; } } @@ -2278,7 +2338,6 @@ string m_linkerRegex; string m_programRegex; string m_testPackage; - bool m_combined; bool m_noRedirect; string m_strategy; uint m_jobCount; // zero means not specified @@ -2309,7 +2368,7 @@ args.getopt("program-status", &m_programStatusCode, ["The expected status code of the built executable"]); args.getopt("program-regex", &m_programRegex, ["A regular expression used to match against the program output"]); args.getopt("test-package", &m_testPackage, ["Perform a test run - usually only used internally"]); - args.getopt("combined", &m_combined, ["Builds multiple packages with one compiler run"]); + args.getopt("combined", &this.baseSettings.combined, ["Builds multiple packages with one compiler run"]); args.getopt("no-redirect", &m_noRedirect, ["Don't redirect stdout/stderr streams of the test command"]); args.getopt("strategy", &m_strategy, ["Set strategy (careful/lookback/pingpong/indepth/inbreadth)"]); args.getopt("j", &m_jobCount, ["Set number of look-ahead processes"]); @@ -2331,16 +2390,11 @@ dub = new Dub(NativePath(getcwd())); setupPackage(dub, m_testPackage); - m_defaultConfig = dub.project.getDefaultConfiguration(m_buildPlatform); + m_defaultConfig = dub.project.getDefaultConfiguration(this.baseSettings.platform); - GeneratorSettings gensettings; - gensettings.platform = m_buildPlatform; - gensettings.config = m_buildConfig.length ? m_buildConfig : m_defaultConfig; - gensettings.buildType = m_buildType; - gensettings.compiler = m_compiler; - gensettings.buildSettings = m_buildSettings; - gensettings.combined = m_combined; - gensettings.filterVersions = m_filterVersions; + GeneratorSettings gensettings = this.baseSettings; + if (!gensettings.config.length) + gensettings.config = m_defaultConfig; gensettings.run = m_programStatusCode != int.min || m_programRegex.length; gensettings.runArgs = app_args; gensettings.force = true; @@ -2349,11 +2403,11 @@ gensettings.runCallback = check(m_programStatusCode, m_programRegex); try dub.generateProject("build", gensettings); catch (DustmiteMismatchException) { - logInfo("Dustmite test doesn't match."); + logInfoNoTag("Dustmite test doesn't match."); return 3; } catch (DustmiteMatchException) { - logInfo("Dustmite test matches."); + logInfoNoTag("Dustmite test matches."); return 0; } } else { @@ -2366,8 +2420,8 @@ setupPackage(dub, null); auto prj = dub.project; - if (m_buildConfig.empty) - m_buildConfig = prj.getDefaultConfiguration(m_buildPlatform); + if (this.baseSettings.config.empty) + this.baseSettings.config = prj.getDefaultConfiguration(this.baseSettings.platform); void copyFolderRec(NativePath folder, NativePath dstfolder) { @@ -2388,10 +2442,13 @@ } static void fixPathDependency(string pack, ref Dependency dep) { - if (!dep.path.empty) { - auto mainpack = getBasePackageName(pack); - dep.path = NativePath("../") ~ mainpack; - } + dep.visit!( + (NativePath path) { + auto mainpack = getBasePackageName(pack); + dep = Dependency(NativePath("../") ~ mainpack); + }, + (any) { /* Nothing to do */ }, + ); } void fixPathDependencies(ref PackageRecipe recipe, NativePath base_path) @@ -2418,7 +2475,7 @@ if (pack.name in visited) continue; visited[pack.name] = true; auto dst_path = path ~ pack.name; - logInfo("Copy package '%s' to destination folder...", pack.name); + logInfo("Prepare", Color.light_blue, "Copy package %s to destination folder...", pack.name.color(Mode.bold)); copyFolderRec(pack.path, dst_path); // adjust all path based dependencies @@ -2428,10 +2485,10 @@ pack.storeInfo(dst_path); } - logInfo("Executing dustmite..."); + logInfo("Starting", Color.light_green, "Executing dustmite..."); auto testcmd = appender!string(); testcmd.formattedWrite("%s dustmite --test-package=%s --build=%s --config=%s", - thisExePath, prj.name, m_buildType, m_buildConfig); + thisExePath, prj.name, this.baseSettings.buildType, this.baseSettings.config); if (m_compilerName.length) testcmd.formattedWrite(" \"--compiler=%s\"", m_compilerName); if (m_arch.length) testcmd.formattedWrite(" --arch=%s", m_arch); @@ -2441,7 +2498,7 @@ if (m_linkerRegex.length) testcmd.formattedWrite(" \"--linker-regex=%s\"", m_linkerRegex); if (m_programStatusCode != int.min) testcmd.formattedWrite(" --program-status=%s", m_programStatusCode); if (m_programRegex.length) testcmd.formattedWrite(" \"--program-regex=%s\"", m_programRegex); - if (m_combined) testcmd ~= " --combined"; + if (this.baseSettings.combined) testcmd ~= " --combined"; // --vquiet swallows dustmite's output ... if (!m_noRedirect) testcmd ~= " --vquiet"; @@ -2537,7 +2594,7 @@ enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(free_args.length == 0, "Unexpected arguments: "~free_args.join(" ")); enforceUsage(m_format.length > 0, "Missing target format file extension (--format=...)."); - if (!loadCwdPackage(dub, true)) return 1; + if (!loadCwdPackage(dub, true)) return 2; dub.convertRecipe(m_format, m_stdout); return 0; } @@ -2714,11 +2771,6 @@ } } -private void warnRenamed(string prev, string curr) -{ - logWarn("The '%s' Command was renamed to '%s'. Please update your scripts.", prev, curr); -} - private bool addDependency(Dub dub, ref PackageRecipe recipe, string depspec) { Dependency dep; @@ -2738,7 +2790,7 @@ } } recipe.buildSettings.dependencies[depname] = dep; - logInfo("Adding dependency %s %s", depname, dep.versionSpec); + logInfo("Adding dependency %s %s", depname, dep.toString()); return true; } diff --git a/source/dub/compilers/buildsettings.d b/source/dub/compilers/buildsettings.d index 212c22a..c9d6b7d 100644 --- a/source/dub/compilers/buildsettings.d +++ b/source/dub/compilers/buildsettings.d @@ -9,6 +9,8 @@ import dub.internal.vibecompat.inet.path; +import configy.Attributes; + import std.array : array; import std.algorithm : filter, any; import std.path : globMatch; @@ -57,8 +59,8 @@ string[string] postBuildEnvironments; string[string] preRunEnvironments; string[string] postRunEnvironments; - @byName BuildRequirements requirements; - @byName BuildOptions options; + @byName Flags!BuildRequirement requirements; + @byName Flags!BuildOption options; BuildSettings dup() const { @@ -82,6 +84,12 @@ return ret; } + /** + * Merges $(LREF bs) onto `this` BuildSettings instance. This is called for + * sourceLibrary dependencies when they are included in the build to be + * merged into the root package build settings as well as configuring + * targets for different build types such as release or unittest-cov. + */ void add(in BuildSettings bs) { addDFlags(bs.dflags); @@ -89,6 +97,7 @@ addLibs(bs.libs); addLinkerFiles(bs.linkerFiles); addSourceFiles(bs.sourceFiles); + addInjectSourceFiles(bs.injectSourceFiles); addCopyFiles(bs.copyFiles); addExtraDependencyFiles(bs.extraDependencyFiles); addVersions(bs.versions); @@ -105,12 +114,24 @@ addPostBuildCommands(bs.postBuildCommands); addPreRunCommands(bs.preRunCommands); addPostRunCommands(bs.postRunCommands); + addEnvironments(bs.environments); + addBuildEnvironments(bs.buildEnvironments); + addRunEnvironments(bs.runEnvironments); + addPreGenerateEnvironments(bs.preGenerateEnvironments); + addPostGenerateEnvironments(bs.postGenerateEnvironments); + addPreBuildEnvironments(bs.preBuildEnvironments); + addPostBuildEnvironments(bs.postBuildEnvironments); + addPreRunEnvironments(bs.preRunEnvironments); + addPostRunEnvironments(bs.postRunEnvironments); + addRequirements(bs.requirements); + addOptions(bs.options); } void addDFlags(in string[] value...) { dflags = chain(dflags, value.dup).uniq.array; } void prependDFlags(in string[] value...) { prepend(dflags, value); } void removeDFlags(in string[] value...) { remove(dflags, value); } void addLFlags(in string[] value...) { lflags ~= value; } + void prependLFlags(in string[] value...) { prepend(lflags, value, false); } void addLibs(in string[] value...) { add(libs, value); } void addLinkerFiles(in string[] value...) { add(linkerFiles, value); } void addSourceFiles(in string[] value...) { add(sourceFiles, value); } @@ -153,11 +174,11 @@ void addPostRunEnvironments(in string[string] value) { add(postRunEnvironments, value); } void updatePostRunEnvironments(in string[string] value) { update(postRunEnvironments, value); } void addRequirements(in BuildRequirement[] value...) { foreach (v; value) this.requirements |= v; } - void addRequirements(in BuildRequirements value) { this.requirements |= value; } + void addRequirements(in Flags!BuildRequirement value) { this.requirements |= value; } void addOptions(in BuildOption[] value...) { foreach (v; value) this.options |= v; } - void addOptions(in BuildOptions value) { this.options |= value; } + void addOptions(in Flags!BuildOption value) { this.options |= value; } void removeOptions(in BuildOption[] value...) { foreach (v; value) this.options &= ~v; } - void removeOptions(in BuildOptions value) { this.options &= ~value; } + void removeOptions(in Flags!BuildOption value) { this.options &= ~value; } private: static auto filterDuplicates(T)(ref string[] arr, in T vals, bool noDuplicates = true) @@ -342,15 +363,6 @@ noDefaultFlags = 1<<9, /// Do not issue any of the default build flags (e.g. -debug, -w, -property etc.) - use only for development purposes } - struct BuildRequirements { - import dub.internal.vibecompat.data.serialization : ignore; - - @ignore BitFlags!BuildRequirement values; - this(BuildRequirement req) { values = req; } - - alias values this; - } - enum BuildOption { none = 0, /// Use compiler defaults debugMode = 1<<0, /// Compile in debug mode (enables contracts, -debug) @@ -378,22 +390,76 @@ pic = 1<<22, /// Generate position independent code betterC = 1<<23, /// Compile in betterC mode (-betterC) lowmem = 1<<24, /// Compile in lowmem mode (-lowmem) + coverageCTFE = 1<<25, /// Enable code coverage analysis including at compile-time (-cov=ctfe) + color = 1<<26, /// Colorize output (-color) // for internal usage - _docs = 1<<25, // Write ddoc to docs - _ddox = 1<<26 // Compile docs.json + _docs = 1<<27, // Write ddoc to docs + _ddox = 1<<28, // Compile docs.json } - struct BuildOptions { - import dub.internal.vibecompat.data.serialization : ignore; +struct Flags (T) { + import dub.internal.vibecompat.data.serialization : ignore; - @ignore BitFlags!BuildOption values; - this(BuildOption opt) { values = opt; } - this(BitFlags!BuildOption v) { values = v; } + @ignore BitFlags!T values; - alias values this; + public this(T opt) @safe pure nothrow @nogc + { + this.values = opt; } + public this(BitFlags!T v) @safe pure nothrow @nogc + { + this.values = v; + } + + alias values this; + + /** + * Reads a list of flags from a JSON/YAML document and converts them + * to our internal representation. + * + * Flags inside of dub code are stored as a `BitFlags`, + * but they are specified in the recipe using an array of their name. + * This routine handles the conversion from `string[]` to `BitFlags!T`. + */ + public static Flags!T fromYAML (scope ConfigParser!(Flags!T) p) + { + import dyaml.node; + import std.exception; + import std.conv; + + enforce(p.node.nodeID == NodeID.sequence, "Should be a sequence"); + typeof(return) res; + foreach (str; p.node.sequence) + res |= str.as!string.to!T; + return res; + } +} + +unittest +{ + import configy.Read; + + static struct Config + { + Flags!BuildRequirement flags; + } + + auto c = parseConfigString!Config(` +{ + "flags": [ "allowWarnings", "noDefaultFlags", "disallowInlining" ] +} +`, __FILE__); + assert(c.flags.allowWarnings); + c.flags.allowWarnings = false; + assert(c.flags.noDefaultFlags); + c.flags.noDefaultFlags = false; + assert(c.flags.disallowInlining); + c.flags.disallowInlining = false; + assert(c.flags == c.flags.init); +} + /** All build options that will be inherited upwards in the dependency graph @@ -404,11 +470,18 @@ $(LI The option enabled meta information in dependent projects that are useful for the dependee (e.g. debug information)) ) */ -enum BuildOptions inheritedBuildOptions = BuildOption.debugMode | BuildOption.releaseMode - | BuildOption.coverage | BuildOption.debugInfo | BuildOption.debugInfoC +enum Flags!BuildOption inheritedBuildOptions = + BuildOption.debugMode | BuildOption.releaseMode + | BuildOption.coverage | BuildOption.coverageCTFE | BuildOption.debugInfo | BuildOption.debugInfoC | BuildOption.alwaysStackFrame | BuildOption.stackStomping | BuildOption.inline | BuildOption.noBoundsCheck | BuildOption.profile | BuildOption.ignoreUnknownPragmas | BuildOption.syntaxOnly | BuildOption.warnings | BuildOption.warningsAsErrors | BuildOption.ignoreDeprecations | BuildOption.deprecationWarnings | BuildOption.deprecationErrors | BuildOption.property | BuildOption.profileGC | BuildOption.pic; + +deprecated("Use `Flags!BuildOption` instead") +public alias BuildOptions = Flags!BuildOption; + +deprecated("Use `Flags!BuildRequirement` instead") +public alias BuildRequirements = Flags!BuildRequirement; diff --git a/source/dub/compilers/compiler.d b/source/dub/compilers/compiler.d index 950f29a..a8dacd0 100644 --- a/source/dub/compilers/compiler.d +++ b/source/dub/compilers/compiler.d @@ -11,9 +11,10 @@ deprecated("Please `import dub.dependency : Dependency` instead") public import dub.dependency : Dependency; public import dub.platform : BuildPlatform, matchesSpecification; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; + import std.algorithm; import std.array; import std.exception; @@ -113,7 +114,7 @@ void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback); /// Convert linker flags to compiler format - string[] lflagsToDFlags(in string[] lflags) const; + string[] lflagsToDFlags(const string[] lflags) const; /// Determines compiler version string determineVersion(string compiler_binary, string verboseOutput); diff --git a/source/dub/compilers/dmd.d b/source/dub/compilers/dmd.d index 65ee2f4..998d631 100644 --- a/source/dub/compilers/dmd.d +++ b/source/dub/compilers/dmd.d @@ -10,8 +10,8 @@ import dub.compilers.compiler; import dub.compilers.utils; import dub.internal.utils; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import std.algorithm; import std.array; @@ -43,6 +43,7 @@ tuple(BuildOption.debugMode, ["-debug"]), tuple(BuildOption.releaseMode, ["-release"]), tuple(BuildOption.coverage, ["-cov"]), + tuple(BuildOption.coverageCTFE, ["-cov=ctfe"]), tuple(BuildOption.debugInfo, ["-g"]), tuple(BuildOption.debugInfoC, ["-g"]), tuple(BuildOption.alwaysStackFrame, ["-gs"]), @@ -64,6 +65,7 @@ tuple(BuildOption.profileGC, ["-profile=gc"]), tuple(BuildOption.betterC, ["-betterC"]), tuple(BuildOption.lowmem, ["-lowmem"]), + tuple(BuildOption.color, ["-color"]), tuple(BuildOption._docs, ["-Dddocs"]), tuple(BuildOption._ddox, ["-Xfdocs.json", "-Df__dummy.html"]), @@ -176,25 +178,25 @@ BuildSettings settings; auto compiler = new DMDCompiler; auto bp = compiler.determinePlatform(settings, "dmd", "x86"); - assert(bp.platform.canFind("windows")); + assert(bp.isWindows()); assert(bp.architecture.canFind("x86")); assert(bp.architecture.canFind("x86_omf")); assert(!bp.architecture.canFind("x86_mscoff")); settings = BuildSettings.init; bp = compiler.determinePlatform(settings, "dmd", "x86_omf"); - assert(bp.platform.canFind("windows")); + assert(bp.isWindows()); assert(bp.architecture.canFind("x86")); assert(bp.architecture.canFind("x86_omf")); assert(!bp.architecture.canFind("x86_mscoff")); settings = BuildSettings.init; bp = compiler.determinePlatform(settings, "dmd", "x86_mscoff"); - assert(bp.platform.canFind("windows")); + assert(bp.isWindows()); assert(bp.architecture.canFind("x86")); assert(!bp.architecture.canFind("x86_omf")); assert(bp.architecture.canFind("x86_mscoff")); settings = BuildSettings.init; bp = compiler.determinePlatform(settings, "dmd", "x86_64"); - assert(bp.platform.canFind("windows")); + assert(bp.isWindows()); assert(bp.architecture.canFind("x86_64")); assert(!bp.architecture.canFind("x86")); assert(!bp.architecture.canFind("x86_omf")); @@ -254,7 +256,7 @@ if (!(fields & BuildSetting.libs)) { resolveLibs(settings, platform); - if (platform.platform.canFind("windows")) + if (platform.isWindows()) settings.addSourceFiles(settings.libs.map!(l => l~".lib")().array()); else settings.addLFlags(settings.libs.map!(l => "-l"~l)().array()); @@ -305,22 +307,22 @@ case TargetType.none: return null; case TargetType.sourceLibrary: return null; case TargetType.executable: - if (platform.platform.canFind("windows")) + if (platform.isWindows()) return settings.targetName ~ ".exe"; else return settings.targetName.idup; case TargetType.library: case TargetType.staticLibrary: - if (platform.platform.canFind("windows")) + if (platform.isWindows()) return settings.targetName ~ ".lib"; else return "lib" ~ settings.targetName ~ ".a"; case TargetType.dynamicLibrary: - if (platform.platform.canFind("windows")) + if (platform.isWindows()) return settings.targetName ~ ".dll"; else if (platform.platform.canFind("darwin")) return "lib" ~ settings.targetName ~ ".dylib"; else return "lib" ~ settings.targetName ~ ".so"; case TargetType.object: - if (platform.platform.canFind("windows")) + if (platform.isWindows()) return settings.targetName ~ ".obj"; else return settings.targetName ~ ".o"; } @@ -328,6 +330,8 @@ void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { + const targetFileName = getTargetFileName(settings, platform); + final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Invalid target type: autodetect"); case TargetType.none: assert(false, "Invalid target type: none"); @@ -338,10 +342,11 @@ settings.addDFlags("-lib"); break; case TargetType.dynamicLibrary: - if (platform.compiler != "dmd" || platform.platform.canFind("windows") || platform.platform.canFind("osx")) + if (platform.compiler != "dmd" || platform.isWindows() || platform.platform.canFind("osx")) settings.addDFlags("-shared"); else settings.prependDFlags("-shared", "-defaultlib=libphobos2.so"); + addDynamicLibName(settings, platform, targetFileName); break; case TargetType.object: settings.addDFlags("-c"); @@ -349,7 +354,7 @@ } if (tpath is null) - tpath = (NativePath(settings.targetPath) ~ getTargetFileName(settings, platform)).toNativeString(); + tpath = (NativePath(settings.targetPath) ~ targetFileName).toNativeString(); settings.addDFlags("-of"~tpath); } @@ -397,7 +402,7 @@ invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback, env); } - string[] lflagsToDFlags(in string[] lflags) const + string[] lflagsToDFlags(const string[] lflags) const { return map!(f => "-L"~f)(lflags.filter!(f => f != "")()).array(); } @@ -410,7 +415,7 @@ static bool isLinkerDFlag(string arg) { switch (arg) { - case "-g", "-gc", "-m32", "-m64", "-shared", "-lib", "-m32mscoff", "-betterC": + case "-g", "-gc", "-m32", "-m64", "-shared", "-lib", "-m32omf", "-m32mscoff", "-betterC": return true; default: return arg.startsWith("-L") diff --git a/source/dub/compilers/gdc.d b/source/dub/compilers/gdc.d index 04ed9dd..422a513 100644 --- a/source/dub/compilers/gdc.d +++ b/source/dub/compilers/gdc.d @@ -10,8 +10,8 @@ import dub.compilers.compiler; import dub.compilers.utils; import dub.internal.utils; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import std.algorithm; import std.array; @@ -46,6 +46,7 @@ tuple(BuildOption.property, ["-fproperty"]), //tuple(BuildOption.profileGC, ["-?"]), tuple(BuildOption.betterC, ["-fno-druntime"]), + tuple(BuildOption.color, ["-fdiagnostics-color=always"]), tuple(BuildOption._docs, ["-fdoc-dir=docs"]), tuple(BuildOption._ddox, ["-Xfdocs.json", "-fdoc-file=__dummy.html"]), @@ -160,20 +161,20 @@ case TargetType.none: return null; case TargetType.sourceLibrary: return null; case TargetType.executable: - if (platform.platform.canFind("windows")) + if (platform.isWindows()) return settings.targetName ~ ".exe"; else return settings.targetName.idup; case TargetType.library: case TargetType.staticLibrary: return "lib" ~ settings.targetName ~ ".a"; case TargetType.dynamicLibrary: - if (platform.platform.canFind("windows")) + if (platform.isWindows()) return settings.targetName ~ ".dll"; else if (platform.platform.canFind("darwin")) return "lib" ~ settings.targetName ~ ".dylib"; else return "lib" ~ settings.targetName ~ ".so"; case TargetType.object: - if (platform.platform.canFind("windows")) + if (platform.isWindows()) return settings.targetName ~ ".obj"; else return settings.targetName ~ ".o"; } @@ -181,6 +182,8 @@ void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { + const targetFileName = getTargetFileName(settings, platform); + final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Invalid target type: autodetect"); case TargetType.none: assert(false, "Invalid target type: none"); @@ -193,11 +196,12 @@ break; case TargetType.dynamicLibrary: settings.addDFlags("-shared", "-fPIC"); + addDynamicLibName(settings, platform, targetFileName); break; } if (tpath is null) - tpath = (NativePath(settings.targetPath) ~ getTargetFileName(settings, platform)).toNativeString(); + tpath = (NativePath(settings.targetPath) ~ targetFileName).toNativeString(); settings.addDFlags("-o", tpath); } @@ -236,7 +240,7 @@ invokeTool(args, output_callback, env); } - string[] lflagsToDFlags(in string[] lflags) const + string[] lflagsToDFlags(const string[] lflags) const { string[] dflags; foreach( f; lflags ) diff --git a/source/dub/compilers/ldc.d b/source/dub/compilers/ldc.d index 1ab5019..76c42e2 100644 --- a/source/dub/compilers/ldc.d +++ b/source/dub/compilers/ldc.d @@ -10,8 +10,8 @@ import dub.compilers.compiler; import dub.compilers.utils; import dub.internal.utils; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import std.algorithm; import std.array; @@ -25,6 +25,7 @@ tuple(BuildOption.debugMode, ["-d-debug"]), tuple(BuildOption.releaseMode, ["-release"]), tuple(BuildOption.coverage, ["-cov"]), + tuple(BuildOption.coverageCTFE, ["-cov=ctfe"]), tuple(BuildOption.debugInfo, ["-g"]), tuple(BuildOption.debugInfoC, ["-gc"]), tuple(BuildOption.alwaysStackFrame, ["-disable-fp-elim"]), @@ -46,6 +47,7 @@ //tuple(BuildOption.profileGC, ["-?"]), tuple(BuildOption.betterC, ["-betterC"]), tuple(BuildOption.lowmem, ["-lowmem"]), + tuple(BuildOption.color, ["-enable-color"]), tuple(BuildOption._docs, ["-Dd=docs"]), tuple(BuildOption._ddox, ["-Xf=docs.json", "-Dd=__dummy_docs"]), @@ -149,8 +151,19 @@ settings.lflags = null; } - if (settings.options & BuildOption.pic) - settings.addDFlags("-relocation-model=pic"); + if (settings.options & BuildOption.pic) { + if (platform.isWindows()) { + /* This has nothing to do with PIC, but as the PIC option is exclusively + * set internally for code that ends up in a dynamic library, explicitly + * specify what `-shared` defaults to (`-shared` can't be used when + * compiling only, without linking). + * *Pre*pending the flags enables the user to override them. + */ + settings.prependDFlags("-fvisibility=public", "-dllimport=all"); + } else { + settings.addDFlags("-relocation-model=pic"); + } + } assert(fields & BuildSetting.dflags); assert(fields & BuildSetting.copyFiles); @@ -207,6 +220,8 @@ void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { + const targetFileName = getTargetFileName(settings, platform); + final switch (settings.targetType) { case TargetType.autodetect: assert(false, "Invalid target type: autodetect"); case TargetType.none: assert(false, "Invalid target type: none"); @@ -218,6 +233,7 @@ break; case TargetType.dynamicLibrary: settings.addDFlags("-shared"); + addDynamicLibName(settings, platform, targetFileName); break; case TargetType.object: settings.addDFlags("-c"); @@ -225,7 +241,7 @@ } if (tpath is null) - tpath = (NativePath(settings.targetPath) ~ getTargetFileName(settings, platform)).toNativeString(); + tpath = (NativePath(settings.targetPath) ~ targetFileName).toNativeString(); settings.addDFlags("-of"~tpath); } @@ -267,7 +283,7 @@ invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback, env); } - string[] lflagsToDFlags(in string[] lflags) const + string[] lflagsToDFlags(const string[] lflags) const { return map!(f => "-L"~f)(lflags.filter!(f => f != "")()).array(); } diff --git a/source/dub/compilers/utils.d b/source/dub/compilers/utils.d index 171f487..20de0de 100644 --- a/source/dub/compilers/utils.d +++ b/source/dub/compilers/utils.d @@ -9,10 +9,10 @@ import dub.compilers.buildsettings; import dub.platform : BuildPlatform, archCheck, compilerCheck, platformCheck; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; -import std.algorithm : canFind, endsWith, filter; +import dub.internal.logging; +import std.algorithm : canFind, endsWith, filter; /** Alters the build options to comply with the specified build requirements. @@ -47,9 +47,9 @@ default: return false; case ".lib", ".obj", ".res", ".def": - return platform.platform.canFind("windows"); + return platform.isWindows(); case ".a", ".o", ".so", ".dylib": - return !platform.platform.canFind("windows"); + return !platform.isWindows(); } } @@ -74,6 +74,25 @@ /** + Adds a default DT_SONAME (ELF) / 'install name' (Mach-O) when linking a dynamic library. + This makes dependees reference their dynamic-lib deps by filename only (DT_NEEDED etc.) + instead of by the path used in the dependee linker cmdline, and enables loading the + deps from the dependee's output directory - either by setting the LD_LIBRARY_PATH + environment variable, or baking an rpath into the executable. +*/ +package void addDynamicLibName(ref BuildSettings settings, in BuildPlatform platform, string fileName) +{ + if (!platform.isWindows()) { + // *pre*pend to allow the user to override it + if (platform.platform.canFind("darwin")) + settings.prependLFlags("-install_name", "@rpath/" ~ fileName); + else + settings.prependLFlags("-soname", fileName); + } +} + + +/** Replaces each referenced import library by the appropriate linker flags. This function tries to invoke "pkg-config" if possible and falls back to @@ -89,7 +108,7 @@ if (settings.targetType == TargetType.library || settings.targetType == TargetType.staticLibrary) { logDiagnostic("Ignoring all import libraries for static library build."); settings.libs = null; - if (platform.platform.canFind("windows")) + if (platform.isWindows()) settings.sourceFiles = settings.sourceFiles.filter!(f => !f.endsWith(".lib")).array; } @@ -143,11 +162,11 @@ equivalent. Certain compiler flags should, instead of using compiler-specific syntax, - be specified as build options (`BuildOptions`) or built requirements + be specified as build options (`BuildOption`) or built requirements (`BuildRequirements`). This function will output warning messages to assist the user in making the best choice. */ -void warnOnSpecialCompilerFlags(string[] compiler_flags, BuildOptions options, string package_name, string config_name) +void warnOnSpecialCompilerFlags(string[] compiler_flags, Flags!BuildOption options, string package_name, string config_name) { import std.algorithm : any, endsWith, startsWith; import std.range : empty; @@ -170,6 +189,7 @@ {["-D"], "Call dub with --build=docs or --build=ddox"}, {["-X"], "Call dub with --build=ddox"}, {["-cov"], "Call dub with --build=cov or --build=unittest-cov"}, + {["-cov=ctfe"], "Call dub with --build=cov-ctfe or --build=unittest-cov-ctfe"}, {["-profile"], "Call dub with --build=profile"}, {["-version="], `Use "versions" to specify version constants in a compiler independent way`}, {["-debug="], `Use "debugVersions" to specify version constants in a compiler independent way`}, @@ -186,6 +206,7 @@ {[BuildOption.debugMode], "Call DUB with --build=debug"}, {[BuildOption.releaseMode], "Call DUB with --build=release"}, {[BuildOption.coverage], "Call DUB with --build=cov or --build=unittest-cov"}, + {[BuildOption.coverageCTFE], "Call DUB with --build=cov-ctfe or --build=unittest-cov-ctfe"}, {[BuildOption.debugInfo], "Call DUB with --build=debug"}, {[BuildOption.inline], "Call DUB with --build=release"}, {[BuildOption.noBoundsCheck], "Call DUB with --build=release-nobounds"}, diff --git a/source/dub/dependency.d b/source/dub/dependency.d index 0fedb97..8817d5e 100644 --- a/source/dub/dependency.d +++ b/source/dub/dependency.d @@ -8,17 +8,18 @@ module dub.dependency; import dub.internal.utils; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; import dub.package_; import dub.semver; +import dub.internal.logging; import std.algorithm; import std.array; import std.exception; import std.string; +import std.sumtype; /** Encapsulates the name of a package along with its dependency specification. @@ -40,234 +41,165 @@ package name is notably not part of the dependency specification. */ struct Dependency { -@safe: + /// We currently support 3 'types' + private alias Value = SumType!(VersionRange, NativePath, Repository); - private { - // Shortcut to create >=0.0.0 - enum ANY_IDENT = "*"; - bool m_inclusiveA = true; // A comparison > (true) or >= (false) - Version m_versA; - bool m_inclusiveB = true; // B comparison < (true) or <= (false) - Version m_versB; - NativePath m_path; - bool m_optional = false; - bool m_default = false; - Repository m_repository; - } + /// Used by `toString` + private static immutable string[] BooleanOptions = [ "optional", "default" ]; + + // Shortcut to create >=0.0.0 + private enum ANY_IDENT = "*"; + + private Value m_value; + private bool m_optional; + private bool m_default; /// A Dependency, which matches every valid version. - static @property Dependency any() { return Dependency(ANY_IDENT); } + static @property Dependency any() @safe { return Dependency(VersionRange.Any); } /// An invalid dependency (with no possible version matches). - static @property Dependency invalid() { Dependency ret; ret.m_versA = Version.maxRelease; ret.m_versB = Version.minRelease; return ret; } + static @property Dependency invalid() @safe + { + return Dependency(VersionRange.Invalid); + } + + /** Constructs a new dependency specification that matches a specific + path. + */ + this(NativePath path) @safe + { + this.m_value = path; + } + + /** Constructs a new dependency specification that matches a specific + Git reference. + */ + this(Repository repository) @safe + { + this.m_value = repository; + } /** Constructs a new dependency specification from a string See the `versionSpec` property for a description of the accepted contents of that string. */ - this(string spec) + this(string spec) @safe { - this.versionSpec = spec; + this(VersionRange.fromString(spec)); } /** Constructs a new dependency specification that matches a specific version. */ - this(const Version ver) + this(const Version ver) @safe { - m_inclusiveA = m_inclusiveB = true; - m_versA = ver; - m_versB = ver; + this(VersionRange(ver, ver)); } - /** Constructs a new dependency specification that matches a specific - path. - */ - this(NativePath path) + /// Construct a version from a range of possible values + this (VersionRange rng) @safe { - this(ANY_IDENT); - m_path = path; + this.m_value = rng; } - /** Constructs a new dependency specification that matches a specific - Git reference. - */ - this(Repository repository, string spec) { - this.versionSpec = spec; - this.repository = repository; + deprecated("Instantiate the `Repository` struct with the string directy") + this(Repository repository, string spec) @safe + { + assert(repository.m_ref is null); + repository.m_ref = spec; + this(repository); } /// If set, overrides any version based dependency selection. - @property void path(NativePath value) { m_path = value; } - /// ditto - @property NativePath path() const { return m_path; } - - /// If set, overrides any version based dependency selection. - @property void repository(Repository value) + deprecated("Construct a new `Dependency` object instead") + @property void path(NativePath value) @trusted { - m_repository = value; + this.m_value = value; + } + /// ditto + @property NativePath path() const @safe + { + return this.m_value.match!( + (const NativePath p) => p, + ( any ) => NativePath.init, + ); } - /// ditto - @property Repository repository() const + /// If set, overrides any version based dependency selection. + deprecated("Construct a new `Dependency` object instead") + @property void repository(Repository value) @trusted { - return m_repository; + this.m_value = value; + } + /// ditto + @property Repository repository() const @safe + { + return this.m_value.match!( + (const Repository p) => p, + ( any ) => Repository.init, + ); } /// Determines if the dependency is required or optional. - @property bool optional() const { return m_optional; } + @property bool optional() const scope @safe pure nothrow @nogc + { + return m_optional; + } /// ditto - @property void optional(bool optional) { m_optional = optional; } + @property void optional(bool optional) scope @safe pure nothrow @nogc + { + m_optional = optional; + } /// Determines if an optional dependency should be chosen by default. - @property bool default_() const { return m_default; } + @property bool default_() const scope @safe pure nothrow @nogc + { + return m_default; + } /// ditto - @property void default_(bool value) { m_default = value; } + @property void default_(bool value) scope @safe pure nothrow @nogc + { + m_default = value; + } /// Returns true $(I iff) the version range only matches a specific version. - @property bool isExactVersion() const { return m_versA == m_versB; } - - /// Determines whether it is a Git dependency. - @property bool isSCM() const { return !repository.empty; } + @property bool isExactVersion() const scope @safe + { + return this.m_value.match!( + (NativePath v) => false, + (Repository v) => false, + (VersionRange v) => v.isExactVersion(), + ); + } /// Returns the exact version matched by the version range. - @property Version version_() const { - enforce(m_versA == m_versB, "Dependency "~this.versionSpec~" is no exact version."); - return m_versA; + @property Version version_() const @safe { + auto range = this.m_value.match!( + (NativePath p) => assert(0), + (Repository r) => assert(0), + (VersionRange v) => v, + ); + enforce(range.isExactVersion(), + "Dependency "~range.toString()~" is no exact version."); + return range.m_versA; } - /** Sets/gets the matching version range as a specification string. - - The acceptable forms for this string are as follows: - - $(UL - $(LI `"1.0.0"` - a single version in SemVer format) - $(LI `"==1.0.0"` - alternative single version notation) - $(LI `">1.0.0"` - version range with a single bound) - $(LI `">1.0.0 <2.0.0"` - version range with two bounds) - $(LI `"~>1.0.0"` - a fuzzy version range) - $(LI `"~>1.0"` - a fuzzy version range with partial version) - $(LI `"^1.0.0"` - semver compatible version range (same version if 0.x.y, ==major >=minor.patch if x.y.z)) - $(LI `"^1.0"` - same as ^1.0.0) - $(LI `"~master"` - a branch name) - $(LI `"*" - match any version (see also `any`)) - ) - - Apart from "$(LT)" and "$(GT)", "$(GT)=" and "$(LT)=" are also valid - comparators. - - */ - @property void versionSpec(string ves) + /// Sets/gets the matching version range as a specification string. + deprecated("Create a new `Dependency` instead and provide a `VersionRange`") + @property void versionSpec(string ves) @trusted { - static import std.string; - - enforce(ves.length > 0); - - if (ves == ANY_IDENT) { - // Any version is good. - ves = ">=0.0.0"; - } - - if (ves.startsWith("~>")) { - // Shortcut: "~>x.y.z" variant. Last non-zero number will indicate - // the base for this so something like this: ">=x.y.z =x.y.z <(x+1).0.0-0 - // ^x.y is equivalent to ^x.y.0. - m_inclusiveA = true; - m_inclusiveB = false; - ves = ves[1..$].expandVersion; - m_versA = Version(ves); - m_versB = Version(bumpIncompatibleVersion(ves) ~ "-0"); - } else if (ves[0] == Version.branchPrefix || ves.isGitHash) { - m_inclusiveA = true; - m_inclusiveB = true; - m_versA = m_versB = Version(ves); - } else if (std.string.indexOf("><=", ves[0]) == -1) { - m_inclusiveA = true; - m_inclusiveB = true; - m_versA = m_versB = Version(ves); - } else { - auto cmpa = skipComp(ves); - size_t idx2 = std.string.indexOf(ves, " "); - if (idx2 == -1) { - if (cmpa == "<=" || cmpa == "<") { - m_versA = Version.minRelease; - m_inclusiveA = true; - m_versB = Version(ves); - m_inclusiveB = cmpa == "<="; - } else if (cmpa == ">=" || cmpa == ">") { - m_versA = Version(ves); - m_inclusiveA = cmpa == ">="; - m_versB = Version.maxRelease; - m_inclusiveB = true; - } else { - // Converts "==" to ">=a&&<=a", which makes merging easier - m_versA = m_versB = Version(ves); - m_inclusiveA = m_inclusiveB = true; - } - } else { - enforce(cmpa == ">" || cmpa == ">=", "First comparison operator expected to be either > or >=, not "~cmpa); - assert(ves[idx2] == ' '); - m_versA = Version(ves[0..idx2]); - m_inclusiveA = cmpa == ">="; - string v2 = ves[idx2+1..$]; - auto cmpb = skipComp(v2); - enforce(cmpb == "<" || cmpb == "<=", "Second comparison operator expected to be either < or <=, not "~cmpb); - m_versB = Version(v2); - m_inclusiveB = cmpb == "<="; - - enforce(!m_versA.isBranch && !m_versB.isBranch, format("Cannot compare branches: %s", ves)); - enforce(m_versA <= m_versB, "First version must not be greater than the second one."); - } - } + this.m_value = VersionRange.fromString(ves); } + /// ditto - @property string versionSpec() - const { - static import std.string; - - string r; - - if (this == invalid) return "invalid"; - if (m_versA == m_versB && m_inclusiveA && m_inclusiveB) { - // Special "==" case - if (m_versA == Version.masterBranch) return "~master"; - else return m_versA.toString(); - } - - // "~>", "^" case - if (m_inclusiveA && !m_inclusiveB && !m_versA.isBranch) { - auto vs = m_versA.toString(); - auto i1 = std.string.indexOf(vs, '-'), i2 = std.string.indexOf(vs, '+'); - auto i12 = i1 >= 0 ? i2 >= 0 ? i1 < i2 ? i1 : i2 : i1 : i2; - auto va = i12 >= 0 ? vs[0 .. i12] : vs; - auto parts = va.splitter('.').array; - assert(parts.length == 3, "Version string with a digit group count != 3: "~va); - - foreach (i; 0 .. 3) { - auto vp = parts[0 .. i+1].join("."); - auto ve = Version(expandVersion(vp)); - auto veb = Version(bumpVersion(vp) ~ "-0"); - if (ve == m_versA && veb == m_versB) return "~>" ~ vp; - - auto veb2 = Version(bumpIncompatibleVersion(expandVersion(vp)) ~ "-0"); - if (ve == m_versA && veb2 == m_versB) return "^" ~ vp; - } - } - - if (m_versA != Version.minRelease) r = (m_inclusiveA ? ">=" : ">") ~ m_versA.toString(); - if (m_versB != Version.maxRelease) r ~= (r.length==0 ? "" : " ") ~ (m_inclusiveB ? "<=" : "<") ~ m_versB.toString(); - if (m_versA == Version.minRelease && m_versB == Version.maxRelease) r = ">=0.0.0"; - return r; + deprecated("Use `Dependency.visit` and match `VersionRange`instead") + @property string versionSpec() const @safe { + return this.m_value.match!( + (const NativePath p) => ANY_IDENT, + (const Repository r) => r.m_ref, + (const VersionRange p) => p.toString(), + ); } /** Returns a modified dependency that gets mapped to a given path. @@ -276,38 +208,39 @@ based. Otherwise, the given `path` will be prefixed to the existing path. */ - Dependency mapToPath(NativePath path) - const @trusted { // NOTE Path is @system in vibe.d 0.7.x and in the compatibility layer - if (m_path.empty || m_path.absolute) return this; - else { - Dependency ret = this; - ret.path = path ~ ret.path; - return ret; - } + Dependency mapToPath(NativePath path) const @trusted { + // NOTE Path is @system in vibe.d 0.7.x and in the compatibility layer + return this.m_value.match!( + (NativePath v) { + if (v.empty || v.absolute) return this; + return Dependency(path ~ v); + }, + (Repository v) => this, + (VersionRange v) => this, + ); } /** Returns a human-readable string representation of the dependency specification. */ - string toString()() - const { - string ret; + string toString() const scope @trusted { + // Trusted because `SumType.match` doesn't seem to support `scope` - if (!repository.empty) { - ret ~= repository.toString~"#"; - } - ret ~= versionSpec; - if (optional) { - if (default_) ret ~= " (optional, default)"; - else ret ~= " (optional)"; + string Stringifier (T, string pre = null) (const T v) + { + const bool extra = this.optional || this.default_; + return format("%s%s%s%-(%s, %)%s", + pre, v, + extra ? " (" : "", + BooleanOptions[!this.optional .. 1 + this.default_], + extra ? ")" : ""); } - // NOTE Path is @system in vibe.d 0.7.x and in the compatibility layer - () @trusted { - if (!path.empty) ret ~= " @"~path.toNativeString(); - } (); - - return ret; + return this.m_value.match!( + Stringifier!Repository, + Stringifier!(NativePath, "@"), + Stringifier!VersionRange + ); } /** Returns a JSON representation of the dependency specification. @@ -316,20 +249,44 @@ string (`versionSpec`), while more complex specifications will be represented as a JSON object with optional "version", "path", "optional" and "default" fields. + + Params: + selections = We are serializing `dub.selections.json`, don't write out + `optional` and `default`. */ - Json toJson() - const @trusted { // NOTE Path and Json is @system in vibe.d 0.7.x and in the compatibility layer - Json json; - if( path.empty && repository.empty && !optional ){ - json = Json(this.versionSpec); - } else { - json = Json.emptyObject; - json["version"] = this.versionSpec; - if (!path.empty) json["path"] = path.toString(); - if (!repository.empty) json["repository"] = repository.toString; - if (optional) json["optional"] = true; - if (default_) json["default"] = true; + Json toJson(bool selections = false) const @safe + { + // NOTE Path and Json is @system in vibe.d 0.7.x and in the compatibility layer + static void initJson(ref Json j, bool opt, bool def, bool s = selections) + { + j = Json.emptyObject; + if (!s && opt) j["optional"] = true; + if (!s && def) j["default"] = true; } + + Json json; + this.m_value.match!( + (const NativePath v) @trusted { + initJson(json, optional, default_); + json["path"] = v.toString(); + }, + + (const Repository v) @trusted { + initJson(json, optional, default_); + json["repository"] = v.toString(); + json["version"] = v.m_ref; + }, + + (const VersionRange v) @trusted { + if (!selections && (optional || default_)) + { + initJson(json, optional, default_); + json["version"] = v.toString(); + } + else + json = Json(v.toString()); + }, + ); return json; } @@ -342,7 +299,7 @@ } @trusted unittest { - Dependency dependency = Dependency(Repository("git+http://localhost"), "1.0.0"); + Dependency dependency = Dependency(Repository("git+http://localhost", "1.0.0")); Json expected = Json([ "repository": Json("git+http://localhost"), "version": Json("1.0.0") @@ -350,6 +307,12 @@ assert(dependency.toJson() == expected, "Failed: " ~ dependency.toJson().toPrettyString()); } + @trusted unittest { + Dependency d = Dependency(NativePath("dir")); + Json expected = Json([ "path": Json("dir") ]); + assert(d.toJson() == expected, "Failed: " ~ d.toJson().toPrettyString()); + } + /** Constructs a new `Dependency` from its JSON representation. See `toJson` for a description of the JSON format. @@ -362,14 +325,13 @@ if (auto pv = "version" in verspec) logDiagnostic("Ignoring version specification (%s) for path based dependency %s", pv.get!string, pp.get!string); - dep = Dependency.any; - dep.path = NativePath(verspec["path"].get!string); + dep = Dependency(NativePath(verspec["path"].get!string)); } else if (auto repository = "repository" in verspec) { enforce("version" in verspec, "No version field specified!"); enforce(repository.length > 0, "No repository field specified!"); - dep = Dependency(Repository(repository.get!string), - verspec["version"].get!string); + dep = Dependency(Repository( + repository.get!string, verspec["version"].get!string)); } else { enforce("version" in verspec, "No version field specified!"); auto ver = verspec["version"].get!string; @@ -396,15 +358,10 @@ "path": "path/to/package" } `)); - Dependency d = Dependency.any; // supposed to ignore the version spec + Dependency d = NativePath("path/to/package"); // supposed to ignore the version spec d.optional = true; d.default_ = true; - d.path = NativePath("path/to/package"); assert(d == parsed); - // optional and path not checked by opEquals. - assert(d.optional == parsed.optional); - assert(d.default_ == parsed.default_); - assert(d.path == parsed.path); } /** Compares dependency specifications. @@ -412,92 +369,62 @@ These methods are suitable for equality comparisons, as well as for using `Dependency` as a key in hash or tree maps. */ - bool opEquals(const Dependency o) - const { - // TODO(mdondorff): Check if not comparing the path is correct for all clients. - return o.m_inclusiveA == m_inclusiveA && o.m_inclusiveB == m_inclusiveB - && o.m_versA == m_versA && o.m_versB == m_versB - && o.m_optional == m_optional && o.m_default == m_default; + bool opEquals(in Dependency o) const scope @safe { + if (o.m_optional != this.m_optional) return false; + if (o.m_default != this.m_default) return false; + return this.m_value == o.m_value; } /// ditto - int opCmp(const Dependency o) - const { - if (m_inclusiveA != o.m_inclusiveA) return m_inclusiveA < o.m_inclusiveA ? -1 : 1; - if (m_inclusiveB != o.m_inclusiveB) return m_inclusiveB < o.m_inclusiveB ? -1 : 1; - if (m_versA != o.m_versA) return m_versA < o.m_versA ? -1 : 1; - if (m_versB != o.m_versB) return m_versB < o.m_versB ? -1 : 1; + int opCmp(in Dependency o) const @safe { + alias ResultMatch = match!( + (VersionRange r1, VersionRange r2) => r1.opCmp(r2), + (_1, _2) => 0, + ); + if (auto result = ResultMatch(this.m_value, o.m_value)) + return result; if (m_optional != o.m_optional) return m_optional ? -1 : 1; return 0; } - /// ditto - size_t toHash() - const nothrow @trusted { - try { - size_t hash = 0; - hash = m_inclusiveA.hashOf(hash); - hash = m_versA.toString().hashOf(hash); - hash = m_inclusiveB.hashOf(hash); - hash = m_versB.toString().hashOf(hash); - hash = m_optional.hashOf(hash); - hash = m_default.hashOf(hash); - return hash; - } catch (Exception) assert(false); - } - /** Determines if this dependency specification is valid. A specification is valid if it can match at least one version. */ - bool valid() const { - if (this.isSCM) return true; - return m_versA <= m_versB && doCmp(m_inclusiveA && m_inclusiveB, m_versA, m_versB); + bool valid() const @safe { + return this.m_value.match!( + (NativePath v) => true, + (Repository v) => true, + (VersionRange v) => v.isValid(), + ); } /** Determines if this dependency specification matches arbitrary versions. This is true in particular for the `any` constant. */ - bool matchesAny() - const { - return m_inclusiveA && m_inclusiveB - && m_versA.toString() == "0.0.0" - && m_versB == Version.maxRelease; - } - - unittest { - assert(Dependency("*").matchesAny); - assert(!Dependency(">0.0.0").matchesAny); - assert(!Dependency(">=1.0.0").matchesAny); - assert(!Dependency("<1.0.0").matchesAny); + deprecated("Use `VersionRange.matchesAny` directly") + bool matchesAny() const scope @safe { + return this.m_value.match!( + (NativePath v) => true, + (Repository v) => true, + (VersionRange v) => v.matchesAny(), + ); } /** Tests if the specification matches a specific version. */ - bool matches(string vers, VersionMatchMode mode = VersionMatchMode.standard) const { return matches(Version(vers), mode); } + bool matches(string vers, VersionMatchMode mode = VersionMatchMode.standard) const @safe + { + return matches(Version(vers), mode); + } /// ditto - bool matches(const(Version) v, VersionMatchMode mode = VersionMatchMode.standard) const { return matches(v, mode); } - /// ditto - bool matches(ref const(Version) v, VersionMatchMode mode = VersionMatchMode.standard) const { - if (this.matchesAny) return true; - if (this.isSCM) return true; - //logDebug(" try match: %s with: %s", v, this); - // Master only matches master - if(m_versA.isBranch) { - enforce(m_versA == m_versB); - return m_versA == v; - } - if(v.isBranch || m_versA.isBranch) - return m_versA == v; - if( !doCmp(m_inclusiveA, m_versA, v) ) - return false; - if( !doCmp(m_inclusiveB, v, m_versB) ) - return false; - if (this.isExactVersion && mode == VersionMatchMode.strict - && this.version_.toString != v.toString) - return false; - return true; + bool matches(in Version v, VersionMatchMode mode = VersionMatchMode.standard) const @safe { + return this.m_value.match!( + (NativePath i) => true, + (Repository i) => true, + (VersionRange i) => i.matchesAny() || i.matches(v, mode), + ); } /** Merges two dependency specifications. @@ -506,72 +433,61 @@ of versions matched by the individual specifications. Note that this result can be invalid (i.e. not match any version). */ - Dependency merge(ref const(Dependency) o) - const { - if (this.isSCM) { - if (!o.isSCM) return this; - if (this.m_versA == o.m_versA) return this; - return invalid; - } - if (o.isSCM) return o; + Dependency merge(ref const(Dependency) o) const @trusted { + alias Merger = match!( + (const NativePath a, const NativePath b) => a == b ? this : invalid, + (const NativePath a, any ) => o, + ( any , const NativePath b) => this, - if (this.matchesAny) return o; - if (o.matchesAny) return this; - if (m_versA.isBranch != o.m_versA.isBranch) return invalid; - if (m_versB.isBranch != o.m_versB.isBranch) return invalid; - if (m_versA.isBranch) return m_versA == o.m_versA ? this : invalid; - // NOTE Path is @system in vibe.d 0.7.x and in the compatibility layer - if (() @trusted { return this.path != o.path; } ()) return invalid; + (const Repository a, const Repository b) => a.m_ref == b.m_ref ? this : invalid, + (const Repository a, any ) => this, + ( any , const Repository b) => o, - int acmp = m_versA.opCmp(o.m_versA); - int bcmp = m_versB.opCmp(o.m_versB); + (const VersionRange a, const VersionRange b) { + if (a.matchesAny()) return o; + if (b.matchesAny()) return this; - Dependency d = this; - d.m_inclusiveA = !m_inclusiveA && acmp >= 0 ? false : o.m_inclusiveA; - d.m_versA = acmp > 0 ? m_versA : o.m_versA; - d.m_inclusiveB = !m_inclusiveB && bcmp <= 0 ? false : o.m_inclusiveB; - d.m_versB = bcmp < 0 ? m_versB : o.m_versB; - d.m_optional = m_optional && o.m_optional; - if (!d.valid) return invalid; + VersionRange copy = a; + copy.merge(b); + if (!copy.isValid()) return invalid; + return Dependency(copy); + } + ); - return d; - } - - private static bool isDigit(char ch) { return ch >= '0' && ch <= '9'; } - private static string skipComp(ref string c) { - size_t idx = 0; - while (idx < c.length && !isDigit(c[idx]) && c[idx] != Version.branchPrefix) idx++; - enforce(idx < c.length, "Expected version number in version spec: "~c); - string cmp = idx==c.length-1||idx==0? ">=" : c[0..idx]; - c = c[idx..$]; - switch(cmp) { - default: enforce(false, "No/Unknown comparison specified: '"~cmp~"'"); return ">="; - case ">=": goto case; case ">": goto case; - case "<=": goto case; case "<": goto case; - case "==": return cmp; - } - } - - private static bool doCmp(bool inclusive, ref const Version a, ref const Version b) { - return inclusive ? a <= b : a < b; + Dependency ret = Merger(this.m_value, o.m_value); + ret.m_optional = m_optional && o.m_optional; + return ret; } } +/// Allow direct access to the underlying dependency +public auto visit (Handlers...) (const auto ref Dependency dep) +{ + return dep.m_value.match!(Handlers); +} + +//// Ditto +public auto visit (Handlers...) (auto ref Dependency dep) +{ + return dep.m_value.match!(Handlers); +} + + unittest { Dependency a = Dependency(">=1.1.0"), b = Dependency(">=1.3.0"); - assert (a.merge(b).valid() && a.merge(b).versionSpec == ">=1.3.0", a.merge(b).toString()); + assert (a.merge(b).valid() && a.merge(b).toString() == ">=1.3.0", a.merge(b).toString()); assertThrown(Dependency("<=2.0.0 >=1.0.0")); assertThrown(Dependency(">=2.0.0 <=1.0.0")); a = Dependency(">=1.0.0 <=5.0.0"); b = Dependency(">=2.0.0"); - assert (a.merge(b).valid() && a.merge(b).versionSpec == ">=2.0.0 <=5.0.0", a.merge(b).toString()); + assert (a.merge(b).valid() && a.merge(b).toString() == ">=2.0.0 <=5.0.0", a.merge(b).toString()); assertThrown(a = Dependency(">1.0.0 ==5.0.0"), "Construction is invalid"); a = Dependency(">1.0.0"); b = Dependency("<2.0.0"); assert (a.merge(b).valid(), a.merge(b).toString()); - assert (a.merge(b).versionSpec == ">1.0.0 <2.0.0", a.merge(b).toString()); + assert (a.merge(b).toString() == ">1.0.0 <2.0.0", a.merge(b).toString()); a = Dependency(">2.0.0"); b = Dependency("<1.0.0"); assert (!(a.merge(b)).valid(), a.merge(b).toString()); @@ -727,13 +643,13 @@ } unittest { - assert(Dependency("~>1.0.4").versionSpec == "~>1.0.4"); - assert(Dependency("~>1.4").versionSpec == "~>1.4"); - assert(Dependency("~>2").versionSpec == "~>2"); - assert(Dependency("~>1.0.4+1.2.3").versionSpec == "~>1.0.4"); - assert(Dependency("^0.1.2").versionSpec == "^0.1.2"); - assert(Dependency("^1.2.3").versionSpec == "^1.2.3"); - assert(Dependency("^1.2").versionSpec == "~>1.2"); // equivalent; prefer ~> + assert(VersionRange.fromString("~>1.0.4").toString() == "~>1.0.4"); + assert(VersionRange.fromString("~>1.4").toString() == "~>1.4"); + assert(VersionRange.fromString("~>2").toString() == "~>2"); + assert(VersionRange.fromString("~>1.0.4+1.2.3").toString() == "~>1.0.4"); + assert(VersionRange.fromString("^0.1.2").toString() == "^0.1.2"); + assert(VersionRange.fromString("^1.2.3").toString() == "^1.2.3"); + assert(VersionRange.fromString("^1.2").toString() == "~>1.2"); // equivalent; prefer ~> } /** @@ -742,6 +658,7 @@ struct Repository { private string m_remote; + private string m_ref; private Kind m_kind; @@ -753,21 +670,31 @@ /** Params: remote = Repository remote. + ref_ = Reference to use (SHA1, tag, branch name...) */ - this(string remote) + this(string remote, string ref_) { - if (remote.startsWith("git+")) - { - m_remote = remote["git+".length .. $]; - m_kind = Kind.git; - } - else - { - throw new Exception("Unsupported repository type"); - } + enforce(remote.startsWith("git+"), "Unsupported repository type (supports: git+URL)"); + + m_remote = remote["git+".length .. $]; + m_kind = Kind.git; + m_ref = ref_; + assert(m_remote.length); + assert(m_ref.length); } - string toString() nothrow pure @safe + /// Ditto + deprecated("Use the constructor accepting a second parameter named `ref_`") + this(string remote) + { + enforce(remote.startsWith("git+"), "Unsupported repository type (supports: git+URL)"); + + m_remote = remote["git+".length .. $]; + m_kind = Kind.git; + assert(m_remote.length); + } + + string toString() const nothrow pure @safe { if (empty) return null; string kindRepresentation; @@ -784,7 +711,7 @@ Returns: Repository URL or path. */ - @property string remote() @nogc nothrow pure @safe + @property string remote() const @nogc nothrow pure @safe in { assert(m_remote !is null); } do { @@ -793,9 +720,21 @@ /** Returns: + The reference (commit hash, branch name, tag) we are targeting + */ + @property string ref_() const @nogc nothrow pure @safe + in { assert(m_remote !is null); } + in { assert(m_ref !is null); } + do + { + return m_ref; + } + + /** + Returns: Repository type. */ - @property Kind kind() @nogc nothrow pure @safe + @property Kind kind() const @nogc nothrow pure @safe { return m_kind; } @@ -819,10 +758,8 @@ Semantic Versioning Specification v2.0.0 at http://semver.org/). */ struct Version { -@safe: private { static immutable MAX_VERS = "99999.0.0"; - static immutable UNKNOWN_VERS = "unknown"; static immutable masterString = "~master"; enum branchPrefix = '~'; string m_version; @@ -831,14 +768,13 @@ static immutable Version minRelease = Version("0.0.0"); static immutable Version maxRelease = Version(MAX_VERS); static immutable Version masterBranch = Version(masterString); - static immutable Version unknown = Version(UNKNOWN_VERS); /** Constructs a new `Version` from its string representation. */ - this(string vers) + this(string vers) @safe pure { enforce(vers.length > 1, "Version strings must not be empty."); - if (vers[0] != branchPrefix && !vers.isGitHash && vers.ptr !is UNKNOWN_VERS.ptr) + if (vers[0] != branchPrefix) enforce(vers.isValidVersion(), "Invalid SemVer format: " ~ vers); m_version = vers; } @@ -848,42 +784,43 @@ This method is equivalent to calling the constructor and is used as an endpoint for the serialization framework. */ - static Version fromString(string vers) { return Version(vers); } + static Version fromString(string vers) @safe pure { return Version(vers); } - bool opEquals(const Version oth) const { return opCmp(oth) == 0; } - - /// Tests if this represents a hash instead of a version. - @property bool isSCM() const { return m_version.isGitHash; } + bool opEquals(in Version oth) const scope @safe pure + { + return opCmp(oth) == 0; + } /// Tests if this represents a branch instead of a version. - @property bool isBranch() const { return m_version.length > 0 && m_version[0] == branchPrefix; } + @property bool isBranch() const scope @safe pure nothrow @nogc + { + return m_version.length > 0 && m_version[0] == branchPrefix; + } /// Tests if this represents the master branch "~master". - @property bool isMaster() const { return m_version == masterString; } + @property bool isMaster() const scope @safe pure nothrow @nogc + { + return m_version == masterString; + } /** Tests if this represents a pre-release version. Note that branches are always considered pre-release versions. */ - @property bool isPreRelease() const { - if (isBranch || isSCM) return true; + @property bool isPreRelease() const scope @safe pure nothrow @nogc + { + if (isBranch) return true; return isPreReleaseVersion(m_version); } - /// Tests if this represents the special unknown version constant. - @property bool isUnknown() const { return m_version == UNKNOWN_VERS; } - /** Tests two versions for equality, according to the selected match mode. */ - bool matches(Version other, VersionMatchMode mode = VersionMatchMode.standard) - const { - if (this != other) - return false; - - if (mode == VersionMatchMode.strict && this.toString() != other.toString()) - return false; - - return true; + bool matches(in Version other, VersionMatchMode mode = VersionMatchMode.standard) + const scope @safe pure + { + if (mode == VersionMatchMode.strict) + return this.toString() == other.toString(); + return this == other; } /** Compares two versions/branches for precedence. @@ -893,18 +830,8 @@ compared using SemVer semantics, while branches are compared lexicographically. */ - int opCmp(ref const Version other) - const { - if (isUnknown || other.isUnknown) { - throw new Exception("Can't compare unknown versions! (this: %s, other: %s)".format(this, other)); - } - - if (isSCM || other.isSCM) { - if (!isSCM) return -1; - if (!other.isSCM) return 1; - return m_version == other.m_version ? 0 : 1; - } - + int opCmp(in Version other) const scope @safe pure + { if (isBranch || other.isBranch) { if(m_version == other.m_version) return 0; if (!isBranch) return 1; @@ -916,11 +843,251 @@ return compareVersions(m_version, other.m_version); } - /// ditto - int opCmp(const Version other) const { return opCmp(other); } /// Returns the string representation of the version/branch. - string toString() const { return m_version; } + string toString() const return scope @safe pure nothrow @nogc + { + return m_version; + } +} + +/** + * A range of versions that are acceptable + * + * While not directly described in SemVer v2.0.0, a common set + * of range operators have appeared among package managers. + * We mostly NPM's: https://semver.npmjs.com/ + * + * Hence the acceptable forms for this string are as follows: + * + * $(UL + * $(LI `"1.0.0"` - a single version in SemVer format) + * $(LI `"==1.0.0"` - alternative single version notation) + * $(LI `">1.0.0"` - version range with a single bound) + * $(LI `">1.0.0 <2.0.0"` - version range with two bounds) + * $(LI `"~>1.0.0"` - a fuzzy version range) + * $(LI `"~>1.0"` - a fuzzy version range with partial version) + * $(LI `"^1.0.0"` - semver compatible version range (same version if 0.x.y, ==major >=minor.patch if x.y.z)) + * $(LI `"^1.0"` - same as ^1.0.0) + * $(LI `"~master"` - a branch name) + * $(LI `"*" - match any version (see also `VersionRange.Any`)) + * ) + * + * Apart from "$(LT)" and "$(GT)", "$(GT)=" and "$(LT)=" are also valid + * comparators. + */ +public struct VersionRange +{ + private Version m_versA; + private Version m_versB; + private bool m_inclusiveA = true; // A comparison > (true) or >= (false) + private bool m_inclusiveB = true; // B comparison < (true) or <= (false) + + /// Matches any version + public static immutable Any = VersionRange(Version.minRelease, Version.maxRelease); + /// Doesn't match any version + public static immutable Invalid = VersionRange(Version.maxRelease, Version.minRelease); + + /// + public int opCmp (in VersionRange o) const scope @safe + { + if (m_inclusiveA != o.m_inclusiveA) return m_inclusiveA < o.m_inclusiveA ? -1 : 1; + if (m_inclusiveB != o.m_inclusiveB) return m_inclusiveB < o.m_inclusiveB ? -1 : 1; + if (m_versA != o.m_versA) return m_versA < o.m_versA ? -1 : 1; + if (m_versB != o.m_versB) return m_versB < o.m_versB ? -1 : 1; + return 0; + } + + public bool matches (in Version v, VersionMatchMode mode = VersionMatchMode.standard) + const scope @safe + { + if (m_versA.isBranch) { + enforce(this.isExactVersion()); + return m_versA == v; + } + + if (v.isBranch) + return m_versA == v; + + if (m_versA == m_versB) + return this.m_versA.matches(v, mode); + + return doCmp(m_inclusiveA, m_versA, v) && + doCmp(m_inclusiveB, v, m_versB); + } + + /// Modify in place + public void merge (const VersionRange o) @safe + { + int acmp = m_versA.opCmp(o.m_versA); + int bcmp = m_versB.opCmp(o.m_versB); + + this.m_inclusiveA = !m_inclusiveA && acmp >= 0 ? false : o.m_inclusiveA; + this.m_versA = acmp > 0 ? m_versA : o.m_versA; + this.m_inclusiveB = !m_inclusiveB && bcmp <= 0 ? false : o.m_inclusiveB; + this.m_versB = bcmp < 0 ? m_versB : o.m_versB; + } + + /// Returns true $(I iff) the version range only matches a specific version. + @property bool isExactVersion() const scope @safe + { + return this.m_versA == this.m_versB; + } + + /// Determines if this dependency specification matches arbitrary versions. + /// This is true in particular for the `any` constant. + public bool matchesAny() const scope @safe + { + return this.m_inclusiveA && this.m_inclusiveB + && this.m_versA == Version.minRelease + && this.m_versB == Version.maxRelease; + } + + unittest { + assert(VersionRange.fromString("*").matchesAny); + assert(!VersionRange.fromString(">0.0.0").matchesAny); + assert(!VersionRange.fromString(">=1.0.0").matchesAny); + assert(!VersionRange.fromString("<1.0.0").matchesAny); + } + + public static VersionRange fromString (string ves) @safe + { + static import std.string; + + enforce(ves.length > 0); + + if (ves == Dependency.ANY_IDENT) { + // Any version is good. + ves = ">=0.0.0"; + } + + if (ves.startsWith("~>")) { + // Shortcut: "~>x.y.z" variant. Last non-zero number will indicate + // the base for this so something like this: ">=x.y.z =x.y.z <(x+1).0.0-0 + // ^x.y is equivalent to ^x.y.0. + ves = ves[1..$].expandVersion; + return VersionRange( + Version(ves), Version(bumpIncompatibleVersion(ves) ~ "-0"), + true, false); + } + + if (ves[0] == Version.branchPrefix) { + auto ver = Version(ves); + return VersionRange(ver, ver, true, true); + } + + if (std.string.indexOf("><=", ves[0]) == -1) { + auto ver = Version(ves); + return VersionRange(ver, ver, true, true); + } + + auto cmpa = skipComp(ves); + size_t idx2 = std.string.indexOf(ves, " "); + if (idx2 == -1) { + if (cmpa == "<=" || cmpa == "<") + return VersionRange(Version.minRelease, Version(ves), true, (cmpa == "<=")); + + if (cmpa == ">=" || cmpa == ">") + return VersionRange(Version(ves), Version.maxRelease, (cmpa == ">="), true); + + // Converts "==" to ">=a&&<=a", which makes merging easier + return VersionRange(Version(ves), Version(ves), true, true); + } + + enforce(cmpa == ">" || cmpa == ">=", + "First comparison operator expected to be either > or >=, not " ~ cmpa); + assert(ves[idx2] == ' '); + VersionRange ret; + ret.m_versA = Version(ves[0..idx2]); + ret.m_inclusiveA = cmpa == ">="; + string v2 = ves[idx2+1..$]; + auto cmpb = skipComp(v2); + enforce(cmpb == "<" || cmpb == "<=", + "Second comparison operator expected to be either < or <=, not " ~ cmpb); + ret.m_versB = Version(v2); + ret.m_inclusiveB = cmpb == "<="; + + enforce(!ret.m_versA.isBranch && !ret.m_versB.isBranch, + format("Cannot compare branches: %s", ves)); + enforce(ret.m_versA <= ret.m_versB, + "First version must not be greater than the second one."); + + return ret; + } + + /// Returns a string representation of this range + string toString() const @safe { + static import std.string; + + string r; + + if (this == Invalid) return "invalid"; + if (this.isExactVersion() && m_inclusiveA && m_inclusiveB) { + // Special "==" case + if (m_versA == Version.masterBranch) return "~master"; + else return m_versA.toString(); + } + + // "~>", "^" case + if (m_inclusiveA && !m_inclusiveB && !m_versA.isBranch) { + auto vs = m_versA.toString(); + auto i1 = std.string.indexOf(vs, '-'), i2 = std.string.indexOf(vs, '+'); + auto i12 = i1 >= 0 ? i2 >= 0 ? i1 < i2 ? i1 : i2 : i1 : i2; + auto va = i12 >= 0 ? vs[0 .. i12] : vs; + auto parts = va.splitter('.').array; + assert(parts.length == 3, "Version string with a digit group count != 3: "~va); + + foreach (i; 0 .. 3) { + auto vp = parts[0 .. i+1].join("."); + auto ve = Version(expandVersion(vp)); + auto veb = Version(bumpVersion(vp) ~ "-0"); + if (ve == m_versA && veb == m_versB) return "~>" ~ vp; + + auto veb2 = Version(bumpIncompatibleVersion(expandVersion(vp)) ~ "-0"); + if (ve == m_versA && veb2 == m_versB) return "^" ~ vp; + } + } + + if (m_versA != Version.minRelease) r = (m_inclusiveA ? ">=" : ">") ~ m_versA.toString(); + if (m_versB != Version.maxRelease) r ~= (r.length==0 ? "" : " ") ~ (m_inclusiveB ? "<=" : "<") ~ m_versB.toString(); + if (this.matchesAny()) r = ">=0.0.0"; + return r; + } + + public bool isValid() const @safe { + return m_versA <= m_versB && doCmp(m_inclusiveA && m_inclusiveB, m_versA, m_versB); + } + + private static bool doCmp(bool inclusive, in Version a, in Version b) + @safe + { + return inclusive ? a <= b : a < b; + } + + private static bool isDigit(char ch) @safe { return ch >= '0' && ch <= '9'; } + private static string skipComp(ref string c) @safe { + size_t idx = 0; + while (idx < c.length && !isDigit(c[idx]) && c[idx] != Version.branchPrefix) idx++; + enforce(idx < c.length, "Expected version number in version spec: "~c); + string cmp = idx==c.length-1||idx==0? ">=" : c[0..idx]; + c = c[idx..$]; + switch(cmp) { + default: enforce(false, "No/Unknown comparison specified: '"~cmp~"'"); return ">="; + case ">=": goto case; case ">": goto case; + case "<=": goto case; case "<": goto case; + case "==": return cmp; + } + } } enum VersionMatchMode { @@ -982,18 +1149,8 @@ for(int j=i-1; j>=0; --j) assert(versions[j] < versions[i], "Failed: " ~ versions[j].toString() ~ "<" ~ versions[i].toString()); - a = Version.unknown; - b = Version.minRelease; - assertThrown(a == b, "Failed: compared " ~ a.toString() ~ " with " ~ b.toString() ~ ""); - - a = Version.unknown; - b = Version.unknown; - assertThrown(a == b, "Failed: UNKNOWN == UNKNOWN"); - assert(Version("1.0.0+a") == Version("1.0.0+b")); - assert(Version("73535568b79a0b124bc1653002637a830ce0fcb8").isSCM); - assert(Version("1.0.0").matches(Version("1.0.0+foo"))); assert(Version("1.0.0").matches(Version("1.0.0+foo"), VersionMatchMode.standard)); assert(!Version("1.0.0").matches(Version("1.0.0+foo"), VersionMatchMode.strict)); diff --git a/source/dub/dependencyresolver.d b/source/dub/dependencyresolver.d index 8ec82cc..5a1b704 100644 --- a/source/dub/dependencyresolver.d +++ b/source/dub/dependencyresolver.d @@ -8,7 +8,7 @@ module dub.dependencyresolver; import dub.dependency; -import dub.internal.vibecompat.core.log; +import dub.internal.logging; import std.algorithm : all, canFind, filter, map, sort; import std.array : appender, array, join; @@ -29,6 +29,32 @@ can be defined in terms of a version range. */ class DependencyResolver(CONFIGS, CONFIG) { + /// Maximum number of loop rounds to do + protected ulong loop_limit; + + /** + * Construct an instance of this class + * + * Params: + * limit = Maximum number of loop rounds to do + */ + public this (ulong limit) inout scope @safe pure nothrow @nogc + { + this.loop_limit = limit; + } + + /// Compatibility overload + deprecated("Use the overload that accepts a `ulong limit` argument") + public this () scope @safe + { + // Leave the possibility to opt-out from the loop limit + import std.process : environment; + if (environment.get("DUB_NO_RESOLVE_LIMIT") !is null) + this(ulong.max); + else + this(1_000_000); + } + /** Encapsulates a list of outgoing edges in the dependency graph. A value of this type represents a single dependency with multiple @@ -75,17 +101,13 @@ CONFIG[string] resolve(TreeNode root, bool throw_on_failure = true) { - // Leave the possibility to opt-out from the loop limit - import std.process : environment; - bool no_loop_limit = environment.get("DUB_NO_RESOLVE_LIMIT") !is null; - auto rootbase = root.pack.basePackageName; // build up the dependency graph, eliminating as many configurations/ // versions as possible ResolveContext context; context.configs[rootbase] = [ResolveConfig(root.config, true)]; - long loop_counter = no_loop_limit ? long.max : 1_000_000; + ulong loop_counter = this.loop_limit; constrain(root, context, loop_counter); // remove any non-default optional dependencies @@ -148,7 +170,7 @@ /** Starting with a single node, fills `context` with a minimized set of configurations that form valid solutions. */ - private void constrain(TreeNode n, ref ResolveContext context, ref long max_iterations) + private void constrain(TreeNode n, ref ResolveContext context, ref ulong max_iterations) { auto base = n.pack.basePackageName; assert(base in context.configs); @@ -204,7 +226,7 @@ propagate. */ private void constrainDependencies(TreeNode n, TreeNodes[] dependencies, size_t depidx, - ref ResolveContext context, ref long max_iterations) + ref ResolveContext context, ref ulong max_iterations) { if (depidx >= dependencies.length) return; @@ -358,7 +380,7 @@ static class TestResolver : DependencyResolver!(IntConfigs, IntConfig) { private TreeNodes[][string] m_children; - this(TreeNodes[][string] children) { m_children = children; } + this(TreeNodes[][string] children) { super(ulong.max); m_children = children; } protected override IntConfig[] getAllConfigs(string pack) { auto ret = appender!(IntConfig[]); foreach (p; m_children.byKey) { diff --git a/source/dub/dub.d b/source/dub/dub.d index 65840c9..1a41cc3 100644 --- a/source/dub/dub.d +++ b/source/dub/dub.d @@ -12,9 +12,9 @@ import dub.dependencyresolver; import dub.internal.utils; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.url; +import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.packagesuppliers; @@ -24,13 +24,13 @@ import std.algorithm; import std.array : array, replace; -import std.conv : to; +import std.conv : text, to; +import std.encoding : sanitize; import std.exception : enforce; import std.file; import std.process : environment; import std.range : assumeSorted, empty; import std.string; -import std.encoding : sanitize; // Set output path and options for coverage reports version (DigitalMars) version (D_Coverage) @@ -126,23 +126,9 @@ PackageSupplier[] m_packageSuppliers; NativePath m_rootPath; SpecialDirs m_dirs; - DubConfig m_config; - NativePath m_projectPath; + UserConfiguration m_config; Project m_project; - NativePath m_overrideSearchPath; string m_defaultCompiler; - string m_defaultArchitecture; - bool m_defaultLowMemory; - string[string] m_defaultEnvironments; - string[string] m_defaultBuildEnvironments; - string[string] m_defaultRunEnvironments; - string[string] m_defaultPreGenerateEnvironments; - string[string] m_defaultPostGenerateEnvironments; - string[string] m_defaultPreBuildEnvironments; - string[string] m_defaultPostBuildEnvironments; - string[string] m_defaultPreRunEnvironments; - string[string] m_defaultPostRunEnvironments; - } /** The default placement location of fetched packages. @@ -173,30 +159,132 @@ m_rootPath = NativePath(root_path); if (!m_rootPath.absolute) m_rootPath = NativePath(getcwd()) ~ m_rootPath; - init(m_rootPath); + init(); - m_packageSuppliers = getPackageSuppliers(additional_package_suppliers, skip_registry); + m_packageSuppliers = this.computePkgSuppliers(additional_package_suppliers, + skip_registry, environment.get("DUB_REGISTRY", null)); m_packageManager = new PackageManager(m_rootPath, m_dirs.localRepository, m_dirs.systemSettings); auto ccps = m_config.customCachePaths; if (ccps.length) m_packageManager.customCachePaths = ccps; - updatePackageSearchPath(); + // TODO: Move this environment read out of the ctor + if (auto p = environment.get("DUBPATH")) { + version(Windows) enum pathsep = ";"; + else enum pathsep = ":"; + NativePath[] paths = p.split(pathsep) + .map!(p => NativePath(p))().array(); + m_packageManager.searchPath = paths; + } + } + + /** Initializes the instance with a single package search path, without + loading a package. + + This constructor corresponds to the "--bare" option of the command line + interface. Use + */ + this(NativePath override_path) + { + init(); + m_packageManager = new PackageManager(override_path); + } + + private void init() + { + this.m_dirs = SpecialDirs.make(); + this.loadConfig(); + this.determineDefaultCompiler(); + } + + /** + * Load user configuration for this instance + * + * This can be overloaded in child classes to prevent library / unittest + * dub from doing any kind of file IO. + */ + protected void loadConfig() + { + import configy.Read; + + void readSettingsFile (NativePath path_) + { + // TODO: Remove `StrictMode.Warn` after v1.40 release + // The default is to error, but as the previous parser wasn't + // complaining, we should first warn the user. + const path = path_.toNativeString(); + if (path.exists) { + auto newConf = parseConfigFileSimple!UserConfiguration(path, StrictMode.Warn); + if (!newConf.isNull()) + this.m_config = this.m_config.merge(newConf.get()); + } + } + + const dubFolderPath = NativePath(thisExePath).parentPath; + + // override default userSettings + localRepository if a $DPATH or + // $DUB_HOME environment variable is set. + bool overrideDubHomeFromEnv; + { + string dubHome = environment.get("DUB_HOME"); + if (!dubHome.length) { + auto dpath = environment.get("DPATH"); + if (dpath.length) + dubHome = (NativePath(dpath) ~ "dub/").toNativeString(); + + } + if (dubHome.length) { + overrideDubHomeFromEnv = true; + + m_dirs.userSettings = NativePath(dubHome); + m_dirs.localRepository = m_dirs.userSettings; + } + } + + readSettingsFile(m_dirs.systemSettings ~ "settings.json"); + readSettingsFile(dubFolderPath ~ "../etc/dub/settings.json"); + version (Posix) { + if (dubFolderPath.absolute && dubFolderPath.startsWith(NativePath("usr"))) + readSettingsFile(NativePath("/etc/dub/settings.json")); + } + + // Override user + local package path from system / binary settings + // Then continues loading local settings from these folders. (keeping + // global /etc/dub/settings.json settings intact) + // + // Don't use it if either $DPATH or $DUB_HOME are set, as environment + // variables usually take precedence over configuration. + if (!overrideDubHomeFromEnv && this.m_config.dubHome.set) { + m_dirs.userSettings = NativePath(this.m_config.dubHome.expandEnvironmentVariables); + } + + // load user config: + readSettingsFile(m_dirs.userSettings ~ "settings.json"); + + // load per-package config: + if (!this.m_rootPath.empty) + readSettingsFile(this.m_rootPath ~ "dub.settings.json"); + + // same as userSettings above, but taking into account the + // config loaded from user settings and per-package config as well. + if (!overrideDubHomeFromEnv && this.m_config.dubHome.set) { + m_dirs.localRepository = NativePath(this.m_config.dubHome.expandEnvironmentVariables); + } } unittest { scope (exit) environment.remove("DUB_REGISTRY"); - auto dub = new Dub(".", null, SkipPackageSuppliers.configured); + auto dub = new TestDub(".", null, SkipPackageSuppliers.configured); assert(dub.m_packageSuppliers.length == 0); environment["DUB_REGISTRY"] = "http://example.com/"; - dub = new Dub(".", null, SkipPackageSuppliers.configured); + dub = new TestDub(".", null, SkipPackageSuppliers.configured); assert(dub.m_packageSuppliers.length == 1); environment["DUB_REGISTRY"] = "http://example.com/;http://foo.com/"; - dub = new Dub(".", null, SkipPackageSuppliers.configured); + dub = new TestDub(".", null, SkipPackageSuppliers.configured); assert(dub.m_packageSuppliers.length == 2); - dub = new Dub(".", [new RegistryPackageSupplier(URL("http://bar.com/"))], SkipPackageSuppliers.configured); + dub = new TestDub(".", [new RegistryPackageSupplier(URL("http://bar.com/"))], SkipPackageSuppliers.configured); assert(dub.m_packageSuppliers.length == 3); } @@ -209,13 +297,24 @@ skip_registry = Can be used to skip using the configured package suppliers, as well as the default suppliers. */ + deprecated("This is an implementation detail. " ~ + "Use `packageSuppliers` to get the computed list of package " ~ + "suppliers once a `Dub` instance has been constructed.") public PackageSupplier[] getPackageSuppliers(PackageSupplier[] additional_package_suppliers, SkipPackageSuppliers skip_registry) { + return this.computePkgSuppliers(additional_package_suppliers, skip_registry, environment.get("DUB_REGISTRY", null)); + } + + /// Ditto + private PackageSupplier[] computePkgSuppliers( + PackageSupplier[] additional_package_suppliers, SkipPackageSuppliers skip_registry, + string dub_registry_var) + { PackageSupplier[] ps = additional_package_suppliers; if (skip_registry < SkipPackageSuppliers.all) { - ps ~= environment.get("DUB_REGISTRY", null) + ps ~= dub_registry_var .splitter(";") .map!(url => getRegistryPackageSupplier(url)) .array; @@ -223,7 +322,7 @@ if (skip_registry < SkipPackageSuppliers.configured) { - ps ~= m_config.registryURLs + ps ~= m_config.registryUrls .map!(url => getRegistryPackageSupplier(url)) .array; } @@ -235,6 +334,9 @@ } /// ditto + deprecated("This is an implementation detail. " ~ + "Use `packageSuppliers` to get the computed list of package " ~ + "suppliers once a `Dub` instance has been constructed.") public PackageSupplier[] getPackageSuppliers(PackageSupplier[] additional_package_suppliers) { return getPackageSuppliers(additional_package_suppliers, m_config.skipRegistry); @@ -242,83 +344,17 @@ unittest { - scope (exit) environment.remove("DUB_REGISTRY"); - auto dub = new Dub(".", null, SkipPackageSuppliers.none); + auto dub = new TestDub(); - dub.m_config = new DubConfig(Json(["skipRegistry": Json("none")]), null); - assert(dub.getPackageSuppliers(null).length == 1); + assert(dub.computePkgSuppliers(null, SkipPackageSuppliers.none, null).length == 1); + assert(dub.computePkgSuppliers(null, SkipPackageSuppliers.configured, null).length == 0); + assert(dub.computePkgSuppliers(null, SkipPackageSuppliers.standard, null).length == 0); - dub.m_config = new DubConfig(Json(["skipRegistry": Json("configured")]), null); - assert(dub.getPackageSuppliers(null).length == 0); - - dub.m_config = new DubConfig(Json(["skipRegistry": Json("standard")]), null); - assert(dub.getPackageSuppliers(null).length == 0); - - environment["DUB_REGISTRY"] = "http://example.com/"; - assert(dub.getPackageSuppliers(null).length == 1); + assert(dub.computePkgSuppliers(null, SkipPackageSuppliers.standard, "http://example.com/") + .length == 1); } - /** Initializes the instance with a single package search path, without - loading a package. - - This constructor corresponds to the "--bare" option of the command line - interface. Use - */ - this(NativePath override_path) - { - init(NativePath()); - m_overrideSearchPath = override_path; - m_packageManager = new PackageManager(override_path); - } - - private void init(NativePath root_path) - { - import std.file : tempDir; - version(Windows) { - m_dirs.systemSettings = NativePath(environment.get("ProgramData")) ~ "dub/"; - immutable appDataDir = environment.get("APPDATA"); - m_dirs.userSettings = NativePath(appDataDir) ~ "dub/"; - m_dirs.localRepository = NativePath(environment.get("LOCALAPPDATA", appDataDir)) ~ "dub"; - } else version(Posix){ - m_dirs.systemSettings = NativePath("/var/lib/dub/"); - m_dirs.userSettings = NativePath(environment.get("HOME")) ~ ".dub/"; - if (!m_dirs.userSettings.absolute) - m_dirs.userSettings = NativePath(getcwd()) ~ m_dirs.userSettings; - m_dirs.localRepository = m_dirs.userSettings; - } - - m_dirs.temp = NativePath(tempDir); - - m_config = new DubConfig(jsonFromFile(m_dirs.systemSettings ~ "settings.json", true), m_config); - - auto dubFolderPath = NativePath(thisExePath).parentPath; - m_config = new DubConfig(jsonFromFile(dubFolderPath ~ "../etc/dub/settings.json", true), m_config); - version (Posix) { - if (dubFolderPath.absolute && dubFolderPath.startsWith(NativePath("usr"))) { - m_config = new DubConfig(jsonFromFile(NativePath("/etc/dub/settings.json"), true), m_config); - } - } - - m_config = new DubConfig(jsonFromFile(m_dirs.userSettings ~ "settings.json", true), m_config); - - if (!root_path.empty) - m_config = new DubConfig(jsonFromFile(root_path ~ "dub.settings.json", true), m_config); - - determineDefaultCompiler(); - - m_defaultArchitecture = m_config.defaultArchitecture; - m_defaultLowMemory = m_config.defaultLowMemory; - m_defaultEnvironments = m_config.defaultEnvironments; - m_defaultBuildEnvironments = m_config.defaultBuildEnvironments; - m_defaultRunEnvironments = m_config.defaultRunEnvironments; - m_defaultPreGenerateEnvironments = m_config.defaultPreGenerateEnvironments; - m_defaultPostGenerateEnvironments = m_config.defaultPostGenerateEnvironments; - m_defaultPreBuildEnvironments = m_config.defaultPreBuildEnvironments; - m_defaultPostBuildEnvironments = m_config.defaultPostBuildEnvironments; - m_defaultPreRunEnvironments = m_config.defaultPreRunEnvironments; - m_defaultPostRunEnvironments = m_config.defaultPostRunEnvironments; - } - + @property bool dryRun() const { return m_dryRun; } @property void dryRun(bool v) { m_dryRun = v; } /** Returns the root path (usually the current working directory). @@ -335,7 +371,7 @@ /// application. @property string projectName() const { return m_project.name; } - @property NativePath projectPath() const { return m_projectPath; } + @property NativePath projectPath() const { return this.m_project.rootPackage.path; } @property string[] configurations() const { return m_project.configurations; } @@ -343,6 +379,8 @@ @property inout(Project) project() inout { return m_project; } + @property inout(PackageSupplier)[] packageSuppliers() inout { return m_packageSuppliers; } + /** Returns the default compiler binary to use for building D code. If set, the "defaultCompiler" field of the DUB user or system @@ -359,24 +397,24 @@ If set, the "defaultArchitecture" field of the DUB user or system configuration file will be used. Otherwise null will be returned. */ - @property string defaultArchitecture() const { return m_defaultArchitecture; } + @property string defaultArchitecture() const { return this.m_config.defaultArchitecture; } /** Returns the default low memory option to use for building D code. If set, the "defaultLowMemory" field of the DUB user or system configuration file will be used. Otherwise false will be returned. */ - @property bool defaultLowMemory() const { return m_defaultLowMemory; } + @property bool defaultLowMemory() const { return this.m_config.defaultLowMemory; } - @property const(string[string]) defaultEnvironments() const { return m_defaultEnvironments; } - @property const(string[string]) defaultBuildEnvironments() const { return m_defaultBuildEnvironments; } - @property const(string[string]) defaultRunEnvironments() const { return m_defaultRunEnvironments; } - @property const(string[string]) defaultPreGenerateEnvironments() const { return m_defaultPreGenerateEnvironments; } - @property const(string[string]) defaultPostGenerateEnvironments() const { return m_defaultPostGenerateEnvironments; } - @property const(string[string]) defaultPreBuildEnvironments() const { return m_defaultPreBuildEnvironments; } - @property const(string[string]) defaultPostBuildEnvironments() const { return m_defaultPostBuildEnvironments; } - @property const(string[string]) defaultPreRunEnvironments() const { return m_defaultPreRunEnvironments; } - @property const(string[string]) defaultPostRunEnvironments() const { return m_defaultPostRunEnvironments; } + @property const(string[string]) defaultEnvironments() const { return this.m_config.defaultEnvironments; } + @property const(string[string]) defaultBuildEnvironments() const { return this.m_config.defaultBuildEnvironments; } + @property const(string[string]) defaultRunEnvironments() const { return this.m_config.defaultRunEnvironments; } + @property const(string[string]) defaultPreGenerateEnvironments() const { return this.m_config.defaultPreGenerateEnvironments; } + @property const(string[string]) defaultPostGenerateEnvironments() const { return this.m_config.defaultPostGenerateEnvironments; } + @property const(string[string]) defaultPreBuildEnvironments() const { return this.m_config.defaultPreBuildEnvironments; } + @property const(string[string]) defaultPostBuildEnvironments() const { return this.m_config.defaultPostBuildEnvironments; } + @property const(string[string]) defaultPreRunEnvironments() const { return this.m_config.defaultPreRunEnvironments; } + @property const(string[string]) defaultPostRunEnvironments() const { return this.m_config.defaultPostRunEnvironments; } /** Loads the package that resides within the configured `rootPath`. */ @@ -388,16 +426,12 @@ /// Loads the package from the specified path as the main project package. void loadPackage(NativePath path) { - m_projectPath = path; - updatePackageSearchPath(); - m_project = new Project(m_packageManager, m_projectPath); + m_project = new Project(m_packageManager, path); } /// Loads a specific package as the main project package (can be a sub package) void loadPackage(Package pack) { - m_projectPath = pack.path; - updatePackageSearchPath(); m_project = new Project(m_packageManager, pack); } @@ -468,7 +502,6 @@ auto recipe_default_package_name = path.toString.baseName.stripExtension.strip; auto recipe = parsePackageRecipe(recipe_content, recipe_filename, null, recipe_default_package_name); - import dub.internal.vibecompat.core.log; logInfo("parsePackageRecipe %s", recipe_filename); enforce(recipe.buildSettings.sourceFiles.length == 0, "Single-file packages are not allowed to specify source files."); enforce(recipe.buildSettings.sourcePaths.length == 0, "Single-file packages are not allowed to specify source paths."); enforce(recipe.buildSettings.importPaths.length == 0, "Single-file packages are not allowed to specify import paths."); @@ -488,20 +521,12 @@ loadSingleFilePackage(NativePath(path)); } - deprecated("Instantiate a Dub instance with the single-argument constructor: `new Dub(path)`") - void overrideSearchPath(NativePath path) - { - if (!path.absolute) path = NativePath(getcwd()) ~ path; - m_overrideSearchPath = path; - updatePackageSearchPath(); - } - /** Gets the default configuration for a particular build platform. This forwards to `Project.getDefaultConfiguration` and requires a project to be loaded. */ - string getDefaultConfiguration(BuildPlatform platform, bool allow_non_library_configs = true) const { return m_project.getDefaultConfiguration(platform, allow_non_library_configs); } + string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library_configs = true) const { return m_project.getDefaultConfiguration(platform, allow_non_library_configs); } /** Attempts to upgrade the dependency selection of the loaded project. @@ -525,7 +550,7 @@ try if (m_packageManager.getOrLoadPackage(path)) continue; catch (Exception e) { logDebug("Failed to load path based selection: %s", e.toString().sanitize); } } else if (!dep.repository.empty) { - if (m_packageManager.loadSCMPackage(getBasePackageName(p), dep)) + if (m_packageManager.loadSCMPackage(getBasePackageName(p), dep.repository)) continue; } else { if (m_packageManager.getPackage(p, dep.version_)) continue; @@ -546,11 +571,9 @@ } } - Dependency[string] versions; - auto resolver = new DependencyVersionResolver(this, options); - foreach (p; packages_to_upgrade) - resolver.addPackageToUpgrade(p); - versions = resolver.resolve(m_project.rootPackage, m_project.selections); + auto resolver = new DependencyVersionResolver( + this, options, m_project.rootPackage, m_project.selections); + Dependency[string] versions = resolver.resolve(packages_to_upgrade); if (options & UpgradeOptions.dryRun) { bool any = false; @@ -563,24 +586,24 @@ if (basename == rootbasename) continue; if (!m_project.selections.hasSelectedVersion(basename)) { - logInfo("Package %s would be selected with version %s.", - basename, ver); + logInfo("Upgrade", Color.cyan, + "Package %s would be selected with version %s", basename, ver); any = true; continue; } auto sver = m_project.selections.getSelectedVersion(basename); if (!sver.path.empty || !sver.repository.empty) continue; if (ver.version_ <= sver.version_) continue; - logInfo("Package %s would be upgraded from %s to %s.", - basename, sver, ver); + logInfo("Upgrade", Color.cyan, + "%s would be upgraded from %s to %s.", + basename.color(Mode.bold), sver, ver); any = true; } - if (any) logInfo("Use \"dub upgrade\" to perform those changes."); + if (any) logInfo("Use \"%s\" to perform those changes", "dub upgrade".color(Mode.bold)); return; } - foreach (p; versions.byKey) { - auto ver = versions[p]; // Workaround for DMD 2.070.0 AA issue (crashes in aaApply2 if iterating by key+value) + foreach (p, ver; versions) { assert(!p.canFind(":"), "Resolved packages contain a sub package!?: "~p); Package pack; if (!ver.path.empty) { @@ -590,7 +613,7 @@ continue; } } else if (!ver.repository.empty) { - pack = m_packageManager.loadSCMPackage(p, ver); + pack = m_packageManager.loadSCMPackage(p, ver.repository); } else { assert(ver.isExactVersion, "Resolved dependency is neither path, nor repository, nor exact version based!?"); pack = m_packageManager.getPackage(p, ver.version_); @@ -606,10 +629,10 @@ FetchOptions fetchOpts; fetchOpts |= (options & UpgradeOptions.preRelease) != 0 ? FetchOptions.usePrerelease : FetchOptions.none; - if (!pack) fetch(p, ver, defaultPlacementLocation, fetchOpts, "getting selected version"); + if (!pack) fetch(p, ver.version_, defaultPlacementLocation, fetchOpts, "getting selected version"); if ((options & UpgradeOptions.select) && p != m_project.rootPackage.name) { if (!ver.repository.empty) { - m_project.selections.selectVersionWithRepository(p, ver.repository, ver.versionSpec); + m_project.selections.selectVersion(p, ver.repository); } else if (ver.path.empty) { m_project.selections.selectVersion(p, ver.version_); } else { @@ -642,152 +665,32 @@ */ void generateProject(string ide, GeneratorSettings settings) { + // With a requested `unittest` config, switch to the special test runner + // config (which doesn't require an existing `unittest` configuration). + if (settings.config == "unittest") { + const test_config = m_project.addTestRunnerConfiguration(settings, !m_dryRun); + if (test_config) settings.config = test_config; + } + auto generator = createProjectGenerator(ide, m_project); if (m_dryRun) return; // TODO: pass m_dryRun to the generator generator.generate(settings); } - /** Executes tests on the current project. + /** Generate project files using the special test runner (`dub test`) configuration. - Throws an exception, if unittests failed. + Any existing project files will be overridden. */ void testProject(GeneratorSettings settings, string config, NativePath custom_main_file) { if (!custom_main_file.empty && !custom_main_file.absolute) custom_main_file = getWorkingDirectory() ~ custom_main_file; - if (config.length == 0) { - // if a custom main file was given, favor the first library configuration, so that it can be applied - if (!custom_main_file.empty) config = m_project.getDefaultConfiguration(settings.platform, false); - // else look for a "unittest" configuration - if (!config.length && m_project.rootPackage.configurations.canFind("unittest")) config = "unittest"; - // if not found, fall back to the first "library" configuration - if (!config.length) config = m_project.getDefaultConfiguration(settings.platform, false); - // if still nothing found, use the first executable configuration - if (!config.length) config = m_project.getDefaultConfiguration(settings.platform, true); - } + const test_config = m_project.addTestRunnerConfiguration(settings, !m_dryRun, config, custom_main_file); + if (!test_config) return; // target type "none" + + settings.config = test_config; auto generator = createProjectGenerator("build", m_project); - - auto test_config = format("%s-test-%s", m_project.rootPackage.name.replace(".", "-").replace(":", "-"), config); - - BuildSettings lbuildsettings = settings.buildSettings; - m_project.addBuildSettings(lbuildsettings, settings, config, null, true); - - if (lbuildsettings.targetType == TargetType.none) { - logInfo(`Configuration '%s' has target type "none". Skipping test.`, config); - return; - } - - if (lbuildsettings.targetType == TargetType.executable && config == "unittest") { - logInfo("Running custom 'unittest' configuration.", config); - if (!custom_main_file.empty) logWarn("Ignoring custom main file."); - settings.config = config; - } else if (lbuildsettings.sourceFiles.empty) { - logInfo(`No source files found in configuration '%s'. Falling back to "dub -b unittest".`, config); - if (!custom_main_file.empty) logWarn("Ignoring custom main file."); - settings.config = m_project.getDefaultConfiguration(settings.platform); - } else { - import std.algorithm : remove; - - logInfo(`Generating test runner configuration '%s' for '%s' (%s).`, test_config, config, lbuildsettings.targetType); - - BuildSettingsTemplate tcinfo = m_project.rootPackage.recipe.getConfiguration(config).buildSettings; - tcinfo.targetType = TargetType.executable; - tcinfo.targetName = test_config; - - auto mainfil = tcinfo.mainSourceFile; - if (!mainfil.length) mainfil = m_project.rootPackage.recipe.buildSettings.mainSourceFile; - - string custommodname; - if (!custom_main_file.empty) { - import std.path; - tcinfo.sourceFiles[""] ~= custom_main_file.relativeTo(m_project.rootPackage.path).toNativeString(); - tcinfo.importPaths[""] ~= custom_main_file.parentPath.toNativeString(); - custommodname = custom_main_file.head.name.baseName(".d"); - } - - // prepare the list of tested modules - - string[] import_modules; - if (settings.single) - lbuildsettings.importPaths ~= NativePath(mainfil).parentPath.toNativeString; - bool firstTimePackage = true; - foreach (file; lbuildsettings.sourceFiles) { - if (file.endsWith(".d")) { - auto fname = NativePath(file).head.name; - NativePath msf = NativePath(mainfil); - if (msf.absolute) - msf = msf.relativeTo(m_project.rootPackage.path); - if (!settings.single && NativePath(file).relativeTo(m_project.rootPackage.path) == msf) { - logWarn("Excluding main source file %s from test.", mainfil); - tcinfo.excludedSourceFiles[""] ~= mainfil; - continue; - } - if (fname == "package.d") { - if (firstTimePackage) { - firstTimePackage = false; - logDiagnostic("Excluding package.d file from test due to https://issues.dlang.org/show_bug.cgi?id=11847"); - } - continue; - } - import_modules ~= dub.internal.utils.determineModuleName(lbuildsettings, NativePath(file), m_project.rootPackage.path); - } - } - - NativePath mainfile; - if (settings.tempBuild) - mainfile = getTempFile("dub_test_root", ".d"); - else { - import dub.generators.build : computeBuildName; - mainfile = m_project.rootPackage.path ~ format(".dub/code/%s_dub_test_root.d", computeBuildName(test_config, settings, import_modules)); - } - - mkdirRecurse(mainfile.parentPath.toNativeString()); - - bool regenerateMainFile = settings.force || !existsFile(mainfile); - auto escapedMainFile = mainfile.toNativeString().replace("$", "$$"); - // generate main file - tcinfo.sourceFiles[""] ~= escapedMainFile; - tcinfo.mainSourceFile = escapedMainFile; - - if (!m_dryRun && regenerateMainFile) { - auto fil = openFile(mainfile, FileMode.createTrunc); - scope(exit) fil.close(); - fil.write("module dub_test_root;\n"); - fil.write("import std.typetuple;\n"); - foreach (mod; import_modules) fil.write(format("static import %s;\n", mod)); - fil.write("alias allModules = TypeTuple!("); - foreach (i, mod; import_modules) { - if (i > 0) fil.write(", "); - fil.write(mod); - } - fil.write(");\n"); - if (custommodname.length) { - fil.write(format("import %s;\n", custommodname)); - } else { - fil.write(q{ - import std.stdio; - import core.runtime; - - void main() { writeln("All unit tests have been run successfully."); } - shared static this() { - version (Have_tested) { - import tested; - import core.runtime; - import std.exception; - Runtime.moduleUnitTester = () => true; - enforce(runUnitTests!allModules(new ConsoleTestResultWriter), "Unit tests failed."); - } - } - }); - } - } - m_project.rootPackage.recipe.configurations ~= ConfigurationInfo(test_config, tcinfo); - m_project = new Project(m_packageManager, m_project.rootPackage); - - settings.config = test_config; - } - generator.generate(settings); } @@ -800,36 +703,17 @@ auto tool = "dscanner"; - auto tool_pack = m_packageManager.getBestPackage(tool, ">=0.0.0"); - if (!tool_pack) tool_pack = m_packageManager.getBestPackage(tool, "~master"); + auto tool_pack = m_packageManager.getBestPackage(tool); if (!tool_pack) { - logInfo("%s is not present, getting and storing it user wide", tool); - tool_pack = fetch(tool, Dependency(">=0.0.0"), defaultPlacementLocation, FetchOptions.none); + logInfo("Hint", Color.light_blue, "%s is not present, getting and storing it user wide", tool); + tool_pack = fetch(tool, VersionRange.Any, defaultPlacementLocation, FetchOptions.none); } auto dscanner_dub = new Dub(null, m_packageSuppliers); - dscanner_dub.loadPackage(tool_pack.path); + dscanner_dub.loadPackage(tool_pack); dscanner_dub.upgrade(UpgradeOptions.select); - auto compiler_binary = this.defaultCompiler; - - GeneratorSettings settings; - settings.config = "application"; - settings.compiler = getCompiler(compiler_binary); - settings.platform = settings.compiler.determinePlatform(settings.buildSettings, compiler_binary, m_defaultArchitecture); - settings.buildType = "debug"; - if (m_defaultLowMemory) settings.buildSettings.options |= BuildOption.lowmem; - if (m_defaultEnvironments) settings.buildSettings.addEnvironments(m_defaultEnvironments); - if (m_defaultBuildEnvironments) settings.buildSettings.addBuildEnvironments(m_defaultBuildEnvironments); - if (m_defaultRunEnvironments) settings.buildSettings.addRunEnvironments(m_defaultRunEnvironments); - if (m_defaultPreGenerateEnvironments) settings.buildSettings.addPreGenerateEnvironments(m_defaultPreGenerateEnvironments); - if (m_defaultPostGenerateEnvironments) settings.buildSettings.addPostGenerateEnvironments(m_defaultPostGenerateEnvironments); - if (m_defaultPreBuildEnvironments) settings.buildSettings.addPreBuildEnvironments(m_defaultPreBuildEnvironments); - if (m_defaultPostBuildEnvironments) settings.buildSettings.addPostBuildEnvironments(m_defaultPostBuildEnvironments); - if (m_defaultPreRunEnvironments) settings.buildSettings.addPreRunEnvironments(m_defaultPreRunEnvironments); - if (m_defaultPostRunEnvironments) settings.buildSettings.addPostRunEnvironments(m_defaultPostRunEnvironments); - settings.run = true; - + GeneratorSettings settings = this.makeAppSettings(); foreach (dependencyPackage; m_project.dependencies) { auto cfgs = m_project.getPackageConfigs(settings.platform, null, true); @@ -879,7 +763,7 @@ /// Cleans intermediate/cache files of the given package void cleanPackage(NativePath path) { - logInfo("Cleaning package at %s...", path.toNativeString()); + logInfo("Cleaning", Color.green, "package at %s", path.toNativeString().color(Mode.bold)); enforce(!Package.findPackageFile(path).empty, "No package found.", path.toNativeString()); // TODO: clear target files and copy files @@ -896,14 +780,31 @@ } /// Fetches the package matching the dependency and places it in the specified location. + deprecated("Use the overload that accepts either a `Version` or a `VersionRange` as second argument") Package fetch(string packageId, const Dependency dep, PlacementLocation location, FetchOptions options, string reason = "") { + const vrange = dep.visit!( + (VersionRange range) => range, + function VersionRange (any) { throw new Exception("Cannot call `dub.fetch` with a " ~ typeof(any).stringof ~ " dependency"); } + ); + return this.fetch(packageId, vrange, location, options, reason); + } + + /// Ditto + Package fetch(string packageId, in Version vers, PlacementLocation location, FetchOptions options, string reason = "") + { + return this.fetch(packageId, VersionRange(vers, vers), location, options, reason); + } + + /// Ditto + Package fetch(string packageId, in VersionRange range, PlacementLocation location, FetchOptions options, string reason = "") + { auto basePackageName = getBasePackageName(packageId); Json pinfo; PackageSupplier supplier; foreach(ps; m_packageSuppliers){ try { - pinfo = ps.fetchPackageRecipe(basePackageName, dep, (options & FetchOptions.usePrerelease) != 0); + pinfo = ps.fetchPackageRecipe(basePackageName, Dependency(range), (options & FetchOptions.usePrerelease) != 0); if (pinfo.type == Json.Type.null_) continue; supplier = ps; @@ -913,83 +814,49 @@ logDebug("Full error: %s", e.toString().sanitize()); } } - enforce(pinfo.type != Json.Type.undefined, "No package "~packageId~" was found matching the dependency "~dep.toString()); - string ver = pinfo["version"].get!string; - - NativePath placement; - final switch (location) { - case PlacementLocation.local: placement = m_rootPath ~ ".dub/packages/"; break; - case PlacementLocation.user: placement = m_dirs.localRepository ~ "packages/"; break; - case PlacementLocation.system: placement = m_dirs.systemSettings ~ "packages/"; break; - } + enforce(pinfo.type != Json.Type.undefined, "No package "~packageId~" was found matching the dependency " ~ range.toString()); + Version ver = Version(pinfo["version"].get!string); // always upgrade branch based versions - TODO: actually check if there is a new commit available - Package existing; - try existing = m_packageManager.getPackage(packageId, ver, placement); - catch (Exception e) { - logWarn("Failed to load existing package %s: %s", ver, e.msg); - logDiagnostic("Full error: %s", e.toString().sanitize); - } - + Package existing = m_packageManager.getPackage(packageId, ver, location); if (options & FetchOptions.printOnly) { - if (existing && existing.version_ != Version(ver)) - logInfo("A new version for %s is available (%s -> %s). Run \"dub upgrade %s\" to switch.", - packageId, existing.version_, ver, packageId); + if (existing && existing.version_ != ver) + logInfo("A new version for %s is available (%s -> %s). Run \"%s\" to switch.", + packageId.color(Mode.bold), existing.version_, ver, + text("dub upgrade ", packageId).color(Mode.bold)); return null; } if (existing) { - if (!ver.startsWith("~") || !(options & FetchOptions.forceBranchUpgrade) || location == PlacementLocation.local) { + if (!ver.isBranch() || !(options & FetchOptions.forceBranchUpgrade) || location == PlacementLocation.local) { // TODO: support git working trees by performing a "git pull" instead of this - logDiagnostic("Package %s %s (%s) is already present with the latest version, skipping upgrade.", - packageId, ver, placement); + logDiagnostic("Package %s %s (in %s packages) is already present with the latest version, skipping upgrade.", + packageId, ver, location.toString); return existing; } else { - logInfo("Removing %s %s to prepare replacement with a new version.", packageId, ver); + logInfo("Removing", Color.yellow, "%s %s to prepare replacement with a new version", packageId.color(Mode.bold), ver); if (!m_dryRun) m_packageManager.remove(existing); } } - if (reason.length) logInfo("Fetching %s %s (%s)...", packageId, ver, reason); - else logInfo("Fetching %s %s...", packageId, ver); + if (reason.length) logInfo("Fetching", Color.yellow, "%s %s (%s)", packageId.color(Mode.bold), ver, reason); + else logInfo("Fetching", Color.yellow, "%s %s", packageId.color(Mode.bold), ver); if (m_dryRun) return null; logDebug("Acquiring package zip file"); - auto clean_package_version = ver[ver.startsWith("~") ? 1 : 0 .. $]; - clean_package_version = clean_package_version.replace("+", "_"); // + has special meaning for Optlink - if (!placement.existsFile()) - mkdirRecurse(placement.toNativeString()); - NativePath dstpath = placement ~ (basePackageName ~ "-" ~ clean_package_version); - if (!dstpath.existsFile()) - mkdirRecurse(dstpath.toNativeString()); - - // Support libraries typically used with git submodules like ae. - // Such libraries need to have ".." as import path but this can create - // import path leakage. - dstpath = dstpath ~ basePackageName; - - import std.datetime : seconds; - auto lock = lockFile(dstpath.toNativeString() ~ ".lock", 30.seconds); // possibly wait for other dub instance - if (dstpath.existsFile()) - { - m_packageManager.refresh(false); - return m_packageManager.getPackage(packageId, ver, dstpath); - } - // repeat download on corrupted zips, see #1336 foreach_reverse (i; 0..3) { import std.zip : ZipException; auto path = getTempFile(basePackageName, ".zip"); - supplier.fetchPackage(path, basePackageName, dep, (options & FetchOptions.usePrerelease) != 0); // Q: continue on fail? + supplier.fetchPackage(path, basePackageName, Dependency(range), (options & FetchOptions.usePrerelease) != 0); // Q: continue on fail? scope(exit) std.file.remove(path.toNativeString()); - logDiagnostic("Placing to %s...", placement.toNativeString()); + logDiagnostic("Placing to %s...", location.toString()); try { - m_packageManager.storeFetchedPackage(path, pinfo, dstpath); - return m_packageManager.getPackage(packageId, ver, dstpath); + return m_packageManager.store(path, location, basePackageName, ver); } catch (ZipException e) { logInfo("Failed to extract zip archive for %s %s...", packageId, ver); // rethrow the exception at the end of the loop @@ -1010,11 +877,12 @@ */ void remove(in Package pack) { - logInfo("Removing %s in %s", pack.name, pack.path.toNativeString()); + logInfo("Removing", Color.yellow, "%s (in %s)", pack.name.color(Mode.bold), pack.path.toNativeString()); if (!m_dryRun) m_packageManager.remove(pack); } /// Compatibility overload. Use the version without a `force_remove` argument instead. + deprecated("Use `remove(pack)` directly instead, the boolean has no effect") void remove(in Package pack, bool force_remove) { remove(pack); @@ -1072,7 +940,6 @@ foreach(pack; packages) { try { remove(pack); - logInfo("Removed %s, version %s.", package_id, pack.version_); } catch (Exception e) { logError("Failed to remove %s %s: %s", package_id, pack.version_, e.msg); logInfo("Continuing with other packages (if any)."); @@ -1115,6 +982,7 @@ } /// Compatibility overload. Use the version without a `force_remove` argument instead. + deprecated("Use the overload without force_remove instead") void remove(string package_id, string version_, PlacementLocation location, bool force_remove) { remove(package_id, version_, location); @@ -1136,7 +1004,7 @@ void addLocalPackage(string path, string ver, bool system) { if (m_dryRun) return; - m_packageManager.addLocalPackage(makeAbsolute(path), ver, system ? LocalPackageType.system : LocalPackageType.user); + m_packageManager.addLocalPackage(makeAbsolute(path), ver, system ? PlacementLocation.system : PlacementLocation.user); } /** Removes a directory from the list of locally known packages. @@ -1153,7 +1021,7 @@ void removeLocalPackage(string path, bool system) { if (m_dryRun) return; - m_packageManager.removeLocalPackage(makeAbsolute(path), system ? LocalPackageType.system : LocalPackageType.user); + m_packageManager.removeLocalPackage(makeAbsolute(path), system ? PlacementLocation.system : PlacementLocation.user); } /** Registers a local directory to search for packages to use for satisfying @@ -1169,7 +1037,7 @@ void addSearchPath(string path, bool system) { if (m_dryRun) return; - m_packageManager.addSearchPath(makeAbsolute(path), system ? LocalPackageType.system : LocalPackageType.user); + m_packageManager.addSearchPath(makeAbsolute(path), system ? PlacementLocation.system : PlacementLocation.user); } /** Unregisters a local directory search path. @@ -1184,7 +1052,7 @@ void removeSearchPath(string path, bool system) { if (m_dryRun) return; - m_packageManager.removeSearchPath(makeAbsolute(path), system ? LocalPackageType.system : LocalPackageType.user); + m_packageManager.removeSearchPath(makeAbsolute(path), system ? PlacementLocation.system : PlacementLocation.user); } /** Queries all package suppliers with the given query string. @@ -1193,6 +1061,9 @@ name of the package supplier and the second entry is the list of matched packages. + Params: + query = the search term to match packages on + See_Also: `PackageSupplier.searchPackages` */ auto searchPackages(string query) @@ -1271,13 +1142,15 @@ if (!path.absolute) path = m_rootPath ~ path; path.normalize(); - string[string] depVers; + VersionRange[string] depVers; string[] notFound; // keep track of any failed packages in here foreach (dep; deps) { - Version ver; try { - ver = getLatestVersion(dep); - depVers[dep] = ver.isBranch ? ver.toString() : "~>" ~ ver.toString(); + Version ver = getLatestVersion(dep); + if (ver.isBranch()) + depVers[dep] = VersionRange(ver); + else + depVers[dep] = VersionRange.fromString("~>" ~ ver.toString()); } catch (Exception e) { notFound ~= dep; } @@ -1299,41 +1172,25 @@ } //Act smug to the user. - logInfo("Successfully created an empty project in '%s'.", path.toNativeString()); + logInfo("Success", Color.green, "created empty project in %s", path.toNativeString().color(Mode.bold)); } private void runCustomInitialization(NativePath path, string type, string[] runArgs) { string packageName = type; - auto template_pack = m_packageManager.getBestPackage(packageName, ">=0.0.0"); - if (!template_pack) template_pack = m_packageManager.getBestPackage(packageName, "~master"); + auto template_pack = m_packageManager.getBestPackage(packageName); if (!template_pack) { logInfo("%s is not present, getting and storing it user wide", packageName); - template_pack = fetch(packageName, Dependency(">=0.0.0"), defaultPlacementLocation, FetchOptions.none); + template_pack = fetch(packageName, VersionRange.Any, defaultPlacementLocation, FetchOptions.none); } Package initSubPackage = m_packageManager.getSubPackage(template_pack, "init-exec", false); auto template_dub = new Dub(null, m_packageSuppliers); template_dub.loadPackage(initSubPackage); - auto compiler_binary = this.defaultCompiler; - GeneratorSettings settings; - settings.config = "application"; - settings.compiler = getCompiler(compiler_binary); - settings.platform = settings.compiler.determinePlatform(settings.buildSettings, compiler_binary, m_defaultArchitecture); - settings.buildType = "debug"; - settings.run = true; + GeneratorSettings settings = this.makeAppSettings(); settings.runArgs = runArgs; - if (m_defaultLowMemory) settings.buildSettings.options |= BuildOption.lowmem; - if (m_defaultEnvironments) settings.buildSettings.addEnvironments(m_defaultEnvironments); - if (m_defaultBuildEnvironments) settings.buildSettings.addBuildEnvironments(m_defaultBuildEnvironments); - if (m_defaultRunEnvironments) settings.buildSettings.addRunEnvironments(m_defaultRunEnvironments); - if (m_defaultPreGenerateEnvironments) settings.buildSettings.addPreGenerateEnvironments(m_defaultPreGenerateEnvironments); - if (m_defaultPostGenerateEnvironments) settings.buildSettings.addPostGenerateEnvironments(m_defaultPostGenerateEnvironments); - if (m_defaultPreBuildEnvironments) settings.buildSettings.addPreBuildEnvironments(m_defaultPreBuildEnvironments); - if (m_defaultPostBuildEnvironments) settings.buildSettings.addPostBuildEnvironments(m_defaultPostBuildEnvironments); - if (m_defaultPreRunEnvironments) settings.buildSettings.addPreRunEnvironments(m_defaultPreRunEnvironments); - if (m_defaultPostRunEnvironments) settings.buildSettings.addPostRunEnvironments(m_defaultPostRunEnvironments); + initSubPackage.recipe.buildSettings.workingDirectory = path.toNativeString(); template_dub.generateProject("build", settings); } @@ -1360,7 +1217,9 @@ auto srcfile = m_project.rootPackage.recipePath; auto srcext = srcfile.head.name.extension; if (srcext == "."~destination_file_ext) { - logInfo("Package format is already %s.", destination_file_ext); + // no logging before this point + tagWidth.push(5); + logError("Package format is already %s.", destination_file_ext); return; } @@ -1386,35 +1245,17 @@ auto tool = m_project.rootPackage.recipe.ddoxTool; if (tool.empty) tool = "ddox"; - auto tool_pack = m_packageManager.getBestPackage(tool, ">=0.0.0"); - if (!tool_pack) tool_pack = m_packageManager.getBestPackage(tool, "~master"); + auto tool_pack = m_packageManager.getBestPackage(tool); if (!tool_pack) { logInfo("%s is not present, getting and storing it user wide", tool); - tool_pack = fetch(tool, Dependency(">=0.0.0"), defaultPlacementLocation, FetchOptions.none); + tool_pack = fetch(tool, VersionRange.Any, defaultPlacementLocation, FetchOptions.none); } auto ddox_dub = new Dub(null, m_packageSuppliers); - ddox_dub.loadPackage(tool_pack.path); + ddox_dub.loadPackage(tool_pack); ddox_dub.upgrade(UpgradeOptions.select); - auto compiler_binary = this.defaultCompiler; - - GeneratorSettings settings; - settings.config = "application"; - settings.compiler = getCompiler(compiler_binary); // TODO: not using --compiler ??? - settings.platform = settings.compiler.determinePlatform(settings.buildSettings, compiler_binary, m_defaultArchitecture); - settings.buildType = "debug"; - if (m_defaultLowMemory) settings.buildSettings.options |= BuildOption.lowmem; - if (m_defaultEnvironments) settings.buildSettings.addEnvironments(m_defaultEnvironments); - if (m_defaultBuildEnvironments) settings.buildSettings.addBuildEnvironments(m_defaultBuildEnvironments); - if (m_defaultRunEnvironments) settings.buildSettings.addRunEnvironments(m_defaultRunEnvironments); - if (m_defaultPreGenerateEnvironments) settings.buildSettings.addPreGenerateEnvironments(m_defaultPreGenerateEnvironments); - if (m_defaultPostGenerateEnvironments) settings.buildSettings.addPostGenerateEnvironments(m_defaultPostGenerateEnvironments); - if (m_defaultPreBuildEnvironments) settings.buildSettings.addPreBuildEnvironments(m_defaultPreBuildEnvironments); - if (m_defaultPostBuildEnvironments) settings.buildSettings.addPostBuildEnvironments(m_defaultPostBuildEnvironments); - if (m_defaultPreRunEnvironments) settings.buildSettings.addPreRunEnvironments(m_defaultPreRunEnvironments); - if (m_defaultPostRunEnvironments) settings.buildSettings.addPostRunEnvironments(m_defaultPostRunEnvironments); - settings.run = true; + GeneratorSettings settings = this.makeAppSettings(); auto filterargs = m_project.rootPackage.recipe.ddoxFilterArgs.dup; if (filterargs.empty) filterargs = ["--min-protection=Protected", "--only-documented"]; @@ -1441,23 +1282,40 @@ } } - private void updatePackageSearchPath() + /// Make a `GeneratorSettings` suitable to generate tools (DDOC, DScanner, etc...) + private GeneratorSettings makeAppSettings () const { - // TODO: Remove once `overrideSearchPath` is removed - if (!m_overrideSearchPath.empty) { - m_packageManager._disableDefaultSearchPaths = true; - m_packageManager.searchPath = [m_overrideSearchPath]; - return; - } + GeneratorSettings settings; + auto compiler_binary = this.defaultCompiler; - auto p = environment.get("DUBPATH"); - NativePath[] paths; + settings.config = "application"; + settings.buildType = "debug"; + settings.compiler = getCompiler(compiler_binary); + settings.platform = settings.compiler.determinePlatform( + settings.buildSettings, compiler_binary, this.defaultArchitecture); + if (this.defaultLowMemory) + settings.buildSettings.options |= BuildOption.lowmem; + if (this.defaultEnvironments) + settings.buildSettings.addEnvironments(this.defaultEnvironments); + if (this.defaultBuildEnvironments) + settings.buildSettings.addBuildEnvironments(this.defaultBuildEnvironments); + if (this.defaultRunEnvironments) + settings.buildSettings.addRunEnvironments(this.defaultRunEnvironments); + if (this.defaultPreGenerateEnvironments) + settings.buildSettings.addPreGenerateEnvironments(this.defaultPreGenerateEnvironments); + if (this.defaultPostGenerateEnvironments) + settings.buildSettings.addPostGenerateEnvironments(this.defaultPostGenerateEnvironments); + if (this.defaultPreBuildEnvironments) + settings.buildSettings.addPreBuildEnvironments(this.defaultPreBuildEnvironments); + if (this.defaultPostBuildEnvironments) + settings.buildSettings.addPostBuildEnvironments(this.defaultPostBuildEnvironments); + if (this.defaultPreRunEnvironments) + settings.buildSettings.addPreRunEnvironments(this.defaultPreRunEnvironments); + if (this.defaultPostRunEnvironments) + settings.buildSettings.addPostRunEnvironments(this.defaultPostRunEnvironments); + settings.run = true; - version(Windows) enum pathsep = ";"; - else enum pathsep = ":"; - if (p.length) paths ~= p.split(pathsep).map!(p => NativePath(p))().array(); - m_packageManager._disableDefaultSearchPaths = false; - m_packageManager.searchPath = paths; + return settings; } private void determineDefaultCompiler() @@ -1528,7 +1386,7 @@ unittest { import std.path: buildPath, absolutePath; - auto dub = new Dub(".", null, SkipPackageSuppliers.configured); + auto dub = new TestDub(".", null, SkipPackageSuppliers.configured); immutable olddc = environment.get("DC", null); immutable oldpath = environment.get("PATH", null); immutable testdir = "test-determineDefaultCompiler"; @@ -1626,22 +1484,29 @@ } - this(Dub dub, UpgradeOptions options) + this(Dub dub, UpgradeOptions options, Package root, SelectedVersions selected_versions) { + assert(dub !is null); + assert(root !is null); + assert(selected_versions !is null); + + if (environment.get("DUB_NO_RESOLVE_LIMIT") !is null) + super(ulong.max); + else + super(1_000_000); + m_dub = dub; m_options = options; - } - - void addPackageToUpgrade(string name) - { - m_packagesToUpgrade[name] = true; - } - - Dependency[string] resolve(Package root, SelectedVersions selected_versions) - { m_rootPackage = root; m_selectedVersions = selected_versions; - return super.resolve(TreeNode(root.name, Dependency(root.version_)), (m_options & UpgradeOptions.printUpgradesOnly) == 0); + } + + Dependency[string] resolve(string[] filter) + { + foreach (name; filter) + m_packagesToUpgrade[name] = true; + return super.resolve(TreeNode(m_rootPackage.name, Dependency(m_rootPackage.version_)), + (m_options & UpgradeOptions.dryRun) == 0); } protected bool isFixedPackage(string pack) @@ -1752,7 +1617,7 @@ altdeppath.endsWithSlash = true; if (!d.spec.path.empty && absdeppath != desireddeppath) - logWarn("Warning: Sub package %s, referenced by %s %s must be referenced using the path to its base package", + logWarn("Sub package %s, referenced by %s %s must be referenced using the path to its base package", subpack.name, pack.name, pack.version_); enforce(d.spec.path.empty || absdeppath == desireddeppath || absdeppath == altdeppath, @@ -1772,11 +1637,11 @@ Dependency dspec = d.spec.mapToPath(pack.path); // if not upgrading, use the selected version - if (!(m_options & UpgradeOptions.upgrade) && m_selectedVersions && m_selectedVersions.hasSelectedVersion(dbasename)) + if (!(m_options & UpgradeOptions.upgrade) && m_selectedVersions.hasSelectedVersion(dbasename)) dspec = m_selectedVersions.getSelectedVersion(dbasename); // keep selected optional dependencies and avoid non-selected optional-default dependencies by default - if (m_selectedVersions && !m_selectedVersions.bare) { + if (!m_selectedVersions.bare) { if (dt == DependencyType.optionalDefault && !m_selectedVersions.hasSelectedVersion(dbasename)) dt = DependencyType.optional; else if (dt == DependencyType.optional && m_selectedVersions.hasSelectedVersion(dbasename)) @@ -1839,23 +1704,23 @@ return m_rootPackage.basePackage; if (!dep.repository.empty) { - auto ret = m_dub.packageManager.loadSCMPackage(name, dep); + auto ret = m_dub.packageManager.loadSCMPackage(name, dep.repository); return ret !is null && dep.matches(ret.version_) ? ret : null; } else if (!dep.path.empty) { try { - auto ret = m_dub.packageManager.getOrLoadPackage(dep.path); - if (dep.matches(ret.version_)) return ret; + return m_dub.packageManager.getOrLoadPackage(dep.path); } catch (Exception e) { logDiagnostic("Failed to load path based dependency %s: %s", name, e.msg); logDebug("Full error: %s", e.toString().sanitize); return null; } } + const vers = dep.version_; if (auto ret = m_dub.m_packageManager.getBestPackage(name, dep)) return ret; - auto key = name ~ ":" ~ dep.version_.toString(); + auto key = name ~ ":" ~ vers.toString(); if (auto ret = key in m_remotePackages) return *ret; @@ -1873,15 +1738,15 @@ m_remotePackages[key] = ret; return ret; } catch (Exception e) { - logDiagnostic("Metadata for %s %s could not be downloaded from %s: %s", name, dep, ps.description, e.msg); + logDiagnostic("Metadata for %s %s could not be downloaded from %s: %s", name, vers, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize); } } else { - logDiagnostic("Package %s not found in base package description (%s). Downloading whole package.", name, dep.version_.toString()); + logDiagnostic("Package %s not found in base package description (%s). Downloading whole package.", name, vers.toString()); try { FetchOptions fetchOpts; fetchOpts |= prerelease ? FetchOptions.usePrerelease : FetchOptions.none; - m_dub.fetch(rootpack, dep, m_dub.defaultPlacementLocation, fetchOpts, "need sub package description"); + m_dub.fetch(rootpack, vers, m_dub.defaultPlacementLocation, fetchOpts, "need sub package description"); auto ret = m_dub.m_packageManager.getBestPackage(name, dep); if (!ret) { logWarn("Package %s %s doesn't have a sub package %s", rootpack, dep.version_, name); @@ -1903,153 +1768,220 @@ } } -private struct SpecialDirs { - NativePath temp; - NativePath userSettings; - NativePath systemSettings; - NativePath localRepository; +/** + * An instance of Dub that does not rely on the environment + * + * This instance of dub should not read any environment variables, + * nor should it do any file IO, to make it usable and reliable in unittests. + * Currently it reads environment variables but does not read the configuration. + */ +package final class TestDub : Dub +{ + /// Forward to base constructor + public this (string root = ".", PackageSupplier[] extras = null, + SkipPackageSuppliers skip = SkipPackageSuppliers.none) + { + super(root, extras, skip); + } + + /// Avoid loading user configuration + protected override void loadConfig() { /* No-op */ } } -private class DubConfig { - private { - DubConfig m_parentConfig; - Json m_data; - } +private struct SpecialDirs { + /// The path where to store temporary files and directory + NativePath temp; + /// The system-wide dub-specific folder + NativePath systemSettings; + /// The dub-specific folder in the user home directory + NativePath userSettings; + /** + * Windows-only: the local, user-specific folder + * + * This folder, unlike `userSettings`, does not roam, IOW an account + * on a company network will not save the content of this data, + * unlike `userSettings`. + * On Posix, this is equivalent to `userSettings`. + * + * See_Also: https://docs.microsoft.com/en-us/windows/win32/shell/knownfolderid + */ + NativePath localRepository; - this(Json data, DubConfig parent_config) + /// Returns: An instance of `SpecialDirs` initialized from the environment + public static SpecialDirs make () { + import std.file : tempDir; + + SpecialDirs result; + result.temp = NativePath(tempDir); + + version(Windows) { + result.systemSettings = NativePath(environment.get("ProgramData")) ~ "dub/"; + immutable appDataDir = environment.get("APPDATA"); + result.userSettings = NativePath(appDataDir) ~ "dub/"; + // LOCALAPPDATA is not defined before Windows Vista + result.localRepository = NativePath(environment.get("LOCALAPPDATA", appDataDir)) ~ "dub"; + } else version(Posix) { + result.systemSettings = NativePath("/var/lib/dub/"); + result.userSettings = NativePath(environment.get("HOME")) ~ ".dub/"; + if (!result.userSettings.absolute) + result.userSettings = NativePath(getcwd()) ~ result.userSettings; + result.localRepository = result.userSettings; + } + return result; + } +} + +/** + * User-provided configuration + * + * All fields in this struct should be optional. + * Fields that are *not* optional should be mandatory from the POV + * of the application, not the POV of file parsing. + * For example, git's `core.author` and `core.email` are required to commit, + * but the error happens on the commit, not when the gitconfig is parsed. + * + * We have multiple configuration locations, and two kinds of fields: + * additive and non-additive. Additive fields are fields which are the union + * of all configuration files (e.g. `registryURLs`). Non-additive fields + * will ignore values set in lower priorities configuration, although parsing + * must still succeed. Additive fields are marked as `@Optional`, + * non-additive are marked as `SetInfo`. + */ +private struct UserConfiguration { + import configy.Attributes; + + @Optional string[] registryUrls; + @Optional NativePath[] customCachePaths; + + SetInfo!(SkipPackageSuppliers) skipRegistry; + SetInfo!(string) defaultCompiler; + SetInfo!(string) defaultArchitecture; + SetInfo!(bool) defaultLowMemory; + + SetInfo!(string[string]) defaultEnvironments; + SetInfo!(string[string]) defaultBuildEnvironments; + SetInfo!(string[string]) defaultRunEnvironments; + SetInfo!(string[string]) defaultPreGenerateEnvironments; + SetInfo!(string[string]) defaultPostGenerateEnvironments; + SetInfo!(string[string]) defaultPreBuildEnvironments; + SetInfo!(string[string]) defaultPostBuildEnvironments; + SetInfo!(string[string]) defaultPreRunEnvironments; + SetInfo!(string[string]) defaultPostRunEnvironments; + SetInfo!(string) dubHome; + + /// Merge a lower priority config (`this`) with a `higher` priority config + public UserConfiguration merge(UserConfiguration higher) + return @safe pure nothrow { - m_data = data; - m_parentConfig = parent_config; + import std.traits : hasUDA; + UserConfiguration result; + + static foreach (idx, _; UserConfiguration.tupleof) { + static if (hasUDA!(UserConfiguration.tupleof[idx], Optional)) + result.tupleof[idx] = higher.tupleof[idx] ~ this.tupleof[idx]; + else static if (IsSetInfo!(typeof(this.tupleof[idx]))) { + if (higher.tupleof[idx].set) + result.tupleof[idx] = higher.tupleof[idx]; + else + result.tupleof[idx] = this.tupleof[idx]; + } else + static assert(false, + "Expect `@Optional` or `SetInfo` on: `" ~ + __traits(identifier, this.tupleof[idx]) ~ + "` of type : `" ~ + typeof(this.tupleof[idx]).stringof ~ "`"); + } + + return result; } - @property string[] registryURLs() - { - string[] ret; - if (auto pv = "registryUrls" in m_data) - ret = (*pv).deserializeJson!(string[]); - if (m_parentConfig) ret ~= m_parentConfig.registryURLs; - return ret; - } + /// Workaround multiple `E` declaration in `static foreach` when inline + private template IsSetInfo(T) { enum bool IsSetInfo = is(T : SetInfo!E, E); } +} - @property SkipPackageSuppliers skipRegistry() - { - if(auto pv = "skipRegistry" in m_data) - return to!SkipPackageSuppliers((*pv).get!string); +unittest { + import configy.Read; - if (m_parentConfig) - return m_parentConfig.skipRegistry; + const str1 = `{ + "registryUrls": [ "http://foo.bar\/optional\/escape" ], + "customCachePaths": [ "foo/bar", "foo/foo" ], - return SkipPackageSuppliers.none; - } + "skipRegistry": "all", + "defaultCompiler": "dmd", + "defaultArchitecture": "fooarch", + "defaultLowMemory": false, - @property NativePath[] customCachePaths() - { - import std.algorithm.iteration : map; - import std.array : array; + "defaultEnvironments": { + "VAR2": "settings.VAR2", + "VAR3": "settings.VAR3", + "VAR4": "settings.VAR4" + } +}`; - NativePath[] ret; - if (auto pv = "customCachePaths" in m_data) - ret = (*pv).deserializeJson!(string[]) - .map!(s => NativePath(s)) - .array; - if (m_parentConfig) - ret ~= m_parentConfig.customCachePaths; - return ret; - } + const str2 = `{ + "registryUrls": [ "http://bar.foo" ], + "customCachePaths": [ "bar/foo", "bar/bar" ], - @property string defaultCompiler() - const { - if (auto pv = "defaultCompiler" in m_data) - return pv.get!string; - if (m_parentConfig) return m_parentConfig.defaultCompiler; - return null; - } + "skipRegistry": "none", + "defaultCompiler": "ldc", + "defaultArchitecture": "bararch", + "defaultLowMemory": true, - @property string defaultArchitecture() - const { - if(auto pv = "defaultArchitecture" in m_data) - return (*pv).get!string; - if (m_parentConfig) return m_parentConfig.defaultArchitecture; - return null; - } + "defaultEnvironments": { + "VAR": "Hi", + } +}`; - @property bool defaultLowMemory() - const { - if(auto pv = "defaultLowMemory" in m_data) - return (*pv).get!bool; - if (m_parentConfig) return m_parentConfig.defaultLowMemory; - return false; - } + auto c1 = parseConfigString!UserConfiguration(str1, "/dev/null"); + assert(c1.registryUrls == [ "http://foo.bar/optional/escape" ]); + assert(c1.customCachePaths == [ NativePath("foo/bar"), NativePath("foo/foo") ]); + assert(c1.skipRegistry == SkipPackageSuppliers.all); + assert(c1.defaultCompiler == "dmd"); + assert(c1.defaultArchitecture == "fooarch"); + assert(c1.defaultLowMemory == false); + assert(c1.defaultEnvironments.length == 3); + assert(c1.defaultEnvironments["VAR2"] == "settings.VAR2"); + assert(c1.defaultEnvironments["VAR3"] == "settings.VAR3"); + assert(c1.defaultEnvironments["VAR4"] == "settings.VAR4"); - @property string[string] defaultEnvironments() - const { - if (auto pv = "defaultEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultEnvironments; - return null; - } + auto c2 = parseConfigString!UserConfiguration(str2, "/dev/null"); + assert(c2.registryUrls == [ "http://bar.foo" ]); + assert(c2.customCachePaths == [ NativePath("bar/foo"), NativePath("bar/bar") ]); + assert(c2.skipRegistry == SkipPackageSuppliers.none); + assert(c2.defaultCompiler == "ldc"); + assert(c2.defaultArchitecture == "bararch"); + assert(c2.defaultLowMemory == true); + assert(c2.defaultEnvironments.length == 1); + assert(c2.defaultEnvironments["VAR"] == "Hi"); - @property string[string] defaultBuildEnvironments() - const { - if (auto pv = "defaultBuildEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultBuildEnvironments; - return null; - } + auto m1 = c2.merge(c1); + // c1 takes priority, so its registryUrls is first + assert(m1.registryUrls == [ "http://foo.bar/optional/escape", "http://bar.foo" ]); + // Same with CCP + assert(m1.customCachePaths == [ + NativePath("foo/bar"), NativePath("foo/foo"), + NativePath("bar/foo"), NativePath("bar/bar"), + ]); - @property string[string] defaultRunEnvironments() - const { - if (auto pv = "defaultRunEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultRunEnvironments; - return null; - } + // c1 fields only + assert(m1.skipRegistry == c1.skipRegistry); + assert(m1.defaultCompiler == c1.defaultCompiler); + assert(m1.defaultArchitecture == c1.defaultArchitecture); + assert(m1.defaultLowMemory == c1.defaultLowMemory); + assert(m1.defaultEnvironments == c1.defaultEnvironments); - @property string[string] defaultPreGenerateEnvironments() - const { - if (auto pv = "defaultPreGenerateEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultPreGenerateEnvironments; - return null; - } + auto m2 = c1.merge(c2); + assert(m2.registryUrls == [ "http://bar.foo", "http://foo.bar/optional/escape" ]); + assert(m2.customCachePaths == [ + NativePath("bar/foo"), NativePath("bar/bar"), + NativePath("foo/bar"), NativePath("foo/foo"), + ]); + assert(m2.skipRegistry == c2.skipRegistry); + assert(m2.defaultCompiler == c2.defaultCompiler); + assert(m2.defaultArchitecture == c2.defaultArchitecture); + assert(m2.defaultLowMemory == c2.defaultLowMemory); + assert(m2.defaultEnvironments == c2.defaultEnvironments); - @property string[string] defaultPostGenerateEnvironments() - const { - if (auto pv = "defaultPostGenerateEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultPostGenerateEnvironments; - return null; - } - - @property string[string] defaultPreBuildEnvironments() - const { - if (auto pv = "defaultPreBuildEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultPreBuildEnvironments; - return null; - } - - @property string[string] defaultPostBuildEnvironments() - const { - if (auto pv = "defaultPostBuildEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultPostBuildEnvironments; - return null; - } - - @property string[string] defaultPreRunEnvironments() - const { - if (auto pv = "defaultPreRunEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultPreRunEnvironments; - return null; - } - - @property string[string] defaultPostRunEnvironments() - const { - if (auto pv = "defaultPostRunEnvironments" in m_data) - return deserializeJson!(string[string])(*cast(Json*)pv); - if (m_parentConfig) return m_parentConfig.defaultPostRunEnvironments; - return null; - } + auto m3 = UserConfiguration.init.merge(c1); + assert(m3 == c1); } diff --git a/source/dub/generators/build.d b/source/dub/generators/build.d index e089e39..2b267cb 100644 --- a/source/dub/generators/build.d +++ b/source/dub/generators/build.d @@ -12,8 +12,8 @@ import dub.generators.generator; import dub.internal.utils; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.project; @@ -29,20 +29,19 @@ string getObjSuffix(const scope ref BuildPlatform platform) { - return platform.platform.canFind("windows") ? ".obj" : ".o"; + return platform.isWindows() ? ".obj" : ".o"; } -string computeBuildName(string config, GeneratorSettings settings, const string[][] hashing...) +string computeBuildName(string config, in GeneratorSettings settings, const string[][] hashing...) { - import std.digest; - import std.digest.md; + import std.digest.sha : SHA256, toHexString; - MD5 hash; + SHA256 hash; hash.start(); void addHash(in string[] strings...) { foreach (s; strings) { hash.put(cast(ubyte[])s); hash.put(0); } hash.put(0); } foreach(strings; hashing) addHash(strings); - auto hashstr = hash.finish().toHexString().idup; + const hashstr = hash.finish().toHexString(); return format("%s-%s-%s-%s-%s_v%s-%s", config, settings.buildType, settings.platform.platform.join("."), @@ -64,6 +63,7 @@ override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { + import std.path : setExtension; scope (exit) cleanupTemporaries(); void checkPkgRequirements(const(Package) pkg) @@ -77,17 +77,24 @@ checkPkgRequirements(pkg); auto root_ti = targets[m_project.rootPackage.name]; + const rootTT = root_ti.buildSettings.targetType; - enforce(!(settings.rdmd && root_ti.buildSettings.targetType == TargetType.none), + enforce(!(settings.rdmd && rootTT == TargetType.none), "Building package with target type \"none\" with rdmd is not supported yet."); - logInfo("Performing \"%s\" build using %s for %-(%s, %).", - settings.buildType, settings.platform.compilerBinary, settings.platform.architecture); + logInfo("Starting", Color.light_green, + "Performing \"%s\" build using %s for %-(%s, %).", + settings.buildType.color(Color.magenta), settings.platform.compilerBinary, + settings.platform.architecture); bool any_cached = false; NativePath[string] target_paths; + NativePath[] dynamicLibDepsFilesToCopy; // to the root package output dir + const copyDynamicLibDepsLinkerFiles = rootTT == TargetType.dynamicLibrary || rootTT == TargetType.none; + const copyDynamicLibDepsRuntimeFiles = copyDynamicLibDepsLinkerFiles || rootTT == TargetType.executable; + bool[string] visited; void buildTargetRec(string target) { @@ -101,33 +108,82 @@ NativePath[] additional_dep_files; auto bs = ti.buildSettings.dup; + const tt = bs.targetType; foreach (ldep; ti.linkDependencies) { - if (bs.targetType != TargetType.staticLibrary && !(bs.options & BuildOption.syntaxOnly)) { - bs.addSourceFiles(target_paths[ldep].toNativeString()); - } else { + const ldepPath = target_paths[ldep].toNativeString(); + const doLink = tt != TargetType.staticLibrary && !(bs.options & BuildOption.syntaxOnly); + + if (doLink && isLinkerFile(settings.platform, ldepPath)) + bs.addSourceFiles(ldepPath); + else additional_dep_files ~= target_paths[ldep]; + + if (targets[ldep].buildSettings.targetType == TargetType.dynamicLibrary) { + // copy the .{dll,so,dylib} + if (copyDynamicLibDepsRuntimeFiles) + dynamicLibDepsFilesToCopy ~= NativePath(ldepPath); + + if (settings.platform.isWindows()) { + // copy the accompanying .pdb if found + if (copyDynamicLibDepsRuntimeFiles) { + const pdb = ldepPath.setExtension(".pdb"); + if (existsFile(pdb)) + dynamicLibDepsFilesToCopy ~= NativePath(pdb); + } + + const importLib = ldepPath.setExtension(".lib"); + if (existsFile(importLib)) { + // link dependee against the import lib + if (doLink) + bs.addSourceFiles(importLib); + // and copy + if (copyDynamicLibDepsLinkerFiles) + dynamicLibDepsFilesToCopy ~= NativePath(importLib); + } + + // copy the .exp file if found + const exp = ldepPath.setExtension(".exp"); + if (copyDynamicLibDepsLinkerFiles && existsFile(exp)) + dynamicLibDepsFilesToCopy ~= NativePath(exp); + } } } NativePath tpath; - if (bs.targetType != TargetType.none) + if (tt != TargetType.none) { if (buildTarget(settings, bs, ti.pack, ti.config, ti.packages, additional_dep_files, tpath)) any_cached = true; + } target_paths[target] = tpath; } // build all targets - if (settings.rdmd || root_ti.buildSettings.targetType == TargetType.staticLibrary) { + if (settings.rdmd || rootTT == TargetType.staticLibrary) { // RDMD always builds everything at once and static libraries don't need their // dependencies to be built NativePath tpath; buildTarget(settings, root_ti.buildSettings.dup, m_project.rootPackage, root_ti.config, root_ti.packages, null, tpath); - } else { - buildTargetRec(m_project.rootPackage.name); + return; + } - if (any_cached) { - logInfo("To force a rebuild of up-to-date targets, run again with --force."); + buildTargetRec(m_project.rootPackage.name); + + if (dynamicLibDepsFilesToCopy.length) { + const rootTargetPath = root_ti.buildSettings.targetPath; + + if (!existsFile(rootTargetPath)) + mkdirRecurse(rootTargetPath); + + foreach (src; dynamicLibDepsFilesToCopy) { + logDiagnostic("Copying target from %s to %s", src.toNativeString(), rootTargetPath); + hardLinkFile(src, NativePath(rootTargetPath) ~ src.head, true); } } + + if (any_cached) { + logInfo("Finished", Color.green, + "To force a rebuild of up-to-date targets, run again with --force" + ); + } } override void performPostGenerateActions(GeneratorSettings settings, in TargetInfo[string] targets) @@ -176,9 +232,9 @@ // run post-build commands if (!cached && buildsettings.postBuildCommands.length) { - logInfo("Running post-build commands..."); + logInfo("Post-build", Color.light_green, "Running commands"); runBuildCommands(CommandType.postBuild, buildsettings.postBuildCommands, pack, m_project, settings, buildsettings, - [["DUB_BUILD_PATH" : target_path.parentPath.toNativeString.absolutePath]]); + [["DUB_BUILD_PATH" : target_path is NativePath.init ? "" : target_path.parentPath.toNativeString.absolutePath]]); } return cached; @@ -197,7 +253,8 @@ else target_path = pack.path ~ format(".dub/build/%s/", build_id); if (!settings.force && isUpToDate(target_path, buildsettings, settings, pack, packages, additional_dep_files)) { - logInfo("%s %s: target for configuration \"%s\" is up to date.", pack.name, pack.version_, config); + logInfo("Up-to-date", Color.green, "%s %s: target for configuration [%s] is up to date.", + pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); logDiagnostic("Using existing build in %s.", target_path.toNativeString()); target_binary_path = target_path ~ settings.compiler.getTargetFileName(buildsettings, settings.platform); if (!settings.tempBuild) @@ -212,10 +269,10 @@ return false; } - logInfo("%s %s: building configuration \"%s\"...", pack.name, pack.version_, config); + logInfo("Building", Color.light_green, "%s %s: building configuration [%s]", pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); if( buildsettings.preBuildCommands.length ){ - logInfo("Running pre-build commands..."); + logInfo("Pre-build", Color.light_green, "Running commands"); runBuildCommands(CommandType.preBuild, buildsettings.preBuildCommands, pack, m_project, settings, buildsettings); } @@ -277,11 +334,11 @@ flags ~= mainsrc.relativeTo(cwd).toNativeString(); if (buildsettings.preBuildCommands.length){ - logInfo("Running pre-build commands..."); + logInfo("Pre-build", Color.light_green, "Running commands"); runCommands(buildsettings.preBuildCommands); } - logInfo("%s %s: building configuration \"%s\"...", pack.name, pack.version_, config); + logInfo("Building", Color.light_green, "%s %s [%s]", pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); logInfo("Running rdmd..."); logDiagnostic("rdmd %s", join(flags, " ")); @@ -308,7 +365,7 @@ f = fp.toNativeString(); } - logInfo("%s %s: building configuration \"%s\"...", pack.name, pack.version_, config); + logInfo("Building", Color.light_green, "%s %s [%s]", pack.name.color(Mode.bold), pack.version_, config.color(Color.blue)); // make all target/import paths relative string makeRelative(string path) { @@ -335,7 +392,7 @@ } if( buildsettings.preBuildCommands.length ){ - logInfo("Running pre-build commands..."); + logInfo("Pre-build", Color.light_green, "Running commands"); runBuildCommands(CommandType.preBuild, buildsettings.preBuildCommands, pack, m_project, settings, buildsettings); } @@ -359,7 +416,7 @@ buildsettings.importPaths, settings.platform.architecture, [ - (cast(uint)buildsettings.options).to!string, + (cast(uint)(buildsettings.options & ~BuildOption.color)).to!string, // exclude color option from id settings.platform.compilerBinary, settings.platform.compiler, settings.platform.compilerVersion, @@ -369,7 +426,7 @@ return computeBuildName(config, settings, hashing); } - private void copyTargetFile(NativePath build_path, BuildSettings buildsettings, GeneratorSettings settings) + private void copyTargetFile(in NativePath build_path, in BuildSettings buildsettings, in GeneratorSettings settings) { if (!existsFile(NativePath(buildsettings.targetPath))) mkdirRecurse(buildsettings.targetPath); @@ -378,15 +435,23 @@ settings.compiler.getTargetFileName(buildsettings, settings.platform) ]; - // Windows: add .pdb if found - const tt = buildsettings.targetType; - if ((tt == TargetType.executable || tt == TargetType.dynamicLibrary) && - settings.platform.platform.canFind("windows")) - { - import std.path : setExtension; - const pdbFilename = filenames[0].setExtension(".pdb"); - if (existsFile(build_path ~ pdbFilename)) - filenames ~= pdbFilename; + // Windows: add .pdb (for executables and DLLs) and/or import .lib & .exp (for DLLs) if found + if (settings.platform.isWindows()) { + void addIfFound(string extension) { + import std.path : setExtension; + const candidate = filenames[0].setExtension(extension); + if (existsFile(build_path ~ candidate)) + filenames ~= candidate; + } + + const tt = buildsettings.targetType; + if (tt == TargetType.executable || tt == TargetType.dynamicLibrary) + addIfFound(".pdb"); + + if (tt == TargetType.dynamicLibrary) { + addIfFound(".lib"); + addIfFound(".exp"); + } } foreach (filename; filenames) @@ -485,7 +550,7 @@ auto objs = new string[](srcs.walkLength); void compileSource(size_t i, string src) { - logInfo("Compiling %s...", src); + logInfo("Compiling", Color.light_green, "%s", src); const objPath = pathToObjName(settings.platform, src); objs[i] = compileUnit(src, objPath, buildsettings, settings); } @@ -496,7 +561,7 @@ foreach (i, src; srcs.array) compileSource(i, src); } - logInfo("Linking..."); + logInfo("Linking", Color.light_green, "%s", buildsettings.targetName.color(Mode.bold)); lbuildsettings.sourceFiles = is_static_library ? [] : lbuildsettings.sourceFiles.filter!(f => isLinkerFile(settings.platform, f)).array; settings.compiler.setTarget(lbuildsettings, settings.platform); settings.compiler.prepareBuildSettings(lbuildsettings, settings.platform, BuildSetting.commandLineSeparate|BuildSetting.sourceFiles); @@ -535,7 +600,11 @@ settings.compiler.invoke(buildsettings, settings.platform, settings.compileCallback); if (generate_binary) { - logInfo("Linking..."); + if (settings.tempBuild) { + logInfo("Linking", Color.light_green, "%s => %s", buildsettings.targetName.color(Mode.bold), buildsettings.getTargetPath(settings)); + } else { + logInfo("Linking", Color.light_green, "%s", buildsettings.targetName.color(Mode.bold)); + } settings.compiler.invokeLinker(lbuildsettings, settings.platform, [tempobj.toNativeString()], settings.linkCallback); } } @@ -552,7 +621,7 @@ } if (!exe_file_path.absolute) exe_file_path = cwd ~ exe_file_path; runPreRunCommands(m_project.rootPackage, m_project, settings, buildsettings); - logInfo("Running %s %s", exe_file_path.relativeTo(runcwd), run_args.join(" ")); + logInfo("Running", Color.green, "%s %s", exe_file_path.relativeTo(runcwd), run_args.join(" ")); string[string] env; foreach (aa; [buildsettings.environments, buildsettings.runEnvironments]) foreach (k, v; aa) @@ -579,7 +648,7 @@ in BuildSettings buildsettings) { if (buildsettings.preRunCommands.length) { - logInfo("Running pre-run commands..."); + logInfo("Pre-run", Color.light_green, "Running commands..."); runBuildCommands(CommandType.preRun, buildsettings.preRunCommands, pack, proj, settings, buildsettings); } } @@ -588,7 +657,7 @@ in BuildSettings buildsettings) { if (buildsettings.postRunCommands.length) { - logInfo("Running post-run commands..."); + logInfo("Post-run", Color.light_green, "Running commands..."); runBuildCommands(CommandType.postRun, buildsettings.postRunCommands, pack, proj, settings, buildsettings); } } diff --git a/source/dub/generators/cmake.d b/source/dub/generators/cmake.d index 595442d..b87a368 100644 --- a/source/dub/generators/cmake.d +++ b/source/dub/generators/cmake.d @@ -9,9 +9,9 @@ import dub.compilers.buildsettings; import dub.generators.generator; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import dub.project; import std.algorithm: map, uniq; @@ -76,8 +76,8 @@ script.put("include(UseD)\n"); script.put( "add_d_conditions(VERSION %s DEBUG %s)\n".format( - info.buildSettings.versions.dup.join(" "), - info.buildSettings.debugVersions.dup.join(" "), + info.buildSettings.versions.join(" "), + info.buildSettings.debugVersions.join(" "), ) ); @@ -96,7 +96,7 @@ "target_link_libraries(%s %s %s)\n".format( name, (info.dependencies ~ info.linkDependencies).dup.stdsort.uniq.map!(s => sanitize(s)).join(" "), - info.buildSettings.libs.dup.join(" ") + info.buildSettings.libs.join(" ") ) ); script.put( @@ -114,13 +114,16 @@ file.close; script.shrinkTo(0); scripts.put(filename); + + logInfo("Generated", Color.green, "%s.cmake", name); } if(!cmakeListsPath.existsFile) { logWarn("You must use a fork of CMake which has D support for these scripts to function properly."); logWarn("It is available at https://github.com/trentforkert/cmake"); - logInfo("Generating default CMakeLists.txt"); + logDiagnostic("Generating default CMakeLists.txt"); + script.put("cmake_minimum_required(VERSION 3.0)\n"); script.put("project(%s D)\n".format(m_project.rootPackage.name)); @@ -131,6 +134,8 @@ file.write(script.data); file.close; + + logInfo("Generated", Color.green, "CMakeLists.txt (default)"); } } } diff --git a/source/dub/generators/generator.d b/source/dub/generators/generator.d index 38ea369..bd4d217 100644 --- a/source/dub/generators/generator.d +++ b/source/dub/generators/generator.d @@ -13,8 +13,8 @@ import dub.generators.sublimetext; import dub.generators.visuald; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.project; @@ -277,8 +277,8 @@ } } if (tt != TargetType.none && tt != TargetType.sourceLibrary && ti.buildSettings.sourceFiles.empty) { - logWarn(`Configuration '%s' of package %s contains no source files. Please add {"targetType": "none"} to its package description to avoid building it.`, - ti.config, ti.pack.name); + logWarn(`Configuration [%s] of package %s contains no source files. Please add %s to its package description to avoid building it.`, + ti.config.color(Color.blue), ti.pack.name.color(Mode.bold), `{"targetType": "none"}`.color(Mode.bold)); tt = TargetType.none; } return tt; @@ -381,9 +381,9 @@ // recurse collectDependencies(deppack, *depti, targets, level + 1); - // also recursively add all link dependencies of static libraries + // also recursively add all link dependencies of static *and* dynamic libraries // preserve topological sorting of dependencies for correct link order - if (depbs.targetType == TargetType.staticLibrary) + if (depbs.targetType == TargetType.staticLibrary || depbs.targetType == TargetType.dynamicLibrary) ti.linkDependencies = ti.linkDependencies.filter!(d => !depti.linkDependencies.canFind(d)).array ~ depti.linkDependencies; } @@ -599,9 +599,7 @@ } // apply both top level and configuration level forced dependency build settings - foreach (configured_dbs; [ - cast(const(BuildSettingsTemplate[string])) rootPackage.recipe.buildSettings.dependencyBuildSettings, - rootPackage.getBuildSettings(genSettings.config).dependencyBuildSettings]) + void applyDependencyBuildSettings (const RecipeDependency[string] configured_dbs) { BuildSettings[string] dependencyBuildSettings; foreach (key, value; configured_dbs) @@ -610,13 +608,15 @@ if (auto target = key in targets) { // get platform specific build settings and process dub variables (BuildSettingsTemplate => BuildSettings) - value.getPlatformSettings(buildSettings, genSettings.platform, target.pack.path); + value.settings.getPlatformSettings(buildSettings, genSettings.platform, target.pack.path); buildSettings.processVars(m_project, target.pack, buildSettings, genSettings, true); dependencyBuildSettings[key] = buildSettings; } } applyForcedSettings(*roottarget, targets, dependencyBuildSettings); } + applyDependencyBuildSettings(rootPackage.recipe.buildSettings.dependencies); + applyDependencyBuildSettings(rootPackage.getBuildSettings(genSettings.config).dependencies); // remove targets without output foreach (name; targets.keys) @@ -720,7 +720,7 @@ { child.addVersions(parent.versions); child.addDebugVersions(parent.debugVersions); - child.addOptions(BuildOptions(parent.options & inheritedBuildOptions)); + child.addOptions(Flags!BuildOption(parent.options & inheritedBuildOptions)); } private static void mergeFromDependency(const scope ref BuildSettings child, ref BuildSettings parent, const scope ref BuildPlatform platform) @@ -735,8 +735,8 @@ parent.addImportPaths(child.importPaths); parent.addStringImportPaths(child.stringImportPaths); parent.addInjectSourceFiles(child.injectSourceFiles); - // linking of static libraries is done by parent - if (child.targetType == TargetType.staticLibrary) { + // linker stuff propagates up from static *and* dynamic library deps + if (child.targetType == TargetType.staticLibrary || child.targetType == TargetType.dynamicLibrary) { parent.addSourceFiles(child.sourceFiles.filter!(f => isLinkerFile(platform, f)).array); parent.addLibs(child.libs); parent.addLFlags(child.lflags); @@ -891,7 +891,7 @@ in BuildSettings buildsettings) { if (buildsettings.preGenerateCommands.length && !isRecursiveInvocation(pack.name)) { - logInfo("Running pre-generate commands for %s...", pack.name); + logInfo("Pre-gen", Color.light_green, "Running commands for %s", pack.name); runBuildCommands(CommandType.preGenerate, buildsettings.preGenerateCommands, pack, proj, settings, buildsettings); } } @@ -903,7 +903,7 @@ in BuildSettings buildsettings, NativePath target_path, bool generate_binary) { if (buildsettings.postGenerateCommands.length && !isRecursiveInvocation(pack.name)) { - logInfo("Running post-generate commands for %s...", pack.name); + logInfo("Post-gen", Color.light_green, "Running commands for %s", pack.name); runBuildCommands(CommandType.postGenerate, buildsettings.postGenerateCommands, pack, proj, settings, buildsettings); } diff --git a/source/dub/generators/sublimetext.d b/source/dub/generators/sublimetext.d index aaf5cf0..d46fbd9 100644 --- a/source/dub/generators/sublimetext.d +++ b/source/dub/generators/sublimetext.d @@ -9,9 +9,9 @@ import dub.compilers.compiler; import dub.generators.generator; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import dub.packagemanager; import dub.project; @@ -45,7 +45,7 @@ write(projectPath, jsonString.data); - logInfo("Project '%s' generated.", projectPath); + logInfo("Generated", Color.green, "%s", projectPath); } } @@ -75,7 +75,9 @@ "profile", "profile-gc", "cov", + "cov-ctfe", "unittest-cov", + "unittest-cov-ctfe", "syntax" ]; diff --git a/source/dub/generators/visuald.d b/source/dub/generators/visuald.d index 7f45122..3e90d6a 100644 --- a/source/dub/generators/visuald.d +++ b/source/dub/generators/visuald.d @@ -11,7 +11,7 @@ import dub.generators.generator; import dub.internal.utils; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; +import dub.internal.logging; import dub.package_; import dub.packagemanager; import dub.project; @@ -124,7 +124,7 @@ sln.put(ret.data); sln.flush(); - logInfo("Solution '%s' generated.", solutionFileName()); + logInfo("Generated", Color.green, "%s (solution)", solutionFileName()); } diff --git a/source/dub/init.d b/source/dub/init.d index 876b791..8571d3c 100644 --- a/source/dub/init.d +++ b/source/dub/init.d @@ -8,7 +8,7 @@ module dub.init; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; +import dub.internal.logging; import dub.package_ : PackageFormat, packageInfoFiles, defaultPackageFilename; import dub.recipe.packagerecipe; import dub.dependency; @@ -38,7 +38,7 @@ package recipe and the file format used to store it prior to writing it to disk. */ -void initPackage(NativePath root_path, string[string] deps, string type, +void initPackage(NativePath root_path, VersionRange[string] deps, string type, PackageFormat format, scope RecipeCallback recipe_callback = null) { import std.conv : to; @@ -57,7 +57,6 @@ p.authors ~= username; p.license = "proprietary"; foreach (pack, v; deps) { - import std.ascii : isDigit; p.buildSettings.dependencies[pack] = Dependency(v); } @@ -193,6 +192,7 @@ %1$s.lib %1$s-test-* *.exe +*.pdb *.o *.obj *.lst diff --git a/source/dub/internal/colorize/colors.d b/source/dub/internal/colorize/colors.d new file mode 100644 index 0000000..5d67294 --- /dev/null +++ b/source/dub/internal/colorize/colors.d @@ -0,0 +1,118 @@ +/** + * Authors: Pedro Tacla Yamada + * Date: June 9, 2014 + * License: Licensed under the MIT license. See LICENSE for more information + * Version: 1.0.2 + */ +module dub.internal.colorize.colors; + +import std.string : format; + +private template color_type(int offset) +{ + enum type : int + { + init = 39 + offset, + + black = 30 + offset, + red = 31 + offset, + green = 32 + offset, + yellow = 33 + offset, + blue = 34 + offset, + magenta = 35 + offset, + cyan = 36 + offset, + white = 37 + offset, + + light_black = 90 + offset, + light_red = 91 + offset, + light_green = 92 + offset, + light_yellow = 93 + offset, + light_blue = 94 + offset, + light_magenta = 95 + offset, + light_cyan = 96 + offset, + light_white = 97 + offset + } +} + +alias color_type!0 .type fg; +alias color_type!10 .type bg; + +// Text modes +enum mode : int +{ + init = 0, + bold = 1, + underline = 4, + blink = 5, + swap = 7, + hide = 8 +} + +/** + * Wraps a string around color escape sequences. + * + * Params: + * str = The string to wrap with colors and modes + * c = The foreground color (see the fg enum type) + * b = The background color (see the bg enum type) + * m = The text mode (see the mode enum type) + * Example: + * --- + * writeln("This is blue".color(fg.blue)); + * writeln( + * color("This is red over green blinking", fg.blue, bg.green, mode.blink) + * ); + * --- + */ +string color( + const string str, + const fg c=fg.init, + const bg b=bg.init, + const mode m=mode.init +) pure +{ + return format("\033[%d;%d;%dm%s\033[0m", m, c, b, str); +} + +unittest +{ + import std.string : representation; + + string ret; + + ret = "This is yellow".color(fg.yellow); + assert(ret.representation == "\033[0;33;49mThis is yellow\033[0m".representation); + + ret = "This is light green".color(fg.light_green); + assert(ret.representation == "\033[0;92;49mThis is light green\033[0m".representation); + + ret = "This is light blue with red background".color(fg.light_blue, bg.red); + assert(ret.representation == "\033[0;94;41mThis is light blue with red background\033[0m".representation); + + ret = "This is red on blue blinking".color(fg.red, bg.blue, mode.blink); + assert(ret.representation == "\033[5;31;44mThis is red on blue blinking\033[0m".representation); +} + +string colorHelper(T)(const string str, const T t=T.init) pure + if(is(T : fg) || is(T : bg) || is(T : mode)) +{ + return format("\033[%dm%s\033[0m", t, str); +} + +alias background = colorHelper!bg; +alias foreground = colorHelper!fg; +alias style = colorHelper!mode; +alias color = colorHelper; + +unittest +{ + import std.string : representation; + + string ret; + + ret = "This is red on blue blinking" + .foreground(fg.red) + .background(bg.blue) + .style(mode.blink); + assert(ret.representation == "\033[5m\033[44m\033[31mThis is red on blue blinking\033[0m\033[0m\033[0m".representation); +} diff --git a/source/dub/internal/colorize/cwrite.d b/source/dub/internal/colorize/cwrite.d new file mode 100644 index 0000000..0f5d1f2 --- /dev/null +++ b/source/dub/internal/colorize/cwrite.d @@ -0,0 +1,74 @@ +/** + * Authors: ponce + * Date: July 28, 2014 + * License: Licensed under the MIT license. See LICENSE for more information + * Version: 1.0.2 + */ +module dub.internal.colorize.cwrite; + +import std.stdio : File, stdout; + +import dub.internal.colorize.winterm; + +/// Coloured write. +void cwrite(T...)(T args) if (!is(T[0] : File)) +{ + stdout.cwrite(args); +} + +/// Coloured writef. +void cwritef(Char, T...)(in Char[] fmt, T args) if (!is(T[0] : File)) +{ + stdout.cwritef(fmt, args); +} + +/// Coloured writefln. +void cwritefln(Char, T...)(in Char[] fmt, T args) +{ + stdout.cwritef(fmt ~ "\n", args); +} + +/// Coloured writeln. +void cwriteln(T...)(T args) +{ + // Most general instance + stdout.cwrite(args, '\n'); +} + +/// Coloured writef to a File. +void cwritef(Char, A...)(File f, in Char[] fmt, A args) +{ + import std.string : format; + auto s = format(fmt, args); + f.cwrite(s); +} + +/// Coloured writef to a File. +void cwrite(S...)(File f, S args) +{ + import std.conv : to; + + string s = ""; + foreach(arg; args) + s ~= to!string(arg); + + version(Windows) + { + WinTermEmulation winterm; + winterm.initialize(); + foreach(dchar c ; s) + { + auto charAction = winterm.feed(c); + final switch(charAction) with (WinTermEmulation.CharAction) + { + case drop: break; + case write: f.write(c); break; + case flush: f.flush(); break; + } + } + } + else + { + f.write(s); + } +} diff --git a/source/dub/internal/colorize/package.d b/source/dub/internal/colorize/package.d new file mode 100644 index 0000000..b4ff384 --- /dev/null +++ b/source/dub/internal/colorize/package.d @@ -0,0 +1,10 @@ +/** + * Authors: ponce + * Date: July 28, 2014 + * License: Licensed under the MIT license. See LICENSE for more information + * Version: 1.0.2 + */ +module dub.internal.colorize; + +public import dub.internal.colorize.colors; +public import dub.internal.colorize.cwrite; diff --git a/source/dub/internal/colorize/winterm.d b/source/dub/internal/colorize/winterm.d new file mode 100644 index 0000000..32b7be2 --- /dev/null +++ b/source/dub/internal/colorize/winterm.d @@ -0,0 +1,191 @@ +/** + * Authors: ponce + * Date: July 28, 2014 + * License: Licensed under the MIT license. See LICENSE for more information + * Version: 1.0.2 + */ +module dub.internal.colorize.winterm; + +version(Windows) +{ + import core.sys.windows.windows; + + // Patch for DMD 2.065 compatibility + static if( __VERSION__ < 2066 ) private enum nogc = 1; + + // This is a state machine to enable terminal colors on Windows. + // Parses and interpret ANSI/VT100 Terminal Control Escape Sequences. + // Only supports colour sequences, will output char incorrectly on invalid input. + struct WinTermEmulation + { + public: + @nogc void initialize() nothrow + { + // saves console attributes + _console = GetStdHandle(STD_OUTPUT_HANDLE); + _savedInitialColor = (0 != GetConsoleScreenBufferInfo(_console, &consoleInfo)); + _state = State.initial; + } + + @nogc ~this() nothrow + { + // Restore initial text attributes on release + if (_savedInitialColor) + { + SetConsoleTextAttribute(_console, consoleInfo.wAttributes); + _savedInitialColor = false; + } + } + + enum CharAction + { + write, + drop, + flush + } + + // Eat one character and update color state accordingly. + // Returns what to do with the fed character. + @nogc CharAction feed(dchar d) nothrow + { + final switch(_state) with (State) + { + case initial: + if (d == '\x1B') + { + _state = escaped; + return CharAction.flush; + } + break; + + case escaped: + if (d == '[') + { + _state = readingAttribute; + _parsedAttr = 0; + return CharAction.drop; + } + break; + + case readingAttribute: + if (d >= '0' && d <= '9') + { + _parsedAttr = _parsedAttr * 10 + (d - '0'); + return CharAction.drop; + } + else if (d == ';') + { + executeAttribute(_parsedAttr); + _parsedAttr = 0; + return CharAction.drop; + } + else if (d == 'm') + { + executeAttribute(_parsedAttr); + _state = State.initial; + return CharAction.drop; + } + break; + } + + return CharAction.write; + } + + private: + HANDLE _console; + bool _savedInitialColor; + CONSOLE_SCREEN_BUFFER_INFO consoleInfo; + State _state; + WORD _currentAttr; + int _parsedAttr; + + enum State + { + initial, + escaped, + readingAttribute + } + + @nogc void setForegroundColor(WORD fgFlags) nothrow + { + _currentAttr = _currentAttr & ~(FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY); + _currentAttr = _currentAttr | fgFlags; + SetConsoleTextAttribute(_console, _currentAttr); + } + + @nogc void setBackgroundColor(WORD bgFlags) nothrow + { + _currentAttr = _currentAttr & ~(BACKGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_INTENSITY); + _currentAttr = _currentAttr | bgFlags; + SetConsoleTextAttribute(_console, _currentAttr); + } + + // resets to the same foreground color that was set on initialize() + @nogc void resetForegroundColor() nothrow + { + if (!_savedInitialColor) + return; + + _currentAttr = _currentAttr & ~(FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY); + _currentAttr = _currentAttr | (consoleInfo.wAttributes & (FOREGROUND_BLUE | FOREGROUND_GREEN | FOREGROUND_RED | FOREGROUND_INTENSITY)); + SetConsoleTextAttribute(_console, _currentAttr); + } + + // resets to the same background color that was set on initialize() + @nogc void resetBackgroundColor() nothrow + { + if (!_savedInitialColor) + return; + + _currentAttr = _currentAttr & ~(BACKGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_INTENSITY); + _currentAttr = _currentAttr | (consoleInfo.wAttributes & (BACKGROUND_BLUE | BACKGROUND_GREEN | BACKGROUND_RED | BACKGROUND_INTENSITY)); + SetConsoleTextAttribute(_console, _currentAttr); + } + + @nogc void executeAttribute(int attr) nothrow + { + switch (attr) + { + case 0: + // reset all attributes + SetConsoleTextAttribute(_console, consoleInfo.wAttributes); + break; + + default: + if ( (30 <= attr && attr <= 37) || (90 <= attr && attr <= 97) ) + { + WORD color = 0; + if (90 <= attr && attr <= 97) + { + color = FOREGROUND_INTENSITY; + attr -= 60; + } + attr -= 30; + color |= (attr & 1 ? FOREGROUND_RED : 0) | (attr & 2 ? FOREGROUND_GREEN : 0) | (attr & 4 ? FOREGROUND_BLUE : 0); + setForegroundColor(color); + } + else if (attr == 39) // fg.init + { + resetForegroundColor(); + } + + if ( (40 <= attr && attr <= 47) || (100 <= attr && attr <= 107) ) + { + WORD color = 0; + if (100 <= attr && attr <= 107) + { + color = BACKGROUND_INTENSITY; + attr -= 60; + } + attr -= 40; + color |= (attr & 1 ? BACKGROUND_RED : 0) | (attr & 2 ? BACKGROUND_GREEN : 0) | (attr & 4 ? BACKGROUND_BLUE : 0); + setBackgroundColor(color); + } + else if (attr == 49) // bg.init + { + resetBackgroundColor(); + } + } + } + } +} diff --git a/source/dub/internal/git.d b/source/dub/internal/git.d index 24e3e7b..c8a371e 100644 --- a/source/dub/internal/git.d +++ b/source/dub/internal/git.d @@ -1,7 +1,7 @@ module dub.internal.git; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; +import dub.internal.logging; import std.file; import std.string; diff --git a/source/dub/internal/logging.d b/source/dub/internal/logging.d new file mode 100644 index 0000000..3a01c45 --- /dev/null +++ b/source/dub/internal/logging.d @@ -0,0 +1,415 @@ +/** + Handles all the console output of the Dub package manager, by providing useful + methods for handling colored text. The module also disables colors when stdout + and stderr are not a TTY in order to avoid ASCII escape sequences in piped + output. The module can autodetect and configure itself in this regard by + calling initLogging() at the beginning of the program. But, whether to color + text or not can also be set manually with setLoggingColorsEnabled(bool). + + The output for the log levels error, warn and info is formatted like this: + + " " + '----------' + fixed width + + the "tag" part can be colored (most oftenly will be) and always has a fixed + width, which is defined as a const at the beginning of this module. + + The output for the log levels debug and diagnostic will be just the plain + string. + + There are some default tag string and color values for some logging levels: + - warn: "Warning", yellow bold + - error: "Error", red bold + + Actually, for error and warn levels, the tag color is fixed to the ones listed + above. + + Also, the default tag string for the info level is "" (the empty string) and + the default color is white (usually it's manually set when calling logInfo + with the wanted tag string, but this allows to just logInfo("text") without + having to worry about the tag if it's not needed). + + Usage: + After initializing the logging module with initLogging(), the functions + logDebug(..), logDiagnostic(..), logInfo(..), logWarning(..) and logError(..) + can be used to print log messages. Whether the messages are printed on stdout + or stderr depends on the log level (warning and error go to stderr). + The log(..) function can also be used. Check the signature and documentation + of the functions for more information. + + The minimum log level to print can be configured using setLogLevel(..), + and whether to color outputted text or not can be set with + setLoggingColorsEnabled(..) + + The color(str, color) function can be used to color text within a log + message, for instance like this: + + logInfo("Tag", Color.green, "My %s message", "colored".color(Color.red)) + + Copyright: © 2018 Giacomo De Lazzari + License: Subject to the terms of the MIT license, as written in the included LICENSE file. + Authors: Giacomo De Lazzari +*/ + +module dub.internal.logging; + +import std.stdio; +import std.array; +import std.format; +import std.string; + +import dub.internal.colorize : fg, mode; + +/** + An enum listing possible colors for terminal output, useful to set the color + of a tag. Re-exported from d-colorize in dub.internal.colorize. See the enum + definition there for a list of possible values. +*/ +public alias Color = fg; + +/** + An enum listing possible text "modes" for terminal output, useful to set the + text to bold, underline, blinking, etc... + Re-exported from d-colorize in dub.internal.colorize. See the enum definition + there for a list of possible values. +*/ +public alias Mode = mode; + +/// Defines the current width of logging tags for justifying in chars. +/// Can be manipulated through push and pop. +struct TagWidth { + import core.atomic; + + private shared int value = 12; + private shared int index; + private shared int[16] stack; + + /// Gets the tag width in chars + public int get() { + return value; + } + + /// Changes the tag width for all following logging calls, until $(LREF pop) is called. + public void push(int width) { + int currentIndex = index; + index.atomicOp!"+="(1); + stack[currentIndex] = value; + assert(index < stack.length, "too many TagWidth.push without pop"); + value = width; + } + + /// Reverts the last $(LREF push) call. + public void pop() { + assert(index > 0); + value = stack[index.atomicOp!"-="(1)]; + } +} + +/// The global tag width instance used for logging. +public __gshared TagWidth tagWidth; + +/// Possible log levels supported +enum LogLevel { + debug_, + diagnostic, + info, + warn, + error, + none +} + +// The current minimum log level to be printed +private shared LogLevel _minLevel = LogLevel.info; + +/* + Whether to print text with colors or not, defaults to true but will be set + to false in initLogging() if stdout or stderr are not a TTY (which means the + output is probably being piped and we don't want ASCII escape chars in it) +*/ +private shared bool _printColors = true; + +/// Ditto +public bool hasColors () @trusted nothrow @nogc { return _printColors; } + +// isatty() is used in initLogging() to detect whether or not we are on a TTY +extern (C) int isatty(int); + +/** + This function must be called at the beginning for the program, before any + logging occurs. It will detect whether or not stdout/stderr are a console/TTY + and will consequently disable colored output if needed. Also, if a NO_COLOR + environment variable is defined, colors are disabled (https://no-color.org/). + + Forgetting to call the function will result in ASCII escape sequences in the + piped output, probably an undesirable thing. +*/ +void initLogging() +{ + import std.process : environment; + import core.stdc.stdio; + + _printColors = environment.get("NO_COLOR") == ""; + version (Windows) + { + version (CRuntime_DigitalMars) + { + if (!isatty(core.stdc.stdio.stdout._file) || + !isatty(core.stdc.stdio.stderr._file)) + _printColors = false; + } + else version (CRuntime_Microsoft) + { + if (!isatty(fileno(core.stdc.stdio.stdout)) || + !isatty(fileno(core.stdc.stdio.stderr))) + _printColors = false; + } + else + _printColors = false; + } + else version (Posix) + { + import core.sys.posix.unistd; + + if (!isatty(STDERR_FILENO) || !isatty(STDOUT_FILENO)) + _printColors = false; + } +} + +/// Sets the minimum log level to be printed +void setLogLevel(LogLevel level) nothrow +{ + _minLevel = level; +} + +/// Gets the minimum log level to be printed +LogLevel getLogLevel() +{ + return _minLevel; +} + +/// Set whether to print colors or not +void setLoggingColorsEnabled(bool enabled) +{ + _printColors = enabled; +} + +/** + Shorthand function to log a message with debug/diagnostic level, no tag string + or tag color required (since there will be no tag). + + Params: + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logDebug(T...)(string fmt, lazy T args) nothrow +{ + log(LogLevel.debug_, false, "", Color.init, fmt, args); +} + +/// ditto +void logDiagnostic(T...)(string fmt, lazy T args) nothrow +{ + log(LogLevel.diagnostic, false, "", Color.init, fmt, args); +} + +/** + Shorthand function to log a message with info level, with custom tag string + and tag color. + + Params: + tag = The string the tag at the beginning of the line should contain + tagColor = The color the tag string should have + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logInfo(T...)(string tag, Color tagColor, string fmt, lazy T args) nothrow +{ + log(LogLevel.info, false, tag, tagColor, fmt, args); +} + +/** + Shorthand function to log a message with info level, this version prints an + empty tag automatically (which is different from not having a tag - in this + case there will be an identation of tagWidth chars on the left anyway). + + Params: + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logInfo(T...)(string fmt, lazy T args) nothrow if (!is(T[0] : Color)) +{ + log(LogLevel.info, false, "", Color.init, fmt, args); +} + +/** + Shorthand function to log a message with info level, this version doesn't + print a tag at all, it effectively just prints the given string. + + Params: + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logInfoNoTag(T...)(string fmt, lazy T args) nothrow if (!is(T[0] : Color)) +{ + log(LogLevel.info, true, "", Color.init, fmt, args); +} + +/** + Shorthand function to log a message with warning level, with custom tag string. + The tag color is fixed to yellow. + + Params: + tag = The string the tag at the beginning of the line should contain + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logWarnTag(T...)(string tag, string fmt, lazy T args) nothrow +{ + log(LogLevel.warn, false, tag, Color.yellow, fmt, args); +} + +/** + Shorthand function to log a message with warning level, using the default + tag "Warning". The tag color is also fixed to yellow. + + Params: + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logWarn(T...)(string fmt, lazy T args) nothrow +{ + log(LogLevel.warn, false, "Warning", Color.yellow, fmt, args); +} + +/** + Shorthand function to log a message with error level, with custom tag string. + The tag color is fixed to red. + + Params: + tag = The string the tag at the beginning of the line should contain + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logErrorTag(T...)(string tag, string fmt, lazy T args) nothrow +{ + log(LogLevel.error, false, tag, Color.red, fmt, args); +} + +/** + Shorthand function to log a message with error level, using the default + tag "Error". The tag color is also fixed to red. + + Params: + level = The log level for the logged message + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void logError(T...)(string fmt, lazy T args) nothrow +{ + log(LogLevel.error, false, "Error", Color.red, fmt, args); +} + +/** + Log a message with the specified log level and with the specified tag string + and color. If the log level is debug or diagnostic, the tag is not printed + thus the tag string and tag color will be ignored. If the log level is error + or warning, the tag will be in bold text. Also the tag can be disabled (for + any log level) by passing true as the second argument. + + Params: + level = The log level for the logged message + disableTag = Setting this to true disables the tag, no matter what + tag = The string the tag at the beginning of the line should contain + tagColor = The color the tag string should have + fmt = See http://dlang.org/phobos/std_format.html#format-string +*/ +void log(T...)( + LogLevel level, + bool disableTag, + string tag, + Color tagColor, + string fmt, + lazy T args +) nothrow +{ + if (level < _minLevel) + return; + + auto hasTag = true; + if (level <= LogLevel.diagnostic) + hasTag = false; + if (disableTag) + hasTag = false; + + auto boldTag = false; + if (level >= LogLevel.warn) + boldTag = true; + + try + { + string result = format(fmt, args); + + if (hasTag) + result = tag.rightJustify(tagWidth.get, ' ').color(tagColor, boldTag ? Mode.bold : Mode.init) ~ " " ~ result; + + import dub.internal.colorize : cwrite; + + File output = (level <= LogLevel.info) ? stdout : stderr; + + if (output.isOpen) + { + output.cwrite(result, "\n"); + output.flush(); + } + } + catch (Exception e) + { + debug assert(false, e.msg); + } +} + +/** + Colors the specified string with the specified color. The function is used to + print colored text within a log message. The function also checks whether + color output is enabled or disabled (when not outputting to a TTY) and, in the + last case, just returns the plain string. This allows to use it like so: + + logInfo("Tag", Color.green, "My %s log message", "colored".color(Color.red)); + + without worring whether or not colored output is enabled or not. + + Also a mode can be specified, such as bold/underline/etc... + + Params: + str = The string to color + color = The color to apply + mode = An optional mode, such as bold/underline/etc... +*/ +string color(const string str, const Color c, const Mode m = Mode.init) +{ + import dub.internal.colorize; + + if (_printColors) + return dub.internal.colorize.color(str, c, bg.init, m); + else + return str; +} + +/** + This function is the same as the above one, but just accepts a mode. + It's useful, for instance, when outputting bold text without changing the + color. + + Params: + str = The string to color + mode = The mode, such as bold/underline/etc... +*/ +string color(const string str, const Mode m = Mode.init) +{ + import dub.internal.colorize; + + if (_printColors) + return dub.internal.colorize.color(str, fg.init, bg.init, m); + else + return str; +} diff --git a/source/dub/internal/utils.d b/source/dub/internal/utils.d index 345385b..1b1b41b 100644 --- a/source/dub/internal/utils.d +++ b/source/dub/internal/utils.d @@ -8,11 +8,11 @@ module dub.internal.utils; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.url; import dub.compilers.buildsettings : BuildSettings; import dub.version_; +import dub.internal.logging; import core.time : Duration; import std.algorithm : canFind, startsWith; @@ -20,6 +20,7 @@ import std.conv : to; import std.exception : enforce; import std.file; +import std.format; import std.string : format; import std.process; import std.traits : isIntegral; @@ -53,18 +54,21 @@ } /** - Obtain a lock for a file at the given path. If the file cannot be locked - within the given duration, an exception is thrown. The file will be created - if it does not yet exist. Deleting the file is not safe as another process - could create a new file with the same name. - The returned lock will get unlocked upon destruction. - - Params: - path = path to file that gets locked - timeout = duration after which locking failed - Returns: - The locked file or an Exception on timeout. -*/ + * Obtain a lock for a file at the given path. + * + * If the file cannot be locked within the given duration, + * an exception is thrown. The file will be created if it does not yet exist. + * Deleting the file is not safe as another process could create a new file + * with the same name. + * The returned lock will get unlocked upon destruction. + * + * Params: + * path = path to file that gets locked + * timeout = duration after which locking failed + * + * Returns: + * The locked file or an Exception on timeout. + */ auto lockFile(string path, Duration timeout) { import core.thread : Thread; @@ -601,9 +605,6 @@ path_skip = ipath.bySegment.walkLength; } - enforce(path_skip > 0, - format("Source file '%s' not found in any import path.", file.toNativeString())); - auto mpath = file.bySegment.array[path_skip .. $]; auto ret = appender!string; @@ -616,13 +617,21 @@ } //create module name from path - foreach (i; 0 .. mpath.length) { + if (path_skip == 0) + { import std.path; - auto p = mpath[i].name; - if (p == "package.d") break ; - if (ret.data.length > 0) ret ~= "."; - if (i+1 < mpath.length) ret ~= p; - else ret ~= p.baseName(".d"); + ret ~= mpath[$-1].name.baseName(".d"); + } + else + { + foreach (i; 0 .. mpath.length) { + import std.path; + auto p = mpath[i].name; + if (p == "package.d") break ; + if (ret.data.length > 0) ret ~= "."; + if (i+1 < mpath.length) ret ~= p; + else ret ~= p.baseName(".d"); + } } assert(ret.data.length > 0, "A module name was expected to be computed, and none was."); @@ -681,8 +690,62 @@ * Search for module keyword in file */ string getModuleNameFromFile(string filePath) { + if (!filePath.exists) + { + return null; + } string fileContent = filePath.readText; - logDiagnostic("Get module name from path: " ~ filePath); + logDiagnostic("Get module name from path: %s", filePath); return getModuleNameFromContent(fileContent); } + +/** + * Compare two instances of the same type for equality, + * providing a rich error message on failure. + * + * This function will recurse into composite types (struct, AA, arrays) + * and compare element / member wise, taking opEquals into account, + * to provide the most accurate reason why comparison failed. + */ +void deepCompare (T) ( + in T result, in T expected, string file = __FILE__, size_t line = __LINE__) +{ + deepCompareImpl!T(result, expected, T.stringof, file, line); +} + +void deepCompareImpl (T) ( + in T result, in T expected, string path, string file, size_t line) +{ + static if (is(T == struct) && !is(typeof(T.init.opEquals(T.init)) : bool)) + { + static foreach (idx; 0 .. T.tupleof.length) + deepCompareImpl(result.tupleof[idx], expected.tupleof[idx], + format("%s.%s", path, __traits(identifier, T.tupleof[idx])), + file, line); + } + else static if (is(T : KeyT[ValueT], KeyT, ValueT)) + { + if (result.length != expected.length) + throw new Exception( + format("%s: AA has different number of entries (%s != %s): %s != %s", + path, result.length, expected.length, result, expected), + file, line); + foreach (key, value; expected) + { + if (auto ptr = key in result) + deepCompareImpl(*ptr, value, format("%s[%s]", path, key), file, line); + else + throw new Exception( + format("Expected key %s[%s] not present in result. %s != %s", + path, key, result, expected), file, line); + } + } + else if (result != expected) { + static if (is(T == struct) && is(typeof(T.init.opEquals(T.init)) : bool)) + path ~= ".opEquals"; + throw new Exception( + format("%s: result != expected: %s != %s", path, result, expected), + file, line); + } +} diff --git a/source/dub/internal/vibecompat/core/file.d b/source/dub/internal/vibecompat/core/file.d index f48176b..15ee255 100644 --- a/source/dub/internal/vibecompat/core/file.d +++ b/source/dub/internal/vibecompat/core/file.d @@ -9,7 +9,7 @@ public import dub.internal.vibecompat.inet.url; -import dub.internal.vibecompat.core.log; +import dub.internal.logging; import std.conv; import core.stdc.stdio; @@ -156,8 +156,52 @@ } } +private bool isWritable(NativePath name) +{ + version (Windows) + { + import core.sys.windows.windows; + + return (name.toNativeString.getAttributes & FILE_ATTRIBUTE_READONLY) == 0; + } + else version (Posix) + { + import core.sys.posix.sys.stat; + + return (name.toNativeString.getAttributes & S_IWUSR) != 0; + } + else + static assert(false, "Needs implementation."); +} + +private void makeWritable(NativePath name) +{ + makeWritable(name.toNativeString); +} + +private void makeWritable(string name) +{ + version (Windows) + { + import core.sys.windows.windows; + + name.setAttributes(name.getAttributes & ~FILE_ATTRIBUTE_READONLY); + } + else version (Posix) + { + import core.sys.posix.sys.stat; + + name.setAttributes(name.getAttributes | S_IWUSR); + } + else + static assert(false, "Needs implementation."); +} + /** - Creates a hardlink. + Creates a hardlink if possible, a copy otherwise. + + If `from` is read-only and `overwrite` is true, then a copy is made instead + and `to` is made writable; so that repeating the command will not fail. */ void hardLinkFile(NativePath from, NativePath to, bool overwrite = false) { @@ -168,22 +212,27 @@ throw fe; } } - - version (Windows) + const writeAccessChangeRequired = overwrite && !isWritable(from); + if (!writeAccessChangeRequired) { - alias cstr = toUTFz!(const(wchar)*); - if (CreateHardLinkW(cstr(to.toNativeString), cstr(from.toNativeString))) - return; - } - else - { - import core.sys.posix.unistd : link; - alias cstr = toUTFz!(const(char)*); - if (!link(cstr(from.toNativeString), cstr(to.toNativeString))) - return; + version (Windows) + { + alias cstr = toUTFz!(const(wchar)*); + if (CreateHardLinkW(cstr(to.toNativeString), cstr(from.toNativeString))) + return; + } + else + { + import core.sys.posix.unistd : link; + alias cstr = toUTFz!(const(char)*); + if (!link(cstr(from.toNativeString), cstr(to.toNativeString))) + return; + } } // fallback to copy copyFile(from, to, overwrite); + if (writeAccessChangeRequired) + to.makeWritable; } /** diff --git a/source/dub/internal/vibecompat/core/log.d b/source/dub/internal/vibecompat/core/log.d deleted file mode 100644 index e48569e..0000000 --- a/source/dub/internal/vibecompat/core/log.d +++ /dev/null @@ -1,99 +0,0 @@ -/** - Central logging facility for vibe. - - Copyright: © 2012 rejectedsoftware e.K. - License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. - Authors: Sönke Ludwig -*/ -module dub.internal.vibecompat.core.log; - -import std.array; -import std.datetime; -import std.format; -import std.stdio; -import core.thread; - -private { - shared LogLevel s_minLevel = LogLevel.info; - shared LogLevel s_logFileLevel; -} - -/// Sets the minimum log level to be printed. -void setLogLevel(LogLevel level) nothrow -{ - s_minLevel = level; -} - -LogLevel getLogLevel() -{ - return s_minLevel; -} - -/** - Logs a message. - - Params: - level = The log level for the logged message - fmt = See http://dlang.org/phobos/std_format.html#format-string -*/ -void logDebug(T...)(string fmt, lazy T args) nothrow { log(LogLevel.debug_, fmt, args); } -/// ditto -void logDiagnostic(T...)(string fmt, lazy T args) nothrow { log(LogLevel.diagnostic, fmt, args); } -/// ditto -void logInfo(T...)(string fmt, lazy T args) nothrow { log(LogLevel.info, fmt, args); } -/// ditto -void logWarn(T...)(string fmt, lazy T args) nothrow { log(LogLevel.warn, fmt, args); } -/// ditto -void logError(T...)(string fmt, lazy T args) nothrow { log(LogLevel.error, fmt, args); } - -/// ditto -void log(T...)(LogLevel level, string fmt, lazy T args) -nothrow { - if( level < s_minLevel ) return; - string pref; - final switch( level ){ - case LogLevel.debug_: pref = "trc"; break; - case LogLevel.diagnostic: pref = "dbg"; break; - case LogLevel.info: pref = "INF"; break; - case LogLevel.warn: pref = "WRN"; break; - case LogLevel.error: pref = "ERR"; break; - case LogLevel.fatal: pref = "FATAL"; break; - case LogLevel.none: assert(false); - } - - try { - auto txt = appender!string(); - txt.reserve(256); - formattedWrite(txt, fmt, args); - - auto threadid = () @trusted { return cast(ulong)cast(void*)Thread.getThis(); } (); - auto fiberid = () @trusted { return cast(ulong)cast(void*)Fiber.getThis(); } (); - threadid ^= threadid >> 32; - fiberid ^= fiberid >> 32; - - if (level >= s_minLevel) { - File output; - if (level == LogLevel.info) () @trusted { output = stdout; } (); - else () @trusted { output = stderr; } (); - if (output.isOpen) { - output.writeln(txt.data); - output.flush(); - } - } - } catch( Exception e ){ - // this is bad but what can we do.. - debug assert(false, e.msg); - } -} - -/// Specifies the log level for a particular log message. -enum LogLevel { - debug_, - diagnostic, - info, - warn, - error, - fatal, - none -} - diff --git a/source/dub/internal/vibecompat/data/json.d b/source/dub/internal/vibecompat/data/json.d index b55166a..aced940 100644 --- a/source/dub/internal/vibecompat/data/json.d +++ b/source/dub/internal/vibecompat/data/json.d @@ -869,7 +869,6 @@ skipWhitespace(range, line); version(JsonLineNumbers) { - import dub.internal.vibecompat.core.log; int curline = line ? *line : 0; } diff --git a/source/dub/internal/vibecompat/inet/path.d b/source/dub/internal/vibecompat/inet/path.d index 0872b14..3baf10d 100644 --- a/source/dub/internal/vibecompat/inet/path.d +++ b/source/dub/internal/vibecompat/inet/path.d @@ -60,7 +60,7 @@ } /// Determines if the path is absolute. - @property bool absolute() const { return m_absolute; } + @property bool absolute() const scope @safe pure nothrow @nogc { return m_absolute; } /// Resolves all '.' and '..' path entries as far as possible. void normalize() @@ -84,7 +84,7 @@ /// Converts the Path back to a string representation using slashes. string toString() - const { + const @safe { if( m_nodes.empty ) return absolute ? "/" : ""; Appender!string ret; @@ -190,10 +190,10 @@ @property immutable(PathEntry)[] nodes() const { return m_nodes; } /// The number of path entries of which this path is composed - @property size_t length() const { return m_nodes.length; } + @property size_t length() const scope @safe pure nothrow @nogc { return m_nodes.length; } /// True if the path contains no entries - @property bool empty() const { return m_nodes.length == 0; } + @property bool empty() const scope @safe pure nothrow @nogc { return m_nodes.length == 0; } /// Determines if the path ends with a slash (i.e. is a directory) @property bool endsWithSlash() const { return m_endsWithSlash; } @@ -242,7 +242,7 @@ void opOpAssign(string OP)(NativePath rhs) if( OP == "~" ) { auto p = this ~ rhs; m_nodes = p.m_nodes; m_endsWithSlash = rhs.m_endsWithSlash; } /// Tests two paths for equality using '=='. - bool opEquals(ref const NativePath rhs) const { + bool opEquals(scope ref const NativePath rhs) const scope @safe { if( m_absolute != rhs.m_absolute ) return false; if( m_endsWithSlash != rhs.m_endsWithSlash ) return false; if( m_nodes.length != rhs.length ) return false; @@ -252,7 +252,7 @@ return true; } /// ditto - bool opEquals(const NativePath other) const { return opEquals(other); } + bool opEquals(scope const NativePath other) const scope @safe { return opEquals(other); } int opCmp(ref const NativePath rhs) const { if( m_absolute != rhs.m_absolute ) return cast(int)m_absolute - cast(int)rhs.m_absolute; @@ -287,17 +287,17 @@ m_name = str; } - string toString() const pure { return m_name; } + string toString() const return scope @safe pure nothrow @nogc { return m_name; } - @property string name() const { return m_name; } + @property string name() const return scope @safe pure nothrow @nogc { return m_name; } NativePath opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { return NativePath([this, rhs], false); } - bool opEquals(ref const PathEntry rhs) const { return m_name == rhs.m_name; } - bool opEquals(PathEntry rhs) const { return m_name == rhs.m_name; } - bool opEquals(string rhs) const { return m_name == rhs; } - int opCmp(ref const PathEntry rhs) const { return m_name.cmp(rhs.m_name); } - int opCmp(string rhs) const { return m_name.cmp(rhs); } + bool opEquals(scope ref const PathEntry rhs) const scope @safe pure nothrow @nogc { return m_name == rhs.m_name; } + bool opEquals(scope PathEntry rhs) const scope @safe pure nothrow @nogc { return m_name == rhs.m_name; } + bool opEquals(string rhs) const scope @safe pure nothrow @nogc { return m_name == rhs; } + int opCmp(scope ref const PathEntry rhs) const scope @safe pure nothrow @nogc { return m_name.cmp(rhs.m_name); } + int opCmp(string rhs) const scope @safe pure nothrow @nogc { return m_name.cmp(rhs); } } private bool isValidFilename(string str) diff --git a/source/dub/package_.d b/source/dub/package_.d index 98b463b..ce7d5a4 100644 --- a/source/dub/package_.d +++ b/source/dub/package_.d @@ -15,12 +15,14 @@ import dub.recipe.json; import dub.recipe.sdl; +import dub.internal.logging; import dub.internal.utils; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; +import configy.Read : StrictMode; + import std.algorithm; import std.array; import std.conv; @@ -69,7 +71,9 @@ "docs", "ddox", "cov", + "cov-ctfe", "unittest-cov", + "unittest-cov-ctfe", "syntax" ]; @@ -167,8 +171,11 @@ version_override = Optional version to associate to the package instead of the one declared in the package recipe, or the one determined by invoking the VCS (GIT currently). + mode = Whether to issue errors, warning, or ignore unknown keys in dub.json */ - static Package load(NativePath root, NativePath recipe_file = NativePath.init, Package parent = null, string version_override = "") + static Package load(NativePath root, NativePath recipe_file = NativePath.init, + Package parent = null, string version_override = "", + StrictMode mode = StrictMode.Ignore) { import dub.recipe.io; @@ -179,7 +186,7 @@ .format(root.toNativeString(), packageInfoFiles.map!(f => cast(string)f.filename).join("/"))); - auto recipe = readPackageRecipe(recipe_file, parent ? parent.name : null); + auto recipe = readPackageRecipe(recipe_file, parent ? parent.name : null, mode); auto ret = new Package(recipe, root, parent, version_override); ret.m_infoFile = recipe_file; @@ -298,7 +305,6 @@ /// ditto void storeInfo(NativePath path) const { - enforce(!version_.isUnknown, "Trying to store a package with an 'unknown' version, this is not supported."); auto filename = path ~ defaultPackageFilename; auto dstFile = openFile(filename.toNativeString(), FileMode.createTrunc); scope(exit) dstFile.close(); @@ -453,7 +459,9 @@ case "profile": settings.addOptions(profile, optimize, inline, debugInfo); break; case "profile-gc": settings.addOptions(profileGC, debugInfo); break; case "cov": settings.addOptions(coverage, debugInfo); break; + case "cov-ctfe": settings.addOptions(coverageCTFE, debugInfo); break; case "unittest-cov": settings.addOptions(unittests, coverage, debugMode, debugInfo); break; + case "unittest-cov-ctfe": settings.addOptions(unittests, coverageCTFE, debugMode, debugInfo); break; case "syntax": settings.addOptions(syntaxOnly); break; } } @@ -552,12 +560,17 @@ const(Dependency[string]) getDependencies(string config) const { Dependency[string] ret; - foreach (k, v; m_info.buildSettings.dependencies) - ret[k] = v; + foreach (k, v; m_info.buildSettings.dependencies) { + // DMD bug: Not giving `Dependency` here leads to RangeError + Dependency dep = v; + ret[k] = dep; + } foreach (ref conf; m_info.configurations) if (conf.name == config) { - foreach (k, v; conf.buildSettings.dependencies) - ret[k] = v; + foreach (k, v; conf.buildSettings.dependencies) { + Dependency dep = v; + ret[k] = dep; + } break; } return ret; @@ -720,22 +733,22 @@ } } - // check for default app_main - string app_main_file; - auto pkg_name = m_info.name.length ? m_info.name : "unknown"; - foreach(sf; bs.sourcePaths.get("", null)){ - auto p = m_path ~ sf; - if( !existsFile(p) ) continue; - foreach(fil; ["app.d", "main.d", pkg_name ~ "/main.d", pkg_name ~ "/" ~ "app.d"]){ - if( existsFile(p ~ fil) ) { - app_main_file = (NativePath(sf) ~ fil).toNativeString(); - break; - } - } - } - // generate default configurations if none are defined if (m_info.configurations.length == 0) { + // check for default app_main + string app_main_file; + auto pkg_name = m_info.name.length ? m_info.name : "unknown"; + MainFileSearch: foreach_reverse(sf; bs.sourcePaths.get("", null)){ + auto p = m_path ~ sf; + if( !existsFile(p) ) continue; + foreach(fil; ["app.d", "main.d", pkg_name ~ "/main.d", pkg_name ~ "/" ~ "app.d"]){ + if( existsFile(p ~ fil) ) { + app_main_file = (NativePath(sf) ~ fil).toNativeString(); + break MainFileSearch; + } + } + } + if (bs.targetType == TargetType.executable) { BuildSettingsTemplate app_settings; app_settings.targetType = TargetType.executable; diff --git a/source/dub/packagemanager.d b/source/dub/packagemanager.d index af16b89..0b73fe9 100644 --- a/source/dub/packagemanager.d +++ b/source/dub/packagemanager.d @@ -10,31 +10,78 @@ import dub.dependency; import dub.internal.utils; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import dub.package_; +import dub.recipe.io; +import configy.Exceptions; +public import configy.Read : StrictMode; -import std.algorithm : countUntil, filter, sort, canFind, remove; +import std.algorithm : countUntil, filter, map, sort, canFind, remove; import std.array; import std.conv; import std.digest.sha; import std.encoding : sanitize; import std.exception; import std.file; +import std.range; import std.string; +import std.sumtype; import std.zip; +/// Indicates where a package has been or should be placed to. +public enum PlacementLocation { + /// Packages retrieved with 'local' will be placed in the current folder + /// using the package name as destination. + local, + /// Packages with 'userWide' will be placed in a folder accessible by + /// all of the applications from the current user. + user, + /// Packages retrieved with 'systemWide' will be placed in a shared folder, + /// which can be accessed by all users of the system. + system, +} + +/// Converts a `PlacementLocation` to a string +public string toString (PlacementLocation loc) @safe pure nothrow @nogc +{ + final switch (loc) { + case PlacementLocation.local: + return "Local"; + case PlacementLocation.user: + return "User"; + case PlacementLocation.system: + return "System"; + } +} + /// The PackageManager can retrieve present packages and get / remove /// packages. class PackageManager { private { - Repository[] m_repositories; - NativePath[] m_searchPath; - Package[] m_packages; - Package[] m_temporaryPackages; - bool m_disableDefaultSearchPaths = false; + /** + * The 'internal' location, for packages not attributable to a location. + * + * There are two uses for this: + * - In `bare` mode, the search paths are set at this scope, + * and packages gathered are stored in `localPackage`; + * - In the general case, any path-based or SCM-based dependency + * is loaded in `fromPath`; + */ + Location m_internal; + /** + * List of locations that are managed by this `PackageManager` + * + * The `PackageManager` can be instantiated either in 'bare' mode, + * in which case this array will be empty, or in the normal mode, + * this array will have 3 entries, matching values + * in the `PlacementLocation` enum. + * + * See_Also: `Location`, `PlacementLocation` + */ + Location[] m_repositories; } /** @@ -49,27 +96,16 @@ */ this(NativePath path) { - this.m_searchPath = [ path ]; - this.m_disableDefaultSearchPaths = true; + this.m_internal.searchPath = [ path ]; this.refresh(true); } - deprecated("Use the overload which accepts 3 `NativePath` arguments") - this(NativePath user_path, NativePath system_path, bool refresh_packages = true) - { - m_repositories = [ - Repository(user_path ~ "packages/"), - Repository(system_path ~ "packages/")]; - - if (refresh_packages) refresh(true); - } - this(NativePath package_path, NativePath user_path, NativePath system_path, bool refresh_packages = true) { m_repositories = [ - Repository(package_path ~ ".dub/packages/"), - Repository(user_path ~ "packages/"), - Repository(system_path ~ "packages/")]; + Location(package_path ~ ".dub/packages/"), + Location(user_path ~ "packages/"), + Location(system_path ~ "packages/")]; if (refresh_packages) refresh(true); } @@ -78,43 +114,23 @@ */ @property void searchPath(NativePath[] paths) { - if (paths == m_searchPath) return; - m_searchPath = paths.dup; + if (paths == this.m_internal.searchPath) return; + this.m_internal.searchPath = paths.dup; refresh(false); } /// ditto - @property const(NativePath)[] searchPath() const { return m_searchPath; } - - /** Disables searching DUB's predefined search paths. - */ - deprecated("Instantiate a PackageManager instance with the single-argument constructor: `new PackageManager(path)`") - @property void disableDefaultSearchPaths(bool val) - { - this._disableDefaultSearchPaths(val); - } - - // Non deprecated instance of the previous symbol, - // as `Dub.updatePackageSearchPath` calls it and while nothing in Dub app - // itself relies on it, just removing the call from `updatePackageSearchPath` - // could break the library use case. - package(dub) void _disableDefaultSearchPaths(bool val) - { - if (val == m_disableDefaultSearchPaths) return; - m_disableDefaultSearchPaths = val; - refresh(true); - } + @property const(NativePath)[] searchPath() const { return this.m_internal.searchPath; } /** Returns the effective list of search paths, including default ones. */ + deprecated("Use the `PackageManager` facilities instead") @property const(NativePath)[] completeSearchPath() const { - auto ret = appender!(NativePath[])(); - ret.put(cast(NativePath[])m_searchPath); // work around Phobos 17251 - if (!m_disableDefaultSearchPaths) { - foreach (ref repo; m_repositories) { - ret.put(cast(NativePath[])repo.searchPath); - ret.put(cast(NativePath)repo.packagePath); - } + auto ret = appender!(const(NativePath)[])(); + ret.put(this.m_internal.searchPath); + foreach (ref repo; m_repositories) { + ret.put(repo.searchPath); + ret.put(repo.packagePath); } return ret.data; } @@ -132,8 +148,8 @@ import std.algorithm.iteration : map; import std.array : array; - m_repositories.length = LocalPackageType.max+1; - m_repositories ~= custom_cache_paths.map!(p => Repository(p)).array; + m_repositories.length = PlacementLocation.max+1; + m_repositories ~= custom_cache_paths.map!(p => Location(p)).array; refresh(false); } @@ -161,14 +177,19 @@ if (enable_overrides) { foreach (ref repo; m_repositories) foreach (ovr; repo.overrides) - if (ovr.package_ == name && ovr.version_.matches(ver)) { - Package pack; - if (!ovr.targetPath.empty) pack = getOrLoadPackage(ovr.targetPath); - else pack = getPackage(name, ovr.targetVersion, false); + if (ovr.package_ == name && ovr.source.matches(ver)) { + Package pack = ovr.target.match!( + (NativePath path) => getOrLoadPackage(path), + (Version vers) => getPackage(name, vers, false), + ); if (pack) return pack; - logWarn("Package override %s %s -> %s %s doesn't reference an existing package.", - ovr.package_, ovr.version_, ovr.targetVersion, ovr.targetPath); + ovr.target.match!( + (any) { + logWarn("Package override %s %s -> '%s' doesn't reference an existing package.", + ovr.package_, ovr.version_, any); + }, + ); } } @@ -180,12 +201,14 @@ } /// ditto + deprecated("Use the overload that accepts a `Version` as second argument") Package getPackage(string name, string ver, bool enable_overrides = true) { return getPackage(name, Version(ver), enable_overrides); } /// ditto + deprecated("Use the overload that takes a `PlacementLocation`") Package getPackage(string name, Version ver, NativePath path) { foreach (p; getPackageIterator(name)) { @@ -196,13 +219,24 @@ return null; } + /// Ditto + Package getPackage(string name, Version ver, PlacementLocation loc) + { + // Bare mode + if (loc >= this.m_repositories.length) + return null; + return this.m_repositories[loc].lookup(name, ver); + } + /// ditto + deprecated("Use the overload that accepts a `Version` as second argument") Package getPackage(string name, string ver, NativePath path) { return getPackage(name, Version(ver), path); } /// ditto + deprecated("Use another `PackageManager` API, open an issue if none suits you") Package getPackage(string name, NativePath path) { foreach( p; getPackageIterator(name) ) @@ -214,6 +248,7 @@ /** Looks up the first package matching the given name. */ + deprecated("Use `getBestPackage` instead") Package getFirstPackage(string name) { foreach (ep; getPackageIterator(name)) @@ -223,6 +258,7 @@ /** Looks up the latest package matching the given name. */ + deprecated("Use `getBestPackage` with `name, Dependency.any` instead") Package getLatestPackage(string name) { Package pkg; @@ -241,18 +277,20 @@ path = NativePath to the root directory of the package recipe_path = Optional path to the recipe file of the package allow_sub_packages = Also return a sub package if it resides in the given folder + mode = Whether to issue errors, warning, or ignore unknown keys in dub.json Returns: The packages loaded from the given path Throws: Throws an exception if no package can be loaded */ - Package getOrLoadPackage(NativePath path, NativePath recipe_path = NativePath.init, bool allow_sub_packages = false) + Package getOrLoadPackage(NativePath path, NativePath recipe_path = NativePath.init, + bool allow_sub_packages = false, StrictMode mode = StrictMode.Ignore) { path.endsWithSlash = true; - foreach (p; getPackageIterator()) + foreach (p; this.m_internal.fromPath) if (p.path == path && (!p.parentPackage || (allow_sub_packages && p.parentPackage.path != p.path))) return p; - auto pack = Package.load(path, recipe_path); - addPackages(m_temporaryPackages, pack); + auto pack = Package.load(path, recipe_path, null, null, mode); + addPackages(this.m_internal.fromPath, pack); return pack; } @@ -272,33 +310,41 @@ The package loaded from the given SCM repository or null if the package couldn't be loaded. */ + deprecated("Use the overload that accepts a `dub.dependency : Repository`") Package loadSCMPackage(string name, Dependency dependency) in { assert(!dependency.repository.empty); } - do { - Package pack; + do { return this.loadSCMPackage(name, dependency.repository); } - with (dependency.repository) final switch (kind) - { - case Kind.git: - pack = loadGitPackage(name, dependency.versionSpec, dependency.repository.remote); - } - if (pack !is null) { - addPackages(m_temporaryPackages, pack); - } - return pack; + /// Ditto + Package loadSCMPackage(string name, Repository repo) + in { assert(!repo.empty); } + do { + Package pack; + + final switch (repo.kind) + { + case repo.Kind.git: + pack = loadGitPackage(name, repo); + } + if (pack !is null) { + addPackages(this.m_internal.fromPath, pack); + } + return pack; } - private Package loadGitPackage(string name, string versionSpec, string remote) - { + private Package loadGitPackage(string name, in Repository repo) + { import dub.internal.git : cloneRepository; - if (!versionSpec.startsWith("~") && !versionSpec.isGitHash) { + if (!repo.ref_.startsWith("~") && !repo.ref_.isGitHash) { return null; } - string gitReference = versionSpec.chompPrefix("~"); - const destination = m_repositories[LocalPackageType.user].packagePath ~ - NativePath(name ~ "-" ~ gitReference) ~ (name~"/"); + string gitReference = repo.ref_.chompPrefix("~"); + NativePath destination = this.getPackagePath(PlacementLocation.user, name, repo.ref_); + // For libraries leaking their import path + destination ~= name; + destination.endsWithSlash = true; foreach (p; getPackageIterator(name)) { if (p.path == destination) { @@ -306,15 +352,38 @@ } } - if (!cloneRepository(remote, gitReference, destination.toNativeString())) { + if (!cloneRepository(repo.remote, gitReference, destination.toNativeString())) { return null; } return Package.load(destination); - } + } + + /** + * Get the final destination a specific package needs to be stored in. + * + * See `Location.getPackagePath`. + */ + package(dub) NativePath getPackagePath (PlacementLocation base, string name, string vers) + { + assert(this.m_repositories.length == 3, "getPackagePath called in bare mode"); + return this.m_repositories[base].getPackagePath(name, vers); + } /** Searches for the latest version of a package matching the given dependency. */ + Package getBestPackage(string name, VersionRange range = VersionRange.Any) + { + return this.getBestPackage(name, Dependency(range)); + } + + /// Ditto + Package getBestPackage(string name, Version vers) + { + return this.getBestPackage(name, VersionRange(vers, vers)); + } + + /// Ditto Package getBestPackage(string name, Dependency version_spec, bool enable_overrides = true) { Package ret; @@ -393,18 +462,18 @@ { int iterator(int delegate(ref Package) del) { - foreach (tp; m_temporaryPackages) - if (auto ret = del(tp)) return ret; + // Search scope by priority, internal has the highest + foreach (p; this.m_internal.fromPath) + if (auto ret = del(p)) return ret; + foreach (p; this.m_internal.localPackages) + if (auto ret = del(p)) return ret; - // first search local packages - foreach (ref repo; m_repositories) + foreach (ref repo; m_repositories) { foreach (p; repo.localPackages) if (auto ret = del(p)) return ret; - - // and then all packages gathered from the search path - foreach( p; m_packages ) - if( auto ret = del(p) ) - return ret; + foreach (p; repo.fromPath) + if (auto ret = del(p)) return ret; + } return 0; } @@ -431,61 +500,154 @@ /** Returns a list of all package overrides for the given scope. */ - const(PackageOverride)[] getOverrides(LocalPackageType scope_) + deprecated(OverrideDepMsg) + const(PackageOverride)[] getOverrides(PlacementLocation scope_) + const { + return cast(typeof(return)) this.getOverrides_(scope_); + } + + package(dub) const(PackageOverride_)[] getOverrides_(PlacementLocation scope_) const { return m_repositories[scope_].overrides; } /** Adds a new override for the given package. */ - void addOverride(LocalPackageType scope_, string package_, Dependency version_spec, Version target) + deprecated("Use the overload that accepts a `VersionRange` as 3rd argument") + void addOverride(PlacementLocation scope_, string package_, Dependency version_spec, Version target) { m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); - writeLocalPackageOverridesFile(scope_); + m_repositories[scope_].writeOverrides(); } /// ditto - void addOverride(LocalPackageType scope_, string package_, Dependency version_spec, NativePath target) + deprecated("Use the overload that accepts a `VersionRange` as 3rd argument") + void addOverride(PlacementLocation scope_, string package_, Dependency version_spec, NativePath target) { m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); - writeLocalPackageOverridesFile(scope_); + m_repositories[scope_].writeOverrides(); + } + + /// Ditto + deprecated(OverrideDepMsg) + void addOverride(PlacementLocation scope_, string package_, VersionRange source, Version target) + { + this.addOverride_(scope_, package_, source, target); + } + /// ditto + deprecated(OverrideDepMsg) + void addOverride(PlacementLocation scope_, string package_, VersionRange source, NativePath target) + { + this.addOverride_(scope_, package_, source, target); + } + + // Non deprecated version that is used by `commandline`. Do not use! + package(dub) void addOverride_(PlacementLocation scope_, string package_, VersionRange source, Version target) + { + m_repositories[scope_].overrides ~= PackageOverride_(package_, source, target); + m_repositories[scope_].writeOverrides(); + } + // Non deprecated version that is used by `commandline`. Do not use! + package(dub) void addOverride_(PlacementLocation scope_, string package_, VersionRange source, NativePath target) + { + m_repositories[scope_].overrides ~= PackageOverride_(package_, source, target); + m_repositories[scope_].writeOverrides(); } /** Removes an existing package override. */ - void removeOverride(LocalPackageType scope_, string package_, Dependency version_spec) + deprecated("Use the overload that accepts a `VersionRange` as 3rd argument") + void removeOverride(PlacementLocation scope_, string package_, Dependency version_spec) { - Repository* rep = &m_repositories[scope_]; - foreach (i, ovr; rep.overrides) { - if (ovr.package_ != package_ || ovr.version_ != version_spec) - continue; - rep.overrides = rep.overrides[0 .. i] ~ rep.overrides[i+1 .. $]; - writeLocalPackageOverridesFile(scope_); - return; - } - throw new Exception(format("No override exists for %s %s", package_, version_spec)); + version_spec.visit!( + (VersionRange src) => this.removeOverride(scope_, package_, src), + (any) { throw new Exception(format("No override exists for %s %s", package_, version_spec)); }, + ); } - /// Extracts the package supplied as a path to it's zip file to the - /// destination and sets a version field in the package description. + deprecated(OverrideDepMsg) + void removeOverride(PlacementLocation scope_, string package_, VersionRange src) + { + this.removeOverride_(scope_, package_, src); + } + + package(dub) void removeOverride_(PlacementLocation scope_, string package_, VersionRange src) + { + Location* rep = &m_repositories[scope_]; + foreach (i, ovr; rep.overrides) { + if (ovr.package_ != package_ || ovr.source != src) + continue; + rep.overrides = rep.overrides[0 .. i] ~ rep.overrides[i+1 .. $]; + (*rep).writeOverrides(); + return; + } + throw new Exception(format("No override exists for %s %s", package_, src)); + } + + deprecated("Use `store(NativePath source, PlacementLocation dest, string name, Version vers)`") Package storeFetchedPackage(NativePath zip_file_path, Json package_info, NativePath destination) { + return this.store_(zip_file_path, destination, package_info["name"].get!string, + Version(package_info["version"].get!string)); + } + + /** + * Store a zip file stored at `src` into a managed location `destination` + * + * This will extracts the package supplied as (as a zip file) to the + * `destination` and sets a version field in the package description. + * In the future, we should aim not to alter the package description, + * but this is done for backward compatibility. + * + * Params: + * src = The path to the zip file containing the package + * dest = At which `PlacementLocation` the package should be stored + * name = Name of the package being stored + * vers = Version of the package + * + * Returns: + * The `Package` after it has been loaded. + * + * Throws: + * If the package cannot be loaded / the zip is corrupted / the package + * already exists, etc... + */ + Package store(NativePath src, PlacementLocation dest, string name, Version vers) + { + NativePath dstpath = this.getPackagePath(dest, name, vers.toString()); + if (!dstpath.existsFile()) + mkdirRecurse(dstpath.toNativeString()); + // For libraries leaking their import path + dstpath = dstpath ~ name; + + // possibly wait for other dub instance + import core.time : seconds; + auto lock = lockFile(dstpath.toNativeString() ~ ".lock", 30.seconds); + if (dstpath.existsFile()) { + this.refresh(false); + return this.getPackage(name, vers, dest); + } + return this.store_(src, dstpath, name, vers); + } + + /// Backward-compatibility for deprecated overload, simplify once `storeFetchedPatch` + /// is removed + private Package store_(NativePath src, NativePath destination, string name, Version vers) + { import std.range : walkLength; - auto package_name = package_info["name"].get!string; - auto package_version = package_info["version"].get!string; - logDebug("Placing package '%s' version '%s' to location '%s' from file '%s'", - package_name, package_version, destination.toNativeString(), zip_file_path.toNativeString()); + name, vers, destination.toNativeString(), src.toNativeString()); if( existsFile(destination) ){ - throw new Exception(format("%s (%s) needs to be removed from '%s' prior placement.", package_name, package_version, destination)); + throw new Exception(format("%s (%s) needs to be removed from '%s' prior placement.", + name, vers, destination)); } // open zip file ZipArchive archive; { - logDebug("Opening file %s", zip_file_path); - auto f = openFile(zip_file_path, FileMode.read); + logDebug("Opening file %s", src); + auto f = openFile(src, FileMode.read); scope(exit) f.close(); archive = new ZipArchive(f.readAll()); } @@ -551,13 +713,13 @@ logDebug("%s file(s) copied.", to!string(countFiles)); // overwrite dub.json (this one includes a version field) - auto pack = Package.load(destination, NativePath.init, null, package_info["version"].get!string); + auto pack = Package.load(destination, NativePath.init, null, vers.toString()); if (pack.recipePath.head != defaultPackageFilename) // Storeinfo saved a default file, this could be different to the file from the zip. removeFile(pack.recipePath); pack.storeInfo(); - addPackages(m_packages, pack); + addPackages(this.m_internal.localPackages, pack); return pack; } @@ -578,18 +740,24 @@ return false; } foreach(repo; m_repositories) { - if(removeFrom(repo.localPackages, pack)) { + if (removeFrom(repo.fromPath, pack)) { + found = true; + break; + } + // Maintain backward compatibility with pre v1.30.0 behavior, + // this is equivalent to remove-local + if (removeFrom(repo.localPackages, pack)) { found = true; break; } } if(!found) - found = removeFrom(m_packages, pack); + found = removeFrom(this.m_internal.localPackages, pack); enforce(found, "Cannot remove, package not found: '"~ pack.name ~"', path: " ~ to!string(pack.path)); logDebug("About to delete root folder for package '%s'.", pack.path); rmdirRecurse(pack.path.toNativeString()); - logInfo("Removed package: '"~pack.name~"'"); + logInfo("Removed", Color.yellow, "%s %s", pack.name.color(Mode.bold), pack.version_); } /// Compatibility overload. Use the version without a `force_remove` argument instead. @@ -599,7 +767,7 @@ remove(pack); } - Package addLocalPackage(NativePath path, string verName, LocalPackageType type) + Package addLocalPackage(NativePath path, string verName, PlacementLocation type) { path.endsWithSlash = true; auto pack = Package.load(path); @@ -619,13 +787,13 @@ addPackages(*packs, pack); - writeLocalPackageList(type); + this.m_repositories[type].writeLocalPackageList(); logInfo("Registered package: %s (version: %s)", pack.name, pack.version_); return pack; } - void removeLocalPackage(NativePath path, LocalPackageType type) + void removeLocalPackage(NativePath path, PlacementLocation type) { path.endsWithSlash = true; @@ -637,176 +805,50 @@ enforce(to_remove.length > 0, "No "~type.to!string()~" package found at "~path.toNativeString()); string[Version] removed; - foreach_reverse( i; to_remove ) { + foreach (i; to_remove) removed[(*packs)[i].version_] = (*packs)[i].name; - *packs = (*packs)[0 .. i] ~ (*packs)[i+1 .. $]; - } - writeLocalPackageList(type); + *packs = (*packs).enumerate + .filter!(en => !to_remove.canFind(en.index)) + .map!(en => en.value).array; + + this.m_repositories[type].writeLocalPackageList(); foreach(ver, name; removed) logInfo("Deregistered package: %s (version: %s)", name, ver); } /// For the given type add another path where packages will be looked up. - void addSearchPath(NativePath path, LocalPackageType type) + void addSearchPath(NativePath path, PlacementLocation type) { m_repositories[type].searchPath ~= path; - writeLocalPackageList(type); + this.m_repositories[type].writeLocalPackageList(); } /// Removes a search path from the given type. - void removeSearchPath(NativePath path, LocalPackageType type) + void removeSearchPath(NativePath path, PlacementLocation type) { m_repositories[type].searchPath = m_repositories[type].searchPath.filter!(p => p != path)().array(); - writeLocalPackageList(type); + this.m_repositories[type].writeLocalPackageList(); } - void refresh(bool refresh_existing_packages) + void refresh(bool refresh) { - logDiagnostic("Refreshing local packages (refresh existing: %s)...", refresh_existing_packages); + logDiagnostic("Refreshing local packages (refresh existing: %s)...", refresh); - // load locally defined packages - void scanLocalPackages(LocalPackageType type) - { - NativePath list_path = m_repositories[type].packagePath; - Package[] packs; - NativePath[] paths; - try { - auto local_package_file = list_path ~ LocalPackagesFilename; - logDiagnostic("Looking for local package map at %s", local_package_file.toNativeString()); - if( !existsFile(local_package_file) ) return; - logDiagnostic("Try to load local package map at %s", local_package_file.toNativeString()); - auto packlist = jsonFromFile(list_path ~ LocalPackagesFilename); - enforce(packlist.type == Json.Type.array, LocalPackagesFilename~" must contain an array."); - foreach( pentry; packlist ){ - try { - auto name = pentry["name"].get!string; - auto path = NativePath(pentry["path"].get!string); - if (name == "*") { - paths ~= path; - } else { - auto ver = Version(pentry["version"].get!string); + foreach (ref repository; this.m_repositories) + repository.scanLocalPackages(refresh, this); - Package pp; - if (!refresh_existing_packages) { - foreach (p; m_repositories[type].localPackages) - if (p.path == path) { - pp = p; - break; - } - } + this.m_internal.scan(this, refresh); + foreach (ref repository; this.m_repositories) + repository.scan(this, refresh); - if (!pp) { - auto infoFile = Package.findPackageFile(path); - if (!infoFile.empty) pp = Package.load(path, infoFile); - else { - logWarn("Locally registered package %s %s was not found. Please run 'dub remove-local \"%s\"'.", - name, ver, path.toNativeString()); - auto info = Json.emptyObject; - info["name"] = name; - pp = new Package(info, path); - } - } - - if (pp.name != name) - logWarn("Local package at %s has different name than %s (%s)", path.toNativeString(), name, pp.name); - pp.version_ = ver; - - addPackages(packs, pp); - } - } catch( Exception e ){ - logWarn("Error adding local package: %s", e.msg); - } - } - } catch( Exception e ){ - logDiagnostic("Loading of local package list at %s failed: %s", list_path.toNativeString(), e.msg); - } - m_repositories[type].localPackages = packs; - m_repositories[type].searchPath = paths; - } - if (!m_disableDefaultSearchPaths) - { - scanLocalPackages(LocalPackageType.system); - scanLocalPackages(LocalPackageType.user); - scanLocalPackages(LocalPackageType.package_); - } - - auto old_packages = m_packages; - - // rescan the system and user package folder - void scanPackageFolder(NativePath path) - { - if( path.existsDirectory() ){ - logDebug("iterating dir %s", path.toNativeString()); - try foreach( pdir; iterateDirectory(path) ){ - logDebug("iterating dir %s entry %s", path.toNativeString(), pdir.name); - if (!pdir.isDirectory) continue; - - auto pack_path = path ~ (pdir.name ~ "/"); - - auto packageFile = Package.findPackageFile(pack_path); - - if (isManagedPath(path) && packageFile.empty) { - // Search for a single directory within this directory which happen to be a prefix of pdir - // This is to support new folder structure installed over the ancient one. - foreach (subdir; iterateDirectory(path ~ (pdir.name ~ "/"))) - if (subdir.isDirectory && pdir.name.startsWith(subdir.name)) {// eg: package vibe-d will be in "vibe-d-x.y.z/vibe-d" - pack_path ~= subdir.name ~ "/"; - packageFile = Package.findPackageFile(pack_path); - break; - } - } - - if (packageFile.empty) continue; - Package p; - try { - if (!refresh_existing_packages) - foreach (pp; old_packages) - if (pp.path == pack_path) { - p = pp; - break; - } - if (!p) p = Package.load(pack_path, packageFile); - addPackages(m_packages, p); - } catch( Exception e ){ - logError("Failed to load package in %s: %s", pack_path, e.msg); - logDiagnostic("Full error: %s", e.toString().sanitize()); - } - } - catch(Exception e) logDiagnostic("Failed to enumerate %s packages: %s", path.toNativeString(), e.toString()); - } - } - - m_packages = null; - foreach (p; this.completeSearchPath) - scanPackageFolder(p); - - void loadOverrides(LocalPackageType type) - { - m_repositories[type].overrides = null; - auto ovrfilepath = m_repositories[type].packagePath ~ LocalOverridesFilename; - if (existsFile(ovrfilepath)) { - foreach (entry; jsonFromFile(ovrfilepath)) { - PackageOverride ovr; - ovr.package_ = entry["name"].get!string; - ovr.version_ = Dependency(entry["version"].get!string); - if (auto pv = "targetVersion" in entry) ovr.targetVersion = Version(pv.get!string); - if (auto pv = "targetPath" in entry) ovr.targetPath = NativePath(pv.get!string); - m_repositories[type].overrides ~= ovr; - } - } - } - if (!m_disableDefaultSearchPaths) - { - loadOverrides(LocalPackageType.package_); - loadOverrides(LocalPackageType.user); - loadOverrides(LocalPackageType.system); - } + foreach (ref repository; this.m_repositories) + repository.loadOverrides(); } alias Hash = ubyte[]; - /// Generates a hash value for a given package. + /// Generates a hash digest for a given package. /// Some files or folders are ignored during the generation (like .dub and /// .svn folders) Hash hashPackage(Package pack) @@ -814,65 +856,26 @@ string[] ignored_directories = [".git", ".dub", ".svn"]; // something from .dub_ignore or what? string[] ignored_files = []; - SHA1 sha1; + SHA256 hash; foreach(file; dirEntries(pack.path.toNativeString(), SpanMode.depth)) { - if(file.isDir && ignored_directories.canFind(NativePath(file.name).head.name)) + const isDir = file.isDir; + if(isDir && ignored_directories.canFind(NativePath(file.name).head.name)) continue; else if(ignored_files.canFind(NativePath(file.name).head.name)) continue; - sha1.put(cast(ubyte[])NativePath(file.name).head.name); - if(file.isDir) { + hash.put(cast(ubyte[])NativePath(file.name).head.name); + if(isDir) { logDebug("Hashed directory name %s", NativePath(file.name).head); } else { - sha1.put(openFile(NativePath(file.name)).readAll()); + hash.put(openFile(NativePath(file.name)).readAll()); logDebug("Hashed file contents from %s", NativePath(file.name).head); } } - auto hash = sha1.finish(); - logDebug("Project hash: %s", hash); - return hash[].dup; - } - - private void writeLocalPackageList(LocalPackageType type) - { - Json[] newlist; - foreach (p; m_repositories[type].searchPath) { - auto entry = Json.emptyObject; - entry["name"] = "*"; - entry["path"] = p.toNativeString(); - newlist ~= entry; - } - - foreach (p; m_repositories[type].localPackages) { - if (p.parentPackage) continue; // do not store sub packages - auto entry = Json.emptyObject; - entry["name"] = p.name; - entry["version"] = p.version_.toString(); - entry["path"] = p.path.toNativeString(); - newlist ~= entry; - } - - NativePath path = m_repositories[type].packagePath; - if( !existsDirectory(path) ) mkdirRecurse(path.toNativeString()); - writeJsonFile(path ~ LocalPackagesFilename, Json(newlist)); - } - - private void writeLocalPackageOverridesFile(LocalPackageType type) - { - Json[] newlist; - foreach (ovr; m_repositories[type].overrides) { - auto jovr = Json.emptyObject; - jovr["name"] = ovr.package_; - jovr["version"] = ovr.version_.versionSpec; - if (!ovr.targetPath.empty) jovr["targetPath"] = ovr.targetPath.toNativeString(); - else jovr["targetVersion"] = ovr.targetVersion.toString(); - newlist ~= jovr; - } - auto path = m_repositories[type].packagePath; - if (!existsDirectory(path)) mkdirRecurse(path.toNativeString()); - writeJsonFile(path ~ LocalOverridesFilename, Json(newlist)); + auto digest = hash.finish(); + logDebug("Project hash: %s", digest); + return digest[].dup; } /// Adds the package and scans for subpackages. @@ -911,45 +914,379 @@ } } -struct PackageOverride { - string package_; - Dependency version_; - Version targetVersion; - NativePath targetPath; +deprecated(OverrideDepMsg) +alias PackageOverride = PackageOverride_; +package(dub) struct PackageOverride_ { + private alias ResolvedDep = SumType!(NativePath, Version); + string package_; + VersionRange source; + ResolvedDep target; + + deprecated("Use `source` instead") + @property inout(Dependency) version_ () inout return @safe { + return Dependency(this.source); + } + + deprecated("Assign `source` instead") + @property ref PackageOverride version_ (Dependency v) scope return @safe pure { + this.source = v.visit!( + (VersionRange range) => range, + (any) { + int a; if (a) return VersionRange.init; // Trick the compiler + throw new Exception("Cannot use anything else than a `VersionRange` for overrides"); + }, + ); + return this; + } + + deprecated("Use `target.match` directly instead") + @property inout(Version) targetVersion () inout return @safe pure nothrow @nogc { + return this.target.match!( + (Version v) => v, + (any) => Version.init, + ); + } + + deprecated("Assign `target` directly instead") + @property ref PackageOverride targetVersion (Version v) scope return pure nothrow @nogc { + this.target = v; + return this; + } + + deprecated("Use `target.match` directly instead") + @property inout(NativePath) targetPath () inout return @safe pure nothrow @nogc { + return this.target.match!( + (NativePath v) => v, + (any) => NativePath.init, + ); + } + + deprecated("Assign `target` directly instead") + @property ref PackageOverride targetPath (NativePath v) scope return pure nothrow @nogc { + this.target = v; + return this; + } + + deprecated("Use the overload that accepts a `VersionRange` as 2nd argument") this(string package_, Dependency version_, Version target_version) { this.package_ = package_; this.version_ = version_; - this.targetVersion = target_version; + this.target = target_version; } + deprecated("Use the overload that accepts a `VersionRange` as 2nd argument") this(string package_, Dependency version_, NativePath target_path) { this.package_ = package_; this.version_ = version_; - this.targetPath = target_path; + this.target = target_path; + } + + this(string package_, VersionRange src, Version target) + { + this.package_ = package_; + this.source = src; + this.target = target; + } + + this(string package_, VersionRange src, NativePath target) + { + this.package_ = package_; + this.source = src; + this.target = target; } } -enum LocalPackageType { - package_, - user, - system +deprecated("Use `PlacementLocation` instead") +enum LocalPackageType : PlacementLocation { + package_ = PlacementLocation.local, + user = PlacementLocation.user, + system = PlacementLocation.system, } private enum LocalPackagesFilename = "local-packages.json"; private enum LocalOverridesFilename = "local-overrides.json"; - -private struct Repository { +/** + * A managed location, with packages, configuration, and overrides + * + * There exists three standards locations, listed in `PlacementLocation`. + * The user one is the default, with the system and local one meeting + * different needs. + * + * Each location has a root, under which the following may be found: + * - A `packages/` directory, where packages are stored (see `packagePath`); + * - A `local-packages.json` file, with extra search paths + * and manually added packages (see `dub add-local`); + * - A `local-overrides.json` file, with manually added overrides (`dub add-override`); + * + * Additionally, each location host a config file, + * which is not managed by this module, but by dub itself. + */ +private struct Location { + /// The absolute path to the root of the location NativePath packagePath; - NativePath[] searchPath; - Package[] localPackages; - PackageOverride[] overrides; - this(NativePath path) + /// Configured (extra) search paths for this `Location` + NativePath[] searchPath; + + /** + * List of manually registered packages at this `Location` + * and stored in `local-packages.json` + */ + Package[] localPackages; + + /// List of overrides stored at this `Location` + PackageOverride_[] overrides; + + /** + * List of packages stored under `packagePath` and automatically detected + */ + Package[] fromPath; + + this(NativePath path) @safe pure nothrow @nogc { this.packagePath = path; } + + void loadOverrides() + { + this.overrides = null; + auto ovrfilepath = this.packagePath ~ LocalOverridesFilename; + if (existsFile(ovrfilepath)) { + logWarn("Found local override file: %s", ovrfilepath); + logWarn(OverrideDepMsg); + logWarn("Replace with a path-based dependency in your project or a custom cache path"); + foreach (entry; jsonFromFile(ovrfilepath)) { + PackageOverride_ ovr; + ovr.package_ = entry["name"].get!string; + ovr.source = VersionRange.fromString(entry["version"].get!string); + if (auto pv = "targetVersion" in entry) ovr.target = Version(pv.get!string); + if (auto pv = "targetPath" in entry) ovr.target = NativePath(pv.get!string); + this.overrides ~= ovr; + } + } + } + + private void writeOverrides() + { + Json[] newlist; + foreach (ovr; this.overrides) { + auto jovr = Json.emptyObject; + jovr["name"] = ovr.package_; + jovr["version"] = ovr.source.toString(); + ovr.target.match!( + (NativePath path) { jovr["targetPath"] = path.toNativeString(); }, + (Version vers) { jovr["targetVersion"] = vers.toString(); }, + ); + newlist ~= jovr; + } + auto path = this.packagePath; + if (!existsDirectory(path)) mkdirRecurse(path.toNativeString()); + writeJsonFile(path ~ LocalOverridesFilename, Json(newlist)); + } + + private void writeLocalPackageList() + { + Json[] newlist; + foreach (p; this.searchPath) { + auto entry = Json.emptyObject; + entry["name"] = "*"; + entry["path"] = p.toNativeString(); + newlist ~= entry; + } + + foreach (p; this.localPackages) { + if (p.parentPackage) continue; // do not store sub packages + auto entry = Json.emptyObject; + entry["name"] = p.name; + entry["version"] = p.version_.toString(); + entry["path"] = p.path.toNativeString(); + newlist ~= entry; + } + + NativePath path = this.packagePath; + if( !existsDirectory(path) ) mkdirRecurse(path.toNativeString()); + writeJsonFile(path ~ LocalPackagesFilename, Json(newlist)); + } + + // load locally defined packages + void scanLocalPackages(bool refresh, PackageManager manager) + { + NativePath list_path = this.packagePath; + Package[] packs; + NativePath[] paths; + try { + auto local_package_file = list_path ~ LocalPackagesFilename; + if (!existsFile(local_package_file)) return; + + logDiagnostic("Loading local package map at %s", local_package_file.toNativeString()); + auto packlist = jsonFromFile(local_package_file); + enforce(packlist.type == Json.Type.array, LocalPackagesFilename ~ " must contain an array."); + foreach (pentry; packlist) { + try { + auto name = pentry["name"].get!string; + auto path = NativePath(pentry["path"].get!string); + if (name == "*") { + paths ~= path; + } else { + auto ver = Version(pentry["version"].get!string); + + Package pp; + if (!refresh) { + foreach (p; this.localPackages) + if (p.path == path) { + pp = p; + break; + } + } + + if (!pp) { + auto infoFile = Package.findPackageFile(path); + if (!infoFile.empty) pp = Package.load(path, infoFile); + else { + logWarn("Locally registered package %s %s was not found. Please run 'dub remove-local \"%s\"'.", + name, ver, path.toNativeString()); + // Store a dummy package + pp = new Package(PackageRecipe(name), path); + } + } + + if (pp.name != name) + logWarn("Local package at %s has different name than %s (%s)", path.toNativeString(), name, pp.name); + pp.version_ = ver; + manager.addPackages(packs, pp); + } + } catch (Exception e) { + logWarn("Error adding local package: %s", e.msg); + } + } + } catch (Exception e) { + logDiagnostic("Loading of local package list at %s failed: %s", list_path.toNativeString(), e.msg); + } + this.localPackages = packs; + this.searchPath = paths; + } + + /** + * Scan this location + */ + void scan(PackageManager mgr, bool refresh) + { + // If we're asked to refresh, reload the packages from scratch + auto existing = refresh ? null : this.fromPath; + if (this.packagePath !is NativePath.init) { + // For the internal location, we use `fromPath` to store packages + // loaded by the user (e.g. the project and its subpackages), + // so don't clean it. + this.fromPath = null; + } + foreach (path; this.searchPath) + this.scanPackageFolder(path, mgr, existing); + if (this.packagePath !is NativePath.init) + this.scanPackageFolder(this.packagePath, mgr, existing); + } + + /** + * Scan the content of a folder (`packagePath` or in `searchPaths`), + * and add all packages that were found to this location. + */ + void scanPackageFolder(NativePath path, PackageManager mgr, + Package[] existing_packages) + { + if (!path.existsDirectory()) + return; + + logDebug("iterating dir %s", path.toNativeString()); + try foreach (pdir; iterateDirectory(path)) { + logDebug("iterating dir %s entry %s", path.toNativeString(), pdir.name); + if (!pdir.isDirectory) continue; + + // Old / flat directory structure, used in non-standard path + // Packages are stored in $ROOT/$SOMETHING/` + auto pack_path = path ~ (pdir.name ~ "/"); + auto packageFile = Package.findPackageFile(pack_path); + + // New (since 2015) managed structure: + // $ROOT/$NAME-$VERSION/$NAME + // This is the most common code path + if (mgr.isManagedPath(path) && packageFile.empty) { + foreach (subdir; iterateDirectory(path ~ (pdir.name ~ "/"))) + if (subdir.isDirectory && pdir.name.startsWith(subdir.name)) { + pack_path ~= subdir.name ~ "/"; + packageFile = Package.findPackageFile(pack_path); + break; + } + } + + if (packageFile.empty) continue; + Package p; + try { + foreach (pp; existing_packages) + if (pp.path == pack_path) { + p = pp; + break; + } + if (!p) p = Package.load(pack_path, packageFile); + mgr.addPackages(this.fromPath, p); + } catch (ConfigException exc) { + // Confiy error message already include the path + logError("Invalid recipe for local package: %S", exc); + } catch (Exception e) { + logError("Failed to load package in %s: %s", pack_path, e.msg); + logDiagnostic("Full error: %s", e.toString().sanitize()); + } + } + catch (Exception e) + logDiagnostic("Failed to enumerate %s packages: %s", path.toNativeString(), e.toString()); + } + + /** + * Looks up already-loaded packages at a specific version + * + * Looks up a package according to this `Location`'s priority, + * that is, packages from the search path and local packages + * have the highest priority. + * + * Params: + * name = The full name of the package to look up + * ver = The version to look up + * + * Returns: + * A `Package` if one was found, `null` if none exists. + */ + private inout(Package) lookup(string name, Version ver) inout { + foreach (pkg; this.localPackages) + if (pkg.name == name && pkg.version_.matches(ver, VersionMatchMode.strict)) + return pkg; + foreach (pkg; this.fromPath) + if (pkg.name == name && pkg.version_.matches(ver, VersionMatchMode.strict)) + return pkg; + return null; + } + + /** + * Get the final destination a specific package needs to be stored in. + * + * Note that there needs to be an extra level for libraries like `ae` + * which expects their containing folder to have an exact name and use + * `importPath "../"`. + * + * Hence the final format should be `$BASE/$NAME-$VERSION/$NAME`, + * but this function returns `$BASE/$NAME-$VERSION/` + * `$BASE` is `this.packagePath`. + */ + private NativePath getPackagePath (string name, string vers) + { + // + has special meaning for Optlink + string clean_vers = vers.chompPrefix("~").replace("+", "_"); + NativePath result = this.packagePath ~ (name ~ "-" ~ clean_vers); + result.endsWithSlash = true; + return result; + } } + +private immutable string OverrideDepMsg = + "Overrides are deprecated as they are redundant with more fine-grained approaches"; diff --git a/source/dub/packagesuppliers/fallback.d b/source/dub/packagesuppliers/fallback.d index 39a61e6..1103e37 100644 --- a/source/dub/packagesuppliers/fallback.d +++ b/source/dub/packagesuppliers/fallback.d @@ -46,14 +46,14 @@ { import std.format : format; enum fallback = q{ - import dub.internal.vibecompat.core.log : logDiagnostic; + import dub.internal.logging : logDebug; Exception firstEx; try return m_suppliers[0].ps.%1$s(args); catch (Exception e) { - logDiagnostic("Package supplier %%s failed with '%%s', trying fallbacks.", + logDebug("Package supplier %%s failed with '%%s', trying fallbacks.", m_suppliers[0].ps.description, e.msg); firstEx = e; } @@ -65,13 +65,13 @@ continue; try { - scope (success) logDiagnostic("Fallback %%s succeeded", pair.ps.description); + scope (success) logDebug("Fallback %%s succeeded", pair.ps.description); return pair.ps.%1$s(args); } catch (Exception e) { pair.failTime = now; - logDiagnostic("Fallback package supplier %%s failed with '%%s'.", + logDebug("Fallback package supplier %%s failed with '%%s'.", pair.ps.description, e.msg); } } diff --git a/source/dub/packagesuppliers/filesystem.d b/source/dub/packagesuppliers/filesystem.d index 9cfb55e..fcb9efd 100644 --- a/source/dub/packagesuppliers/filesystem.d +++ b/source/dub/packagesuppliers/filesystem.d @@ -9,7 +9,8 @@ the form "[package name]-[version].zip". */ class FileSystemPackageSupplier : PackageSupplier { - import dub.internal.vibecompat.core.log; + import dub.internal.logging; + version (Have_vibe_core) import dub.internal.vibecompat.inet.path : toNativeString; import std.exception : enforce; private { @@ -42,7 +43,7 @@ { import dub.internal.vibecompat.core.file : copyFile, existsFile; enforce(path.absolute); - logInfo("Storing package '"~packageId~"', version requirements: %s", dep); + logInfo("Storing package '%s', version requirements: %s", packageId, dep); auto filename = bestPackageFile(packageId, dep, pre_release); enforce(existsFile(filename)); copyFile(filename, path); diff --git a/source/dub/packagesuppliers/maven.d b/source/dub/packagesuppliers/maven.d index 7ac02ed..4899844 100644 --- a/source/dub/packagesuppliers/maven.d +++ b/source/dub/packagesuppliers/maven.d @@ -11,8 +11,8 @@ class MavenRegistryPackageSupplier : PackageSupplier { import dub.internal.utils : retryDownload, HTTPStatusException; import dub.internal.vibecompat.data.json : serializeToJson; - import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.url : URL; + import dub.internal.logging; import std.datetime : Clock, Duration, hours, SysTime, UTC; @@ -63,7 +63,7 @@ } catch(HTTPStatusException e) { if (e.status == 404) throw e; - else logDebug("Failed to download package %s from %s", packageId, url); + else logDebug("Failed to download package %s from %s", packageId, url); } catch(Exception e) { logDebug("Failed to download package %s from %s", packageId, url); @@ -125,8 +125,7 @@ auto md = getMetadata(query); if (md.type == Json.Type.null_) return []; - auto json = getBestPackage(md, query, Dependency(">=0.0.0"), true); + auto json = getBestPackage(md, query, Dependency.any, true); return [SearchResult(json["name"].opt!string, "", json["version"].opt!string)]; } } - diff --git a/source/dub/packagesuppliers/packagesupplier.d b/source/dub/packagesuppliers/packagesupplier.d index 4018977..c0a75d5 100644 --- a/source/dub/packagesuppliers/packagesupplier.d +++ b/source/dub/packagesuppliers/packagesupplier.d @@ -29,8 +29,8 @@ Params: path = Absolute path of the target ZIP file package_id = Name of the package to retrieve - dep: Version constraint to match against - pre_release: If true, matches the latest pre-release version. + dep = Version constraint to match against + pre_release = If true, matches the latest pre-release version. Otherwise prefers stable versions. */ void fetchPackage(NativePath path, string package_id, Dependency dep, bool pre_release); @@ -39,8 +39,8 @@ Params: package_id = Name of the package of which to retrieve the recipe - dep: Version constraint to match against - pre_release: If true, matches the latest pre-release version. + dep = Version constraint to match against + pre_release = If true, matches the latest pre-release version. Otherwise prefers stable versions. */ Json fetchPackageRecipe(string package_id, Dependency dep, bool pre_release); diff --git a/source/dub/packagesuppliers/registry.d b/source/dub/packagesuppliers/registry.d index 83b01e5..a5afa64 100644 --- a/source/dub/packagesuppliers/registry.d +++ b/source/dub/packagesuppliers/registry.d @@ -12,9 +12,9 @@ */ class RegistryPackageSupplier : PackageSupplier { import dub.internal.utils : download, retryDownload, HTTPStatusException; - import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json : parseJson, parseJsonString, serializeToJson; import dub.internal.vibecompat.inet.url : URL; + import dub.internal.logging; import std.uri : encodeComponent; import std.datetime : Clock, Duration, hours, SysTime, UTC; @@ -131,4 +131,3 @@ .array; } } - diff --git a/source/dub/platform.d b/source/dub/platform.d index 39e5557..37122d7 100644 --- a/source/dub/platform.d +++ b/source/dub/platform.d @@ -188,16 +188,14 @@ */ bool matchesSpecification(in BuildPlatform platform, const(char)[] specification) { - import std.string : format; + import std.string : chompPrefix, format; import std.algorithm : canFind, splitter; import std.exception : enforce; if (specification.empty) return true; if (platform == BuildPlatform.any) return true; - auto splitted = specification.splitter('-'); - assert(!splitted.empty, "No valid platform specification! The leading hyphen is required!"); - splitted.popFront(); // Drop leading empty match. + auto splitted = specification.chompPrefix("-").splitter('-'); enforce(!splitted.empty, format("Platform specification, if present, must not be empty: \"%s\"", specification)); if (platform.platform.canFind(splitted.front)) { @@ -220,16 +218,19 @@ /// unittest { - auto platform=BuildPlatform(["posix", "linux"], ["x86_64"], "dmd"); + auto platform = BuildPlatform(["posix", "linux"], ["x86_64"], "dmd"); assert(platform.matchesSpecification("")); - assert(platform.matchesSpecification("-posix")); - assert(platform.matchesSpecification("-linux")); - assert(platform.matchesSpecification("-linux-dmd")); - assert(platform.matchesSpecification("-linux-x86_64-dmd")); + assert(platform.matchesSpecification("posix")); + assert(platform.matchesSpecification("linux")); + assert(platform.matchesSpecification("linux-dmd")); + assert(platform.matchesSpecification("linux-x86_64-dmd")); + assert(platform.matchesSpecification("x86_64")); + assert(!platform.matchesSpecification("windows")); + assert(!platform.matchesSpecification("ldc")); + assert(!platform.matchesSpecification("windows-dmd")); + + // Before PR#2279, a leading '-' was required assert(platform.matchesSpecification("-x86_64")); - assert(!platform.matchesSpecification("-windows")); - assert(!platform.matchesSpecification("-ldc")); - assert(!platform.matchesSpecification("-windows-dmd")); } /// Represents a platform a package can be build upon. @@ -266,6 +267,18 @@ bp.frontendVersion = 2067; assert(bp.frontendVersionString == "2.067"); } + + /// Checks to see if platform field contains windows + bool isWindows() const { + import std.algorithm : canFind; + return this.platform.canFind("windows"); + } + /// + unittest { + BuildPlatform bp; + bp.platform = ["windows"]; + assert(bp.isWindows); + bp.platform = ["posix"]; + assert(!bp.isWindows); + } } - - diff --git a/source/dub/project.d b/source/dub/project.d index e7361d0..b1545e3 100644 --- a/source/dub/project.d +++ b/source/dub/project.d @@ -10,22 +10,25 @@ import dub.compilers.compiler; import dub.dependency; import dub.description; +import dub.generators.generator; import dub.internal.utils; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.path; +import dub.internal.logging; import dub.package_; import dub.packagemanager; -import dub.generators.generator; +import dub.recipe.selection; + +import configy.Read; import std.algorithm; import std.array; import std.conv : to; import std.datetime; +import std.encoding : sanitize; import std.exception : enforce; import std.string; -import std.encoding : sanitize; /** Represents a full project, a root package with its dependencies and package @@ -38,7 +41,6 @@ class Project { private { PackageManager m_packageManager; - Json m_packageSettings; Package m_rootPackage; Package[] m_dependencies; Package[][Package] m_dependees; @@ -63,7 +65,7 @@ logWarn("There was no package description found for the application in '%s'.", project_path.toNativeString()); pack = new Package(PackageRecipe.init, project_path); } else { - pack = package_manager.getOrLoadPackage(project_path, packageFile); + pack = package_manager.getOrLoadPackage(project_path, packageFile, false, StrictMode.Warn); } this(package_manager, pack); @@ -74,19 +76,15 @@ { m_packageManager = package_manager; m_rootPackage = pack; - m_packageSettings = Json.emptyObject; - try m_packageSettings = jsonFromFile(m_rootPackage.path ~ ".dub/dub.json", true); - catch(Exception t) logDiagnostic("Failed to read .dub/dub.json: %s", t.msg); - - auto selverfile = m_rootPackage.path ~ SelectedVersions.defaultFile; + auto selverfile = (m_rootPackage.path ~ SelectedVersions.defaultFile).toNativeString(); if (existsFile(selverfile)) { - try m_selections = new SelectedVersions(selverfile); - catch(Exception e) { - logWarn("Failed to load %s: %s", SelectedVersions.defaultFile, e.msg); - logDiagnostic("Full error: %s", e.toString().sanitize); - m_selections = new SelectedVersions; - } + // TODO: Remove `StrictMode.Warn` after v1.40 release + // The default is to error, but as the previous parser wasn't + // complaining, we should first warn the user. + auto selected = parseConfigFileSimple!Selected(selverfile, StrictMode.Warn); + m_selections = !selected.isNull() ? + new SelectedVersions(selected.get()) : new SelectedVersions(); } else m_selections = new SelectedVersions; reinit(); @@ -199,7 +197,7 @@ possible configuration instead of the first "executable" configuration. */ - string getDefaultConfiguration(BuildPlatform platform, bool allow_non_library_configs = true) + string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library_configs = true) const { auto cfgs = getPackageConfigs(platform, null, allow_non_library_configs); return cfgs[m_rootPackage.name]; @@ -226,6 +224,159 @@ m_overriddenConfigs[package_] = config; } + /** Adds a test runner configuration for the root package. + + Params: + generate_main = Whether to generate the main.d file + base_config = Optional base configuration + custom_main_file = Optional path to file with custom main entry point + + Returns: + Name of the added test runner configuration, or null for base configurations with target type `none` + */ + string addTestRunnerConfiguration(in GeneratorSettings settings, bool generate_main = true, string base_config = "", NativePath custom_main_file = NativePath()) + { + if (base_config.length == 0) { + // if a custom main file was given, favor the first library configuration, so that it can be applied + if (!custom_main_file.empty) base_config = getDefaultConfiguration(settings.platform, false); + // else look for a "unittest" configuration + if (!base_config.length && rootPackage.configurations.canFind("unittest")) base_config = "unittest"; + // if not found, fall back to the first "library" configuration + if (!base_config.length) base_config = getDefaultConfiguration(settings.platform, false); + // if still nothing found, use the first executable configuration + if (!base_config.length) base_config = getDefaultConfiguration(settings.platform, true); + } + + BuildSettings lbuildsettings = settings.buildSettings.dup; + addBuildSettings(lbuildsettings, settings, base_config, null, true); + + if (lbuildsettings.targetType == TargetType.none) { + logInfo(`Configuration '%s' has target type "none". Skipping test runner configuration.`, base_config); + return null; + } + + if (lbuildsettings.targetType == TargetType.executable && base_config == "unittest") { + if (!custom_main_file.empty) logWarn("Ignoring custom main file."); + return base_config; + } + + if (lbuildsettings.sourceFiles.empty) { + logInfo(`No source files found in configuration '%s'. Falling back to default configuration for test runner.`, base_config); + if (!custom_main_file.empty) logWarn("Ignoring custom main file."); + return getDefaultConfiguration(settings.platform); + } + + const config = format("%s-test-%s", rootPackage.name.replace(".", "-").replace(":", "-"), base_config); + logInfo(`Generating test runner configuration '%s' for '%s' (%s).`, config, base_config, lbuildsettings.targetType); + + BuildSettingsTemplate tcinfo = rootPackage.recipe.getConfiguration(base_config).buildSettings.dup; + tcinfo.targetType = TargetType.executable; + + // set targetName unless specified explicitly in unittest base configuration + if (tcinfo.targetName.empty || base_config != "unittest") + tcinfo.targetName = config; + + auto mainfil = tcinfo.mainSourceFile; + if (!mainfil.length) mainfil = rootPackage.recipe.buildSettings.mainSourceFile; + + string custommodname; + if (!custom_main_file.empty) { + import std.path; + tcinfo.sourceFiles[""] ~= custom_main_file.relativeTo(rootPackage.path).toNativeString(); + tcinfo.importPaths[""] ~= custom_main_file.parentPath.toNativeString(); + custommodname = custom_main_file.head.name.baseName(".d"); + } + + // prepare the list of tested modules + + string[] import_modules; + if (settings.single) + lbuildsettings.importPaths ~= NativePath(mainfil).parentPath.toNativeString; + bool firstTimePackage = true; + foreach (file; lbuildsettings.sourceFiles) { + if (file.endsWith(".d")) { + auto fname = NativePath(file).head.name; + NativePath msf = NativePath(mainfil); + if (msf.absolute) + msf = msf.relativeTo(rootPackage.path); + if (!settings.single && NativePath(file).relativeTo(rootPackage.path) == msf) { + logWarn("Excluding main source file %s from test.", mainfil); + tcinfo.excludedSourceFiles[""] ~= mainfil; + continue; + } + if (fname == "package.d") { + if (firstTimePackage) { + firstTimePackage = false; + logDiagnostic("Excluding package.d file from test due to https://issues.dlang.org/show_bug.cgi?id=11847"); + } + continue; + } + import_modules ~= dub.internal.utils.determineModuleName(lbuildsettings, NativePath(file), rootPackage.path); + } + } + + NativePath mainfile; + if (settings.tempBuild) + mainfile = getTempFile("dub_test_root", ".d"); + else { + import dub.generators.build : computeBuildName; + mainfile = rootPackage.path ~ format(".dub/code/%s/dub_test_root.d", computeBuildName(config, settings, import_modules)); + } + + auto escapedMainFile = mainfile.toNativeString().replace("$", "$$"); + tcinfo.sourceFiles[""] ~= escapedMainFile; + tcinfo.mainSourceFile = escapedMainFile; + if (!settings.tempBuild) { + // add the directory containing dub_test_root.d to the import paths + tcinfo.importPaths[""] ~= NativePath(escapedMainFile).parentPath.toNativeString(); + } + + if (generate_main && (settings.force || !existsFile(mainfile))) { + import std.file : mkdirRecurse; + mkdirRecurse(mainfile.parentPath.toNativeString()); + + auto fil = openFile(mainfile, FileMode.createTrunc); + scope(exit) fil.close(); + fil.write("module dub_test_root;\n"); + fil.write("import std.typetuple;\n"); + foreach (mod; import_modules) fil.write(format("static import %s;\n", mod)); + fil.write("alias allModules = TypeTuple!("); + foreach (i, mod; import_modules) { + if (i > 0) fil.write(", "); + fil.write(mod); + } + fil.write(");\n"); + if (custommodname.length) { + fil.write(format("import %s;\n", custommodname)); + } else { + fil.write(q{ +import core.runtime; + +void main() { + version (D_Coverage) { + } else { + import std.stdio : writeln; + writeln("All unit tests have been run successfully."); + } +} +shared static this() { + version (Have_tested) { + import tested; + import core.runtime; + import std.exception; + Runtime.moduleUnitTester = () => true; + enforce(runUnitTests!allModules(new ConsoleTestResultWriter), "Unit tests failed."); + } +} + }); + } + } + + rootPackage.recipe.configurations ~= ConfigurationInfo(config, tcinfo); + + return config; + } + /** Performs basic validation of various aspects of the package. This will emit warnings to `stderr` if any discouraged names or @@ -256,7 +407,7 @@ enforce(!m_rootPackage.name.canFind(' '), "Aborting due to the package name containing spaces."); foreach (d; m_rootPackage.getAllDependencies()) - if (d.spec.isExactVersion && d.spec.version_.isBranch && d.spec.repository.empty) { + if (d.spec.isExactVersion && d.spec.version_.isBranch) { logWarn("WARNING: A deprecated branch based version specification is used " ~ "for the dependency %s. Please use numbered versions instead. Also " ~ "note that you can still use the %s file to override a certain " @@ -266,16 +417,17 @@ // search for orphan sub configurations void warnSubConfig(string pack, string config) { - logWarn("The sub configuration directive \"%s\" -> \"%s\" " + logWarn("The sub configuration directive \"%s\" -> [%s] " ~ "references a package that is not specified as a dependency " - ~ "and will have no effect.", pack, config); + ~ "and will have no effect.", pack.color(Mode.bold), config.color(Color.blue)); } + void checkSubConfig(string pack, string config) { auto p = getDependency(pack, true); if (p && !p.configurations.canFind(config)) { - logWarn("The sub configuration directive \"%s\" -> \"%s\" " + logWarn("The sub configuration directive \"%s\" -> [%s] " ~ "references a configuration that does not exist.", - pack, config); + pack.color(Mode.bold), config.color(Color.red)); } } auto globalbs = m_rootPackage.getBuildSettings(); @@ -300,13 +452,19 @@ foreach (d; pack.getAllDependencies()) { auto basename = getBasePackageName(d.name); - if (m_selections.hasSelectedVersion(basename)) { - auto selver = m_selections.getSelectedVersion(basename); - if (d.spec.merge(selver) == Dependency.invalid) { - logWarn("Selected package %s %s does not match the dependency specification %s in package %s. Need to \"dub upgrade\"?", - basename, selver, d.spec, pack.name); - } - } + d.spec.visit!( + (NativePath path) { /* Valid */ }, + (Repository repo) { /* Valid */ }, + (VersionRange vers) { + if (m_selections.hasSelectedVersion(basename)) { + auto selver = m_selections.getSelectedVersion(basename); + if (d.spec.merge(selver) == Dependency.invalid) { + logWarn(`Selected package %s %s does not match the dependency specification %s in package %s. Need to "%s"?`, + basename.color(Mode.bold), selver, vers, pack.name.color(Mode.bold), "dub upgrade".color(Mode.bold)); + } + } + }, + ); auto deppack = getDependency(d.name, true); if (deppack in visited) continue; @@ -324,6 +482,10 @@ m_missingDependencies = []; m_packageManager.refresh(false); + Package resolveSubPackage(Package p, string subname, bool silentFail) { + return subname.length ? m_packageManager.getSubPackage(p, subname, silentFail) : p; + } + void collectDependenciesRec(Package pack, int depth = 0) { auto indent = replicate(" ", depth); @@ -341,10 +503,6 @@ // need to be satisfied bool is_desired = !vspec.optional || m_selections.hasSelectedVersion(basename) || (vspec.default_ && m_selections.bare); - Package resolveSubPackage(Package p, in bool silentFail) { - return subname.length ? m_packageManager.getSubPackage(p, subname, silentFail) : p; - } - if (dep.name == m_rootPackage.basePackage.name) { vspec = Dependency(m_rootPackage.version_); p = m_rootPackage.basePackage; @@ -358,51 +516,54 @@ } } else if (m_selections.hasSelectedVersion(basename)) { vspec = m_selections.getSelectedVersion(basename); - if (!vspec.path.empty) { - auto path = vspec.path; - if (!path.absolute) path = m_rootPackage.path ~ path; - p = m_packageManager.getOrLoadPackage(path, NativePath.init, true); - p = resolveSubPackage(p, true); - } else if (!vspec.repository.empty) { - p = m_packageManager.loadSCMPackage(basename, vspec); - p = resolveSubPackage(p, true); - } else { - p = m_packageManager.getBestPackage(dep.name, vspec); - } + p = vspec.visit!( + (NativePath path_) { + auto path = path_.absolute ? path_ : m_rootPackage.path ~ path_; + auto tmp = m_packageManager.getOrLoadPackage(path, NativePath.init, true); + return resolveSubPackage(tmp, subname, true); + }, + (Repository repo) { + auto tmp = m_packageManager.loadSCMPackage(basename, repo); + return resolveSubPackage(tmp, subname, true); + }, + (VersionRange range) { + return m_packageManager.getBestPackage(dep.name, range); + }, + ); } else if (m_dependencies.canFind!(d => getBasePackageName(d.name) == basename)) { auto idx = m_dependencies.countUntil!(d => getBasePackageName(d.name) == basename); auto bp = m_dependencies[idx].basePackage; vspec = Dependency(bp.path); - p = resolveSubPackage(bp, false); + p = resolveSubPackage(bp, subname, false); } else { logDiagnostic("%sVersion selection for dependency %s (%s) of %s is missing.", indent, basename, dep.name, pack.name); } - if (!p && !vspec.repository.empty) { - p = m_packageManager.loadSCMPackage(basename, vspec); - resolveSubPackage(p, false); - } - - if (!p && !vspec.path.empty && is_desired) { - NativePath path = vspec.path; - if (!path.absolute) path = pack.path ~ path; - logDiagnostic("%sAdding local %s in %s", indent, dep.name, path); - p = m_packageManager.getOrLoadPackage(path, NativePath.init, true); - if (p.parentPackage !is null) { - logWarn("%sSub package %s must be referenced using the path to it's parent package.", indent, dep.name); - p = p.parentPackage; + // We didn't find the package + if (p is null) + { + if (!vspec.repository.empty) { + p = m_packageManager.loadSCMPackage(basename, vspec.repository); + resolveSubPackage(p, subname, false); + } else if (!vspec.path.empty && is_desired) { + NativePath path = vspec.path; + if (!path.absolute) path = pack.path ~ path; + logDiagnostic("%sAdding local %s in %s", indent, dep.name, path); + p = m_packageManager.getOrLoadPackage(path, NativePath.init, true); + if (p.parentPackage !is null) { + logWarn("%sSub package %s must be referenced using the path to it's parent package.", indent, dep.name); + p = p.parentPackage; + } + p = resolveSubPackage(p, subname, false); + enforce(p.name == dep.name, + format("Path based dependency %s is referenced with a wrong name: %s vs. %s", + path.toNativeString(), dep.name, p.name)); + } else { + logDiagnostic("%sMissing dependency %s %s of %s", indent, dep.name, vspec, pack.name); + if (is_desired) m_missingDependencies ~= dep.name; + continue; } - p = resolveSubPackage(p, false); - enforce(p.name == dep.name, - format("Path based dependency %s is referenced with a wrong name: %s vs. %s", - path.toNativeString(), dep.name, p.name)); - } - - if (!p) { - logDiagnostic("%sMissing dependency %s %s of %s", indent, dep.name, vspec, pack.name); - if (is_desired) m_missingDependencies ~= dep.name; - continue; } if (!m_dependencies.canFind(p)) { @@ -908,9 +1069,9 @@ return value.byKeyValue.map!(a => a.key ~ "=" ~ a.value); else static if( is(typeof(value) == enum) ) return only(value); - else static if( is(typeof(value) == BuildRequirements) ) + else static if( is(typeof(value) == Flags!BuildRequirement) ) return only(cast(BuildRequirement) cast(int) value.values); - else static if( is(typeof(value) == BuildOptions) ) + else static if( is(typeof(value) == Flags!BuildOption) ) return only(cast(BuildOption) cast(int) value.values); else static assert(false, "Type of BuildSettings."~attributeName~" is unsupported."); @@ -1134,42 +1295,6 @@ { return null; } - - /** Sets a new set of versions for the upgrade cache. - */ - void setUpgradeCache(Dependency[string] versions) - { - logDebug("markUpToDate"); - Json create(ref Json json, string object) { - if (json[object].type == Json.Type.undefined) json[object] = Json.emptyObject; - return json[object]; - } - create(m_packageSettings, "dub"); - m_packageSettings["dub"]["lastUpgrade"] = Clock.currTime().toISOExtString(); - - create(m_packageSettings["dub"], "cachedUpgrades"); - foreach (p, d; versions) - m_packageSettings["dub"]["cachedUpgrades"][p] = SelectedVersions.dependencyToJson(d); - - writeDubJson(); - } - - private void writeDubJson() { - import std.file : exists, mkdir; - // don't bother to write an empty file - if( m_packageSettings.length == 0 ) return; - - try { - logDebug("writeDubJson"); - auto dubpath = m_rootPackage.path~".dub"; - if( !exists(dubpath.toNativeString()) ) mkdir(dubpath.toNativeString()); - auto dstFile = openFile((dubpath~"dub.json").toString(), FileMode.createTrunc); - scope(exit) dstFile.close(); - dstFile.writePrettyJsonString(m_packageSettings); - } catch( Exception e ){ - logWarn("Could not write .dub/dub.json."); - } - } } @@ -1181,19 +1306,8 @@ commandLineNul, /// NUL character separated list entries (unescaped, data lists separated by two NUL characters) } - -/// Indicates where a package has been or should be placed to. -enum PlacementLocation { - /// Packages retrieved with 'local' will be placed in the current folder - /// using the package name as destination. - local, - /// Packages with 'userWide' will be placed in a folder accessible by - /// all of the applications from the current user. - user, - /// Packages retrieved with 'systemWide' will be placed in a shared folder, - /// which can be accessed by all users of the system. - system -} +deprecated("Use `dub.packagemanager : PlacementLocation` instead") +public alias PlacementLocation = dub.packagemanager.PlacementLocation; void processVars(ref BuildSettings dst, in Project project, in Package pack, BuildSettings settings, in GeneratorSettings gsettings, bool include_target_settings = false) @@ -1313,6 +1427,7 @@ { case '*', '?', '[', '{': break loop; case '/': sepIdx = i; goto default; + version (Windows) { case '\\': sepIdx = i; goto default; } default: ++i; break switch_; } } @@ -1410,6 +1525,25 @@ assert(expandVars!expandVar("$${DUB_EXE:-dub}") == "${DUB_EXE:-dub}"); } +/// Expands the variables in the input string with the same rules as command +/// variables inside custom dub commands. +/// +/// Params: +/// s = the input string where environment variables in form `$VAR` should be replaced +/// throwIfMissing = if true, throw an exception if the given variable is not found, +/// otherwise replace unknown variables with the empty string. +string expandEnvironmentVariables(string s, bool throwIfMissing = true) +{ + import std.process : environment; + + return expandVars!((v) { + auto ret = environment.get(v); + if (ret is null && throwIfMissing) + throw new Exception("Specified environment variable `$" ~ v ~ "` is not set"); + return ret; + })(s); +} + // Keep the following list up-to-date if adding more build settings variables. /// List of variables that can be used in build settings package(dub) immutable buildSettingsVars = [ @@ -1568,13 +1702,9 @@ "dub.selections.json" within a package's directory. */ final class SelectedVersions { - private struct Selected { - Dependency dep; - //Dependency[string] packages; - } private { enum FileVersion = 1; - Selected[string] m_selections; + Selected m_selections; bool m_dirty = false; // has changes since last save bool m_bare = true; } @@ -1583,13 +1713,24 @@ enum defaultFile = "dub.selections.json"; /// Constructs a new empty version selection. - this() {} + public this(uint version_ = FileVersion) @safe pure nothrow @nogc + { + this.m_selections = Selected(version_); + } + + /// Constructs a new non-empty version selection. + public this(Selected data) @safe pure nothrow @nogc + { + this.m_selections = data; + this.m_bare = false; + } /** Constructs a new version selection from JSON data. The structure of the JSON document must match the contents of the "dub.selections.json" file. */ + deprecated("Pass a `dub.recipe.selection : Selected` directly") this(Json data) { deserialize(data); @@ -1598,6 +1739,7 @@ /** Constructs a new version selections from an existing JSON file. */ + deprecated("JSON deserialization is deprecated") this(NativePath path) { auto json = jsonFromFile(path); @@ -1607,7 +1749,7 @@ } /// Returns a list of names for all packages that have a version selection. - @property string[] selectedPackages() const { return m_selections.keys; } + @property string[] selectedPackages() const { return m_selections.versions.keys; } /// Determines if any changes have been made after loading the selections from a file. @property bool dirty() const { return m_dirty; } @@ -1618,62 +1760,69 @@ /// Removes all selections. void clear() { - m_selections = null; + m_selections.versions = null; m_dirty = true; } /// Duplicates the set of selected versions from another instance. void set(SelectedVersions versions) { - m_selections = versions.m_selections.dup; + m_selections.fileVersion = versions.m_selections.fileVersion; + m_selections.versions = versions.m_selections.versions.dup; m_dirty = true; } /// Selects a certain version for a specific package. void selectVersion(string package_id, Version version_) { - if (auto ps = package_id in m_selections) { - if (ps.dep == Dependency(version_)) + if (auto pdep = package_id in m_selections.versions) { + if (*pdep == Dependency(version_)) return; } - m_selections[package_id] = Selected(Dependency(version_)/*, issuer*/); + m_selections.versions[package_id] = Dependency(version_); m_dirty = true; } /// Selects a certain path for a specific package. void selectVersion(string package_id, NativePath path) { - if (auto ps = package_id in m_selections) { - if (ps.dep == Dependency(path)) + if (auto pdep = package_id in m_selections.versions) { + if (*pdep == Dependency(path)) return; } - m_selections[package_id] = Selected(Dependency(path)); + m_selections.versions[package_id] = Dependency(path); m_dirty = true; } /// Selects a certain Git reference for a specific package. - void selectVersionWithRepository(string package_id, Repository repository, string spec) + void selectVersion(string package_id, Repository repository) { - const dependency = Dependency(repository, spec); - if (auto ps = package_id in m_selections) { - if (ps.dep == dependency) + const dependency = Dependency(repository); + if (auto pdep = package_id in m_selections.versions) { + if (*pdep == dependency) return; } - m_selections[package_id] = Selected(dependency); + m_selections.versions[package_id] = dependency; m_dirty = true; } + deprecated("Move `spec` inside of the `repository` parameter and call `selectVersion`") + void selectVersionWithRepository(string package_id, Repository repository, string spec) + { + this.selectVersion(package_id, Repository(repository.remote(), spec)); + } + /// Removes the selection for a particular package. void deselectVersion(string package_id) { - m_selections.remove(package_id); + m_selections.versions.remove(package_id); m_dirty = true; } /// Determines if a particular package has a selection set. bool hasSelectedVersion(string packageId) const { - return (packageId in m_selections) !is null; + return (packageId in m_selections.versions) !is null; } /** Returns the selection for a particular package. @@ -1686,7 +1835,7 @@ Dependency getSelectedVersion(string packageId) const { enforce(hasSelectedVersion(packageId)); - return m_selections[packageId].dep; + return m_selections.versions[packageId]; } /** Stores the selections to disk. @@ -1723,17 +1872,13 @@ m_bare = false; } + deprecated("Use `dub.dependency : Dependency.toJson(true)`") static Json dependencyToJson(Dependency d) { - if (!d.repository.empty) { - return serializeToJson([ - "version": d.version_.toString(), - "repository": d.repository.toString, - ]); - } else if (d.path.empty) return Json(d.version_.toString()); - else return serializeToJson(["path": d.path.toString()]); + return d.toJson(true); } + deprecated("JSON deserialization is deprecated") static Dependency dependencyFromJson(Json j) { if (j.type == Json.Type.string) @@ -1741,8 +1886,8 @@ else if (j.type == Json.Type.object && "path" in j) return Dependency(NativePath(j["path"].get!string)); else if (j.type == Json.Type.object && "repository" in j) - return Dependency(Repository(j["repository"].get!string), - enforce("version" in j, "Expected \"version\" field in repository version object").get!string); + return Dependency(Repository(j["repository"].get!string, + enforce("version" in j, "Expected \"version\" field in repository version object").get!string)); else throw new Exception(format("Unexpected type for dependency: %s", j)); } @@ -1750,19 +1895,22 @@ const { Json json = serializeToJson(m_selections); Json serialized = Json.emptyObject; - serialized["fileVersion"] = FileVersion; + serialized["fileVersion"] = m_selections.fileVersion; serialized["versions"] = Json.emptyObject; - foreach (p, v; m_selections) - serialized["versions"][p] = dependencyToJson(v.dep); + foreach (p, dep; m_selections.versions) + serialized["versions"][p] = dep.toJson(true); return serialized; } + deprecated("JSON deserialization is deprecated") private void deserialize(Json json) { - enforce(cast(int)json["fileVersion"] == FileVersion, "Mismatched dub.select.json version: " ~ to!string(cast(int)json["fileVersion"]) ~ "vs. " ~to!string(FileVersion)); + const fileVersion = json["fileVersion"].get!int; + enforce(fileVersion == FileVersion, "Mismatched dub.selections.json version: " ~ to!string(fileVersion) ~ " vs. " ~ to!string(FileVersion)); clear(); + m_selections.fileVersion = fileVersion; scope(failure) clear(); - foreach (string p, v; json["versions"]) - m_selections[p] = Selected(dependencyFromJson(v)); + foreach (string p, dep; json["versions"]) + m_selections.versions[p] = dependencyFromJson(dep); } } diff --git a/source/dub/recipe/io.d b/source/dub/recipe/io.d index 994b3da..67d0199 100644 --- a/source/dub/recipe/io.d +++ b/source/dub/recipe/io.d @@ -8,8 +8,9 @@ module dub.recipe.io; import dub.recipe.packagerecipe; +import dub.internal.logging; import dub.internal.vibecompat.inet.path; - +import configy.Read; /** Reads a package recipe from a file. @@ -18,16 +19,20 @@ Params: filename = NativePath of the package recipe file parent_name = Optional name of the parent package (if this is a sub package) + mode = Whether to issue errors, warning, or ignore unknown keys in dub.json Returns: Returns the package recipe contents Throws: Throws an exception if an I/O or syntax error occurs */ -PackageRecipe readPackageRecipe(string filename, string parent_name = null) +PackageRecipe readPackageRecipe( + string filename, string parent_name = null, StrictMode mode = StrictMode.Ignore) { - return readPackageRecipe(NativePath(filename), parent_name); + return readPackageRecipe(NativePath(filename), parent_name, mode); } + /// ditto -PackageRecipe readPackageRecipe(NativePath filename, string parent_name = null) +PackageRecipe readPackageRecipe( + NativePath filename, string parent_name = null, StrictMode mode = StrictMode.Ignore) { import dub.internal.utils : stripUTF8Bom; import dub.internal.vibecompat.core.file : openFile, FileMode; @@ -40,7 +45,7 @@ text = stripUTF8Bom(cast(string)f.readAll()); } - return parsePackageRecipe(text, filename.toNativeString(), parent_name); + return parsePackageRecipe(text, filename.toNativeString(), parent_name, null, mode); } /** Parses an in-memory package recipe. @@ -55,14 +60,16 @@ package) default_package_name = Optional default package name (if no package name is found in the recipe this value will be used) + mode = Whether to issue errors, warning, or ignore unknown keys in dub.json Returns: Returns the package recipe contents Throws: Throws an exception if an I/O or syntax error occurs */ PackageRecipe parsePackageRecipe(string contents, string filename, string parent_name = null, - string default_package_name = null) + string default_package_name = null, StrictMode mode = StrictMode.Ignore) { import std.algorithm : endsWith; + import dub.compilers.buildsettings : TargetType; import dub.internal.vibecompat.data.json; import dub.recipe.json : parseJson; import dub.recipe.sdl : parseSDL; @@ -71,9 +78,64 @@ ret.name = default_package_name; - if (filename.endsWith(".json")) parseJson(ret, parseJsonString(contents, filename), parent_name); + if (filename.endsWith(".json")) + { + try { + ret = parseConfigString!PackageRecipe(contents, filename, mode); + fixDependenciesNames(ret.name, ret); + } catch (ConfigException exc) { + logWarn("Your `dub.json` file use non-conventional features that are deprecated"); + logWarn("Please adjust your `dub.json` file as those warnings will turn into errors in dub v1.40.0"); + logWarn("Error was: %s", exc); + // Fallback to JSON parser + ret = PackageRecipe.init; + parseJson(ret, parseJsonString(contents, filename), parent_name); + } catch (Exception exc) { + logWarn("Your `dub.json` file use non-conventional features that are deprecated"); + logWarn("This is most likely due to duplicated keys."); + logWarn("Please adjust your `dub.json` file as those warnings will turn into errors in dub v1.40.0"); + logWarn("Error was: %s", exc); + // Fallback to JSON parser + ret = PackageRecipe.init; + parseJson(ret, parseJsonString(contents, filename), parent_name); + } + // `debug = ConfigFillerDebug` also enables verbose parser output + debug (ConfigFillerDebug) + { + import std.stdio; + + PackageRecipe jsonret; + parseJson(jsonret, parseJsonString(contents, filename), parent_name); + if (ret != jsonret) + { + writeln("Content of JSON and YAML parsing differ for file: ", filename); + writeln("-------------------------------------------------------------------"); + writeln("JSON (excepted): ", jsonret); + writeln("-------------------------------------------------------------------"); + writeln("YAML (actual ): ", ret); + writeln("========================================"); + ret = jsonret; + } + } + } else if (filename.endsWith(".sdl")) parseSDL(ret, contents, parent_name, filename); else assert(false, "readPackageRecipe called with filename with unknown extension: "~filename); + + // Fix for issue #711: `targetType` should be inherited, or default to library + static void sanitizeTargetType(ref PackageRecipe r) { + TargetType defaultTT = (r.buildSettings.targetType == TargetType.autodetect) ? + TargetType.library : r.buildSettings.targetType; + foreach (ref conf; r.configurations) + if (conf.buildSettings.targetType == TargetType.autodetect) + conf.buildSettings.targetType = defaultTT; + + // recurse into sub packages + foreach (ref subPackage; r.subPackages) + sanitizeTargetType(subPackage.recipe); + } + + sanitizeTargetType(ret); + return ret; } @@ -123,6 +185,23 @@ } } +unittest { // make sure targetType of sub packages are sanitized too + import dub.compilers.buildsettings : TargetType; + auto inputs = [ + "dub.sdl": "name \"test\"\nsubPackage {\nname \"sub\"\ntargetType \"sourceLibrary\"\nconfiguration \"a\" {\n}\n}", + "dub.json": "{\"name\": \"test\", \"subPackages\": [ { \"name\": \"sub\", \"targetType\": \"sourceLibrary\", \"configurations\": [{\"name\": \"a\"}] } ] }" + ]; + foreach (file, content; inputs) { + auto pr = parsePackageRecipe(content, file); + assert(pr.name == "test"); + const spr = pr.subPackages[0].recipe; + assert(spr.name == "sub"); + assert(spr.configurations.length == 1); + assert(spr.configurations[0].name == "a"); + assert(spr.configurations[0].buildSettings.targetType == TargetType.sourceLibrary); + } +} + /** Writes the textual representation of a package recipe to a file. @@ -160,3 +239,56 @@ toSDL(recipe).toSDLDocument(dst); else assert(false, "writePackageRecipe called with filename with unknown extension: "~filename); } + +unittest { + import std.format; + import dub.dependency; + import dub.internal.utils : deepCompare; + + static void success (string source, in PackageRecipe expected, size_t line = __LINE__) { + const result = parseConfigString!PackageRecipe(source, "dub.json"); + deepCompare(result, expected, __FILE__, line); + } + + static void error (string source, string expected, size_t line = __LINE__) { + try + { + auto result = parseConfigString!PackageRecipe(source, "dub.json"); + assert(0, + format("[%s:%d] Exception should have been thrown but wasn't: %s", + __FILE__, line, result)); + } + catch (Exception exc) + assert(exc.toString() == expected, + format("[%s:%s] result != expected: '%s' != '%s'", + __FILE__, line, exc.toString(), expected)); + } + + alias YAMLDep = typeof(BuildSettingsTemplate.dependencies[string.init]); + const PackageRecipe expected1 = + { + name: "foo", + buildSettings: { + dependencies: RecipeDependencyAA([ + "repo": YAMLDep(Dependency(Repository( + "git+https://github.com/dlang/dmd", + "09d04945bdbc0cba36f7bb1e19d5bd009d4b0ff2", + ))), + "path": YAMLDep(Dependency(NativePath("/foo/bar/jar/"))), + "version": YAMLDep(Dependency(VersionRange.fromString("~>1.0"))), + "version2": YAMLDep(Dependency(Version("4.2.0"))), + ])}, + }; + success( + `{ "name": "foo", "dependencies": { + "repo": { "repository": "git+https://github.com/dlang/dmd", + "version": "09d04945bdbc0cba36f7bb1e19d5bd009d4b0ff2" }, + "path": { "path": "/foo/bar/jar/" }, + "version": { "version": "~>1.0" }, + "version2": "4.2.0" +}}`, expected1); + + + error(`{ "name": "bar", "dependencies": {"bad": { "repository": "git+https://github.com/dlang/dmd" }}}`, + "dub.json(0:41): dependencies[bad]: Need to provide a commit hash in 'version' field with 'repository' dependency"); +} diff --git a/source/dub/recipe/json.d b/source/dub/recipe/json.d index c1d776d..7993fa2 100644 --- a/source/dub/recipe/json.d +++ b/source/dub/recipe/json.d @@ -57,13 +57,9 @@ recipe.buildSettings.parseJson(json, fullname); if (auto pv = "configurations" in json) { - TargetType deftargettp = TargetType.library; - if (recipe.buildSettings.targetType != TargetType.autodetect) - deftargettp = recipe.buildSettings.targetType; - foreach (settings; *pv) { ConfigurationInfo ci; - ci.parseJson(settings, recipe.name, deftargettp); + ci.parseJson(settings, recipe.name); recipe.configurations ~= ci; } } @@ -133,10 +129,8 @@ } } -private void parseJson(ref ConfigurationInfo config, Json json, string package_name, TargetType default_target_type = TargetType.library) +private void parseJson(ref ConfigurationInfo config, Json json, string package_name) { - config.buildSettings.targetType = default_target_type; - foreach (string name, value; json) { switch (name) { default: break; @@ -166,7 +160,7 @@ { auto idx = indexOf(name, "-"); string basename, suffix; - if( idx >= 0 ) { basename = name[0 .. idx]; suffix = name[idx .. $]; } + if( idx >= 0 ) { basename = name[0 .. idx]; suffix = name[idx + 1 .. $]; } else basename = name; switch(basename){ default: break; @@ -179,11 +173,7 @@ enforce(pkg !in bs.dependencies, "The dependency '"~pkg~"' is specified more than once." ); bs.dependencies[pkg] = Dependency.fromJson(verspec); if (verspec.type == Json.Type.object) - { - BuildSettingsTemplate dbs; - dbs.parseJson(verspec, package_name); - bs.dependencyBuildSettings[pkg] = dbs; - } + bs.dependencies[pkg].settings.parseJson(verspec, package_name); } break; case "systemDependencies": @@ -246,13 +236,13 @@ case "preRunEnvironments": bs.preRunEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "postRunEnvironments": bs.postRunEnvironments[suffix] = deserializeJson!(string[string])(value); break; case "buildRequirements": - BuildRequirements reqs; + Flags!BuildRequirement reqs; foreach (req; deserializeJson!(string[])(value)) reqs |= to!BuildRequirement(req); bs.buildRequirements[suffix] = reqs; break; case "buildOptions": - BuildOptions options; + Flags!BuildOption options; foreach (opt; deserializeJson!(string[])(value)) options |= to!BuildOption(opt); bs.buildOptions[suffix] = options; @@ -263,6 +253,13 @@ private Json toJson(const scope ref BuildSettingsTemplate bs) { + static string withSuffix (string pre, string post) + { + if (!post.length) + return pre; + return pre ~ "-" ~ post; + } + auto ret = Json.emptyObject; if( bs.dependencies !is null ){ auto deps = Json.emptyObject; @@ -277,47 +274,47 @@ if (!bs.workingDirectory.empty) ret["workingDirectory"] = bs.workingDirectory; if (!bs.mainSourceFile.empty) ret["mainSourceFile"] = bs.mainSourceFile; if (bs.subConfigurations.length > 0) ret["subConfigurations"] = serializeToJson(bs.subConfigurations); - foreach (suffix, arr; bs.dflags) ret["dflags"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.lflags) ret["lflags"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.libs) ret["libs"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.sourceFiles) ret["sourceFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.sourcePaths) ret["sourcePaths"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.excludedSourceFiles) ret["excludedSourceFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.injectSourceFiles) ret["injectSourceFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.copyFiles) ret["copyFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.extraDependencyFiles) ret["extraDependencyFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.versions) ret["versions"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.debugVersions) ret["debugVersions"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.versionFilters) ret["-versionFilters"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.debugVersionFilters) ret["-debugVersionFilters"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.importPaths) ret["importPaths"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.stringImportPaths) ret["stringImportPaths"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.preGenerateCommands) ret["preGenerateCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.postGenerateCommands) ret["postGenerateCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.preBuildCommands) ret["preBuildCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.postBuildCommands) ret["postBuildCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.preRunCommands) ret["preRunCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; bs.postRunCommands) ret["postRunCommands"~suffix] = serializeToJson(arr); - foreach (suffix, aa; bs.environments) ret["environments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.buildEnvironments) ret["buildEnvironments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.runEnvironments) ret["runEnvironments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.preGenerateEnvironments) ret["preGenerateEnvironments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.postGenerateEnvironments) ret["postGenerateEnvironments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.preBuildEnvironments) ret["preBuildEnvironments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.postBuildEnvironments) ret["postBuildEnvironments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.preRunEnvironments) ret["preRunEnvironments"~suffix] = serializeToJson(aa); - foreach (suffix, aa; bs.postRunEnvironments) ret["postRunEnvironments"~suffix] = serializeToJson(aa); + foreach (suffix, arr; bs.dflags) ret[withSuffix("dflags", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.lflags) ret[withSuffix("lflags", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.libs) ret[withSuffix("libs", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.sourceFiles) ret[withSuffix("sourceFiles", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.sourcePaths) ret[withSuffix("sourcePaths", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.excludedSourceFiles) ret[withSuffix("excludedSourceFiles", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.injectSourceFiles) ret[withSuffix("injectSourceFiles", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.copyFiles) ret[withSuffix("copyFiles", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.extraDependencyFiles) ret[withSuffix("extraDependencyFiles", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.versions) ret[withSuffix("versions", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.debugVersions) ret[withSuffix("debugVersions", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.versionFilters) ret[withSuffix("-versionFilters", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.debugVersionFilters) ret[withSuffix("-debugVersionFilters", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.importPaths) ret[withSuffix("importPaths", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.stringImportPaths) ret[withSuffix("stringImportPaths", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.preGenerateCommands) ret[withSuffix("preGenerateCommands", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.postGenerateCommands) ret[withSuffix("postGenerateCommands", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.preBuildCommands) ret[withSuffix("preBuildCommands", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.postBuildCommands) ret[withSuffix("postBuildCommands", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.preRunCommands) ret[withSuffix("preRunCommands", suffix)] = serializeToJson(arr); + foreach (suffix, arr; bs.postRunCommands) ret[withSuffix("postRunCommands", suffix)] = serializeToJson(arr); + foreach (suffix, aa; bs.environments) ret[withSuffix("environments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.buildEnvironments) ret[withSuffix("buildEnvironments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.runEnvironments) ret[withSuffix("runEnvironments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.preGenerateEnvironments) ret[withSuffix("preGenerateEnvironments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.postGenerateEnvironments) ret[withSuffix("postGenerateEnvironments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.preBuildEnvironments) ret[withSuffix("preBuildEnvironments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.postBuildEnvironments) ret[withSuffix("postBuildEnvironments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.preRunEnvironments) ret[withSuffix("preRunEnvironments", suffix)] = serializeToJson(aa); + foreach (suffix, aa; bs.postRunEnvironments) ret[withSuffix("postRunEnvironments", suffix)] = serializeToJson(aa); foreach (suffix, arr; bs.buildRequirements) { string[] val; foreach (i; [EnumMembers!BuildRequirement]) if (arr & i) val ~= to!string(i); - ret["buildRequirements"~suffix] = serializeToJson(val); + ret[withSuffix("buildRequirements", suffix)] = serializeToJson(val); } foreach (suffix, arr; bs.buildOptions) { string[] val; foreach (i; [EnumMembers!BuildOption]) if (arr & i) val ~= to!string(i); - ret["buildOptions"~suffix] = serializeToJson(val); + ret[withSuffix("buildOptions", suffix)] = serializeToJson(val); } return ret; } diff --git a/source/dub/recipe/packagerecipe.d b/source/dub/recipe/packagerecipe.d index e1e4663..deedc67 100644 --- a/source/dub/recipe/packagerecipe.d +++ b/source/dub/recipe/packagerecipe.d @@ -10,11 +10,13 @@ import dub.compilers.compiler; import dub.compilers.utils : warnOnSpecialCompilerFlags; import dub.dependency; +import dub.internal.logging; import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; +import configy.Attributes; + import std.algorithm : findSplit, sort; import std.array : join, split; import std.exception : enforce; @@ -73,22 +75,92 @@ For higher level package handling, see the $(D Package) class. */ struct PackageRecipe { + /** + * Name of the package, used to uniquely identify the package. + * + * This field is the only mandatory one. + * Must be comprised of only lower case ASCII alpha-numeric characters, + * "-" or "_". + */ string name; - string version_; - string description; - string homepage; - string[] authors; - string copyright; - string license; - string[] ddoxFilterArgs; - string ddoxTool; - BuildSettingsTemplate buildSettings; - ConfigurationInfo[] configurations; - BuildSettingsTemplate[string] buildTypes; - ToolchainRequirements toolchainRequirements; + /// Brief description of the package. + @Optional string description; - SubPackage[] subPackages; + /// URL of the project website + @Optional string homepage; + + /** + * List of project authors + * + * the suggested format is either: + * "Peter Parker" + * or + * "Peter Parker " + */ + @Optional string[] authors; + + /// Copyright declaration string + @Optional string copyright; + + /// License(s) under which the project can be used + @Optional string license; + + /// Set of version requirements for DUB, compilers and/or language frontend. + @Optional ToolchainRequirements toolchainRequirements; + + /** + * Speficies an optional list of build configurations + * + * By default, the first configuration present in the package recipe + * will be used, except for special configurations (e.g. "unittest"). + * A specific configuration can be chosen from the command line using + * `--config=name` or `-c name`. A package can select a specific + * configuration in one of its dependency by using the `subConfigurations` + * build setting. + * Build settings defined at the top level affect all configurations. + */ + @Optional @Key("name") ConfigurationInfo[] configurations; + + /** + * Defines additional custom build types or overrides the default ones + * + * Build types can be selected from the command line using `--build=name` + * or `-b name`. The default build type is `debug`. + */ + @Optional BuildSettingsTemplate[string] buildTypes; + + /** + * Build settings influence the command line arguments and options passed + * to the compiler and linker. + * + * All build settings can be present at the top level, and are optional. + * Build settings can also be found in `configurations`. + */ + @Optional BuildSettingsTemplate buildSettings; + alias buildSettings this; + + /** + * Specifies a list of command line flags usable for controlling + * filter behavior for `--build=ddox` [experimental] + */ + @Optional @Name("-ddoxFilterArgs") string[] ddoxFilterArgs; + + /// Specify which tool to use with `--build=ddox` (experimental) + @Optional @Name("-ddoxTool") string ddoxTool; + + /** + * Sub-packages path or definitions + * + * Sub-packages allow to break component of a large framework into smaller + * packages. In the recipe file, subpackages entry can take one of two forms: + * either the path to a sub-folder where a recipe file exists, + * or an object of the same format as a recipe file (or `PackageRecipe`). + */ + @Optional SubPackage[] subPackages; + + /// Usually unused by users, this is set by dub automatically + @Optional @Name("version") string version_; inout(ConfigurationInfo) getConfiguration(string name) inout { @@ -107,6 +179,27 @@ { string path; PackageRecipe recipe; + + /** + * Given a YAML parser, recurses into `recipe` or use `path` + * depending on the node type. + * + * Two formats are supported for `subpackages`: a string format, + * which is just the path to the subpackage, and embedding the + * full subpackage recipe into the parent package recipe. + * + * To support such a dual syntax, Configy requires the use + * of a `fromYAML` method, as it exposes the underlying format. + */ + static SubPackage fromYAML (scope ConfigParser!SubPackage p) + { + import dyaml.node; + + if (p.node.nodeID == NodeID.mapping) + return SubPackage(null, p.parseAs!PackageRecipe); + else + return SubPackage(p.parseAs!string); + } } /// Describes minimal toolchain requirements @@ -114,15 +207,23 @@ { import std.typecons : Tuple, tuple; + // TODO: We can remove `@Optional` once bosagora/configy#30 is resolved, + // currently it fails because `Dependency.opCmp` is not CTFE-able. + /// DUB version requirement + @Optional @converter((scope ConfigParser!Dependency p) => p.node.as!string.parseDependency) Dependency dub = Dependency.any; /// D front-end version requirement + @Optional @converter((scope ConfigParser!Dependency p) => p.node.as!string.parseDMDDependency) Dependency frontend = Dependency.any; /// DMD version requirement + @Optional @converter((scope ConfigParser!Dependency p) => p.node.as!string.parseDMDDependency) Dependency dmd = Dependency.any; /// LDC version requirement + @Optional @converter((scope ConfigParser!Dependency p) => p.node.as!string.parseDependency) Dependency ldc = Dependency.any; /// GDC version requirement + @Optional @converter((scope ConfigParser!Dependency p) => p.node.as!string.parseDependency) Dependency gdc = Dependency.any; /** Get the list of supported compilers. @@ -151,8 +252,20 @@ /// Bundles information about a build configuration. struct ConfigurationInfo { string name; - string[] platforms; - BuildSettingsTemplate buildSettings; + @Optional string[] platforms; + @Optional BuildSettingsTemplate buildSettings; + alias buildSettings this; + + /** + * Equivalent to the default constructor, used by Configy + */ + this(string name, string[] p, BuildSettingsTemplate build_settings) + @safe pure nothrow @nogc + { + this.name = name; + this.platforms = p; + this.buildSettings = build_settings; + } this(string name, BuildSettingsTemplate build_settings) { @@ -165,58 +278,187 @@ const { if( platforms.empty ) return true; foreach(p; platforms) - if( platform.matchesSpecification("-"~p) ) + if (platform.matchesSpecification(p)) return true; return false; } } +/** + * A dependency with possible `BuildSettingsTemplate` + * + * Currently only `dflags` is taken into account, but the parser accepts any + * value that is in `BuildSettingsTemplate`. + * This feature was originally introduced to support `-preview`, as setting + * a `-preview` in `dflags` does not propagate down to dependencies. + */ +public struct RecipeDependency +{ + /// The dependency itself + public Dependency dependency; + + /// Additional dflags, if any + public BuildSettingsTemplate settings; + + /// Convenience alias as most uses just want to deal with the `Dependency` + public alias dependency this; + + /** + * Read a `Dependency` and `BuildSettingsTemplate` from the config file + * + * Required to support both short and long form + */ + static RecipeDependency fromYAML (scope ConfigParser!RecipeDependency p) + { + import dyaml.node; + + if (p.node.nodeID == NodeID.scalar) { + auto d = YAMLFormat(p.node.as!string); + return RecipeDependency(d.toDependency()); + } + auto d = p.parseAs!YAMLFormat; + return RecipeDependency(d.toDependency(), d.settings); + } + + /// In-file representation of a dependency as specified by the user + private struct YAMLFormat + { + @Name("version") @Optional string version_; + @Optional string path; + @Optional string repository; + bool optional; + @Name("default") bool default_; + + @Optional BuildSettingsTemplate settings; + alias settings this; + + /** + * Used by Configy to provide rich error message when parsing. + * + * Exceptions thrown from `validate` methods will be wrapped with field/file + * informations and rethrown from Configy, providing the user + * with the location of the configuration that triggered the error. + */ + public void validate () const + { + enforce(this.optional || !this.default_, + "Setting default to 'true' has no effect if 'optional' is not set"); + enforce(this.version_.length || this.path.length || this.repository.length, + "Need to provide one of the following fields: 'version', 'path', or 'repository'"); + + enforce(!this.path.length || !this.repository.length, + "Cannot provide a 'path' dependency if a repository dependency is used"); + enforce(!this.repository.length || this.version_.length, + "Need to provide a commit hash in 'version' field with 'repository' dependency"); + + // Need to deprecate this as it's fairly common + version (none) { + enforce(!this.path.length || !this.version_.length, + "Cannot provide a 'path' dependency if a 'version' dependency is used"); + } + } + + /// Turns this struct into a `Dependency` + public Dependency toDependency () const + { + auto result = () { + if (this.path.length) + return Dependency(NativePath(this.path)); + if (this.repository.length) + return Dependency(Repository(this.repository, this.version_)); + return Dependency(VersionRange.fromString(this.version_)); + }(); + result.optional = this.optional; + result.default_ = this.default_; + return result; + } + } +} + +/// Type used to avoid a breaking change when `Dependency[string]` +/// was changed to `RecipeDependency[string]` +package struct RecipeDependencyAA +{ + /// The underlying data, `public` as `alias this` to `private` field doesn't + /// always work. + public RecipeDependency[string] data; + + /// Expose base function, e.g. `clear` + alias data this; + + /// Supports assignment from a `RecipeDependency` (used in the parser) + public void opIndexAssign(RecipeDependency dep, string key) + pure nothrow + { + this.data[key] = dep; + } + + /// Supports assignment from a `Dependency`, used in user code mostly + public void opIndexAssign(Dependency dep, string key) + pure nothrow + { + this.data[key] = RecipeDependency(dep); + } + + /// Configy doesn't like `alias this` to an AA + static RecipeDependencyAA fromYAML (scope ConfigParser!RecipeDependencyAA p) + { + return RecipeDependencyAA(p.parseAs!(typeof(this.data))); + } +} + /// This keeps general information about how to build a package. /// It contains functions to create a specific BuildSetting, targeted at /// a certain BuildPlatform. struct BuildSettingsTemplate { - Dependency[string] dependencies; - BuildSettingsTemplate[string] dependencyBuildSettings; - string systemDependencies; - TargetType targetType = TargetType.autodetect; - string targetPath; - string targetName; - string workingDirectory; - string mainSourceFile; - string[string] subConfigurations; - string[][string] dflags; - string[][string] lflags; - string[][string] libs; - string[][string] sourceFiles; - string[][string] sourcePaths; - string[][string] excludedSourceFiles; - string[][string] injectSourceFiles; - string[][string] copyFiles; - string[][string] extraDependencyFiles; - string[][string] versions; - string[][string] debugVersions; - string[][string] versionFilters; - string[][string] debugVersionFilters; - string[][string] importPaths; - string[][string] stringImportPaths; - string[][string] preGenerateCommands; - string[][string] postGenerateCommands; - string[][string] preBuildCommands; - string[][string] postBuildCommands; - string[][string] preRunCommands; - string[][string] postRunCommands; - string[string][string] environments; - string[string][string] buildEnvironments; - string[string][string] runEnvironments; - string[string][string] preGenerateEnvironments; - string[string][string] postGenerateEnvironments; - string[string][string] preBuildEnvironments; - string[string][string] postBuildEnvironments; - string[string][string] preRunEnvironments; - string[string][string] postRunEnvironments; - BuildRequirements[string] buildRequirements; - BuildOptions[string] buildOptions; + @Optional RecipeDependencyAA dependencies; + @Optional string systemDependencies; + @Optional TargetType targetType = TargetType.autodetect; + @Optional string targetPath; + @Optional string targetName; + @Optional string workingDirectory; + @Optional string mainSourceFile; + @Optional string[string] subConfigurations; + @StartsWith("dflags") string[][string] dflags; + @StartsWith("lflags") string[][string] lflags; + @StartsWith("libs") string[][string] libs; + @StartsWith("sourceFiles") string[][string] sourceFiles; + @StartsWith("sourcePaths") string[][string] sourcePaths; + @StartsWith("excludedSourceFiles") string[][string] excludedSourceFiles; + @StartsWith("injectSourceFiles") string[][string] injectSourceFiles; + @StartsWith("copyFiles") string[][string] copyFiles; + @StartsWith("extraDependencyFiles") string[][string] extraDependencyFiles; + @StartsWith("versions") string[][string] versions; + @StartsWith("debugVersions") string[][string] debugVersions; + @StartsWith("versionFilters") string[][string] versionFilters; + @StartsWith("debugVersionFilters") string[][string] debugVersionFilters; + @StartsWith("importPaths") string[][string] importPaths; + @StartsWith("stringImportPaths") string[][string] stringImportPaths; + @StartsWith("preGenerateCommands") string[][string] preGenerateCommands; + @StartsWith("postGenerateCommands") string[][string] postGenerateCommands; + @StartsWith("preBuildCommands") string[][string] preBuildCommands; + @StartsWith("postBuildCommands") string[][string] postBuildCommands; + @StartsWith("preRunCommands") string[][string] preRunCommands; + @StartsWith("postRunCommands") string[][string] postRunCommands; + @StartsWith("environments") string[string][string] environments; + @StartsWith("buildEnvironments")string[string][string] buildEnvironments; + @StartsWith("runEnvironments") string[string][string] runEnvironments; + @StartsWith("preGenerateEnvironments") string[string][string] preGenerateEnvironments; + @StartsWith("postGenerateEnvironments") string[string][string] postGenerateEnvironments; + @StartsWith("preBuildEnvironments") string[string][string] preBuildEnvironments; + @StartsWith("postBuildEnvironments") string[string][string] postBuildEnvironments; + @StartsWith("preRunEnvironments") string[string][string] preRunEnvironments; + @StartsWith("postRunEnvironments") string[string][string] postRunEnvironments; + @StartsWith("buildRequirements") @Optional + Flags!BuildRequirement[string] buildRequirements; + @StartsWith("buildOptions") @Optional + Flags!BuildOption[string] buildOptions; + + + BuildSettingsTemplate dup() const { + return clone(this); + } /// Constructs a BuildSettings object from this template. void getPlatformSettings(ref BuildSettings dst, in BuildPlatform platform, NativePath base_path) @@ -354,7 +596,7 @@ logWarn(""); } else { string[] all_dflags; - BuildOptions all_options; + Flags!BuildOption all_options; foreach (flags; this.dflags) all_dflags ~= flags; foreach (options; this.buildOptions) all_options |= options; .warnOnSpecialCompilerFlags(all_dflags, all_options, package_name, config_name); @@ -461,6 +703,7 @@ private T clone(T)(ref const(T) val) { + import std.sumtype; import std.traits : isSomeString, isDynamicArray, isAssociativeArray, isBasicType, ValueType; static if (is(T == immutable)) return val; @@ -480,6 +723,8 @@ foreach (k, ref f; val) ret[k] = clone!V(f); return ret; + } else static if (is(T == SumType!A, A...)) { + return val.match!((any) => T(clone(any))); } else static if (is(T == struct)) { T ret; foreach (i, M; typeof(T.tupleof)) @@ -507,3 +752,26 @@ assert(bs.sourceFiles == ["src\\foo.d"]); }} } + +/** + * Edit all dependency names from `:foo` to `name:foo`. + * + * TODO: Remove the special case in the parser and remove this hack. + */ +package void fixDependenciesNames (T) (string root, ref T aggr) nothrow +{ + static foreach (idx, FieldRef; T.tupleof) { + static if (is(immutable typeof(FieldRef) == immutable RecipeDependencyAA)) { + string[] toReplace; + foreach (key; aggr.tupleof[idx].byKey) + if (key.length && key[0] == ':') + toReplace ~= key; + foreach (k; toReplace) { + aggr.tupleof[idx][root ~ k] = aggr.tupleof[idx][k]; + aggr.tupleof[idx].data.remove(k); + } + } + else static if (is(typeof(FieldRef) == struct)) + fixDependenciesNames(root, aggr.tupleof[idx]); + } +} diff --git a/source/dub/recipe/sdl.d b/source/dub/recipe/sdl.d index e005e11..facc603 100644 --- a/source/dub/recipe/sdl.d +++ b/source/dub/recipe/sdl.d @@ -9,8 +9,8 @@ import dub.compilers.compiler; import dub.dependency; +import dub.internal.logging; import dub.internal.sdlang; -import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; import dub.recipe.packagerecipe; @@ -64,15 +64,9 @@ // parse general build settings parseBuildSettings(sdl, recipe.buildSettings, full_name); - // determine default target type for configurations - auto defttype = recipe.buildSettings.targetType; - if (defttype == TargetType.autodetect) - defttype = TargetType.library; - // parse configurations recipe.configurations.length = configs.length; foreach (i, n; configs) { - recipe.configurations[i].buildSettings.targetType = defttype; parseConfiguration(n, recipe.configurations[i], full_name); } @@ -193,18 +187,15 @@ auto attrs = t.attributes; if ("path" in attrs) { - if ("version" in attrs) - logDiagnostic("Ignoring version specification (%s) for path based dependency %s", attrs["version"][0].value.get!string, attrs["path"][0].value.get!string); - dep.versionSpec = "*"; - dep.path = NativePath(attrs["path"][0].value.get!string); + dep = Dependency(NativePath(attrs["path"][0].value.get!string)); } else if ("repository" in attrs) { enforceSDL("version" in attrs, "Missing version specification.", t); - dep.repository = Repository(attrs["repository"][0].value.get!string); - dep.versionSpec = attrs["version"][0].value.get!string; + dep = Dependency(Repository(attrs["repository"][0].value.get!string, + attrs["version"][0].value.get!string)); } else { enforceSDL("version" in attrs, "Missing version specification.", t); - dep.versionSpec = attrs["version"][0].value.get!string; + dep = Dependency(attrs["version"][0].value.get!string); } if ("optional" in attrs) @@ -216,8 +207,7 @@ bs.dependencies[pkg] = dep; BuildSettingsTemplate dbs; - parseBuildSettings(t, dbs, package_name); - bs.dependencyBuildSettings[pkg] = dbs; + parseBuildSettings(t, bs.dependencies[pkg].settings, package_name); } private void parseConfiguration(Tag t, ref ConfigurationInfo ret, string package_name) @@ -245,12 +235,12 @@ void add(string name, string value, string namespace = null) { ret ~= new Tag(namespace, name, [Value(value)]); } void adda(string name, string suffix, in string[] values, string namespace = null) { ret ~= new Tag(namespace, name, values[].map!(v => Value(v)).array, - suffix.length ? [new Attribute(null, "platform", Value(suffix[1 .. $]))] : null); + suffix.length ? [new Attribute(null, "platform", Value(suffix))] : null); } void addaa(string name, string suffix, in string[string] values, string namespace = null) { foreach (k, v; values) { ret ~= new Tag(namespace, name, [Value(k), Value(v)], - suffix.length ? [new Attribute(null, "platform", Value(suffix[1 .. $]))] : null); + suffix.length ? [new Attribute(null, "platform", Value(suffix))] : null); } } @@ -264,13 +254,22 @@ foreach (pack, d; bs.dependencies) { Attribute[] attribs; - if (!d.repository.empty) attribs ~= new Attribute(null, "repository", Value(d.repository.toString())); - if (!d.path.empty) attribs ~= new Attribute(null, "path", Value(d.path.toString())); - else attribs ~= new Attribute(null, "version", Value(d.versionSpec)); + d.visit!( + (const Repository r) { + attribs ~= new Attribute(null, "repository", Value(r.toString())); + attribs ~= new Attribute(null, "version", Value(r.ref_)); + }, + (const NativePath p) { + attribs ~= new Attribute(null, "path", Value(p.toString())); + }, + (const VersionRange v) { + attribs ~= new Attribute(null, "version", Value(v.toString())); + }, + ); if (d.optional) attribs ~= new Attribute(null, "optional", Value(true)); auto t = new Tag(null, "dependency", [Value(pack)], attribs); - if (pack in bs.dependencyBuildSettings) - t.add(bs.dependencyBuildSettings[pack].toSDL()); + if (d.settings !is typeof(d.settings).init) + t.add(d.settings.toSDL()); ret ~= t; } if (bs.systemDependencies !is null) add("systemDependencies", bs.systemDependencies); @@ -373,7 +372,7 @@ { string platform; if ("platform" in t.attributes) - platform = "-" ~ t.attributes["platform"][0].value.get!string; + platform = t.attributes["platform"][0].value.get!string; dst[platform] ~= t.values.map!(v => v.get!string).array; } private void parsePlatformStringAA(Tag t, ref string[string][string] dst) @@ -381,7 +380,7 @@ import std.string : format; string platform; if ("platform" in t.attributes) - platform = "-" ~ t.attributes["platform"][0].value.get!string; + platform = t.attributes["platform"][0].value.get!string; enforceSDL(t.values.length == 2, format("Values for '%s' must be 2 required.", t.fullName), t); enforceSDL(t.values[0].peek!string !is null, format("Values for '%s' must be strings.", t.fullName), t); enforceSDL(t.values[1].peek!string !is null, format("Values for '%s' must be strings.", t.fullName), t); @@ -392,7 +391,7 @@ { string platform; if ("platform" in t.attributes) - platform = "-" ~ t.attributes["platform"][0].value.get!string; + platform = t.attributes["platform"][0].value.get!string; foreach (v; t.values) { if (platform !in dst) dst[platform] = Es.init; dst[platform] |= v.get!string.to!E; @@ -551,10 +550,9 @@ assert(rec.buildSettings.dependencies.length == 2); assert(rec.buildSettings.dependencies["projectname:subpackage1"].optional == false); assert(rec.buildSettings.dependencies["projectname:subpackage1"].path == NativePath(".")); - assert(rec.buildSettings.dependencyBuildSettings["projectname:subpackage1"].dflags == ["":["-g", "-debug"]]); - assert(rec.buildSettings.dependencies["somedep"].versionSpec == "1.0.0"); + assert(rec.buildSettings.dependencies["projectname:subpackage1"].settings.dflags == ["":["-g", "-debug"]]); + assert(rec.buildSettings.dependencies["somedep"].version_.toString() == "1.0.0"); assert(rec.buildSettings.dependencies["somedep"].optional == true); - assert(rec.buildSettings.dependencies["somedep"].path.empty); assert(rec.buildSettings.systemDependencies == "system dependencies"); assert(rec.buildSettings.targetType == TargetType.executable); assert(rec.buildSettings.targetName == "target name"); @@ -562,8 +560,8 @@ assert(rec.buildSettings.workingDirectory == "working directory"); assert(rec.buildSettings.subConfigurations.length == 1); assert(rec.buildSettings.subConfigurations["projectname:subpackage2"] == "library"); - assert(rec.buildSettings.buildRequirements == ["": cast(BuildRequirements)(BuildRequirement.allowWarnings | BuildRequirement.silenceDeprecations)]); - assert(rec.buildSettings.buildOptions == ["": cast(BuildOptions)(BuildOption.verbose | BuildOption.ignoreUnknownPragmas)]); + assert(rec.buildSettings.buildRequirements == ["": cast(Flags!BuildRequirement)(BuildRequirement.allowWarnings | BuildRequirement.silenceDeprecations)]); + assert(rec.buildSettings.buildOptions == ["": cast(Flags!BuildOption)(BuildOption.verbose | BuildOption.ignoreUnknownPragmas)]); assert(rec.buildSettings.libs == ["": ["lib1", "lib2", "lib3"]]); assert(rec.buildSettings.sourceFiles == ["": ["source1", "source2", "source3"]]); assert(rec.buildSettings.sourcePaths == ["": ["sourcepath1", "sourcepath2", "sourcepath3"]]); @@ -610,9 +608,9 @@ PackageRecipe rec; parseSDL(rec, sdl, null, "testfile"); assert(rec.buildSettings.dflags.length == 3); - assert(rec.buildSettings.dflags["-windows-x86"] == ["-a", "-b", "-c"]); + assert(rec.buildSettings.dflags["windows-x86"] == ["-a", "-b", "-c"]); assert(rec.buildSettings.dflags[""] == ["-e", "-f", "-g"]); - assert(rec.buildSettings.dflags["-linux"] == ["-h", "-i", "-j"]); + assert(rec.buildSettings.dflags["linux"] == ["-h", "-i", "-j"]); } unittest { // test for missing name field @@ -642,7 +640,7 @@ PackageRecipe p; p.name = "test"; p.authors = ["foo", "bar"]; - p.buildSettings.dflags["-windows"] = ["-a"]; + p.buildSettings.dflags["windows"] = ["-a"]; p.buildSettings.lflags[""] = ["-b", "-c"]; auto sdl = toSDL(p).toSDLDocument(); assert(sdl == @@ -669,15 +667,15 @@ parseSDL(rec, sdl, null, "testfile"); auto dependency = rec.buildSettings.dependencies["package"]; assert(!dependency.repository.empty); - assert(dependency.versionSpec == "12345678"); + assert(dependency.repository.ref_ == "12345678"); } unittest { PackageRecipe p; p.name = "test"; - auto repository = Repository("git+https://some.url"); - p.buildSettings.dependencies["package"] = Dependency(repository, "12345678"); + auto repository = Repository("git+https://some.url", "12345678"); + p.buildSettings.dependencies["package"] = Dependency(repository); auto sdl = toSDL(p).toSDLDocument(); assert(sdl == `name "test" diff --git a/source/dub/recipe/selection.d b/source/dub/recipe/selection.d new file mode 100644 index 0000000..ced50e4 --- /dev/null +++ b/source/dub/recipe/selection.d @@ -0,0 +1,109 @@ +/** + * Contains type definition for `dub.selections.json` + */ +module dub.recipe.selection; + +import dub.dependency; +import dub.internal.vibecompat.core.file : NativePath; + +import configy.Attributes; + +import std.exception; + +public struct Selected +{ + /// The current version of the file format + public uint fileVersion; + + /// The selected package and their matching versions + public SelectedDependency[string] versions; +} + + +/// Wrapper around `SelectedDependency` to do deserialization but still provide +/// a `Dependency` object to client code. +private struct SelectedDependency +{ + public Dependency actual; + alias actual this; + + /// Constructor, used in `fromYAML` + public this (inout(Dependency) dep) inout @safe pure nothrow @nogc + { + this.actual = dep; + } + + /// Allow external code to assign to this object as if it was a `Dependency` + public ref SelectedDependency opAssign (Dependency dep) return pure nothrow @nogc + { + this.actual = dep; + return this; + } + + /// Read a `Dependency` from the config file - Required to support both short and long form + static SelectedDependency fromYAML (scope ConfigParser!SelectedDependency p) + { + import dyaml.node; + + if (p.node.nodeID == NodeID.scalar) + return SelectedDependency(Dependency(Version(p.node.as!string))); + + auto d = p.parseAs!YAMLFormat; + if (d.path.length) + return SelectedDependency(Dependency(NativePath(d.path))); + else + { + assert(d.version_.length); + if (d.repository.length) + return SelectedDependency(Dependency(Repository(d.repository, d.version_))); + return SelectedDependency(Dependency(Version(d.version_))); + } + } + + /// In-file representation of a dependency as permitted in `dub.selections.json` + private struct YAMLFormat + { + @Optional @Name("version") string version_; + @Optional string path; + @Optional string repository; + + public void validate () const scope @safe pure + { + enforce(this.version_.length || this.path.length || this.repository.length, + "Need to provide a version string, or an object with one of the following fields: `version`, `path`, or `repository`"); + enforce(!this.path.length || !this.repository.length, + "Cannot provide a `path` dependency if a repository dependency is used"); + enforce(!this.path.length || !this.version_.length, + "Cannot provide a `path` dependency if a `version` dependency is used"); + enforce(!this.repository.length || this.version_.length, + "Cannot provide a `repository` dependency without a `version`"); + } + } +} + +// Ensure we can read all type of dependencies +unittest +{ + import configy.Read : parseConfigString; + import dub.internal.vibecompat.core.file : NativePath; + + immutable string content = `{ + "fileVersion": 1, + "versions": { + "simple": "1.5.6", + "branch": "~master", + "branch2": "~main", + "path": { "path": "../some/where" }, + "repository": { "repository": "git+https://github.com/dlang/dub", "version": "123456123456123456" } + } +}`; + + auto s = parseConfigString!Selected(content, "/dev/null"); + assert(s.fileVersion == 1); + assert(s.versions.length == 5); + assert(s.versions["simple"] == Dependency(Version("1.5.6"))); + assert(s.versions["branch"] == Dependency(Version("~master"))); + assert(s.versions["branch2"] == Dependency(Version("~main"))); + assert(s.versions["path"] == Dependency(NativePath("../some/where"))); + assert(s.versions["repository"] == Dependency(Repository("git+https://github.com/dlang/dub", "123456123456123456"))); +} diff --git a/source/dub/semver.d b/source/dub/semver.d index c04a9c5..674b324 100644 --- a/source/dub/semver.d +++ b/source/dub/semver.d @@ -24,7 +24,7 @@ /** Validates a version string according to the SemVer specification. */ -bool isValidVersion(string ver) +bool isValidVersion(scope string ver) pure @nogc { // NOTE: this is not by spec, but to ensure sane input if (ver.length > 256) return false; @@ -101,7 +101,7 @@ /** Determines if a given valid SemVer version has a pre-release suffix. */ -bool isPreReleaseVersion(string ver) pure @nogc +bool isPreReleaseVersion(scope string ver) pure @nogc nothrow in { assert(isValidVersion(ver)); } do { foreach (i; 0 .. 2) { @@ -134,16 +134,30 @@ Returns a negative number if `a` is a lower version than `b`, `0` if they are equal, and a positive number otherwise. */ -int compareVersions(string a, string b) +int compareVersions(scope string a, scope string b) pure @nogc { + // This needs to be a nested function as we can't pass local scope + // variables by `ref` + int compareNumber() @safe pure @nogc { + int res = 0; + while (true) { + if (a[0] != b[0] && res == 0) res = a[0] - b[0]; + a = a[1 .. $]; b = b[1 .. $]; + auto aempty = !a.length || (a[0] < '0' || a[0] > '9'); + auto bempty = !b.length || (b[0] < '0' || b[0] > '9'); + if (aempty != bempty) return bempty - aempty; + if (aempty) return res; + } + } + // compare a.b.c numerically - if (auto ret = compareNumber(a, b)) return ret; + if (auto ret = compareNumber()) return ret; assert(a[0] == '.' && b[0] == '.'); a = a[1 .. $]; b = b[1 .. $]; - if (auto ret = compareNumber(a, b)) return ret; + if (auto ret = compareNumber()) return ret; assert(a[0] == '.' && b[0] == '.'); a = a[1 .. $]; b = b[1 .. $]; - if (auto ret = compareNumber(a, b)) return ret; + if (auto ret = compareNumber()) return ret; // give precedence to non-prerelease versions bool apre = a.length > 0 && a[0] == '-'; @@ -314,7 +328,7 @@ assert("1.0.0-pre.release+meta" == expandVersion("1-pre.release+meta")); } -private int compareIdentifier(ref string a, ref string b) +private int compareIdentifier(scope ref string a, scope ref string b) pure @nogc { bool anumber = true; bool bnumber = true; @@ -344,20 +358,7 @@ } } -private int compareNumber(ref string a, ref string b) -pure @nogc { - int res = 0; - while (true) { - if (a[0] != b[0] && res == 0) res = a[0] - b[0]; - a = a[1 .. $]; b = b[1 .. $]; - auto aempty = !a.length || (a[0] < '0' || a[0] > '9'); - auto bempty = !b.length || (b[0] < '0' || b[0] > '9'); - if (aempty != bempty) return bempty - aempty; - if (aempty) return res; - } -} - -private bool isValidIdentifierChain(string str, bool allow_leading_zeros = false) +private bool isValidIdentifierChain(scope string str, bool allow_leading_zeros = false) pure @nogc { if (str.length == 0) return false; while (str.length) { @@ -370,7 +371,7 @@ return true; } -private bool isValidIdentifier(string str, bool allow_leading_zeros = false) +private bool isValidIdentifier(scope string str, bool allow_leading_zeros = false) pure @nogc { if (str.length < 1) return false; @@ -394,7 +395,7 @@ } private bool isValidNumber(string str) -pure @nogc { +pure @nogc nothrow { if (str.length < 1) return false; foreach (ch; str) if (ch < '0' || ch > '9') @@ -406,7 +407,7 @@ return true; } -private ptrdiff_t indexOfAny(string str, in char[] chars) +private ptrdiff_t indexOfAny(scope string str, in char[] chars) pure @nogc { ptrdiff_t ret = -1; foreach (ch; chars) { diff --git a/source/dyaml/composer.d b/source/dyaml/composer.d new file mode 100644 index 0000000..e7b083a --- /dev/null +++ b/source/dyaml/composer.d @@ -0,0 +1,401 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * Composes nodes from YAML events provided by parser. + * Code based on PyYAML: http://www.pyyaml.org + */ +module dyaml.composer; + +import core.memory; + +import std.algorithm; +import std.array; +import std.conv; +import std.exception; +import std.format; +import std.range; +import std.typecons; + +import dyaml.constructor; +import dyaml.event; +import dyaml.exception; +import dyaml.node; +import dyaml.parser; +import dyaml.resolver; + + +package: +/** + * Exception thrown at composer errors. + * + * See_Also: MarkedYAMLException + */ +class ComposerException : MarkedYAMLException +{ + mixin MarkedExceptionCtors; +} + +///Composes YAML documents from events provided by a Parser. +struct Composer +{ + private: + ///Parser providing YAML events. + Parser parser_; + ///Resolver resolving tags (data types). + Resolver resolver_; + ///Nodes associated with anchors. Used by YAML aliases. + Node[string] anchors_; + + ///Used to reduce allocations when creating pair arrays. + /// + ///We need one appender for each nesting level that involves + ///a pair array, as the inner levels are processed as a + ///part of the outer levels. Used as a stack. + Appender!(Node.Pair[])[] pairAppenders_; + ///Used to reduce allocations when creating node arrays. + /// + ///We need one appender for each nesting level that involves + ///a node array, as the inner levels are processed as a + ///part of the outer levels. Used as a stack. + Appender!(Node[])[] nodeAppenders_; + + public: + /** + * Construct a composer. + * + * Params: parser = Parser to provide YAML events. + * resolver = Resolver to resolve tags (data types). + */ + this(Parser parser, Resolver resolver) @safe + { + parser_ = parser; + resolver_ = resolver; + } + + /** + * Determine if there are any nodes left. + * + * Must be called before loading as it handles the stream start event. + */ + bool checkNode() @safe + { + // If next event is stream start, skip it + parser_.skipOver!"a.id == b"(EventID.streamStart); + + //True if there are more documents available. + return parser_.front.id != EventID.streamEnd; + } + + ///Get a YAML document as a node (the root of the document). + Node getNode() @safe + { + //Get the root node of the next document. + assert(parser_.front.id != EventID.streamEnd, + "Trying to get a node from Composer when there is no node to " ~ + "get. use checkNode() to determine if there is a node."); + + return composeDocument(); + } + + private: + + void skipExpected(const EventID id) @safe + { + const foundExpected = parser_.skipOver!"a.id == b"(id); + assert(foundExpected, text("Expected ", id, " not found.")); + } + ///Ensure that appenders for specified nesting levels exist. + /// + ///Params: pairAppenderLevel = Current level in the pair appender stack. + /// nodeAppenderLevel = Current level the node appender stack. + void ensureAppendersExist(const uint pairAppenderLevel, const uint nodeAppenderLevel) + @safe + { + while(pairAppenders_.length <= pairAppenderLevel) + { + pairAppenders_ ~= appender!(Node.Pair[])(); + } + while(nodeAppenders_.length <= nodeAppenderLevel) + { + nodeAppenders_ ~= appender!(Node[])(); + } + } + + ///Compose a YAML document and return its root node. + Node composeDocument() @safe + { + skipExpected(EventID.documentStart); + + //Compose the root node. + Node node = composeNode(0, 0); + + skipExpected(EventID.documentEnd); + + anchors_.destroy(); + return node; + } + + /// Compose a node. + /// + /// Params: pairAppenderLevel = Current level of the pair appender stack. + /// nodeAppenderLevel = Current level of the node appender stack. + Node composeNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe + { + if(parser_.front.id == EventID.alias_) + { + const event = parser_.front; + parser_.popFront(); + const anchor = event.anchor; + enforce((anchor in anchors_) !is null, + new ComposerException("Found undefined alias: " ~ anchor, + event.startMark)); + + //If the node referenced by the anchor is uninitialized, + //it's not finished, i.e. we're currently composing it + //and trying to use it recursively here. + enforce(anchors_[anchor] != Node(), + new ComposerException("Found recursive alias: " ~ anchor, + event.startMark)); + + return anchors_[anchor]; + } + + const event = parser_.front; + const anchor = event.anchor; + if((anchor !is null) && (anchor in anchors_) !is null) + { + throw new ComposerException("Found duplicate anchor: " ~ anchor, + event.startMark); + } + + Node result; + //Associate the anchor, if any, with an uninitialized node. + //used to detect duplicate and recursive anchors. + if(anchor !is null) + { + anchors_[anchor] = Node(); + } + + switch (parser_.front.id) + { + case EventID.scalar: + result = composeScalarNode(); + break; + case EventID.sequenceStart: + result = composeSequenceNode(pairAppenderLevel, nodeAppenderLevel); + break; + case EventID.mappingStart: + result = composeMappingNode(pairAppenderLevel, nodeAppenderLevel); + break; + default: assert(false, "This code should never be reached"); + } + + if(anchor !is null) + { + anchors_[anchor] = result; + } + return result; + } + + ///Compose a scalar node. + Node composeScalarNode() @safe + { + const event = parser_.front; + parser_.popFront(); + const tag = resolver_.resolve(NodeID.scalar, event.tag, event.value, + event.implicit); + + Node node = constructNode(event.startMark, event.endMark, tag, + event.value); + node.scalarStyle = event.scalarStyle; + + return node; + } + + /// Compose a sequence node. + /// + /// Params: pairAppenderLevel = Current level of the pair appender stack. + /// nodeAppenderLevel = Current level of the node appender stack. + Node composeSequenceNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) + @safe + { + ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); + auto nodeAppender = &(nodeAppenders_[nodeAppenderLevel]); + + const startEvent = parser_.front; + parser_.popFront(); + const tag = resolver_.resolve(NodeID.sequence, startEvent.tag, null, + startEvent.implicit); + + while(parser_.front.id != EventID.sequenceEnd) + { + nodeAppender.put(composeNode(pairAppenderLevel, nodeAppenderLevel + 1)); + } + + Node node = constructNode(startEvent.startMark, parser_.front.endMark, + tag, nodeAppender.data.dup); + node.collectionStyle = startEvent.collectionStyle; + parser_.popFront(); + nodeAppender.clear(); + + return node; + } + + /** + * Flatten a node, merging it with nodes referenced through YAMLMerge data type. + * + * Node must be a mapping or a sequence of mappings. + * + * Params: root = Node to flatten. + * startMark = Start position of the node. + * endMark = End position of the node. + * pairAppenderLevel = Current level of the pair appender stack. + * nodeAppenderLevel = Current level of the node appender stack. + * + * Returns: Flattened mapping as pairs. + */ + Node.Pair[] flatten(ref Node root, const Mark startMark, const Mark endMark, + const uint pairAppenderLevel, const uint nodeAppenderLevel) @safe + { + void error(Node node) + { + //this is Composer, but the code is related to Constructor. + throw new ConstructorException("While constructing a mapping, " ~ + "expected a mapping or a list of " ~ + "mappings for merging, but found: " ~ + text(node.type) ~ + " NOTE: line/column shows topmost parent " ~ + "to which the content is being merged", + startMark, endMark); + } + + ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); + auto pairAppender = &(pairAppenders_[pairAppenderLevel]); + + final switch (root.nodeID) + { + case NodeID.mapping: + Node[] toMerge; + toMerge.reserve(root.length); + foreach (ref Node key, ref Node value; root) + { + if(key.type == NodeType.merge) + { + toMerge ~= value; + } + else + { + auto temp = Node.Pair(key, value); + pairAppender.put(temp); + } + } + foreach (node; toMerge) + { + pairAppender.put(flatten(node, startMark, endMark, + pairAppenderLevel + 1, nodeAppenderLevel)); + } + break; + case NodeID.sequence: + foreach (ref Node node; root) + { + if (node.nodeID != NodeID.mapping) + { + error(node); + } + pairAppender.put(flatten(node, startMark, endMark, + pairAppenderLevel + 1, nodeAppenderLevel)); + } + break; + case NodeID.scalar: + case NodeID.invalid: + error(root); + break; + } + + auto flattened = pairAppender.data.dup; + pairAppender.clear(); + + return flattened; + } + + /// Compose a mapping node. + /// + /// Params: pairAppenderLevel = Current level of the pair appender stack. + /// nodeAppenderLevel = Current level of the node appender stack. + Node composeMappingNode(const uint pairAppenderLevel, const uint nodeAppenderLevel) + @safe + { + ensureAppendersExist(pairAppenderLevel, nodeAppenderLevel); + const startEvent = parser_.front; + parser_.popFront(); + const tag = resolver_.resolve(NodeID.mapping, startEvent.tag, null, + startEvent.implicit); + auto pairAppender = &(pairAppenders_[pairAppenderLevel]); + + Tuple!(Node, Mark)[] toMerge; + while(parser_.front.id != EventID.mappingEnd) + { + auto pair = Node.Pair(composeNode(pairAppenderLevel + 1, nodeAppenderLevel), + composeNode(pairAppenderLevel + 1, nodeAppenderLevel)); + + //Need to flatten and merge the node referred by YAMLMerge. + if(pair.key.type == NodeType.merge) + { + toMerge ~= tuple(pair.value, cast(Mark)parser_.front.endMark); + } + //Not YAMLMerge, just add the pair. + else + { + pairAppender.put(pair); + } + } + foreach(node; toMerge) + { + merge(*pairAppender, flatten(node[0], startEvent.startMark, node[1], + pairAppenderLevel + 1, nodeAppenderLevel)); + } + + auto sorted = pairAppender.data.dup.sort!((x,y) => x.key > y.key); + if (sorted.length) { + foreach (index, const ref value; sorted[0 .. $ - 1].enumerate) + if (value.key == sorted[index + 1].key) { + const message = () @trusted { + return format("Key '%s' appears multiple times in mapping (first: %s)", + value.key.get!string, value.key.startMark); + }(); + throw new ComposerException(message, sorted[index + 1].key.startMark); + } + } + + Node node = constructNode(startEvent.startMark, parser_.front.endMark, + tag, pairAppender.data.dup); + node.collectionStyle = startEvent.collectionStyle; + parser_.popFront(); + + pairAppender.clear(); + return node; + } +} + +// Provide good error message on multiple keys (which JSON supports) +@safe unittest +{ + import dyaml.loader : Loader; + + const str = `{ + "comment": "This is a common technique", + "name": "foobar", + "comment": "To write down comments pre-JSON5" +}`; + + try + auto node = Loader.fromString(str).load(); + catch (ComposerException exc) + assert(exc.message() == + "Key 'comment' appears multiple times in mapping " ~ + "(first: file ,line 2,column 5)\nfile ,line 4,column 5"); +} diff --git a/source/dyaml/constructor.d b/source/dyaml/constructor.d new file mode 100644 index 0000000..4cd1546 --- /dev/null +++ b/source/dyaml/constructor.d @@ -0,0 +1,611 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * Class that processes YAML mappings, sequences and scalars into nodes. + * This can be used to add custom data types. A tutorial can be found + * $(LINK2 https://dlang-community.github.io/D-YAML/, here). + */ +module dyaml.constructor; + + +import std.array; +import std.algorithm; +import std.base64; +import std.container; +import std.conv; +import std.datetime; +import std.exception; +import std.regex; +import std.string; +import std.typecons; +import std.utf; + +import dyaml.node; +import dyaml.exception; +import dyaml.style; + +package: + +// Exception thrown at constructor errors. +class ConstructorException : YAMLException +{ + /// Construct a ConstructorException. + /// + /// Params: msg = Error message. + /// start = Start position of the error context. + /// end = End position of the error context. + this(string msg, Mark start, Mark end, string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow + { + super(msg ~ "\nstart: " ~ start.toString() ~ "\nend: " ~ end.toString(), + file, line); + } +} + +/** Constructs YAML values. + * + * Each YAML scalar, sequence or mapping has a tag specifying its data type. + * Constructor uses user-specifyable functions to create a node of desired + * data type from a scalar, sequence or mapping. + * + * + * Each of these functions is associated with a tag, and can process either + * a scalar, a sequence, or a mapping. The constructor passes each value to + * the function with corresponding tag, which then returns the resulting value + * that can be stored in a node. + * + * If a tag is detected with no known constructor function, it is considered an error. + */ +/* + * Construct a node. + * + * Params: start = Start position of the node. + * end = End position of the node. + * tag = Tag (data type) of the node. + * value = Value to construct node from (string, nodes or pairs). + * style = Style of the node (scalar or collection style). + * + * Returns: Constructed node. + */ +Node constructNode(T)(const Mark start, const Mark end, const string tag, + T value) @safe + if((is(T : string) || is(T == Node[]) || is(T == Node.Pair[]))) +{ + Node newNode; + try + { + switch(tag) + { + case "tag:yaml.org,2002:null": + newNode = Node(YAMLNull(), tag); + break; + case "tag:yaml.org,2002:bool": + static if(is(T == string)) + { + newNode = Node(constructBool(value), tag); + break; + } + else throw new Exception("Only scalars can be bools"); + case "tag:yaml.org,2002:int": + static if(is(T == string)) + { + newNode = Node(constructLong(value), tag); + break; + } + else throw new Exception("Only scalars can be ints"); + case "tag:yaml.org,2002:float": + static if(is(T == string)) + { + newNode = Node(constructReal(value), tag); + break; + } + else throw new Exception("Only scalars can be floats"); + case "tag:yaml.org,2002:binary": + static if(is(T == string)) + { + newNode = Node(constructBinary(value), tag); + break; + } + else throw new Exception("Only scalars can be binary data"); + case "tag:yaml.org,2002:timestamp": + static if(is(T == string)) + { + newNode = Node(constructTimestamp(value), tag); + break; + } + else throw new Exception("Only scalars can be timestamps"); + case "tag:yaml.org,2002:str": + static if(is(T == string)) + { + newNode = Node(constructString(value), tag); + break; + } + else throw new Exception("Only scalars can be strings"); + case "tag:yaml.org,2002:value": + static if(is(T == string)) + { + newNode = Node(constructString(value), tag); + break; + } + else throw new Exception("Only scalars can be values"); + case "tag:yaml.org,2002:omap": + static if(is(T == Node[])) + { + newNode = Node(constructOrderedMap(value), tag); + break; + } + else throw new Exception("Only sequences can be ordered maps"); + case "tag:yaml.org,2002:pairs": + static if(is(T == Node[])) + { + newNode = Node(constructPairs(value), tag); + break; + } + else throw new Exception("Only sequences can be pairs"); + case "tag:yaml.org,2002:set": + static if(is(T == Node.Pair[])) + { + newNode = Node(constructSet(value), tag); + break; + } + else throw new Exception("Only mappings can be sets"); + case "tag:yaml.org,2002:seq": + static if(is(T == Node[])) + { + newNode = Node(constructSequence(value), tag); + break; + } + else throw new Exception("Only sequences can be sequences"); + case "tag:yaml.org,2002:map": + static if(is(T == Node.Pair[])) + { + newNode = Node(constructMap(value), tag); + break; + } + else throw new Exception("Only mappings can be maps"); + case "tag:yaml.org,2002:merge": + newNode = Node(YAMLMerge(), tag); + break; + default: + newNode = Node(value, tag); + break; + } + } + catch(Exception e) + { + throw new ConstructorException("Error constructing " ~ typeid(T).toString() + ~ ":\n" ~ e.msg, start, end); + } + + newNode.startMark_ = start; + + return newNode; +} + +private: +// Construct a boolean _node. +bool constructBool(const string str) @safe +{ + string value = str.toLower(); + if(value.among!("yes", "true", "on")){return true;} + if(value.among!("no", "false", "off")){return false;} + throw new Exception("Unable to parse boolean value: " ~ value); +} + +// Construct an integer (long) _node. +long constructLong(const string str) @safe +{ + string value = str.replace("_", ""); + const char c = value[0]; + const long sign = c != '-' ? 1 : -1; + if(c == '-' || c == '+') + { + value = value[1 .. $]; + } + + enforce(value != "", new Exception("Unable to parse float value: " ~ value)); + + long result; + try + { + //Zero. + if(value == "0") {result = cast(long)0;} + //Binary. + else if(value.startsWith("0b")){result = sign * to!int(value[2 .. $], 2);} + //Hexadecimal. + else if(value.startsWith("0x")){result = sign * to!int(value[2 .. $], 16);} + //Octal. + else if(value[0] == '0') {result = sign * to!int(value, 8);} + //Sexagesimal. + else if(value.canFind(":")) + { + long val; + long base = 1; + foreach_reverse(digit; value.split(":")) + { + val += to!long(digit) * base; + base *= 60; + } + result = sign * val; + } + //Decimal. + else{result = sign * to!long(value);} + } + catch(ConvException e) + { + throw new Exception("Unable to parse integer value: " ~ value); + } + + return result; +} +@safe unittest +{ + string canonical = "685230"; + string decimal = "+685_230"; + string octal = "02472256"; + string hexadecimal = "0x_0A_74_AE"; + string binary = "0b1010_0111_0100_1010_1110"; + string sexagesimal = "190:20:30"; + + assert(685230 == constructLong(canonical)); + assert(685230 == constructLong(decimal)); + assert(685230 == constructLong(octal)); + assert(685230 == constructLong(hexadecimal)); + assert(685230 == constructLong(binary)); + assert(685230 == constructLong(sexagesimal)); +} + +// Construct a floating point (real) _node. +real constructReal(const string str) @safe +{ + string value = str.replace("_", "").toLower(); + const char c = value[0]; + const real sign = c != '-' ? 1.0 : -1.0; + if(c == '-' || c == '+') + { + value = value[1 .. $]; + } + + enforce(value != "" && value != "nan" && value != "inf" && value != "-inf", + new Exception("Unable to parse float value: " ~ value)); + + real result; + try + { + //Infinity. + if (value == ".inf"){result = sign * real.infinity;} + //Not a Number. + else if(value == ".nan"){result = real.nan;} + //Sexagesimal. + else if(value.canFind(":")) + { + real val = 0.0; + real base = 1.0; + foreach_reverse(digit; value.split(":")) + { + val += to!real(digit) * base; + base *= 60.0; + } + result = sign * val; + } + //Plain floating point. + else{result = sign * to!real(value);} + } + catch(ConvException e) + { + throw new Exception("Unable to parse float value: \"" ~ value ~ "\""); + } + + return result; +} +@safe unittest +{ + bool eq(real a, real b, real epsilon = 0.2) @safe + { + return a >= (b - epsilon) && a <= (b + epsilon); + } + + string canonical = "6.8523015e+5"; + string exponential = "685.230_15e+03"; + string fixed = "685_230.15"; + string sexagesimal = "190:20:30.15"; + string negativeInf = "-.inf"; + string NaN = ".NaN"; + + assert(eq(685230.15, constructReal(canonical))); + assert(eq(685230.15, constructReal(exponential))); + assert(eq(685230.15, constructReal(fixed))); + assert(eq(685230.15, constructReal(sexagesimal))); + assert(eq(-real.infinity, constructReal(negativeInf))); + assert(to!string(constructReal(NaN)) == "nan"); +} + +// Construct a binary (base64) _node. +ubyte[] constructBinary(const string value) @safe +{ + import std.ascii : newline; + import std.array : array; + + // For an unknown reason, this must be nested to work (compiler bug?). + try + { + return Base64.decode(value.representation.filter!(c => !newline.canFind(c)).array); + } + catch(Base64Exception e) + { + throw new Exception("Unable to decode base64 value: " ~ e.msg); + } +} + +@safe unittest +{ + auto test = "The Answer: 42".representation; + char[] buffer; + buffer.length = 256; + string input = Base64.encode(test, buffer).idup; + const value = constructBinary(input); + assert(value == test); + assert(value == [84, 104, 101, 32, 65, 110, 115, 119, 101, 114, 58, 32, 52, 50]); +} + +// Construct a timestamp (SysTime) _node. +SysTime constructTimestamp(const string str) @safe +{ + string value = str; + + auto YMDRegexp = regex("^([0-9][0-9][0-9][0-9])-([0-9][0-9]?)-([0-9][0-9]?)"); + auto HMSRegexp = regex("^[Tt \t]+([0-9][0-9]?):([0-9][0-9]):([0-9][0-9])(\\.[0-9]*)?"); + auto TZRegexp = regex("^[ \t]*Z|([-+][0-9][0-9]?)(:[0-9][0-9])?"); + + try + { + // First, get year, month and day. + auto matches = match(value, YMDRegexp); + + enforce(!matches.empty, + new Exception("Unable to parse timestamp value: " ~ value)); + + auto captures = matches.front.captures; + const year = to!int(captures[1]); + const month = to!int(captures[2]); + const day = to!int(captures[3]); + + // If available, get hour, minute, second and fraction, if present. + value = matches.front.post; + matches = match(value, HMSRegexp); + if(matches.empty) + { + return SysTime(DateTime(year, month, day), UTC()); + } + + captures = matches.front.captures; + const hour = to!int(captures[1]); + const minute = to!int(captures[2]); + const second = to!int(captures[3]); + const hectonanosecond = cast(int)(to!real("0" ~ captures[4]) * 10_000_000); + + // If available, get timezone. + value = matches.front.post; + matches = match(value, TZRegexp); + if(matches.empty || matches.front.captures[0] == "Z") + { + // No timezone. + return SysTime(DateTime(year, month, day, hour, minute, second), + hectonanosecond.dur!"hnsecs", UTC()); + } + + // We have a timezone, so parse it. + captures = matches.front.captures; + int sign = 1; + int tzHours; + if(!captures[1].empty) + { + if(captures[1][0] == '-') {sign = -1;} + tzHours = to!int(captures[1][1 .. $]); + } + const tzMinutes = (!captures[2].empty) ? to!int(captures[2][1 .. $]) : 0; + const tzOffset = dur!"minutes"(sign * (60 * tzHours + tzMinutes)); + + return SysTime(DateTime(year, month, day, hour, minute, second), + hectonanosecond.dur!"hnsecs", + new immutable SimpleTimeZone(tzOffset)); + } + catch(ConvException e) + { + throw new Exception("Unable to parse timestamp value " ~ value ~ " : " ~ e.msg); + } + catch(DateTimeException e) + { + throw new Exception("Invalid timestamp value " ~ value ~ " : " ~ e.msg); + } + + assert(false, "This code should never be reached"); +} +@safe unittest +{ + string timestamp(string value) + { + return constructTimestamp(value).toISOString(); + } + + string canonical = "2001-12-15T02:59:43.1Z"; + string iso8601 = "2001-12-14t21:59:43.10-05:00"; + string spaceSeparated = "2001-12-14 21:59:43.10 -5"; + string noTZ = "2001-12-15 2:59:43.10"; + string noFraction = "2001-12-15 2:59:43"; + string ymd = "2002-12-14"; + + assert(timestamp(canonical) == "20011215T025943.1Z"); + //avoiding float conversion errors + assert(timestamp(iso8601) == "20011214T215943.0999999-05:00" || + timestamp(iso8601) == "20011214T215943.1-05:00"); + assert(timestamp(spaceSeparated) == "20011214T215943.0999999-05:00" || + timestamp(spaceSeparated) == "20011214T215943.1-05:00"); + assert(timestamp(noTZ) == "20011215T025943.0999999Z" || + timestamp(noTZ) == "20011215T025943.1Z"); + assert(timestamp(noFraction) == "20011215T025943Z"); + assert(timestamp(ymd) == "20021214T000000Z"); +} + +// Construct a string _node. +string constructString(const string str) @safe +{ + return str; +} + +// Convert a sequence of single-element mappings into a sequence of pairs. +Node.Pair[] getPairs(string type, const Node[] nodes) @safe +{ + Node.Pair[] pairs; + pairs.reserve(nodes.length); + foreach(node; nodes) + { + enforce(node.nodeID == NodeID.mapping && node.length == 1, + new Exception("While constructing " ~ type ~ + ", expected a mapping with single element")); + + pairs ~= node.as!(Node.Pair[]); + } + + return pairs; +} + +// Construct an ordered map (ordered sequence of key:value pairs without duplicates) _node. +Node.Pair[] constructOrderedMap(const Node[] nodes) @safe +{ + auto pairs = getPairs("ordered map", nodes); + + //Detect duplicates. + //TODO this should be replaced by something with deterministic memory allocation. + auto keys = new RedBlackTree!Node(); + foreach(ref pair; pairs) + { + enforce(!(pair.key in keys), + new Exception("Duplicate entry in an ordered map: " + ~ pair.key.debugString())); + keys.insert(pair.key); + } + return pairs; +} +@safe unittest +{ + Node[] alternateTypes(uint length) @safe + { + Node[] pairs; + foreach(long i; 0 .. length) + { + auto pair = (i % 2) ? Node.Pair(i.to!string, i) : Node.Pair(i, i.to!string); + pairs ~= Node([pair]); + } + return pairs; + } + + Node[] sameType(uint length) @safe + { + Node[] pairs; + foreach(long i; 0 .. length) + { + auto pair = Node.Pair(i.to!string, i); + pairs ~= Node([pair]); + } + return pairs; + } + + assertThrown(constructOrderedMap(alternateTypes(8) ~ alternateTypes(2))); + assertNotThrown(constructOrderedMap(alternateTypes(8))); + assertThrown(constructOrderedMap(sameType(64) ~ sameType(16))); + assertThrown(constructOrderedMap(alternateTypes(64) ~ alternateTypes(16))); + assertNotThrown(constructOrderedMap(sameType(64))); + assertNotThrown(constructOrderedMap(alternateTypes(64))); +} + +// Construct a pairs (ordered sequence of key: value pairs allowing duplicates) _node. +Node.Pair[] constructPairs(const Node[] nodes) @safe +{ + return getPairs("pairs", nodes); +} + +// Construct a set _node. +Node[] constructSet(const Node.Pair[] pairs) @safe +{ + // In future, the map here should be replaced with something with deterministic + // memory allocation if possible. + // Detect duplicates. + ubyte[Node] map; + Node[] nodes; + nodes.reserve(pairs.length); + foreach(pair; pairs) + { + enforce((pair.key in map) is null, new Exception("Duplicate entry in a set")); + map[pair.key] = 0; + nodes ~= pair.key; + } + + return nodes; +} +@safe unittest +{ + Node.Pair[] set(uint length) @safe + { + Node.Pair[] pairs; + foreach(long i; 0 .. length) + { + pairs ~= Node.Pair(i.to!string, YAMLNull()); + } + + return pairs; + } + + auto DuplicatesShort = set(8) ~ set(2); + auto noDuplicatesShort = set(8); + auto DuplicatesLong = set(64) ~ set(4); + auto noDuplicatesLong = set(64); + + bool eq(Node.Pair[] a, Node[] b) + { + if(a.length != b.length){return false;} + foreach(i; 0 .. a.length) + { + if(a[i].key != b[i]) + { + return false; + } + } + return true; + } + + auto nodeDuplicatesShort = DuplicatesShort.dup; + auto nodeNoDuplicatesShort = noDuplicatesShort.dup; + auto nodeDuplicatesLong = DuplicatesLong.dup; + auto nodeNoDuplicatesLong = noDuplicatesLong.dup; + + assertThrown(constructSet(nodeDuplicatesShort)); + assertNotThrown(constructSet(nodeNoDuplicatesShort)); + assertThrown(constructSet(nodeDuplicatesLong)); + assertNotThrown(constructSet(nodeNoDuplicatesLong)); +} + +// Construct a sequence (array) _node. +Node[] constructSequence(Node[] nodes) @safe +{ + return nodes; +} + +// Construct an unordered map (unordered set of key:value _pairs without duplicates) _node. +Node.Pair[] constructMap(Node.Pair[] pairs) @safe +{ + //Detect duplicates. + //TODO this should be replaced by something with deterministic memory allocation. + auto keys = new RedBlackTree!Node(); + foreach(ref pair; pairs) + { + enforce(!(pair.key in keys), + new Exception("Duplicate entry in a map: " ~ pair.key.debugString())); + keys.insert(pair.key); + } + return pairs; +} diff --git a/source/dyaml/dumper.d b/source/dyaml/dumper.d new file mode 100644 index 0000000..03d3620 --- /dev/null +++ b/source/dyaml/dumper.d @@ -0,0 +1,298 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * YAML dumper. + * + * Code based on $(LINK2 http://www.pyyaml.org, PyYAML). + */ +module dyaml.dumper; + +import std.array; +import std.range.primitives; +import std.typecons; + +import dyaml.emitter; +import dyaml.event; +import dyaml.exception; +import dyaml.linebreak; +import dyaml.node; +import dyaml.representer; +import dyaml.resolver; +import dyaml.serializer; +import dyaml.style; +import dyaml.tagdirective; + + +/** + * Dumps YAML documents to files or streams. + * + * User specified Representer and/or Resolver can be used to support new + * tags / data types. + * + * Setters are provided to affect output details (style, etc.). + */ +auto dumper() +{ + auto dumper = Dumper(); + dumper.resolver = Resolver.withDefaultResolvers; + return dumper; +} + +struct Dumper +{ + private: + //Indentation width. + int indent_ = 2; + //Tag directives to use. + TagDirective[] tags_; + public: + //Resolver to resolve tags. + Resolver resolver; + //Write scalars in canonical form? + bool canonical; + //Preferred text width. + uint textWidth = 80; + //Line break to use. Unix by default. + LineBreak lineBreak = LineBreak.unix; + //YAML version string. Default is 1.1. + string YAMLVersion = "1.1"; + //Always explicitly write document start? Default is no explicit start. + bool explicitStart = false; + //Always explicitly write document end? Default is no explicit end. + bool explicitEnd = false; + + //Name of the output file or stream, used in error messages. + string name = ""; + + // Default style for scalar nodes. If style is $(D ScalarStyle.invalid), the _style is chosen automatically. + ScalarStyle defaultScalarStyle = ScalarStyle.invalid; + // Default style for collection nodes. If style is $(D CollectionStyle.invalid), the _style is chosen automatically. + CollectionStyle defaultCollectionStyle = CollectionStyle.invalid; + + @disable bool opEquals(ref Dumper); + @disable int opCmp(ref Dumper); + + ///Set indentation width. 2 by default. Must not be zero. + @property void indent(uint indent) pure @safe nothrow + in + { + assert(indent != 0, "Can't use zero YAML indent width"); + } + do + { + indent_ = indent; + } + + /** + * Specify tag directives. + * + * A tag directive specifies a shorthand notation for specifying _tags. + * Each tag directive associates a handle with a prefix. This allows for + * compact tag notation. + * + * Each handle specified MUST start and end with a '!' character + * (a single character "!" handle is allowed as well). + * + * Only alphanumeric characters, '-', and '__' may be used in handles. + * + * Each prefix MUST not be empty. + * + * The "!!" handle is used for default YAML _tags with prefix + * "tag:yaml.org,2002:". This can be overridden. + * + * Params: tags = Tag directives (keys are handles, values are prefixes). + */ + @property void tagDirectives(string[string] tags) pure @safe + { + TagDirective[] t; + foreach(handle, prefix; tags) + { + assert(handle.length >= 1 && handle[0] == '!' && handle[$ - 1] == '!', + "A tag handle is empty or does not start and end with a " ~ + "'!' character : " ~ handle); + assert(prefix.length >= 1, "A tag prefix is empty"); + t ~= TagDirective(handle, prefix); + } + tags_ = t; + } + /// + @safe unittest + { + auto dumper = dumper(); + string[string] directives; + directives["!short!"] = "tag:long.org,2011:"; + //This will emit tags starting with "tag:long.org,2011" + //with a "!short!" prefix instead. + dumper.tagDirectives(directives); + dumper.dump(new Appender!string(), Node("foo")); + } + + /** + * Dump one or more YAML _documents to the file/stream. + * + * Note that while you can call dump() multiple times on the same + * dumper, you will end up writing multiple YAML "files" to the same + * file/stream. + * + * Params: documents = Documents to _dump (root nodes of the _documents). + * + * Throws: YAMLException on error (e.g. invalid nodes, + * unable to write to file/stream). + */ + void dump(CharacterType = char, Range)(Range range, Node[] documents ...) + if (isOutputRange!(Range, CharacterType) && + isOutputRange!(Range, char) || isOutputRange!(Range, wchar) || isOutputRange!(Range, dchar)) + { + try + { + auto emitter = new Emitter!(Range, CharacterType)(range, canonical, indent_, textWidth, lineBreak); + auto serializer = Serializer(resolver, explicitStart ? Yes.explicitStart : No.explicitStart, + explicitEnd ? Yes.explicitEnd : No.explicitEnd, YAMLVersion, tags_); + serializer.startStream(emitter); + foreach(ref document; documents) + { + auto data = representData(document, defaultScalarStyle, defaultCollectionStyle); + serializer.serialize(emitter, data); + } + serializer.endStream(emitter); + } + catch(YAMLException e) + { + throw new YAMLException("Unable to dump YAML to stream " + ~ name ~ " : " ~ e.msg, e.file, e.line); + } + } +} +///Write to a file +@safe unittest +{ + auto node = Node([1, 2, 3, 4, 5]); + dumper().dump(new Appender!string(), node); +} +///Write multiple YAML documents to a file +@safe unittest +{ + auto node1 = Node([1, 2, 3, 4, 5]); + auto node2 = Node("This document contains only one string"); + dumper().dump(new Appender!string(), node1, node2); + //Or with an array: + dumper().dump(new Appender!string(), [node1, node2]); +} +///Write to memory +@safe unittest +{ + auto stream = new Appender!string(); + auto node = Node([1, 2, 3, 4, 5]); + dumper().dump(stream, node); +} +///Use a custom resolver to support custom data types and/or implicit tags +@safe unittest +{ + import std.regex : regex; + auto node = Node([1, 2, 3, 4, 5]); + auto dumper = dumper(); + dumper.resolver.addImplicitResolver("!tag", regex("A.*"), "A"); + dumper.dump(new Appender!string(), node); +} +/// Set default scalar style +@safe unittest +{ + auto stream = new Appender!string(); + auto node = Node("Hello world!"); + auto dumper = dumper(); + dumper.defaultScalarStyle = ScalarStyle.singleQuoted; + dumper.dump(stream, node); +} +/// Set default collection style +@safe unittest +{ + auto stream = new Appender!string(); + auto node = Node(["Hello", "world!"]); + auto dumper = dumper(); + dumper.defaultCollectionStyle = CollectionStyle.flow; + dumper.dump(stream, node); +} +// Make sure the styles are actually used +@safe unittest +{ + auto stream = new Appender!string(); + auto node = Node([Node("Hello world!"), Node(["Hello", "world!"])]); + auto dumper = dumper(); + dumper.defaultScalarStyle = ScalarStyle.singleQuoted; + dumper.defaultCollectionStyle = CollectionStyle.flow; + dumper.explicitEnd = false; + dumper.explicitStart = false; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + assert(stream.data == "['Hello world!', ['Hello', 'world!']]\n"); +} +// Explicit document start/end markers +@safe unittest +{ + auto stream = new Appender!string(); + auto node = Node([1, 2, 3, 4, 5]); + auto dumper = dumper(); + dumper.explicitEnd = true; + dumper.explicitStart = true; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + //Skip version string + assert(stream.data[0..3] == "---"); + //account for newline at end + assert(stream.data[$-4..$-1] == "..."); +} +@safe unittest +{ + auto stream = new Appender!string(); + auto node = Node([Node("Te, st2")]); + auto dumper = dumper(); + dumper.explicitStart = true; + dumper.explicitEnd = false; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + assert(stream.data == "--- ['Te, st2']\n"); +} +// No explicit document start/end markers +@safe unittest +{ + auto stream = new Appender!string(); + auto node = Node([1, 2, 3, 4, 5]); + auto dumper = dumper(); + dumper.explicitEnd = false; + dumper.explicitStart = false; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + //Skip version string + assert(stream.data[0..3] != "---"); + //account for newline at end + assert(stream.data[$-4..$-1] != "..."); +} +// Windows, macOS line breaks +@safe unittest +{ + auto node = Node(0); + { + auto stream = new Appender!string(); + auto dumper = dumper(); + dumper.explicitEnd = true; + dumper.explicitStart = true; + dumper.YAMLVersion = null; + dumper.lineBreak = LineBreak.windows; + dumper.dump(stream, node); + assert(stream.data == "--- 0\r\n...\r\n"); + } + { + auto stream = new Appender!string(); + auto dumper = dumper(); + dumper.explicitEnd = true; + dumper.explicitStart = true; + dumper.YAMLVersion = null; + dumper.lineBreak = LineBreak.macintosh; + dumper.dump(stream, node); + assert(stream.data == "--- 0\r...\r"); + } +} diff --git a/source/dyaml/emitter.d b/source/dyaml/emitter.d new file mode 100644 index 0000000..5aafc0e --- /dev/null +++ b/source/dyaml/emitter.d @@ -0,0 +1,1690 @@ +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * YAML emitter. + * Code based on PyYAML: http://www.pyyaml.org + */ +module dyaml.emitter; + + +import std.algorithm; +import std.array; +import std.ascii; +import std.conv; +import std.encoding; +import std.exception; +import std.format; +import std.range; +import std.string; +import std.system; +import std.typecons; +import std.utf; + +import dyaml.encoding; +import dyaml.escapes; +import dyaml.event; +import dyaml.exception; +import dyaml.linebreak; +import dyaml.queue; +import dyaml.scanner; +import dyaml.style; +import dyaml.tagdirective; + + +package: + +//Stores results of analysis of a scalar, determining e.g. what scalar style to use. +struct ScalarAnalysis +{ + //Scalar itself. + string scalar; + + enum AnalysisFlags + { + empty = 1<<0, + multiline = 1<<1, + allowFlowPlain = 1<<2, + allowBlockPlain = 1<<3, + allowSingleQuoted = 1<<4, + allowDoubleQuoted = 1<<5, + allowBlock = 1<<6, + isNull = 1<<7 + } + + ///Analysis results. + BitFlags!AnalysisFlags flags; +} + +private alias isNewLine = among!('\n', '\u0085', '\u2028', '\u2029'); + +private alias isSpecialChar = among!('#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\\', '\'', '"', '%', '@', '`'); + +private alias isFlowIndicator = among!(',', '?', '[', ']', '{', '}'); + +private alias isSpace = among!('\0', '\n', '\r', '\u0085', '\u2028', '\u2029', ' ', '\t'); + +//Emits YAML events into a file/stream. +struct Emitter(Range, CharType) if (isOutputRange!(Range, CharType)) +{ + private: + ///Default tag handle shortcuts and replacements. + static TagDirective[] defaultTagDirectives_ = + [TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")]; + + ///Stream to write to. + Range stream_; + + /// Type used for upcoming emitter steps + alias EmitterFunction = void function(scope typeof(this)*) @safe; + + ///Stack of states. + Appender!(EmitterFunction[]) states_; + + ///Current state. + EmitterFunction state_; + + ///Event queue. + Queue!Event events_; + ///Event we're currently emitting. + Event event_; + + ///Stack of previous indentation levels. + Appender!(int[]) indents_; + ///Current indentation level. + int indent_ = -1; + + ///Level of nesting in flow context. If 0, we're in block context. + uint flowLevel_ = 0; + + /// Describes context (where we are in the document). + enum Context + { + /// Root node of a document. + root, + /// Sequence. + sequence, + /// Mapping. + mappingNoSimpleKey, + /// Mapping, in a simple key. + mappingSimpleKey, + } + /// Current context. + Context context_; + + ///Characteristics of the last emitted character: + + ///Line. + uint line_ = 0; + ///Column. + uint column_ = 0; + ///Whitespace character? + bool whitespace_ = true; + ///indentation space, '-', '?', or ':'? + bool indentation_ = true; + + ///Does the document require an explicit document indicator? + bool openEnded_; + + ///Formatting details. + + ///Canonical scalar format? + bool canonical_; + ///Best indentation width. + uint bestIndent_ = 2; + ///Best text width. + uint bestWidth_ = 80; + ///Best line break character/s. + LineBreak bestLineBreak_; + + ///Tag directive handle - prefix pairs. + TagDirective[] tagDirectives_; + + ///Anchor/alias to process. + string preparedAnchor_ = null; + ///Tag to process. + string preparedTag_ = null; + + ///Analysis result of the current scalar. + ScalarAnalysis analysis_; + ///Style of the current scalar. + ScalarStyle style_ = ScalarStyle.invalid; + + public: + @disable int opCmp(ref Emitter); + @disable bool opEquals(ref Emitter); + + /** + * Construct an emitter. + * + * Params: stream = Output range to write to. + * canonical = Write scalars in canonical form? + * indent = Indentation width. + * lineBreak = Line break character/s. + */ + this(Range stream, const bool canonical, const int indent, const int width, + const LineBreak lineBreak) @safe + { + states_.reserve(32); + indents_.reserve(32); + stream_ = stream; + canonical_ = canonical; + nextExpected!"expectStreamStart"(); + + if(indent > 1 && indent < 10){bestIndent_ = indent;} + if(width > bestIndent_ * 2) {bestWidth_ = width;} + bestLineBreak_ = lineBreak; + + analysis_.flags.isNull = true; + } + + ///Emit an event. + void emit(Event event) @safe + { + events_.push(event); + while(!needMoreEvents()) + { + event_ = events_.pop(); + callNext(); + event_.destroy(); + } + } + + private: + ///Pop and return the newest state in states_. + EmitterFunction popState() @safe + in(states_.data.length > 0, + "Emitter: Need to pop a state but there are no states left") + { + const result = states_.data[$-1]; + states_.shrinkTo(states_.data.length - 1); + return result; + } + + void pushState(string D)() @safe + { + states_ ~= mixin("function(typeof(this)* self) { self."~D~"(); }"); + } + + ///Pop and return the newest indent in indents_. + int popIndent() @safe + in(indents_.data.length > 0, + "Emitter: Need to pop an indent level but there" ~ + " are no indent levels left") + { + const result = indents_.data[$-1]; + indents_.shrinkTo(indents_.data.length - 1); + return result; + } + + ///Write a string to the file/stream. + void writeString(const scope char[] str) @safe + { + static if(is(CharType == char)) + { + copy(str, stream_); + } + static if(is(CharType == wchar)) + { + const buffer = to!wstring(str); + copy(buffer, stream_); + } + static if(is(CharType == dchar)) + { + const buffer = to!dstring(str); + copy(buffer, stream_); + } + } + + ///In some cases, we wait for a few next events before emitting. + bool needMoreEvents() @safe nothrow + { + if(events_.length == 0){return true;} + + const event = events_.peek(); + if(event.id == EventID.documentStart){return needEvents(1);} + if(event.id == EventID.sequenceStart){return needEvents(2);} + if(event.id == EventID.mappingStart) {return needEvents(3);} + + return false; + } + + ///Determines if we need specified number of more events. + bool needEvents(in uint count) @safe nothrow + { + int level; + + foreach(const event; events_.range) + { + if(event.id.among!(EventID.documentStart, EventID.sequenceStart, EventID.mappingStart)) {++level;} + else if(event.id.among!(EventID.documentEnd, EventID.sequenceEnd, EventID.mappingEnd)) {--level;} + else if(event.id == EventID.streamStart){level = -1;} + + if(level < 0) + { + return false; + } + } + + return events_.length < (count + 1); + } + + ///Increase indentation level. + void increaseIndent(const Flag!"flow" flow = No.flow, const bool indentless = false) @safe + { + indents_ ~= indent_; + if(indent_ == -1) + { + indent_ = flow ? bestIndent_ : 0; + } + else if(!indentless) + { + indent_ += bestIndent_; + } + } + + ///Determines if the type of current event is as specified. Throws if no event. + bool eventTypeIs(in EventID id) const pure @safe + in(!event_.isNull, "Expected an event, but no event is available.") + { + return event_.id == id; + } + + + //States. + + + //Stream handlers. + + ///Handle start of a file/stream. + void expectStreamStart() @safe + in(eventTypeIs(EventID.streamStart), + "Expected streamStart, but got " ~ event_.idString) + { + + writeStreamStart(); + nextExpected!"expectDocumentStart!(Yes.first)"(); + } + + ///Expect nothing, throwing if we still have something. + void expectNothing() @safe + { + assert(0, "Expected nothing, but got " ~ event_.idString); + } + + //Document handlers. + + ///Handle start of a document. + void expectDocumentStart(Flag!"first" first)() @safe + in(eventTypeIs(EventID.documentStart) || eventTypeIs(EventID.streamEnd), + "Expected documentStart or streamEnd, but got " ~ event_.idString) + { + + if(event_.id == EventID.documentStart) + { + const YAMLVersion = event_.value; + auto tagDirectives = event_.tagDirectives; + if(openEnded_ && (YAMLVersion !is null || tagDirectives !is null)) + { + writeIndicator("...", Yes.needWhitespace); + writeIndent(); + } + + if(YAMLVersion !is null) + { + writeVersionDirective(prepareVersion(YAMLVersion)); + } + + if(tagDirectives !is null) + { + tagDirectives_ = tagDirectives; + sort!"icmp(a.handle, b.handle) < 0"(tagDirectives_); + + foreach(ref pair; tagDirectives_) + { + writeTagDirective(prepareTagHandle(pair.handle), + prepareTagPrefix(pair.prefix)); + } + } + + bool eq(ref TagDirective a, ref TagDirective b){return a.handle == b.handle;} + //Add any default tag directives that have not been overriden. + foreach(ref def; defaultTagDirectives_) + { + if(!std.algorithm.canFind!eq(tagDirectives_, def)) + { + tagDirectives_ ~= def; + } + } + + const implicit = first && !event_.explicitDocument && !canonical_ && + YAMLVersion is null && tagDirectives is null && + !checkEmptyDocument(); + if(!implicit) + { + writeIndent(); + writeIndicator("---", Yes.needWhitespace); + if(canonical_){writeIndent();} + } + nextExpected!"expectRootNode"(); + } + else if(event_.id == EventID.streamEnd) + { + if(openEnded_) + { + writeIndicator("...", Yes.needWhitespace); + writeIndent(); + } + writeStreamEnd(); + nextExpected!"expectNothing"(); + } + } + + ///Handle end of a document. + void expectDocumentEnd() @safe + in(eventTypeIs(EventID.documentEnd), + "Expected DocumentEnd, but got " ~ event_.idString) + { + + writeIndent(); + if(event_.explicitDocument) + { + writeIndicator("...", Yes.needWhitespace); + writeIndent(); + } + nextExpected!"expectDocumentStart!(No.first)"(); + } + + ///Handle the root node of a document. + void expectRootNode() @safe + { + pushState!"expectDocumentEnd"(); + expectNode(Context.root); + } + + ///Handle a mapping node. + // + //Params: simpleKey = Are we in a simple key? + void expectMappingNode(const bool simpleKey = false) @safe + { + expectNode(simpleKey ? Context.mappingSimpleKey : Context.mappingNoSimpleKey); + } + + ///Handle a sequence node. + void expectSequenceNode() @safe + { + expectNode(Context.sequence); + } + + ///Handle a new node. Context specifies where in the document we are. + void expectNode(const Context context) @safe + { + context_ = context; + + const flowCollection = event_.collectionStyle == CollectionStyle.flow; + + switch(event_.id) + { + case EventID.alias_: expectAlias(); break; + case EventID.scalar: + processAnchor("&"); + processTag(); + expectScalar(); + break; + case EventID.sequenceStart: + processAnchor("&"); + processTag(); + if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptySequence()) + { + expectFlowSequence(); + } + else + { + expectBlockSequence(); + } + break; + case EventID.mappingStart: + processAnchor("&"); + processTag(); + if(flowLevel_ > 0 || canonical_ || flowCollection || checkEmptyMapping()) + { + expectFlowMapping(); + } + else + { + expectBlockMapping(); + } + break; + default: + assert(0, "Expected alias_, scalar, sequenceStart or " ~ + "mappingStart, but got: " ~ event_.idString); + } + } + ///Handle an alias. + void expectAlias() @safe + in(event_.anchor != "", "Anchor is not specified for alias") + { + processAnchor("*"); + nextExpected(popState()); + } + + ///Handle a scalar. + void expectScalar() @safe + { + increaseIndent(Yes.flow); + processScalar(); + indent_ = popIndent(); + nextExpected(popState()); + } + + //Flow sequence handlers. + + ///Handle a flow sequence. + void expectFlowSequence() @safe + { + writeIndicator("[", Yes.needWhitespace, Yes.whitespace); + ++flowLevel_; + increaseIndent(Yes.flow); + nextExpected!"expectFlowSequenceItem!(Yes.first)"(); + } + + ///Handle a flow sequence item. + void expectFlowSequenceItem(Flag!"first" first)() @safe + { + if(event_.id == EventID.sequenceEnd) + { + indent_ = popIndent(); + --flowLevel_; + static if(!first) if(canonical_) + { + writeIndicator(",", No.needWhitespace); + writeIndent(); + } + writeIndicator("]", No.needWhitespace); + nextExpected(popState()); + return; + } + static if(!first){writeIndicator(",", No.needWhitespace);} + if(canonical_ || column_ > bestWidth_){writeIndent();} + pushState!"expectFlowSequenceItem!(No.first)"(); + expectSequenceNode(); + } + + //Flow mapping handlers. + + ///Handle a flow mapping. + void expectFlowMapping() @safe + { + writeIndicator("{", Yes.needWhitespace, Yes.whitespace); + ++flowLevel_; + increaseIndent(Yes.flow); + nextExpected!"expectFlowMappingKey!(Yes.first)"(); + } + + ///Handle a key in a flow mapping. + void expectFlowMappingKey(Flag!"first" first)() @safe + { + if(event_.id == EventID.mappingEnd) + { + indent_ = popIndent(); + --flowLevel_; + static if (!first) if(canonical_) + { + writeIndicator(",", No.needWhitespace); + writeIndent(); + } + writeIndicator("}", No.needWhitespace); + nextExpected(popState()); + return; + } + + static if(!first){writeIndicator(",", No.needWhitespace);} + if(canonical_ || column_ > bestWidth_){writeIndent();} + if(!canonical_ && checkSimpleKey()) + { + pushState!"expectFlowMappingSimpleValue"(); + expectMappingNode(true); + return; + } + + writeIndicator("?", Yes.needWhitespace); + pushState!"expectFlowMappingValue"(); + expectMappingNode(); + } + + ///Handle a simple value in a flow mapping. + void expectFlowMappingSimpleValue() @safe + { + writeIndicator(":", No.needWhitespace); + pushState!"expectFlowMappingKey!(No.first)"(); + expectMappingNode(); + } + + ///Handle a complex value in a flow mapping. + void expectFlowMappingValue() @safe + { + if(canonical_ || column_ > bestWidth_){writeIndent();} + writeIndicator(":", Yes.needWhitespace); + pushState!"expectFlowMappingKey!(No.first)"(); + expectMappingNode(); + } + + //Block sequence handlers. + + ///Handle a block sequence. + void expectBlockSequence() @safe + { + const indentless = (context_ == Context.mappingNoSimpleKey || + context_ == Context.mappingSimpleKey) && !indentation_; + increaseIndent(No.flow, indentless); + nextExpected!"expectBlockSequenceItem!(Yes.first)"(); + } + + ///Handle a block sequence item. + void expectBlockSequenceItem(Flag!"first" first)() @safe + { + static if(!first) if(event_.id == EventID.sequenceEnd) + { + indent_ = popIndent(); + nextExpected(popState()); + return; + } + + writeIndent(); + writeIndicator("-", Yes.needWhitespace, No.whitespace, Yes.indentation); + pushState!"expectBlockSequenceItem!(No.first)"(); + expectSequenceNode(); + } + + //Block mapping handlers. + + ///Handle a block mapping. + void expectBlockMapping() @safe + { + increaseIndent(No.flow); + nextExpected!"expectBlockMappingKey!(Yes.first)"(); + } + + ///Handle a key in a block mapping. + void expectBlockMappingKey(Flag!"first" first)() @safe + { + static if(!first) if(event_.id == EventID.mappingEnd) + { + indent_ = popIndent(); + nextExpected(popState()); + return; + } + + writeIndent(); + if(checkSimpleKey()) + { + pushState!"expectBlockMappingSimpleValue"(); + expectMappingNode(true); + return; + } + + writeIndicator("?", Yes.needWhitespace, No.whitespace, Yes.indentation); + pushState!"expectBlockMappingValue"(); + expectMappingNode(); + } + + ///Handle a simple value in a block mapping. + void expectBlockMappingSimpleValue() @safe + { + writeIndicator(":", No.needWhitespace); + pushState!"expectBlockMappingKey!(No.first)"(); + expectMappingNode(); + } + + ///Handle a complex value in a block mapping. + void expectBlockMappingValue() @safe + { + writeIndent(); + writeIndicator(":", Yes.needWhitespace, No.whitespace, Yes.indentation); + pushState!"expectBlockMappingKey!(No.first)"(); + expectMappingNode(); + } + + //Checkers. + + ///Check if an empty sequence is next. + bool checkEmptySequence() const @safe pure nothrow + { + return event_.id == EventID.sequenceStart && events_.length > 0 + && events_.peek().id == EventID.sequenceEnd; + } + + ///Check if an empty mapping is next. + bool checkEmptyMapping() const @safe pure nothrow + { + return event_.id == EventID.mappingStart && events_.length > 0 + && events_.peek().id == EventID.mappingEnd; + } + + ///Check if an empty document is next. + bool checkEmptyDocument() const @safe pure nothrow + { + if(event_.id != EventID.documentStart || events_.length == 0) + { + return false; + } + + const event = events_.peek(); + const emptyScalar = event.id == EventID.scalar && (event.anchor is null) && + (event.tag is null) && event.implicit && event.value == ""; + return emptyScalar; + } + + ///Check if a simple key is next. + bool checkSimpleKey() @safe + { + uint length; + const id = event_.id; + const scalar = id == EventID.scalar; + const collectionStart = id == EventID.mappingStart || + id == EventID.sequenceStart; + + if((id == EventID.alias_ || scalar || collectionStart) + && (event_.anchor !is null)) + { + if(preparedAnchor_ is null) + { + preparedAnchor_ = prepareAnchor(event_.anchor); + } + length += preparedAnchor_.length; + } + + if((scalar || collectionStart) && (event_.tag !is null)) + { + if(preparedTag_ is null){preparedTag_ = prepareTag(event_.tag);} + length += preparedTag_.length; + } + + if(scalar) + { + if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} + length += analysis_.scalar.length; + } + + if(length >= 128){return false;} + + return id == EventID.alias_ || + (scalar && !analysis_.flags.empty && !analysis_.flags.multiline) || + checkEmptySequence() || + checkEmptyMapping(); + } + + ///Process and write a scalar. + void processScalar() @safe + { + if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} + if(style_ == ScalarStyle.invalid) + { + style_ = chooseScalarStyle(); + } + + //if(analysis_.flags.multiline && (context_ != Context.mappingSimpleKey) && + // ([ScalarStyle.invalid, ScalarStyle.plain, ScalarStyle.singleQuoted, ScalarStyle.doubleQuoted) + // .canFind(style_)) + //{ + // writeIndent(); + //} + auto writer = ScalarWriter!(Range, CharType)(&this, analysis_.scalar, + context_ != Context.mappingSimpleKey); + final switch(style_) + { + case ScalarStyle.invalid: assert(false); + case ScalarStyle.doubleQuoted: writer.writeDoubleQuoted(); break; + case ScalarStyle.singleQuoted: writer.writeSingleQuoted(); break; + case ScalarStyle.folded: writer.writeFolded(); break; + case ScalarStyle.literal: writer.writeLiteral(); break; + case ScalarStyle.plain: writer.writePlain(); break; + } + analysis_.flags.isNull = true; + style_ = ScalarStyle.invalid; + } + + ///Process and write an anchor/alias. + void processAnchor(const string indicator) @safe + { + if(event_.anchor is null) + { + preparedAnchor_ = null; + return; + } + if(preparedAnchor_ is null) + { + preparedAnchor_ = prepareAnchor(event_.anchor); + } + if(preparedAnchor_ !is null && preparedAnchor_ != "") + { + writeIndicator(indicator, Yes.needWhitespace); + writeString(preparedAnchor_); + } + preparedAnchor_ = null; + } + + ///Process and write a tag. + void processTag() @safe + { + string tag = event_.tag; + + if(event_.id == EventID.scalar) + { + if(style_ == ScalarStyle.invalid){style_ = chooseScalarStyle();} + if((!canonical_ || (tag is null)) && + ((tag == "tag:yaml.org,2002:str") || (style_ == ScalarStyle.plain ? event_.implicit : !event_.implicit && (tag is null)))) + { + preparedTag_ = null; + return; + } + if(event_.implicit && (tag is null)) + { + tag = "!"; + preparedTag_ = null; + } + } + else if((!canonical_ || (tag is null)) && event_.implicit) + { + preparedTag_ = null; + return; + } + + assert(tag != "", "Tag is not specified"); + if(preparedTag_ is null){preparedTag_ = prepareTag(tag);} + if(preparedTag_ !is null && preparedTag_ != "") + { + writeIndicator(preparedTag_, Yes.needWhitespace); + } + preparedTag_ = null; + } + + ///Determine style to write the current scalar in. + ScalarStyle chooseScalarStyle() @safe + { + if(analysis_.flags.isNull){analysis_ = analyzeScalar(event_.value);} + + const style = event_.scalarStyle; + const invalidOrPlain = style == ScalarStyle.invalid || style == ScalarStyle.plain; + const block = style == ScalarStyle.literal || style == ScalarStyle.folded; + const singleQuoted = style == ScalarStyle.singleQuoted; + const doubleQuoted = style == ScalarStyle.doubleQuoted; + + const allowPlain = flowLevel_ > 0 ? analysis_.flags.allowFlowPlain + : analysis_.flags.allowBlockPlain; + //simple empty or multiline scalars can't be written in plain style + const simpleNonPlain = (context_ == Context.mappingSimpleKey) && + (analysis_.flags.empty || analysis_.flags.multiline); + + if(doubleQuoted || canonical_) + { + return ScalarStyle.doubleQuoted; + } + + if(invalidOrPlain && event_.implicit && !simpleNonPlain && allowPlain) + { + return ScalarStyle.plain; + } + + if(block && flowLevel_ == 0 && context_ != Context.mappingSimpleKey && + analysis_.flags.allowBlock) + { + return style; + } + + if((invalidOrPlain || singleQuoted) && + analysis_.flags.allowSingleQuoted && + !(context_ == Context.mappingSimpleKey && analysis_.flags.multiline)) + { + return ScalarStyle.singleQuoted; + } + + return ScalarStyle.doubleQuoted; + } + + ///Prepare YAML version string for output. + static string prepareVersion(const string YAMLVersion) @safe + in(YAMLVersion.split(".")[0] == "1", + "Unsupported YAML version: " ~ YAMLVersion) + { + return YAMLVersion; + } + + ///Encode an Unicode character for tag directive and write it to writer. + static void encodeChar(Writer)(ref Writer writer, in dchar c) @safe + { + char[4] data; + const bytes = encode(data, c); + //For each byte add string in format %AB , where AB are hex digits of the byte. + foreach(const char b; data[0 .. bytes]) + { + formattedWrite(writer, "%%%02X", cast(ubyte)b); + } + } + + ///Prepare tag directive handle for output. + static string prepareTagHandle(const string handle) @safe + in(handle != "", "Tag handle must not be empty") + in(handle.drop(1).dropBack(1).all!(c => isAlphaNum(c) || c.among!('-', '_')), + "Tag handle contains invalid characters") + { + return handle; + } + + ///Prepare tag directive prefix for output. + static string prepareTagPrefix(const string prefix) @safe + in(prefix != "", "Tag prefix must not be empty") + { + auto appender = appender!string(); + const int offset = prefix[0] == '!'; + size_t start, end; + + foreach(const size_t i, const dchar c; prefix) + { + const size_t idx = i + offset; + if(isAlphaNum(c) || c.among!('-', ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '!', '~', '*', '\\', '\'', '(', ')', '[', ']', '%')) + { + end = idx + 1; + continue; + } + + if(start < idx){appender.put(prefix[start .. idx]);} + start = end = idx + 1; + + encodeChar(appender, c); + } + + end = min(end, prefix.length); + if(start < end){appender.put(prefix[start .. end]);} + return appender.data; + } + + ///Prepare tag for output. + string prepareTag(in string tag) @safe + in(tag != "", "Tag must not be empty") + { + + string tagString = tag; + if (tagString == "!") return "!"; + string handle; + string suffix = tagString; + + //Sort lexicographically by prefix. + sort!"icmp(a.prefix, b.prefix) < 0"(tagDirectives_); + foreach(ref pair; tagDirectives_) + { + auto prefix = pair.prefix; + if(tagString.startsWith(prefix) && + (prefix != "!" || prefix.length < tagString.length)) + { + handle = pair.handle; + suffix = tagString[prefix.length .. $]; + } + } + + auto appender = appender!string(); + appender.put(handle !is null && handle != "" ? handle : "!<"); + size_t start, end; + foreach(const dchar c; suffix) + { + if(isAlphaNum(c) || c.among!('-', ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\\', '\'', '(', ')', '[', ']') || + (c == '!' && handle != "!")) + { + ++end; + continue; + } + if(start < end){appender.put(suffix[start .. end]);} + start = end = end + 1; + + encodeChar(appender, c); + } + + if(start < end){appender.put(suffix[start .. end]);} + if(handle is null || handle == ""){appender.put(">");} + + return appender.data; + } + + ///Prepare anchor for output. + static string prepareAnchor(const string anchor) @safe + in(anchor != "", "Anchor must not be empty") + in(anchor.all!isNSAnchorName, "Anchor contains invalid characters") + { + return anchor; + } + + ///Analyze specifed scalar and return the analysis result. + static ScalarAnalysis analyzeScalar(string scalar) @safe + { + ScalarAnalysis analysis; + analysis.flags.isNull = false; + analysis.scalar = scalar; + + //Empty scalar is a special case. + if(scalar is null || scalar == "") + { + with(ScalarAnalysis.AnalysisFlags) + analysis.flags = + empty | + allowBlockPlain | + allowSingleQuoted | + allowDoubleQuoted; + return analysis; + } + + //Indicators and special characters (All false by default). + bool blockIndicators, flowIndicators, lineBreaks, specialCharacters; + + //Important whitespace combinations (All false by default). + bool leadingSpace, leadingBreak, trailingSpace, trailingBreak, + breakSpace, spaceBreak; + + //Check document indicators. + if(scalar.startsWith("---", "...")) + { + blockIndicators = flowIndicators = true; + } + + //First character or preceded by a whitespace. + bool preceededByWhitespace = true; + + //Last character or followed by a whitespace. + bool followedByWhitespace = scalar.length == 1 || + scalar[1].among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); + + //The previous character is a space/break (false by default). + bool previousSpace, previousBreak; + + foreach(const size_t index, const dchar c; scalar) + { + //Check for indicators. + if(index == 0) + { + //Leading indicators are special characters. + if(c.isSpecialChar) + { + flowIndicators = blockIndicators = true; + } + if(':' == c || '?' == c) + { + flowIndicators = true; + if(followedByWhitespace){blockIndicators = true;} + } + if(c == '-' && followedByWhitespace) + { + flowIndicators = blockIndicators = true; + } + } + else + { + //Some indicators cannot appear within a scalar as well. + if(c.isFlowIndicator){flowIndicators = true;} + if(c == ':') + { + flowIndicators = true; + if(followedByWhitespace){blockIndicators = true;} + } + if(c == '#' && preceededByWhitespace) + { + flowIndicators = blockIndicators = true; + } + } + + //Check for line breaks, special, and unicode characters. + if(c.isNewLine){lineBreaks = true;} + if(!(c == '\n' || (c >= '\x20' && c <= '\x7E')) && + !((c == '\u0085' || (c >= '\xA0' && c <= '\uD7FF') || + (c >= '\uE000' && c <= '\uFFFD')) && c != '\uFEFF')) + { + specialCharacters = true; + } + + //Detect important whitespace combinations. + if(c == ' ') + { + if(index == 0){leadingSpace = true;} + if(index == scalar.length - 1){trailingSpace = true;} + if(previousBreak){breakSpace = true;} + previousSpace = true; + previousBreak = false; + } + else if(c.isNewLine) + { + if(index == 0){leadingBreak = true;} + if(index == scalar.length - 1){trailingBreak = true;} + if(previousSpace){spaceBreak = true;} + previousSpace = false; + previousBreak = true; + } + else + { + previousSpace = previousBreak = false; + } + + //Prepare for the next character. + preceededByWhitespace = c.isSpace != 0; + followedByWhitespace = index + 2 >= scalar.length || + scalar[index + 2].isSpace; + } + + with(ScalarAnalysis.AnalysisFlags) + { + //Let's decide what styles are allowed. + analysis.flags |= allowFlowPlain | allowBlockPlain | allowSingleQuoted | + allowDoubleQuoted | allowBlock; + + //Leading and trailing whitespaces are bad for plain scalars. + if(leadingSpace || leadingBreak || trailingSpace || trailingBreak) + { + analysis.flags &= ~(allowFlowPlain | allowBlockPlain); + } + + //We do not permit trailing spaces for block scalars. + if(trailingSpace) + { + analysis.flags &= ~allowBlock; + } + + //Spaces at the beginning of a new line are only acceptable for block + //scalars. + if(breakSpace) + { + analysis.flags &= ~(allowFlowPlain | allowBlockPlain | allowSingleQuoted); + } + + //Spaces followed by breaks, as well as special character are only + //allowed for double quoted scalars. + if(spaceBreak || specialCharacters) + { + analysis.flags &= ~(allowFlowPlain | allowBlockPlain | allowSingleQuoted | allowBlock); + } + + //Although the plain scalar writer supports breaks, we never emit + //multiline plain scalars. + if(lineBreaks) + { + analysis.flags &= ~(allowFlowPlain | allowBlockPlain); + analysis.flags |= multiline; + } + + //Flow indicators are forbidden for flow plain scalars. + if(flowIndicators) + { + analysis.flags &= ~allowFlowPlain; + } + + //Block indicators are forbidden for block plain scalars. + if(blockIndicators) + { + analysis.flags &= ~allowBlockPlain; + } + } + return analysis; + } + + @safe unittest + { + with(analyzeScalar("").flags) + { + // workaround for empty being std.range.primitives.empty here + alias empty = ScalarAnalysis.AnalysisFlags.empty; + assert(empty && allowBlockPlain && allowSingleQuoted && allowDoubleQuoted); + } + with(analyzeScalar("a").flags) + { + assert(allowFlowPlain && allowBlockPlain && allowSingleQuoted && allowDoubleQuoted && allowBlock); + } + with(analyzeScalar(" ").flags) + { + assert(allowSingleQuoted && allowDoubleQuoted); + } + with(analyzeScalar(" a").flags) + { + assert(allowSingleQuoted && allowDoubleQuoted); + } + with(analyzeScalar("a ").flags) + { + assert(allowSingleQuoted && allowDoubleQuoted); + } + with(analyzeScalar("\na").flags) + { + assert(allowSingleQuoted && allowDoubleQuoted); + } + with(analyzeScalar("a\n").flags) + { + assert(allowSingleQuoted && allowDoubleQuoted); + } + with(analyzeScalar("\n").flags) + { + assert(multiline && allowSingleQuoted && allowDoubleQuoted && allowBlock); + } + with(analyzeScalar(" \n").flags) + { + assert(multiline && allowDoubleQuoted); + } + with(analyzeScalar("\n a").flags) + { + assert(multiline && allowDoubleQuoted && allowBlock); + } + } + + //Writers. + + ///Start the YAML stream (write the unicode byte order mark). + void writeStreamStart() @safe + { + //Write BOM (except for UTF-8) + static if(is(CharType == wchar) || is(CharType == dchar)) + { + stream_.put(cast(CharType)'\uFEFF'); + } + } + + ///End the YAML stream. + void writeStreamEnd() @safe {} + + ///Write an indicator (e.g. ":", "[", ">", etc.). + void writeIndicator(const scope char[] indicator, + const Flag!"needWhitespace" needWhitespace, + const Flag!"whitespace" whitespace = No.whitespace, + const Flag!"indentation" indentation = No.indentation) @safe + { + const bool prefixSpace = !whitespace_ && needWhitespace; + whitespace_ = whitespace; + indentation_ = indentation_ && indentation; + openEnded_ = false; + column_ += indicator.length; + if(prefixSpace) + { + ++column_; + writeString(" "); + } + writeString(indicator); + } + + ///Write indentation. + void writeIndent() @safe + { + const indent = indent_ == -1 ? 0 : indent_; + + if(!indentation_ || column_ > indent || (column_ == indent && !whitespace_)) + { + writeLineBreak(); + } + if(column_ < indent) + { + whitespace_ = true; + + //Used to avoid allocation of arbitrary length strings. + static immutable spaces = " "; + size_t numSpaces = indent - column_; + column_ = indent; + while(numSpaces >= spaces.length) + { + writeString(spaces); + numSpaces -= spaces.length; + } + writeString(spaces[0 .. numSpaces]); + } + } + + ///Start new line. + void writeLineBreak(const scope char[] data = null) @safe + { + whitespace_ = indentation_ = true; + ++line_; + column_ = 0; + writeString(data is null ? lineBreak(bestLineBreak_) : data); + } + + ///Write a YAML version directive. + void writeVersionDirective(const string versionText) @safe + { + writeString("%YAML "); + writeString(versionText); + writeLineBreak(); + } + + ///Write a tag directive. + void writeTagDirective(const string handle, const string prefix) @safe + { + writeString("%TAG "); + writeString(handle); + writeString(" "); + writeString(prefix); + writeLineBreak(); + } + void nextExpected(string D)() @safe + { + state_ = mixin("function(typeof(this)* self) { self."~D~"(); }"); + } + void nextExpected(EmitterFunction f) @safe + { + state_ = f; + } + void callNext() @safe + { + state_(&this); + } +} + + +private: + +///RAII struct used to write out scalar values. +struct ScalarWriter(Range, CharType) +{ + invariant() + { + assert(emitter_.bestIndent_ > 0 && emitter_.bestIndent_ < 10, + "Emitter bestIndent must be 1 to 9 for one-character indent hint"); + } + + private: + @disable int opCmp(ref Emitter!(Range, CharType)); + @disable bool opEquals(ref Emitter!(Range, CharType)); + + ///Used as "null" UTF-32 character. + static immutable dcharNone = dchar.max; + + ///Emitter used to emit the scalar. + Emitter!(Range, CharType)* emitter_; + + ///UTF-8 encoded text of the scalar to write. + string text_; + + ///Can we split the scalar into multiple lines? + bool split_; + ///Are we currently going over spaces in the text? + bool spaces_; + ///Are we currently going over line breaks in the text? + bool breaks_; + + ///Start and end byte of the text range we're currently working with. + size_t startByte_, endByte_; + ///End byte of the text range including the currently processed character. + size_t nextEndByte_; + ///Start and end character of the text range we're currently working with. + long startChar_, endChar_; + + public: + ///Construct a ScalarWriter using emitter to output text. + this(Emitter!(Range, CharType)* emitter, string text, const bool split = true) @safe nothrow + { + emitter_ = emitter; + text_ = text; + split_ = split; + } + + ///Write text as single quoted scalar. + void writeSingleQuoted() @safe + { + emitter_.writeIndicator("\'", Yes.needWhitespace); + spaces_ = breaks_ = false; + resetTextPosition(); + + do + { + const dchar c = nextChar(); + if(spaces_) + { + if(c != ' ' && tooWide() && split_ && + startByte_ != 0 && endByte_ != text_.length) + { + writeIndent(Flag!"ResetSpace".no); + updateRangeStart(); + } + else if(c != ' ') + { + writeCurrentRange(Flag!"UpdateColumn".yes); + } + } + else if(breaks_) + { + if(!c.isNewLine) + { + writeStartLineBreak(); + writeLineBreaks(); + emitter_.writeIndent(); + } + } + else if((c == dcharNone || c == '\'' || c == ' ' || c.isNewLine) + && startChar_ < endChar_) + { + writeCurrentRange(Flag!"UpdateColumn".yes); + } + if(c == '\'') + { + emitter_.column_ += 2; + emitter_.writeString("\'\'"); + startByte_ = endByte_ + 1; + startChar_ = endChar_ + 1; + } + updateBreaks(c, Flag!"UpdateSpaces".yes); + }while(endByte_ < text_.length); + + emitter_.writeIndicator("\'", No.needWhitespace); + } + + ///Write text as double quoted scalar. + void writeDoubleQuoted() @safe + { + resetTextPosition(); + emitter_.writeIndicator("\"", Yes.needWhitespace); + do + { + const dchar c = nextChar(); + //handle special characters + if(c == dcharNone || c.among!('\"', '\\', '\u0085', '\u2028', '\u2029', '\uFEFF') || + !((c >= '\x20' && c <= '\x7E') || + ((c >= '\xA0' && c <= '\uD7FF') || (c >= '\uE000' && c <= '\uFFFD')))) + { + if(startChar_ < endChar_) + { + writeCurrentRange(Flag!"UpdateColumn".yes); + } + if(c != dcharNone) + { + auto appender = appender!string(); + if(const dchar es = toEscape(c)) + { + appender.put('\\'); + appender.put(es); + } + else + { + //Write an escaped Unicode character. + const format = c <= 255 ? "\\x%02X": + c <= 65535 ? "\\u%04X": "\\U%08X"; + formattedWrite(appender, format, cast(uint)c); + } + + emitter_.column_ += appender.data.length; + emitter_.writeString(appender.data); + startChar_ = endChar_ + 1; + startByte_ = nextEndByte_; + } + } + if((endByte_ > 0 && endByte_ < text_.length - strideBack(text_, text_.length)) + && (c == ' ' || startChar_ >= endChar_) + && (emitter_.column_ + endChar_ - startChar_ > emitter_.bestWidth_) + && split_) + { + //text_[2:1] is ok in Python but not in D, so we have to use min() + emitter_.writeString(text_[min(startByte_, endByte_) .. endByte_]); + emitter_.writeString("\\"); + emitter_.column_ += startChar_ - endChar_ + 1; + startChar_ = max(startChar_, endChar_); + startByte_ = max(startByte_, endByte_); + + writeIndent(Flag!"ResetSpace".yes); + if(charAtStart() == ' ') + { + emitter_.writeString("\\"); + ++emitter_.column_; + } + } + }while(endByte_ < text_.length); + emitter_.writeIndicator("\"", No.needWhitespace); + } + + ///Write text as folded block scalar. + void writeFolded() @safe + { + initBlock('>'); + bool leadingSpace = true; + spaces_ = false; + breaks_ = true; + resetTextPosition(); + + do + { + const dchar c = nextChar(); + if(breaks_) + { + if(!c.isNewLine) + { + if(!leadingSpace && c != dcharNone && c != ' ') + { + writeStartLineBreak(); + } + leadingSpace = (c == ' '); + writeLineBreaks(); + if(c != dcharNone){emitter_.writeIndent();} + } + } + else if(spaces_) + { + if(c != ' ' && tooWide()) + { + writeIndent(Flag!"ResetSpace".no); + updateRangeStart(); + } + else if(c != ' ') + { + writeCurrentRange(Flag!"UpdateColumn".yes); + } + } + else if(c == dcharNone || c.isNewLine || c == ' ') + { + writeCurrentRange(Flag!"UpdateColumn".yes); + if(c == dcharNone){emitter_.writeLineBreak();} + } + updateBreaks(c, Flag!"UpdateSpaces".yes); + }while(endByte_ < text_.length); + } + + ///Write text as literal block scalar. + void writeLiteral() @safe + { + initBlock('|'); + breaks_ = true; + resetTextPosition(); + + do + { + const dchar c = nextChar(); + if(breaks_) + { + if(!c.isNewLine) + { + writeLineBreaks(); + if(c != dcharNone){emitter_.writeIndent();} + } + } + else if(c == dcharNone || c.isNewLine) + { + writeCurrentRange(Flag!"UpdateColumn".no); + if(c == dcharNone){emitter_.writeLineBreak();} + } + updateBreaks(c, Flag!"UpdateSpaces".no); + }while(endByte_ < text_.length); + } + + ///Write text as plain scalar. + void writePlain() @safe + { + if(emitter_.context_ == Emitter!(Range, CharType).Context.root){emitter_.openEnded_ = true;} + if(text_ == ""){return;} + if(!emitter_.whitespace_) + { + ++emitter_.column_; + emitter_.writeString(" "); + } + emitter_.whitespace_ = emitter_.indentation_ = false; + spaces_ = breaks_ = false; + resetTextPosition(); + + do + { + const dchar c = nextChar(); + if(spaces_) + { + if(c != ' ' && tooWide() && split_) + { + writeIndent(Flag!"ResetSpace".yes); + updateRangeStart(); + } + else if(c != ' ') + { + writeCurrentRange(Flag!"UpdateColumn".yes); + } + } + else if(breaks_) + { + if(!c.isNewLine) + { + writeStartLineBreak(); + writeLineBreaks(); + writeIndent(Flag!"ResetSpace".yes); + } + } + else if(c == dcharNone || c.isNewLine || c == ' ') + { + writeCurrentRange(Flag!"UpdateColumn".yes); + } + updateBreaks(c, Flag!"UpdateSpaces".yes); + }while(endByte_ < text_.length); + } + + private: + ///Get next character and move end of the text range to it. + @property dchar nextChar() pure @safe + { + ++endChar_; + endByte_ = nextEndByte_; + if(endByte_ >= text_.length){return dcharNone;} + const c = text_[nextEndByte_]; + //c is ascii, no need to decode. + if(c < 0x80) + { + ++nextEndByte_; + return c; + } + return decode(text_, nextEndByte_); + } + + ///Get character at start of the text range. + @property dchar charAtStart() const pure @safe + { + size_t idx = startByte_; + return decode(text_, idx); + } + + ///Is the current line too wide? + @property bool tooWide() const pure @safe nothrow + { + return startChar_ + 1 == endChar_ && + emitter_.column_ > emitter_.bestWidth_; + } + + ///Determine hints (indicators) for block scalar. + size_t determineBlockHints(char[] hints, uint bestIndent) const pure @safe + { + size_t hintsIdx; + if(text_.length == 0) + return hintsIdx; + + dchar lastChar(const string str, ref size_t end) + { + size_t idx = end = end - strideBack(str, end); + return decode(text_, idx); + } + + size_t end = text_.length; + const last = lastChar(text_, end); + const secondLast = end > 0 ? lastChar(text_, end) : 0; + + if(text_[0].isNewLine || text_[0] == ' ') + { + hints[hintsIdx++] = cast(char)('0' + bestIndent); + } + if(!last.isNewLine) + { + hints[hintsIdx++] = '-'; + } + else if(std.utf.count(text_) == 1 || secondLast.isNewLine) + { + hints[hintsIdx++] = '+'; + } + return hintsIdx; + } + + ///Initialize for block scalar writing with specified indicator. + void initBlock(const char indicator) @safe + { + char[4] hints; + hints[0] = indicator; + const hintsLength = 1 + determineBlockHints(hints[1 .. $], emitter_.bestIndent_); + emitter_.writeIndicator(hints[0 .. hintsLength], Yes.needWhitespace); + if(hints.length > 0 && hints[$ - 1] == '+') + { + emitter_.openEnded_ = true; + } + emitter_.writeLineBreak(); + } + + ///Write out the current text range. + void writeCurrentRange(const Flag!"UpdateColumn" updateColumn) @safe + { + emitter_.writeString(text_[startByte_ .. endByte_]); + if(updateColumn){emitter_.column_ += endChar_ - startChar_;} + updateRangeStart(); + } + + ///Write line breaks in the text range. + void writeLineBreaks() @safe + { + foreach(const dchar br; text_[startByte_ .. endByte_]) + { + if(br == '\n'){emitter_.writeLineBreak();} + else + { + char[4] brString; + const bytes = encode(brString, br); + emitter_.writeLineBreak(brString[0 .. bytes]); + } + } + updateRangeStart(); + } + + ///Write line break if start of the text range is a newline. + void writeStartLineBreak() @safe + { + if(charAtStart == '\n'){emitter_.writeLineBreak();} + } + + ///Write indentation, optionally resetting whitespace/indentation flags. + void writeIndent(const Flag!"ResetSpace" resetSpace) @safe + { + emitter_.writeIndent(); + if(resetSpace) + { + emitter_.whitespace_ = emitter_.indentation_ = false; + } + } + + ///Move start of text range to its end. + void updateRangeStart() pure @safe nothrow + { + startByte_ = endByte_; + startChar_ = endChar_; + } + + ///Update the line breaks_ flag, optionally updating the spaces_ flag. + void updateBreaks(in dchar c, const Flag!"UpdateSpaces" updateSpaces) pure @safe + { + if(c == dcharNone){return;} + breaks_ = (c.isNewLine != 0); + if(updateSpaces){spaces_ = c == ' ';} + } + + ///Move to the beginning of text. + void resetTextPosition() pure @safe nothrow + { + startByte_ = endByte_ = nextEndByte_ = 0; + startChar_ = endChar_ = -1; + } +} diff --git a/source/dyaml/encoding.d b/source/dyaml/encoding.d new file mode 100644 index 0000000..50c10b9 --- /dev/null +++ b/source/dyaml/encoding.d @@ -0,0 +1,11 @@ +// Copyright Ferdinand Majerech 2014. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +module dyaml.encoding; + + +import tinyendian; + +alias Encoding = tinyendian.UTFEncoding; diff --git a/source/dyaml/escapes.d b/source/dyaml/escapes.d new file mode 100644 index 0000000..36fd744 --- /dev/null +++ b/source/dyaml/escapes.d @@ -0,0 +1,106 @@ + + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +module dyaml.escapes; + +package: + +import std.meta : AliasSeq; +alias escapes = AliasSeq!('0', 'a', 'b', 't', '\t', 'n', 'v', 'f', 'r', 'e', ' ', + '/', '\"', '\\', 'N', '_', 'L', 'P'); + +/// YAML hex codes specifying the length of the hex number. +alias escapeHexCodeList = AliasSeq!('x', 'u', 'U'); + +/// Convert a YAML escape to a dchar. +dchar fromEscape(dchar escape) @safe pure nothrow @nogc +{ + switch(escape) + { + case '0': return '\0'; + case 'a': return '\x07'; + case 'b': return '\x08'; + case 't': return '\x09'; + case '\t': return '\x09'; + case 'n': return '\x0A'; + case 'v': return '\x0B'; + case 'f': return '\x0C'; + case 'r': return '\x0D'; + case 'e': return '\x1B'; + case '/': return '/'; + case ' ': return '\x20'; + case '\"': return '\"'; + case '\\': return '\\'; + case 'N': return '\x85'; //'\u0085'; + case '_': return '\xA0'; + case 'L': return '\u2028'; + case 'P': return '\u2029'; + default: assert(false, "No such YAML escape"); + } +} + +/** + * Convert a dchar to a YAML escape. + * + * Params: + * value = The possibly escapable character. + * + * Returns: + * If the character passed as parameter can be escaped, returns the matching + * escape, otherwise returns a null character. + */ +dchar toEscape(dchar value) @safe pure nothrow @nogc +{ + switch(value) + { + case '\0': return '0'; + case '\x07': return 'a'; + case '\x08': return 'b'; + case '\x09': return 't'; + case '\x0A': return 'n'; + case '\x0B': return 'v'; + case '\x0C': return 'f'; + case '\x0D': return 'r'; + case '\x1B': return 'e'; + case '\"': return '\"'; + case '\\': return '\\'; + case '\xA0': return '_'; + case '\x85': return 'N'; + case '\u2028': return 'L'; + case '\u2029': return 'P'; + default: return 0; + } +} + +/// Get the length of a hexadecimal number determined by its hex code. +/// +/// Need a function as associative arrays don't work with @nogc. +/// (And this may be even faster with a function.) +uint escapeHexLength(dchar hexCode) @safe pure nothrow @nogc +{ + switch(hexCode) + { + case 'x': return 2; + case 'u': return 4; + case 'U': return 8; + default: assert(false, "No such YAML hex code"); + } +} + +// Issue #302: Support optional escaping of forward slashes in string +// for JSON compatibility +@safe unittest +{ + import dyaml.loader : Loader; + + const str = `{ + "forward/slashes": "can\/be\/optionally\/escaped" +}`; + + auto node = Loader.fromString(str).load(); + assert(node["forward/slashes"] == "can/be/optionally/escaped"); +} diff --git a/source/dyaml/event.d b/source/dyaml/event.d new file mode 100644 index 0000000..f4a747f --- /dev/null +++ b/source/dyaml/event.d @@ -0,0 +1,243 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * YAML events. + * Code based on PyYAML: http://www.pyyaml.org + */ +module dyaml.event; + +import std.array; +import std.conv; + +import dyaml.exception; +import dyaml.reader; +import dyaml.tagdirective; +import dyaml.style; + + +package: +///Event types. +enum EventID : ubyte +{ + invalid = 0, /// Invalid (uninitialized) event. + streamStart, /// Stream start + streamEnd, /// Stream end + documentStart, /// Document start + documentEnd, /// Document end + alias_, /// Alias + scalar, /// Scalar + sequenceStart, /// Sequence start + sequenceEnd, /// Sequence end + mappingStart, /// Mapping start + mappingEnd /// Mapping end +} + +/** + * YAML event produced by parser. + * + * 48 bytes on 64bit. + */ +struct Event +{ + @disable int opCmp(ref Event); + + ///Value of the event, if any. + string value; + ///Start position of the event in file/stream. + Mark startMark; + ///End position of the event in file/stream. + Mark endMark; + union + { + struct + { + ///Anchor of the event, if any. + string _anchor; + ///Tag of the event, if any. + string _tag; + } + ///Tag directives, if this is a DocumentStart. + //TagDirectives tagDirectives; + TagDirective[] _tagDirectives; + } + ///Event type. + EventID id = EventID.invalid; + ///Style of scalar event, if this is a scalar event. + ScalarStyle scalarStyle = ScalarStyle.invalid; + union + { + ///Should the tag be implicitly resolved? + bool implicit; + /** + * Is this document event explicit? + * + * Used if this is a DocumentStart or DocumentEnd. + */ + bool explicitDocument; + } + ///Collection style, if this is a SequenceStart or MappingStart. + CollectionStyle collectionStyle = CollectionStyle.invalid; + + ///Is this a null (uninitialized) event? + @property bool isNull() const pure @safe nothrow {return id == EventID.invalid;} + + ///Get string representation of the token ID. + @property string idString() const @safe {return to!string(id);} + + auto ref anchor() inout @trusted pure { + assert(id != EventID.documentStart, "DocumentStart events cannot have anchors."); + return _anchor; + } + + auto ref tag() inout @trusted pure { + assert(id != EventID.documentStart, "DocumentStart events cannot have tags."); + return _tag; + } + + auto ref tagDirectives() inout @trusted pure { + assert(id == EventID.documentStart, "Only DocumentStart events have tag directives."); + return _tagDirectives; + } +} + +/** + * Construct a simple event. + * + * Params: start = Start position of the event in the file/stream. + * end = End position of the event in the file/stream. + * anchor = Anchor, if this is an alias event. + */ +Event event(EventID id)(const Mark start, const Mark end, const string anchor = null) + @safe + in(!(id == EventID.alias_ && anchor == ""), "Missing anchor for alias event") +{ + Event result; + result.startMark = start; + result.endMark = end; + result.anchor = anchor; + result.id = id; + return result; +} + +/** + * Construct a collection (mapping or sequence) start event. + * + * Params: start = Start position of the event in the file/stream. + * end = End position of the event in the file/stream. + * anchor = Anchor of the sequence, if any. + * tag = Tag of the sequence, if specified. + * implicit = Should the tag be implicitly resolved? + * style = Style to use when outputting document. + */ +Event collectionStartEvent(EventID id) + (const Mark start, const Mark end, const string anchor, const string tag, + const bool implicit, const CollectionStyle style) pure @safe nothrow +{ + static assert(id == EventID.sequenceStart || id == EventID.sequenceEnd || + id == EventID.mappingStart || id == EventID.mappingEnd); + Event result; + result.startMark = start; + result.endMark = end; + result.anchor = anchor; + result.tag = tag; + result.id = id; + result.implicit = implicit; + result.collectionStyle = style; + return result; +} + +/** + * Construct a stream start event. + * + * Params: start = Start position of the event in the file/stream. + * end = End position of the event in the file/stream. + */ +Event streamStartEvent(const Mark start, const Mark end) + pure @safe nothrow +{ + Event result; + result.startMark = start; + result.endMark = end; + result.id = EventID.streamStart; + return result; +} + +///Aliases for simple events. +alias streamEndEvent = event!(EventID.streamEnd); +alias aliasEvent = event!(EventID.alias_); +alias sequenceEndEvent = event!(EventID.sequenceEnd); +alias mappingEndEvent = event!(EventID.mappingEnd); + +///Aliases for collection start events. +alias sequenceStartEvent = collectionStartEvent!(EventID.sequenceStart); +alias mappingStartEvent = collectionStartEvent!(EventID.mappingStart); + +/** + * Construct a document start event. + * + * Params: start = Start position of the event in the file/stream. + * end = End position of the event in the file/stream. + * explicit = Is this an explicit document start? + * YAMLVersion = YAML version string of the document. + * tagDirectives = Tag directives of the document. + */ +Event documentStartEvent(const Mark start, const Mark end, const bool explicit, string YAMLVersion, + TagDirective[] tagDirectives) pure @safe nothrow +{ + Event result; + result.value = YAMLVersion; + result.startMark = start; + result.endMark = end; + result.id = EventID.documentStart; + result.explicitDocument = explicit; + result.tagDirectives = tagDirectives; + return result; +} + +/** + * Construct a document end event. + * + * Params: start = Start position of the event in the file/stream. + * end = End position of the event in the file/stream. + * explicit = Is this an explicit document end? + */ +Event documentEndEvent(const Mark start, const Mark end, const bool explicit) pure @safe nothrow +{ + Event result; + result.startMark = start; + result.endMark = end; + result.id = EventID.documentEnd; + result.explicitDocument = explicit; + return result; +} + +/// Construct a scalar event. +/// +/// Params: start = Start position of the event in the file/stream. +/// end = End position of the event in the file/stream. +/// anchor = Anchor of the scalar, if any. +/// tag = Tag of the scalar, if specified. +/// implicit = Should the tag be implicitly resolved? +/// value = String value of the scalar. +/// style = Scalar style. +Event scalarEvent(const Mark start, const Mark end, const string anchor, const string tag, + const bool implicit, const string value, + const ScalarStyle style = ScalarStyle.invalid) @safe pure nothrow @nogc +{ + Event result; + result.value = value; + result.startMark = start; + result.endMark = end; + + result.anchor = anchor; + result.tag = tag; + + result.id = EventID.scalar; + result.scalarStyle = style; + result.implicit = implicit; + return result; +} diff --git a/source/dyaml/exception.d b/source/dyaml/exception.d new file mode 100644 index 0000000..145e9c3 --- /dev/null +++ b/source/dyaml/exception.d @@ -0,0 +1,171 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +///Exceptions thrown by D:YAML and _exception related code. +module dyaml.exception; + + +import std.algorithm; +import std.array; +import std.string; +import std.conv; + + +/// Base class for all exceptions thrown by D:YAML. +class YAMLException : Exception +{ + /// Construct a YAMLException with specified message and position where it was thrown. + public this(string msg, string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow @nogc + { + super(msg, file, line); + } +} + +/// Position in a YAML stream, used for error messages. +struct Mark +{ + package: + /// File name. + string name_; + /// Line number. + ushort line_; + /// Column number. + ushort column_; + + public: + /// Construct a Mark with specified line and column in the file. + this(string name, const uint line, const uint column) @safe pure nothrow @nogc + { + name_ = name; + line_ = cast(ushort)min(ushort.max, line); + // This *will* overflow on extremely wide files but saves CPU time + // (mark ctor takes ~5% of time) + column_ = cast(ushort)column; + } + + /// Get a file name. + @property string name() @safe pure nothrow @nogc const + { + return name_; + } + + /// Get a line number. + @property ushort line() @safe pure nothrow @nogc const + { + return line_; + } + + /// Get a column number. + @property ushort column() @safe pure nothrow @nogc const + { + return column_; + } + + /// Duplicate a mark + Mark dup () const scope @safe pure nothrow + { + return Mark(this.name_.idup, this.line_, this.column_); + } + + /// Get a string representation of the mark. + string toString() const scope @safe pure nothrow + { + // Line/column numbers start at zero internally, make them start at 1. + static string clamped(ushort v) @safe pure nothrow + { + return text(v + 1, v == ushort.max ? " or higher" : ""); + } + return "file " ~ name_ ~ ",line " ~ clamped(line_) ~ ",column " ~ clamped(column_); + } +} + +// Base class of YAML exceptions with marked positions of the problem. +abstract class MarkedYAMLException : YAMLException +{ + /// Position of the error. + Mark mark; + + // Construct a MarkedYAMLException with specified context and problem. + this(string context, scope const Mark contextMark, + string problem, scope const Mark problemMark, + string file = __FILE__, size_t line = __LINE__) @safe pure nothrow + { + const msg = context ~ '\n' ~ + (contextMark != problemMark ? contextMark.toString() ~ '\n' : "") ~ + problem ~ '\n' ~ problemMark.toString() ~ '\n'; + super(msg, file, line); + mark = problemMark.dup; + } + + // Construct a MarkedYAMLException with specified problem. + this(string problem, scope const Mark problemMark, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow + { + super(problem ~ '\n' ~ problemMark.toString(), file, line); + mark = problemMark.dup; + } + + /// Construct a MarkedYAMLException from a struct storing constructor parameters. + this(ref const(MarkedYAMLExceptionData) data) @safe pure nothrow + { + with(data) this(context, contextMark, problem, problemMark); + } +} + +package: +// A struct storing parameters to the MarkedYAMLException constructor. +struct MarkedYAMLExceptionData +{ + // Context of the error. + string context; + // Position of the context in a YAML buffer. + Mark contextMark; + // The error itself. + string problem; + // Position if the error. + Mark problemMark; +} + +// Constructors of YAML exceptions are mostly the same, so we use a mixin. +// +// See_Also: YAMLException +template ExceptionCtors() +{ + public this(string msg, string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow + { + super(msg, file, line); + } +} + +// Constructors of marked YAML exceptions are mostly the same, so we use a mixin. +// +// See_Also: MarkedYAMLException +template MarkedExceptionCtors() +{ + public: + this(string context, const Mark contextMark, string problem, + const Mark problemMark, string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow + { + super(context, contextMark, problem, problemMark, + file, line); + } + + this(string problem, const Mark problemMark, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow + { + super(problem, problemMark, file, line); + } + + this(ref const(MarkedYAMLExceptionData) data) @safe pure nothrow + { + super(data); + } +} diff --git a/source/dyaml/linebreak.d b/source/dyaml/linebreak.d new file mode 100644 index 0000000..1f0f661 --- /dev/null +++ b/source/dyaml/linebreak.d @@ -0,0 +1,32 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +module dyaml.linebreak; + + +///Enumerates platform specific line breaks. +enum LineBreak +{ + ///Unix line break ("\n"). + unix, + ///Windows line break ("\r\n"). + windows, + ///Macintosh line break ("\r"). + macintosh +} + +package: + +//Get line break string for specified line break. +string lineBreak(in LineBreak b) pure @safe nothrow +{ + final switch(b) + { + case LineBreak.unix: return "\n"; + case LineBreak.windows: return "\r\n"; + case LineBreak.macintosh: return "\r"; + } +} diff --git a/source/dyaml/loader.d b/source/dyaml/loader.d new file mode 100644 index 0000000..6638dfc --- /dev/null +++ b/source/dyaml/loader.d @@ -0,0 +1,413 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/// Class used to load YAML documents. +module dyaml.loader; + + +import std.exception; +import std.file; +import std.stdio : File; +import std.string; + +import dyaml.composer; +import dyaml.constructor; +import dyaml.event; +import dyaml.exception; +import dyaml.node; +import dyaml.parser; +import dyaml.reader; +import dyaml.resolver; +import dyaml.scanner; +import dyaml.token; + + +/** Loads YAML documents from files or char[]. + * + * User specified Constructor and/or Resolver can be used to support new + * tags / data types. + */ +struct Loader +{ + private: + // Processes character data to YAML tokens. + Scanner scanner_; + // Processes tokens to YAML events. + Parser parser_; + // Resolves tags (data types). + Resolver resolver_; + // Name of the input file or stream, used in error messages. + string name_ = ""; + // Are we done loading? + bool done_; + // Last node read from stream + Node currentNode; + // Has the range interface been initialized yet? + bool rangeInitialized; + + public: + @disable this(); + @disable int opCmp(ref Loader); + @disable bool opEquals(ref Loader); + + /** Construct a Loader to load YAML from a file. + * + * Params: filename = Name of the file to load from. + * file = Already-opened file to load from. + * + * Throws: YAMLException if the file could not be opened or read. + */ + static Loader fromFile(string filename) @trusted + { + try + { + auto loader = Loader(std.file.read(filename), filename); + return loader; + } + catch(FileException e) + { + throw new YAMLException("Unable to open file %s for YAML loading: %s" + .format(filename, e.msg), e.file, e.line); + } + } + /// ditto + static Loader fromFile(File file) @system + { + auto loader = Loader(file.byChunk(4096).join, file.name); + return loader; + } + + /** Construct a Loader to load YAML from a string. + * + * Params: + * data = String to load YAML from. The char[] version $(B will) + * overwrite its input during parsing as D:YAML reuses memory. + * filename = The filename to give to the Loader, defaults to `""` + * + * Returns: Loader loading YAML from given string. + * + * Throws: + * + * YAMLException if data could not be read (e.g. a decoding error) + */ + static Loader fromString(char[] data, string filename = "") @safe + { + return Loader(cast(ubyte[])data, filename); + } + /// Ditto + static Loader fromString(string data, string filename = "") @safe + { + return fromString(data.dup, filename); + } + /// Load a char[]. + @safe unittest + { + assert(Loader.fromString("42".dup).load().as!int == 42); + } + /// Load a string. + @safe unittest + { + assert(Loader.fromString("42").load().as!int == 42); + } + + /** Construct a Loader to load YAML from a buffer. + * + * Params: yamlData = Buffer with YAML data to load. This may be e.g. a file + * loaded to memory or a string with YAML data. Note that + * buffer $(B will) be overwritten, as D:YAML minimizes + * memory allocations by reusing the input _buffer. + * $(B Must not be deleted or modified by the user as long + * as nodes loaded by this Loader are in use!) - Nodes may + * refer to data in this buffer. + * + * Note that D:YAML looks for byte-order-marks YAML files encoded in + * UTF-16/UTF-32 (and sometimes UTF-8) use to specify the encoding and + * endianness, so it should be enough to load an entire file to a buffer and + * pass it to D:YAML, regardless of Unicode encoding. + * + * Throws: YAMLException if yamlData contains data illegal in YAML. + */ + static Loader fromBuffer(ubyte[] yamlData) @safe + { + return Loader(yamlData); + } + /// Ditto + static Loader fromBuffer(void[] yamlData) @system + { + return Loader(yamlData); + } + /// Ditto + private this(void[] yamlData, string name = "") @system + { + this(cast(ubyte[])yamlData, name); + } + /// Ditto + private this(ubyte[] yamlData, string name = "") @safe + { + resolver_ = Resolver.withDefaultResolvers; + name_ = name; + try + { + auto reader_ = new Reader(yamlData, name); + scanner_ = Scanner(reader_); + parser_ = new Parser(scanner_); + } + catch(YAMLException e) + { + throw new YAMLException("Unable to open %s for YAML loading: %s" + .format(name_, e.msg), e.file, e.line); + } + } + + + /// Set stream _name. Used in debugging messages. + void name(string name) pure @safe nothrow @nogc + { + name_ = name; + scanner_.name = name; + } + + /// Specify custom Resolver to use. + auto ref resolver() pure @safe nothrow @nogc + { + return resolver_; + } + + /** Load single YAML document. + * + * If none or more than one YAML document is found, this throws a YAMLException. + * + * This can only be called once; this is enforced by contract. + * + * Returns: Root node of the document. + * + * Throws: YAMLException if there wasn't exactly one document + * or on a YAML parsing error. + */ + Node load() @safe + { + enforce!YAMLException(!empty, "Zero documents in stream"); + auto output = front; + popFront(); + enforce!YAMLException(empty, "More than one document in stream"); + return output; + } + + /** Implements the empty range primitive. + * + * If there's no more documents left in the stream, this will be true. + * + * Returns: `true` if no more documents left, `false` otherwise. + */ + bool empty() @safe + { + // currentNode and done_ are both invalid until popFront is called once + if (!rangeInitialized) + { + popFront(); + } + return done_; + } + /** Implements the popFront range primitive. + * + * Reads the next document from the stream, if possible. + */ + void popFront() @safe + { + // Composer initialization is done here in case the constructor is + // modified, which is a pretty common case. + static Composer composer; + if (!rangeInitialized) + { + composer = Composer(parser_, resolver_); + rangeInitialized = true; + } + assert(!done_, "Loader.popFront called on empty range"); + if (composer.checkNode()) + { + currentNode = composer.getNode(); + } + else + { + done_ = true; + } + } + /** Implements the front range primitive. + * + * Returns: the current document as a Node. + */ + Node front() @safe + { + // currentNode and done_ are both invalid until popFront is called once + if (!rangeInitialized) + { + popFront(); + } + return currentNode; + } + + // Scan all tokens, throwing them away. Used for benchmarking. + void scanBench() @safe + { + try + { + while(!scanner_.empty) + { + scanner_.popFront(); + } + } + catch(YAMLException e) + { + throw new YAMLException("Unable to scan YAML from stream " ~ + name_ ~ " : " ~ e.msg, e.file, e.line); + } + } + + + // Parse and return all events. Used for debugging. + auto parse() @safe + { + return parser_; + } +} +/// Load single YAML document from a file: +@safe unittest +{ + write("example.yaml", "Hello world!"); + auto rootNode = Loader.fromFile("example.yaml").load(); + assert(rootNode == "Hello world!"); +} +/// Load single YAML document from an already-opened file: +@system unittest +{ + // Open a temporary file + auto file = File.tmpfile; + // Write valid YAML + file.write("Hello world!"); + // Return to the beginning + file.seek(0); + // Load document + auto rootNode = Loader.fromFile(file).load(); + assert(rootNode == "Hello world!"); +} +/// Load all YAML documents from a file: +@safe unittest +{ + import std.array : array; + import std.file : write; + write("example.yaml", + "---\n"~ + "Hello world!\n"~ + "...\n"~ + "---\n"~ + "Hello world 2!\n"~ + "...\n" + ); + auto nodes = Loader.fromFile("example.yaml").array; + assert(nodes.length == 2); +} +/// Iterate over YAML documents in a file, lazily loading them: +@safe unittest +{ + import std.file : write; + write("example.yaml", + "---\n"~ + "Hello world!\n"~ + "...\n"~ + "---\n"~ + "Hello world 2!\n"~ + "...\n" + ); + auto loader = Loader.fromFile("example.yaml"); + + foreach(ref node; loader) + { + //Do something + } +} +/// Load YAML from a string: +@safe unittest +{ + string yaml_input = ("red: '#ff0000'\n" ~ + "green: '#00ff00'\n" ~ + "blue: '#0000ff'"); + + auto colors = Loader.fromString(yaml_input).load(); + + foreach(string color, string value; colors) + { + // Do something with the color and its value... + } +} + +/// Load a file into a buffer in memory and then load YAML from that buffer: +@safe unittest +{ + import std.file : read, write; + import std.stdio : writeln; + // Create a yaml document + write("example.yaml", + "---\n"~ + "Hello world!\n"~ + "...\n"~ + "---\n"~ + "Hello world 2!\n"~ + "...\n" + ); + try + { + string buffer = readText("example.yaml"); + auto yamlNode = Loader.fromString(buffer); + + // Read data from yamlNode here... + } + catch(FileException e) + { + writeln("Failed to read file 'example.yaml'"); + } +} +/// Use a custom resolver to support custom data types and/or implicit tags: +@safe unittest +{ + import std.file : write; + // Create a yaml document + write("example.yaml", + "---\n"~ + "Hello world!\n"~ + "...\n" + ); + + auto loader = Loader.fromFile("example.yaml"); + + // Add resolver expressions here... + // loader.resolver.addImplicitResolver(...); + + auto rootNode = loader.load(); +} + +//Issue #258 - https://github.com/dlang-community/D-YAML/issues/258 +@safe unittest +{ + auto yaml = "{\n\"root\": {\n\t\"key\": \"value\"\n }\n}"; + auto doc = Loader.fromString(yaml).load(); + assert(doc.isValid); +} + +@safe unittest +{ + import std.exception : collectException; + + auto yaml = q"EOS + value: invalid: string +EOS"; + auto filename = "invalid.yml"; + auto loader = Loader.fromString(yaml); + loader.name = filename; + + Node unused; + auto e = loader.load().collectException!ScannerException(unused); + assert(e.mark.name == filename); +} diff --git a/source/dyaml/node.d b/source/dyaml/node.d new file mode 100644 index 0000000..4c3c5eb --- /dev/null +++ b/source/dyaml/node.d @@ -0,0 +1,2638 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/// Node of a YAML document. Used to read YAML data once it's loaded, +/// and to prepare data to emit. +module dyaml.node; + + +import std.algorithm; +import std.array; +import std.conv; +import std.datetime; +import std.exception; +import std.format; +import std.math; +import std.meta : AliasSeq; +import std.range; +import std.string; +import std.traits; +import std.typecons; + +// FIXME: Switch back to upstream's when v2.101 is the oldest +// supported version (recommended: after v2.111 release). +import dyaml.stdsumtype; + +import dyaml.event; +import dyaml.exception; +import dyaml.style; + +/// Exception thrown at node related errors. +class NodeException : MarkedYAMLException +{ + package: + // Construct a NodeException. + // + // Params: msg = Error message. + // start = Start position of the node. + this(string msg, const scope Mark start, + string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow + { + super(msg, start, file, line); + } +} + +// Node kinds. +enum NodeID : ubyte +{ + scalar, + sequence, + mapping, + invalid +} + +/// Null YAML type. Used in nodes with _null values. +struct YAMLNull +{ + /// Used for string conversion. + string toString() const pure @safe nothrow {return "null";} +} + +/// Invalid YAML type, used internally by SumType +private struct YAMLInvalid {} + +// Merge YAML type, used to support "tag:yaml.org,2002:merge". +package struct YAMLMerge{} + +// Key-value pair of YAML nodes, used in mappings. +private struct Pair +{ + public: + /// Key node. + Node key; + /// Value node. + Node value; + + /// Construct a Pair from two values. Will be converted to Nodes if needed. + this(K, V)(K key, V value) + { + static if(is(Unqual!K == Node)){this.key = key;} + else {this.key = Node(key);} + static if(is(Unqual!V == Node)){this.value = value;} + else {this.value = Node(value);} + } + + /// Equality test with another Pair. + bool opEquals(const ref Pair rhs) const scope @safe + { + return key == rhs.key && value == rhs.value; + } + + // Comparison with another Pair. + int opCmp(const scope ref Pair rhs) const scope @safe + { + const keyCmp = key.opCmp(rhs.key); + return keyCmp != 0 ? keyCmp + : value.opCmp(rhs.value); + } + + /// + public void toString (scope void delegate(scope const(char)[]) @safe sink) + const scope @safe + { + // formattedWrite does not accept `scope` parameters + () @trusted { + formattedWrite(sink, "%s: %s", this.key, this.value); + }(); + } +} + +enum NodeType +{ + null_, + merge, + boolean, + integer, + decimal, + binary, + timestamp, + string, + mapping, + sequence, + invalid +} + +/** YAML node. + * + * This is a pseudo-dynamic type that can store any YAML value, including a + * sequence or mapping of nodes. You can get data from a Node directly or + * iterate over it if it's a collection. + */ +struct Node +{ + public: + alias Pair = .Pair; + + package: + // YAML value type. + alias Value = SumType!( + YAMLInvalid, YAMLNull, YAMLMerge, + bool, long, real, ubyte[], SysTime, string, + Node.Pair[], Node[]); + + // Can Value hold this type naturally? + enum allowed(T) = isIntegral!T || + isFloatingPoint!T || + isSomeString!T || + is(typeof({ Value i = T.init; })); + + // Stored value. + Value value_; + // Start position of the node. + Mark startMark_; + + // Tag of the node. + string tag_; + // Node scalar style. Used to remember style this node was loaded with. + ScalarStyle scalarStyle = ScalarStyle.invalid; + // Node collection style. Used to remember style this node was loaded with. + CollectionStyle collectionStyle = CollectionStyle.invalid; + + public: + /** Construct a Node from a value. + * + * Any type except for Node can be stored in a Node, but default YAML + * types (integers, floats, strings, timestamps, etc.) will be stored + * more efficiently. To create a node representing a null value, + * construct it from YAMLNull. + * + * If value is a node, its value will be copied directly. The tag and + * other information attached to the original node will be discarded. + * + * If value is an array of nodes or pairs, it is stored directly. + * Otherwise, every value in the array is converted to a node, and + * those nodes are stored. + * + * Note that to emit any non-default types you store + * in a node, you need a Representer to represent them in YAML - + * otherwise emitting will fail. + * + * Params: value = Value to store in the node. + * tag = Overrides tag of the node when emitted, regardless + * of tag determined by Representer. Representer uses + * this to determine YAML data type when a D data type + * maps to multiple different YAML data types. Tag must + * be in full form, e.g. "tag:yaml.org,2002:int", not + * a shortcut, like "!!int". + */ + this(T)(T value, const string tag = null) @safe + if (allowed!T || isArray!T || isAssociativeArray!T || is(Unqual!T == Node) || castableToNode!T) + { + tag_ = tag; + + //Unlike with assignment, we're just copying the value. + static if (is(Unqual!T == Node)) + { + setValue(value.value_); + } + else static if(isSomeString!T) + { + setValue(value.to!string); + } + else static if(is(Unqual!T == bool)) + { + setValue(cast(bool)value); + } + else static if(isIntegral!T) + { + setValue(cast(long)value); + } + else static if(isFloatingPoint!T) + { + setValue(cast(real)value); + } + else static if (isArray!T) + { + alias ElementT = Unqual!(ElementType!T); + // Construction from raw node or pair array. + static if(is(ElementT == Node) || is(ElementT == Node.Pair)) + { + setValue(value); + } + // Need to handle byte buffers separately. + else static if(is(ElementT == byte) || is(ElementT == ubyte)) + { + setValue(cast(ubyte[]) value); + } + else + { + Node[] nodes; + foreach(ref v; value) + { + nodes ~= Node(v); + } + setValue(nodes); + } + } + else static if (isAssociativeArray!T) + { + Node.Pair[] pairs; + foreach(k, ref v; value) + { + pairs ~= Pair(k, v); + } + setValue(pairs); + } + // User defined type. + else + { + setValue(value); + } + } + /// Construct a scalar node + @safe unittest + { + // Integer + { + auto node = Node(5); + } + // String + { + auto node = Node("Hello world!"); + } + // Floating point + { + auto node = Node(5.0f); + } + // Boolean + { + auto node = Node(true); + } + // Time + { + auto node = Node(SysTime(DateTime(2005, 6, 15, 20, 0, 0), UTC())); + } + // Integer, dumped as a string + { + auto node = Node(5, "tag:yaml.org,2002:str"); + } + } + /// Construct a sequence node + @safe unittest + { + // Will be emitted as a sequence (default for arrays) + { + auto seq = Node([1, 2, 3, 4, 5]); + } + // Will be emitted as a set (overridden tag) + { + auto set = Node([1, 2, 3, 4, 5], "tag:yaml.org,2002:set"); + } + // Can also store arrays of arrays + { + auto node = Node([[1,2], [3,4]]); + } + } + /// Construct a mapping node + @safe unittest + { + // Will be emitted as an unordered mapping (default for mappings) + auto map = Node([1 : "a", 2 : "b"]); + // Will be emitted as an ordered map (overridden tag) + auto omap = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:omap"); + // Will be emitted as pairs (overridden tag) + auto pairs = Node([1 : "a", 2 : "b"], "tag:yaml.org,2002:pairs"); + } + @safe unittest + { + { + auto node = Node(42); + assert(node.nodeID == NodeID.scalar); + assert(node.as!int == 42 && node.as!float == 42.0f && node.as!string == "42"); + } + + { + auto node = Node("string"); + assert(node.as!string == "string"); + } + } + @safe unittest + { + with(Node([1, 2, 3])) + { + assert(nodeID == NodeID.sequence); + assert(length == 3); + assert(opIndex(2).as!int == 3); + } + + } + @safe unittest + { + int[string] aa; + aa["1"] = 1; + aa["2"] = 2; + with(Node(aa)) + { + assert(nodeID == NodeID.mapping); + assert(length == 2); + assert(opIndex("2").as!int == 2); + } + } + @safe unittest + { + auto node = Node(Node(4, "tag:yaml.org,2002:str")); + assert(node == 4); + assert(node.tag_ == ""); + } + + /** Construct a node from arrays of _keys and _values. + * + * Constructs a mapping node with key-value pairs from + * _keys and _values, keeping their order. Useful when order + * is important (ordered maps, pairs). + * + * + * keys and values must have equal length. + * + * + * If _keys and/or _values are nodes, they are stored directly/ + * Otherwise they are converted to nodes and then stored. + * + * Params: keys = Keys of the mapping, from first to last pair. + * values = Values of the mapping, from first to last pair. + * tag = Overrides tag of the node when emitted, regardless + * of tag determined by Representer. Representer uses + * this to determine YAML data type when a D data type + * maps to multiple different YAML data types. + * This is used to differentiate between YAML unordered + * mappings ("!!map"), ordered mappings ("!!omap"), and + * pairs ("!!pairs") which are all internally + * represented as an array of node pairs. Tag must be + * in full form, e.g. "tag:yaml.org,2002:omap", not a + * shortcut, like "!!omap". + * + */ + this(K, V)(K[] keys, V[] values, const string tag = null) + if(!(isSomeString!(K[]) || isSomeString!(V[]))) + in(keys.length == values.length, + "Lengths of keys and values arrays to construct " ~ + "a YAML node from don't match") + { + tag_ = tag; + + Node.Pair[] pairs; + foreach(i; 0 .. keys.length){pairs ~= Pair(keys[i], values[i]);} + setValue(pairs); + } + /// + @safe unittest + { + // Will be emitted as an unordered mapping (default for mappings) + auto map = Node([1, 2], ["a", "b"]); + // Will be emitted as an ordered map (overridden tag) + auto omap = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:omap"); + // Will be emitted as pairs (overriden tag) + auto pairs = Node([1, 2], ["a", "b"], "tag:yaml.org,2002:pairs"); + } + @safe unittest + { + with(Node(["1", "2"], [1, 2])) + { + assert(nodeID == NodeID.mapping); + assert(length == 2); + assert(opIndex("2").as!int == 2); + } + + } + + /// Is this node valid (initialized)? + @property bool isValid() const scope @safe pure nothrow @nogc + { + return value_.match!((const YAMLInvalid _) => false, _ => true); + } + + /// Return tag of the node. + @property string tag() const return scope @safe pure nothrow @nogc + { + return tag_; + } + + /// Return the start position of the node. + @property Mark startMark() const return scope @safe pure nothrow @nogc + { + return startMark_; + } + + /** Equality test. + * + * If T is Node, recursively compares all subnodes. + * This might be quite expensive if testing entire documents. + * + * If T is not Node, gets a value of type T from the node and tests + * equality with that. + * + * To test equality with a null YAML value, use YAMLNull. + * + * Params: rhs = Variable to test equality with. + * + * Returns: true if equal, false otherwise. + */ + bool opEquals(const scope Node rhs) const scope @safe + { + return opCmp(rhs) == 0; + } + bool opEquals(T)(const scope auto ref T rhs) const @safe + { + try + { + auto stored = get!(T, No.stringConversion); + // NaNs aren't normally equal to each other, but we'll pretend they are. + static if(isFloatingPoint!T) + { + return rhs == stored || (isNaN(rhs) && isNaN(stored)); + } + else + { + return rhs == stored; + } + } + catch(NodeException e) + { + return false; + } + } + /// + @safe unittest + { + auto node = Node(42); + + assert(node == 42); + assert(node != "42"); + assert(node != "43"); + + auto node2 = Node(YAMLNull()); + assert(node2 == YAMLNull()); + + const node3 = Node(42); + assert(node3 == 42); + } + + /// Shortcut for get(). + alias as = get; + + /** Get the value of the node as specified type. + * + * If the specifed type does not match type in the node, + * conversion is attempted. The stringConversion template + * parameter can be used to disable conversion from non-string + * types to strings. + * + * Numeric values are range checked, throwing if out of range of + * requested type. + * + * Timestamps are stored as std.datetime.SysTime. + * Binary values are decoded and stored as ubyte[]. + * + * To get a null value, use get!YAMLNull . This is to + * prevent getting null values for types such as strings or classes. + * + * $(BR)$(B Mapping default values:) + * + * $(PBR + * The '=' key can be used to denote the default value of a mapping. + * This can be used when a node is scalar in early versions of a program, + * but is replaced by a mapping later. Even if the node is a mapping, the + * get method can be used as if it was a scalar if it has a default value. + * This way, new YAML files where the node is a mapping can still be read + * by old versions of the program, which expect the node to be a scalar. + * ) + * + * Returns: Value of the node as specified type. + * + * Throws: NodeException if unable to convert to specified type, or if + * the value is out of range of requested type. + */ + inout(T) get(T, Flag!"stringConversion" stringConversion = Yes.stringConversion)() inout @safe return scope + { + static assert (allowed!(Unqual!T) || + hasNodeConstructor!(inout(Unqual!T)) || + (!hasIndirections!(Unqual!T) && hasNodeConstructor!(Unqual!T))); + + static if(!allowed!(Unqual!T)) + { + static if (hasSimpleNodeConstructor!(Unqual!T) || hasSimpleNodeConstructor!(inout(Unqual!T))) + { + alias params = AliasSeq!(this); + } + else static if (hasExpandedNodeConstructor!(Unqual!T) || hasExpandedNodeConstructor!(inout(Unqual!T))) + { + alias params = AliasSeq!(this, tag_); + } + else + { + static assert(0, "Unknown Node constructor?"); + } + + static if (is(T == class)) + { + return new inout T(params); + } + else static if (is(T == struct)) + { + return T(params); + } + else + { + static assert(0, "Unhandled user type"); + } + } else { + static if (canBeType!T) + if (isType!(Unqual!T)) { return getValue!T; } + + // If we're getting from a mapping and we're not getting Node.Pair[], + // we're getting the default value. + if(nodeID == NodeID.mapping){return this["="].get!( T, stringConversion);} + + static if(isSomeString!T) + { + static if(!stringConversion) + { + enforce(type == NodeType.string, new NodeException( + "Node stores unexpected type: " ~ text(type) ~ + ". Expected: " ~ typeid(T).toString(), startMark_)); + return to!T(getValue!string); + } + else + { + // Try to convert to string. + try + { + return coerceValue!T().dup; + } + catch (MatchException e) + { + throw new NodeException("Unable to convert node value to string", startMark_); + } + } + } + else static if(isFloatingPoint!T) + { + final switch (type) + { + case NodeType.integer: + return to!T(getValue!long); + case NodeType.decimal: + return to!T(getValue!real); + case NodeType.binary: + case NodeType.string: + case NodeType.boolean: + case NodeType.null_: + case NodeType.merge: + case NodeType.invalid: + case NodeType.timestamp: + case NodeType.mapping: + case NodeType.sequence: + throw new NodeException("Node stores unexpected type: " ~ text(type) ~ + ". Expected: " ~ typeid(T).toString, startMark_); + } + } + else static if(isIntegral!T) + { + enforce(type == NodeType.integer, new NodeException("Node stores unexpected type: " ~ text(type) ~ + ". Expected: " ~ typeid(T).toString, startMark_)); + immutable temp = getValue!long; + enforce(temp >= T.min && temp <= T.max, + new NodeException("Integer value of type " ~ typeid(T).toString() ~ + " out of range. Value: " ~ to!string(temp), startMark_)); + return temp.to!T; + } + else throw new NodeException("Node stores unexpected type: " ~ text(type) ~ + ". Expected: " ~ typeid(T).toString, startMark_); + } + } + /// ditto + T get(T)() const + if (hasIndirections!(Unqual!T) && hasNodeConstructor!(Unqual!T) && (!hasNodeConstructor!(inout(Unqual!T)))) + { + static if (hasSimpleNodeConstructor!T) + { + alias params = AliasSeq!(this); + } + else static if (hasExpandedNodeConstructor!T) + { + alias params = AliasSeq!(this, tag_); + } + else + { + static assert(0, "Unknown Node constructor?"); + } + static if (is(T == class)) + { + return new T(params); + } + else static if (is(T == struct)) + { + return T(params); + } + else + { + static assert(0, "Unhandled user type"); + } + } + /// Automatic type conversion + @safe unittest + { + auto node = Node(42); + + assert(node.get!int == 42); + assert(node.get!string == "42"); + assert(node.get!double == 42.0); + } + /// Scalar node to struct and vice versa + @safe unittest + { + import dyaml.dumper : dumper; + import dyaml.loader : Loader; + static struct MyStruct + { + int x, y, z; + + this(int x, int y, int z) @safe + { + this.x = x; + this.y = y; + this.z = z; + } + + this(scope const Node node) @safe + { + // `std.array.split` is not marked as taking a `scope` range, + // but we don't escape a reference. + scope parts = () @trusted { return node.as!string().split(":"); }(); + x = parts[0].to!int; + y = parts[1].to!int; + z = parts[2].to!int; + } + + Node opCast(T: Node)() @safe + { + //Using custom scalar format, x:y:z. + auto scalar = format("%s:%s:%s", x, y, z); + //Representing as a scalar, with custom tag to specify this data type. + return Node(scalar, "!mystruct.tag"); + } + } + + auto appender = new Appender!string; + + // Dump struct to yaml document + dumper().dump(appender, Node(MyStruct(1,2,3))); + + // Read yaml document back as a MyStruct + auto loader = Loader.fromString(appender.data); + Node node = loader.load(); + assert(node.as!MyStruct == MyStruct(1,2,3)); + } + /// Sequence node to struct and vice versa + @safe unittest + { + import dyaml.dumper : dumper; + import dyaml.loader : Loader; + static struct MyStruct + { + int x, y, z; + + this(int x, int y, int z) @safe + { + this.x = x; + this.y = y; + this.z = z; + } + + this(Node node) @safe + { + x = node[0].as!int; + y = node[1].as!int; + z = node[2].as!int; + } + + Node opCast(T: Node)() + { + return Node([x, y, z], "!mystruct.tag"); + } + } + + auto appender = new Appender!string; + + // Dump struct to yaml document + dumper().dump(appender, Node(MyStruct(1,2,3))); + + // Read yaml document back as a MyStruct + auto loader = Loader.fromString(appender.data); + Node node = loader.load(); + assert(node.as!MyStruct == MyStruct(1,2,3)); + } + /// Mapping node to struct and vice versa + @safe unittest + { + import dyaml.dumper : dumper; + import dyaml.loader : Loader; + static struct MyStruct + { + int x, y, z; + + Node opCast(T: Node)() + { + auto pairs = [Node.Pair("x", x), + Node.Pair("y", y), + Node.Pair("z", z)]; + return Node(pairs, "!mystruct.tag"); + } + + this(int x, int y, int z) + { + this.x = x; + this.y = y; + this.z = z; + } + + this(Node node) @safe + { + x = node["x"].as!int; + y = node["y"].as!int; + z = node["z"].as!int; + } + } + + auto appender = new Appender!string; + + // Dump struct to yaml document + dumper().dump(appender, Node(MyStruct(1,2,3))); + + // Read yaml document back as a MyStruct + auto loader = Loader.fromString(appender.data); + Node node = loader.load(); + assert(node.as!MyStruct == MyStruct(1,2,3)); + } + /// Classes can be used too + @system unittest { + import dyaml.dumper : dumper; + import dyaml.loader : Loader; + + static class MyClass + { + int x, y, z; + + this(int x, int y, int z) + { + this.x = x; + this.y = y; + this.z = z; + } + + this(scope const Node node) @safe inout + { + // `std.array.split` is not marked as taking a `scope` range, + // but we don't escape a reference. + scope parts = () @trusted { return node.as!string().split(":"); }(); + x = parts[0].to!int; + y = parts[1].to!int; + z = parts[2].to!int; + } + + ///Useful for Node.as!string. + override string toString() + { + return format("MyClass(%s, %s, %s)", x, y, z); + } + + Node opCast(T: Node)() @safe + { + //Using custom scalar format, x:y:z. + auto scalar = format("%s:%s:%s", x, y, z); + //Representing as a scalar, with custom tag to specify this data type. + return Node(scalar, "!myclass.tag"); + } + override bool opEquals(Object o) + { + if (auto other = cast(MyClass)o) + { + return (other.x == x) && (other.y == y) && (other.z == z); + } + return false; + } + } + auto appender = new Appender!string; + + // Dump class to yaml document + dumper().dump(appender, Node(new MyClass(1,2,3))); + + // Read yaml document back as a MyClass + auto loader = Loader.fromString(appender.data); + Node node = loader.load(); + assert(node.as!MyClass == new MyClass(1,2,3)); + } + // Make sure custom tags and styles are kept. + @safe unittest + { + static struct MyStruct + { + Node opCast(T: Node)() + { + auto node = Node("hi", "!mystruct.tag"); + node.setStyle(ScalarStyle.doubleQuoted); + return node; + } + } + + auto node = Node(MyStruct.init); + assert(node.tag == "!mystruct.tag"); + assert(node.scalarStyle == ScalarStyle.doubleQuoted); + } + // ditto, but for collection style + @safe unittest + { + static struct MyStruct + { + Node opCast(T: Node)() + { + auto node = Node(["hi"], "!mystruct.tag"); + node.setStyle(CollectionStyle.flow); + return node; + } + } + + auto node = Node(MyStruct.init); + assert(node.tag == "!mystruct.tag"); + assert(node.collectionStyle == CollectionStyle.flow); + } + @safe unittest + { + assertThrown!NodeException(Node("42").get!int); + assertThrown!NodeException(Node("42").get!double); + assertThrown!NodeException(Node(long.max).get!ushort); + Node(YAMLNull()).get!YAMLNull; + } + @safe unittest + { + const node = Node(42); + assert(node.get!int == 42); + assert(node.get!string == "42"); + assert(node.get!double == 42.0); + + immutable node2 = Node(42); + assert(node2.get!int == 42); + assert(node2.get!(const int) == 42); + assert(node2.get!(immutable int) == 42); + assert(node2.get!string == "42"); + assert(node2.get!(const string) == "42"); + assert(node2.get!(immutable string) == "42"); + assert(node2.get!double == 42.0); + assert(node2.get!(const double) == 42.0); + assert(node2.get!(immutable double) == 42.0); + } + + /** If this is a collection, return its _length. + * + * Otherwise, throw NodeException. + * + * Returns: Number of elements in a sequence or key-value pairs in a mapping. + * + * Throws: NodeException if this is not a sequence nor a mapping. + */ + @property size_t length() const @safe + { + final switch(nodeID) + { + case NodeID.sequence: + return getValue!(Node[]).length; + case NodeID.mapping: + return getValue!(Pair[]).length; + case NodeID.scalar: + case NodeID.invalid: + throw new NodeException("Trying to get length of a " ~ nodeTypeString ~ " node", + startMark_); + } + } + @safe unittest + { + auto node = Node([1,2,3]); + assert(node.length == 3); + const cNode = Node([1,2,3]); + assert(cNode.length == 3); + immutable iNode = Node([1,2,3]); + assert(iNode.length == 3); + } + + /** Get the element at specified index. + * + * If the node is a sequence, index must be integral. + * + * + * If the node is a mapping, return the value corresponding to the first + * key equal to index. containsKey() can be used to determine if a mapping + * has a specific key. + * + * To get element at a null index, use YAMLNull for index. + * + * Params: index = Index to use. + * + * Returns: Value corresponding to the index. + * + * Throws: NodeException if the index could not be found, + * non-integral index is used with a sequence or the node is + * not a collection. + */ + ref inout(Node) opIndex(T)(T index) inout return scope @safe + { + final switch (nodeID) + { + case NodeID.sequence: + checkSequenceIndex(index); + static if(isIntegral!T) + { + return getValue!(Node[])[index]; + } + else + { + assert(false, "Only integers may index sequence nodes"); + } + case NodeID.mapping: + auto idx = findPair(index); + if(idx >= 0) + { + return getValue!(Pair[])[idx].value; + } + + string msg = "Mapping index not found" ~ (isSomeString!T ? ": " ~ to!string(index) : ""); + throw new NodeException(msg, startMark_); + case NodeID.scalar: + case NodeID.invalid: + throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_); + } + } + /// + @safe unittest + { + Node narray = Node([11, 12, 13, 14]); + Node nmap = Node(["11", "12", "13", "14"], [11, 12, 13, 14]); + + assert(narray[0].as!int == 11); + assert(null !is collectException(narray[42])); + assert(nmap["11"].as!int == 11); + assert(nmap["14"].as!int == 14); + } + @safe unittest + { + Node narray = Node([11, 12, 13, 14]); + Node nmap = Node(["11", "12", "13", "14"], [11, 12, 13, 14]); + + assert(narray[0].as!int == 11); + assert(null !is collectException(narray[42])); + assert(nmap["11"].as!int == 11); + assert(nmap["14"].as!int == 14); + assert(null !is collectException(nmap["42"])); + + narray.add(YAMLNull()); + nmap.add(YAMLNull(), "Nothing"); + assert(narray[4].as!YAMLNull == YAMLNull()); + assert(nmap[YAMLNull()].as!string == "Nothing"); + + assertThrown!NodeException(nmap[11]); + assertThrown!NodeException(nmap[14]); + } + + /** Determine if a collection contains specified value. + * + * If the node is a sequence, check if it contains the specified value. + * If it's a mapping, check if it has a value that matches specified value. + * + * Params: rhs = Item to look for. Use YAMLNull to check for a null value. + * + * Returns: true if rhs was found, false otherwise. + * + * Throws: NodeException if the node is not a collection. + */ + bool contains(T)(T rhs) const + { + return contains_!(T, No.key, "contains")(rhs); + } + @safe unittest + { + auto mNode = Node(["1", "2", "3"]); + assert(mNode.contains("2")); + const cNode = Node(["1", "2", "3"]); + assert(cNode.contains("2")); + immutable iNode = Node(["1", "2", "3"]); + assert(iNode.contains("2")); + } + + + /** Determine if a mapping contains specified key. + * + * Params: rhs = Key to look for. Use YAMLNull to check for a null key. + * + * Returns: true if rhs was found, false otherwise. + * + * Throws: NodeException if the node is not a mapping. + */ + bool containsKey(T)(T rhs) const + { + return contains_!(T, Yes.key, "containsKey")(rhs); + } + + // Unittest for contains() and containsKey(). + @safe unittest + { + auto seq = Node([1, 2, 3, 4, 5]); + assert(seq.contains(3)); + assert(seq.contains(5)); + assert(!seq.contains("5")); + assert(!seq.contains(6)); + assert(!seq.contains(float.nan)); + assertThrown!NodeException(seq.containsKey(5)); + + auto seq2 = Node(["1", "2"]); + assert(seq2.contains("1")); + assert(!seq2.contains(1)); + + auto map = Node(["1", "2", "3", "4"], [1, 2, 3, 4]); + assert(map.contains(1)); + assert(!map.contains("1")); + assert(!map.contains(5)); + assert(!map.contains(float.nan)); + assert(map.containsKey("1")); + assert(map.containsKey("4")); + assert(!map.containsKey(1)); + assert(!map.containsKey("5")); + + assert(!seq.contains(YAMLNull())); + assert(!map.contains(YAMLNull())); + assert(!map.containsKey(YAMLNull())); + seq.add(YAMLNull()); + map.add("Nothing", YAMLNull()); + assert(seq.contains(YAMLNull())); + assert(map.contains(YAMLNull())); + assert(!map.containsKey(YAMLNull())); + map.add(YAMLNull(), "Nothing"); + assert(map.containsKey(YAMLNull())); + + auto map2 = Node([1, 2, 3, 4], [1, 2, 3, 4]); + assert(!map2.contains("1")); + assert(map2.contains(1)); + assert(!map2.containsKey("1")); + assert(map2.containsKey(1)); + + // scalar + assertThrown!NodeException(Node(1).contains(4)); + assertThrown!NodeException(Node(1).containsKey(4)); + + auto mapNan = Node([1.0, 2, double.nan], [1, double.nan, 5]); + + assert(mapNan.contains(double.nan)); + assert(mapNan.containsKey(double.nan)); + } + + /// Assignment (shallow copy) by value. + void opAssign()(auto ref Node rhs) + { + assumeWontThrow(setValue(rhs.value_)); + startMark_ = rhs.startMark_; + tag_ = rhs.tag_; + scalarStyle = rhs.scalarStyle; + collectionStyle = rhs.collectionStyle; + } + // Unittest for opAssign(). + @safe unittest + { + auto seq = Node([1, 2, 3, 4, 5]); + auto assigned = seq; + assert(seq == assigned, + "Node.opAssign() doesn't produce an equivalent copy"); + } + + /** Set element at specified index in a collection. + * + * This method can only be called on collection nodes. + * + * If the node is a sequence, index must be integral. + * + * If the node is a mapping, sets the _value corresponding to the first + * key matching index (including conversion, so e.g. "42" matches 42). + * + * If the node is a mapping and no key matches index, a new key-value + * pair is added to the mapping. In sequences the index must be in + * range. This ensures behavior siilar to D arrays and associative + * arrays. + * + * To set element at a null index, use YAMLNull for index. + * + * Params: + * value = Value to assign. + * index = Index of the value to set. + * + * Throws: NodeException if the node is not a collection, index is out + * of range or if a non-integral index is used on a sequence node. + */ + void opIndexAssign(K, V)(V value, K index) + { + final switch (nodeID) + { + case NodeID.sequence: + checkSequenceIndex(index); + static if(isIntegral!K || is(Unqual!K == bool)) + { + auto nodes = getValue!(Node[]); + static if(is(Unqual!V == Node)){nodes[index] = value;} + else {nodes[index] = Node(value);} + setValue(nodes); + return; + } + assert(false, "Only integers may index sequence nodes"); + case NodeID.mapping: + const idx = findPair(index); + if(idx < 0){add(index, value);} + else + { + auto pairs = as!(Node.Pair[])(); + static if(is(Unqual!V == Node)){pairs[idx].value = value;} + else {pairs[idx].value = Node(value);} + setValue(pairs); + } + return; + case NodeID.scalar: + case NodeID.invalid: + throw new NodeException("Trying to index a " ~ nodeTypeString ~ " node", startMark_); + } + } + @safe unittest + { + with(Node([1, 2, 3, 4, 3])) + { + opIndexAssign(42, 3); + assert(length == 5); + assert(opIndex(3).as!int == 42); + + opIndexAssign(YAMLNull(), 0); + assert(opIndex(0) == YAMLNull()); + } + with(Node(["1", "2", "3"], [4, 5, 6])) + { + opIndexAssign(42, "3"); + opIndexAssign(123, 456); + assert(length == 4); + assert(opIndex("3").as!int == 42); + assert(opIndex(456).as!int == 123); + + opIndexAssign(43, 3); + //3 and "3" should be different + assert(length == 5); + assert(opIndex("3").as!int == 42); + assert(opIndex(3).as!int == 43); + + opIndexAssign(YAMLNull(), "2"); + assert(opIndex("2") == YAMLNull()); + } + } + + /** Return a range object iterating over a sequence, getting each + * element as T. + * + * If T is Node, simply iterate over the nodes in the sequence. + * Otherwise, convert each node to T during iteration. + * + * Throws: NodeException if the node is not a sequence or an element + * could not be converted to specified type. + */ + template sequence(T = Node) + { + struct Range(N) + { + N subnodes; + size_t position; + + this(N nodes) + { + subnodes = nodes; + position = 0; + } + + /* Input range functionality. */ + bool empty() const @property { return position >= subnodes.length; } + + void popFront() + { + enforce(!empty, "Attempted to popFront an empty sequence"); + position++; + } + + T front() const @property + { + enforce(!empty, "Attempted to take the front of an empty sequence"); + static if (is(Unqual!T == Node)) + return subnodes[position]; + else + return subnodes[position].as!T; + } + + /* Forward range functionality. */ + Range save() { return this; } + + /* Bidirectional range functionality. */ + void popBack() + { + enforce(!empty, "Attempted to popBack an empty sequence"); + subnodes = subnodes[0 .. $ - 1]; + } + + T back() + { + enforce(!empty, "Attempted to take the back of an empty sequence"); + static if (is(Unqual!T == Node)) + return subnodes[$ - 1]; + else + return subnodes[$ - 1].as!T; + } + + /* Random-access range functionality. */ + size_t length() const @property { return subnodes.length; } + T opIndex(size_t index) + { + static if (is(Unqual!T == Node)) + return subnodes[index]; + else + return subnodes[index].as!T; + } + + static assert(isInputRange!Range); + static assert(isForwardRange!Range); + static assert(isBidirectionalRange!Range); + static assert(isRandomAccessRange!Range); + } + auto sequence() + { + enforce(nodeID == NodeID.sequence, + new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", + startMark_)); + return Range!(Node[])(get!(Node[])); + } + auto sequence() const + { + enforce(nodeID == NodeID.sequence, + new NodeException("Trying to 'sequence'-iterate over a " ~ nodeTypeString ~ " node", + startMark_)); + return Range!(const(Node)[])(get!(Node[])); + } + } + @safe unittest + { + Node n1 = Node([1, 2, 3, 4]); + int[int] array; + Node n2 = Node(array); + const n3 = Node([1, 2, 3, 4]); + + auto r = n1.sequence!int.map!(x => x * 10); + assert(r.equal([10, 20, 30, 40])); + + assertThrown(n2.sequence); + + auto r2 = n3.sequence!int.map!(x => x * 10); + assert(r2.equal([10, 20, 30, 40])); + } + + /** Return a range object iterating over mapping's pairs. + * + * Throws: NodeException if the node is not a mapping. + * + */ + template mapping() + { + struct Range(T) + { + T pairs; + size_t position; + + this(T pairs) @safe + { + this.pairs = pairs; + position = 0; + } + + /* Input range functionality. */ + bool empty() @safe { return position >= pairs.length; } + + void popFront() @safe + { + enforce(!empty, "Attempted to popFront an empty mapping"); + position++; + } + + auto front() @safe + { + enforce(!empty, "Attempted to take the front of an empty mapping"); + return pairs[position]; + } + + /* Forward range functionality. */ + Range save() @safe { return this; } + + /* Bidirectional range functionality. */ + void popBack() @safe + { + enforce(!empty, "Attempted to popBack an empty mapping"); + pairs = pairs[0 .. $ - 1]; + } + + auto back() @safe + { + enforce(!empty, "Attempted to take the back of an empty mapping"); + return pairs[$ - 1]; + } + + /* Random-access range functionality. */ + size_t length() const @property @safe { return pairs.length; } + auto opIndex(size_t index) @safe { return pairs[index]; } + + static assert(isInputRange!Range); + static assert(isForwardRange!Range); + static assert(isBidirectionalRange!Range); + static assert(isRandomAccessRange!Range); + } + + auto mapping() + { + enforce(nodeID == NodeID.mapping, + new NodeException("Trying to 'mapping'-iterate over a " + ~ nodeTypeString ~ " node", startMark_)); + return Range!(Node.Pair[])(get!(Node.Pair[])); + } + auto mapping() const + { + enforce(nodeID == NodeID.mapping, + new NodeException("Trying to 'mapping'-iterate over a " + ~ nodeTypeString ~ " node", startMark_)); + return Range!(const(Node.Pair)[])(get!(Node.Pair[])); + } + } + @safe unittest + { + int[int] array; + Node n = Node(array); + n[1] = "foo"; + n[2] = "bar"; + n[3] = "baz"; + + string[int] test; + foreach (pair; n.mapping) + test[pair.key.as!int] = pair.value.as!string.idup; + + assert(test[1] == "foo"); + assert(test[2] == "bar"); + assert(test[3] == "baz"); + + int[int] constArray = [1: 2, 3: 4]; + const x = Node(constArray); + foreach (pair; x.mapping) + assert(pair.value == constArray[pair.key.as!int]); + } + + /** Return a range object iterating over mapping's keys. + * + * If K is Node, simply iterate over the keys in the mapping. + * Otherwise, convert each key to T during iteration. + * + * Throws: NodeException if the nodes is not a mapping or an element + * could not be converted to specified type. + */ + auto mappingKeys(K = Node)() const + { + enforce(nodeID == NodeID.mapping, + new NodeException("Trying to 'mappingKeys'-iterate over a " + ~ nodeTypeString ~ " node", startMark_)); + static if (is(Unqual!K == Node)) + return mapping.map!(pair => pair.key); + else + return mapping.map!(pair => pair.key.as!K); + } + @safe unittest + { + int[int] array; + Node m1 = Node(array); + m1["foo"] = 2; + m1["bar"] = 3; + + assert(m1.mappingKeys.equal(["foo", "bar"]) || m1.mappingKeys.equal(["bar", "foo"])); + + const cm1 = Node(["foo": 2, "bar": 3]); + + assert(cm1.mappingKeys.equal(["foo", "bar"]) || cm1.mappingKeys.equal(["bar", "foo"])); + } + + /** Return a range object iterating over mapping's values. + * + * If V is Node, simply iterate over the values in the mapping. + * Otherwise, convert each key to V during iteration. + * + * Throws: NodeException if the nodes is not a mapping or an element + * could not be converted to specified type. + */ + auto mappingValues(V = Node)() const + { + enforce(nodeID == NodeID.mapping, + new NodeException("Trying to 'mappingValues'-iterate over a " + ~ nodeTypeString ~ " node", startMark_)); + static if (is(Unqual!V == Node)) + return mapping.map!(pair => pair.value); + else + return mapping.map!(pair => pair.value.as!V); + } + @safe unittest + { + int[int] array; + Node m1 = Node(array); + m1["foo"] = 2; + m1["bar"] = 3; + + assert(m1.mappingValues.equal([2, 3]) || m1.mappingValues.equal([3, 2])); + + const cm1 = Node(["foo": 2, "bar": 3]); + + assert(cm1.mappingValues.equal([2, 3]) || cm1.mappingValues.equal([3, 2])); + } + + + /** Foreach over a sequence, getting each element as T. + * + * If T is Node, simply iterate over the nodes in the sequence. + * Otherwise, convert each node to T during iteration. + * + * Throws: NodeException if the node is not a sequence or an + * element could not be converted to specified type. + */ + int opApply(D)(D dg) if (isDelegate!D && (Parameters!D.length == 1)) + { + enforce(nodeID == NodeID.sequence, + new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", + startMark_)); + + int result; + foreach(ref node; get!(Node[])) + { + static if(is(Unqual!(Parameters!D[0]) == Node)) + { + result = dg(node); + } + else + { + Parameters!D[0] temp = node.as!(Parameters!D[0]); + result = dg(temp); + } + if(result){break;} + } + return result; + } + /// ditto + int opApply(D)(D dg) const if (isDelegate!D && (Parameters!D.length == 1)) + { + enforce(nodeID == NodeID.sequence, + new NodeException("Trying to sequence-foreach over a " ~ nodeTypeString ~ " node", + startMark_)); + + int result; + foreach(ref node; get!(Node[])) + { + static if(is(Unqual!(Parameters!D[0]) == Node)) + { + result = dg(node); + } + else + { + Parameters!D[0] temp = node.as!(Parameters!D[0]); + result = dg(temp); + } + if(result){break;} + } + return result; + } + @safe unittest + { + Node n1 = Node(11); + Node n2 = Node(12); + Node n3 = Node(13); + Node n4 = Node(14); + Node narray = Node([n1, n2, n3, n4]); + const cNArray = narray; + + int[] array, array2, array3; + foreach(int value; narray) + { + array ~= value; + } + foreach(Node node; narray) + { + array2 ~= node.as!int; + } + foreach (const Node node; cNArray) + { + array3 ~= node.as!int; + } + assert(array == [11, 12, 13, 14]); + assert(array2 == [11, 12, 13, 14]); + assert(array3 == [11, 12, 13, 14]); + } + @safe unittest + { + string[] testStrs = ["1", "2", "3"]; + auto node1 = Node(testStrs); + int i = 0; + foreach (string elem; node1) + { + assert(elem == testStrs[i]); + i++; + } + const node2 = Node(testStrs); + i = 0; + foreach (string elem; node2) + { + assert(elem == testStrs[i]); + i++; + } + immutable node3 = Node(testStrs); + i = 0; + foreach (string elem; node3) + { + assert(elem == testStrs[i]); + i++; + } + } + @safe unittest + { + auto node = Node(["a":1, "b":2, "c":3]); + const cNode = node; + assertThrown({foreach (Node n; node) {}}()); + assertThrown({foreach (const Node n; cNode) {}}()); + } + + /** Foreach over a mapping, getting each key/value as K/V. + * + * If the K and/or V is Node, simply iterate over the nodes in the mapping. + * Otherwise, convert each key/value to T during iteration. + * + * Throws: NodeException if the node is not a mapping or an + * element could not be converted to specified type. + */ + int opApply(DG)(DG dg) if (isDelegate!DG && (Parameters!DG.length == 2)) + { + alias K = Parameters!DG[0]; + alias V = Parameters!DG[1]; + enforce(nodeID == NodeID.mapping, + new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", + startMark_)); + + int result; + foreach(ref pair; get!(Node.Pair[])) + { + static if(is(Unqual!K == Node) && is(Unqual!V == Node)) + { + result = dg(pair.key, pair.value); + } + else static if(is(Unqual!K == Node)) + { + V tempValue = pair.value.as!V; + result = dg(pair.key, tempValue); + } + else static if(is(Unqual!V == Node)) + { + K tempKey = pair.key.as!K; + result = dg(tempKey, pair.value); + } + else + { + K tempKey = pair.key.as!K; + V tempValue = pair.value.as!V; + result = dg(tempKey, tempValue); + } + + if(result){break;} + } + return result; + } + /// ditto + int opApply(DG)(DG dg) const if (isDelegate!DG && (Parameters!DG.length == 2)) + { + alias K = Parameters!DG[0]; + alias V = Parameters!DG[1]; + enforce(nodeID == NodeID.mapping, + new NodeException("Trying to mapping-foreach over a " ~ nodeTypeString ~ " node", + startMark_)); + + int result; + foreach(ref pair; get!(Node.Pair[])) + { + static if(is(Unqual!K == Node) && is(Unqual!V == Node)) + { + result = dg(pair.key, pair.value); + } + else static if(is(Unqual!K == Node)) + { + V tempValue = pair.value.as!V; + result = dg(pair.key, tempValue); + } + else static if(is(Unqual!V == Node)) + { + K tempKey = pair.key.as!K; + result = dg(tempKey, pair.value); + } + else + { + K tempKey = pair.key.as!K; + V tempValue = pair.value.as!V; + result = dg(tempKey, tempValue); + } + + if(result){break;} + } + return result; + } + @safe unittest + { + Node n1 = Node(cast(long)11); + Node n2 = Node(cast(long)12); + Node n3 = Node(cast(long)13); + Node n4 = Node(cast(long)14); + + Node k1 = Node("11"); + Node k2 = Node("12"); + Node k3 = Node("13"); + Node k4 = Node("14"); + + Node nmap1 = Node([Pair(k1, n1), + Pair(k2, n2), + Pair(k3, n3), + Pair(k4, n4)]); + + int[string] expected = ["11" : 11, + "12" : 12, + "13" : 13, + "14" : 14]; + int[string] array; + foreach(string key, int value; nmap1) + { + array[key] = value; + } + assert(array == expected); + + Node nmap2 = Node([Pair(k1, Node(cast(long)5)), + Pair(k2, Node(true)), + Pair(k3, Node(cast(real)1.0)), + Pair(k4, Node("yarly"))]); + + foreach(scope string key, scope Node value; nmap2) + { + switch(key) + { + case "11": assert(value.as!int == 5 ); break; + case "12": assert(value.as!bool == true ); break; + case "13": assert(value.as!float == 1.0 ); break; + case "14": assert(value.as!string == "yarly"); break; + default: assert(false); + } + } + const nmap3 = nmap2; + + foreach(const Node key, const Node value; nmap3) + { + switch(key.as!string) + { + case "11": assert(value.as!int == 5 ); break; + case "12": assert(value.as!bool == true ); break; + case "13": assert(value.as!float == 1.0 ); break; + case "14": assert(value.as!string == "yarly"); break; + default: assert(false); + } + } + } + @safe unittest + { + string[int] testStrs = [0: "1", 1: "2", 2: "3"]; + auto node1 = Node(testStrs); + foreach (const int i, string elem; node1) + { + assert(elem == testStrs[i]); + } + const node2 = Node(testStrs); + foreach (const int i, string elem; node2) + { + assert(elem == testStrs[i]); + } + immutable node3 = Node(testStrs); + foreach (const int i, string elem; node3) + { + assert(elem == testStrs[i]); + } + } + @safe unittest + { + auto node = Node(["a", "b", "c"]); + const cNode = node; + assertThrown({foreach (Node a, Node b; node) {}}()); + assertThrown({foreach (const Node a, const Node b; cNode) {}}()); + } + + /** Add an element to a sequence. + * + * This method can only be called on sequence nodes. + * + * If value is a node, it is copied to the sequence directly. Otherwise + * value is converted to a node and then stored in the sequence. + * + * $(P When emitting, all values in the sequence will be emitted. When + * using the !!set tag, the user needs to ensure that all elements in + * the sequence are unique, otherwise $(B invalid) YAML code will be + * emitted.) + * + * Params: value = Value to _add to the sequence. + */ + void add(T)(T value) + { + if (!isValid) + { + setValue(Node[].init); + } + enforce(nodeID == NodeID.sequence, + new NodeException("Trying to add an element to a " ~ nodeTypeString ~ " node", startMark_)); + + auto nodes = get!(Node[])(); + static if(is(Unqual!T == Node)){nodes ~= value;} + else {nodes ~= Node(value);} + setValue(nodes); + } + @safe unittest + { + with(Node([1, 2, 3, 4])) + { + add(5.0f); + assert(opIndex(4).as!float == 5.0f); + } + with(Node()) + { + add(5.0f); + assert(opIndex(0).as!float == 5.0f); + } + with(Node(5.0f)) + { + assertThrown!NodeException(add(5.0f)); + } + with(Node([5.0f : true])) + { + assertThrown!NodeException(add(5.0f)); + } + } + + /** Add a key-value pair to a mapping. + * + * This method can only be called on mapping nodes. + * + * If key and/or value is a node, it is copied to the mapping directly. + * Otherwise it is converted to a node and then stored in the mapping. + * + * $(P It is possible for the same key to be present more than once in a + * mapping. When emitting, all key-value pairs will be emitted. + * This is useful with the "!!pairs" tag, but will result in + * $(B invalid) YAML with "!!map" and "!!omap" tags.) + * + * Params: key = Key to _add. + * value = Value to _add. + */ + void add(K, V)(K key, V value) + { + if (!isValid) + { + setValue(Node.Pair[].init); + } + enforce(nodeID == NodeID.mapping, + new NodeException("Trying to add a key-value pair to a " ~ + nodeTypeString ~ " node", + startMark_)); + + auto pairs = get!(Node.Pair[])(); + pairs ~= Pair(key, value); + setValue(pairs); + } + @safe unittest + { + with(Node([1, 2], [3, 4])) + { + add(5, "6"); + assert(opIndex(5).as!string == "6"); + } + with(Node()) + { + add(5, "6"); + assert(opIndex(5).as!string == "6"); + } + with(Node(5.0f)) + { + assertThrown!NodeException(add(5, "6")); + } + with(Node([5.0f])) + { + assertThrown!NodeException(add(5, "6")); + } + } + + /** Determine whether a key is in a mapping, and access its value. + * + * This method can only be called on mapping nodes. + * + * Params: key = Key to search for. + * + * Returns: A pointer to the value (as a Node) corresponding to key, + * or null if not found. + * + * Note: Any modification to the node can invalidate the returned + * pointer. + * + * See_Also: contains + */ + inout(Node*) opBinaryRight(string op, K)(K key) inout + if (op == "in") + { + enforce(nodeID == NodeID.mapping, new NodeException("Trying to use 'in' on a " ~ + nodeTypeString ~ " node", startMark_)); + + auto idx = findPair(key); + if(idx < 0) + { + return null; + } + else + { + return &(get!(Node.Pair[])[idx].value); + } + } + @safe unittest + { + auto mapping = Node(["foo", "baz"], ["bar", "qux"]); + assert("bad" !in mapping && ("bad" in mapping) is null); + Node* foo = "foo" in mapping; + assert(foo !is null); + assert(*foo == Node("bar")); + assert(foo.get!string == "bar"); + *foo = Node("newfoo"); + assert(mapping["foo"] == Node("newfoo")); + } + @safe unittest + { + auto mNode = Node(["a": 2]); + assert("a" in mNode); + const cNode = Node(["a": 2]); + assert("a" in cNode); + immutable iNode = Node(["a": 2]); + assert("a" in iNode); + } + + /** Remove first (if any) occurence of a value in a collection. + * + * This method can only be called on collection nodes. + * + * If the node is a sequence, the first node matching value is removed. + * If the node is a mapping, the first key-value pair where _value + * matches specified value is removed. + * + * Params: rhs = Value to _remove. + * + * Throws: NodeException if the node is not a collection. + */ + void remove(T)(T rhs) + { + remove_!(T, No.key, "remove")(rhs); + } + @safe unittest + { + with(Node([1, 2, 3, 4, 3])) + { + remove(3); + assert(length == 4); + assert(opIndex(2).as!int == 4); + assert(opIndex(3).as!int == 3); + + add(YAMLNull()); + assert(length == 5); + remove(YAMLNull()); + assert(length == 4); + } + with(Node(["1", "2", "3"], [4, 5, 6])) + { + remove(4); + assert(length == 2); + add("nullkey", YAMLNull()); + assert(length == 3); + remove(YAMLNull()); + assert(length == 2); + } + } + + /** Remove element at the specified index of a collection. + * + * This method can only be called on collection nodes. + * + * If the node is a sequence, index must be integral. + * + * If the node is a mapping, remove the first key-value pair where + * key matches index. + * + * If the node is a mapping and no key matches index, nothing is removed + * and no exception is thrown. This ensures behavior siilar to D arrays + * and associative arrays. + * + * Params: index = Index to remove at. + * + * Throws: NodeException if the node is not a collection, index is out + * of range or if a non-integral index is used on a sequence node. + */ + void removeAt(T)(T index) + { + remove_!(T, Yes.key, "removeAt")(index); + } + @safe unittest + { + with(Node([1, 2, 3, 4, 3])) + { + removeAt(3); + assertThrown!NodeException(removeAt("3")); + assert(length == 4); + assert(opIndex(3).as!int == 3); + } + with(Node(["1", "2", "3"], [4, 5, 6])) + { + // no integer 2 key, so don't remove anything + removeAt(2); + assert(length == 3); + removeAt("2"); + assert(length == 2); + add(YAMLNull(), "nullval"); + assert(length == 3); + removeAt(YAMLNull()); + assert(length == 2); + } + } + + /// Compare with another _node. + int opCmp(const scope ref Node rhs) const scope @safe + { + const bool hasNullTag = this.tag_ is null; + // Only one of them is null: we can order nodes + if ((hasNullTag) ^ (rhs.tag is null)) + return hasNullTag ? -1 : 1; + // Either both `null` or both have a value + if (!hasNullTag) + if (int result = std.algorithm.comparison.cmp(tag_, rhs.tag_)) + return result; + + static int cmp(T1, T2)(T1 a, T2 b) + { + return a > b ? 1 : + a < b ? -1 : + 0; + } + + // Compare validity: if both valid, we have to compare further. + if (!this.isValid()) + return rhs.isValid() ? -1 : 0; + if (!rhs.isValid()) + return 1; + if (const typeCmp = cmp(type, rhs.type)) + return typeCmp; + + static int compareCollections(T)(const scope ref Node lhs, const scope ref Node rhs) + { + const c1 = lhs.getValue!T; + const c2 = rhs.getValue!T; + if(c1 is c2){return 0;} + if(c1.length != c2.length) + { + return cmp(c1.length, c2.length); + } + // Equal lengths, compare items. + foreach(i; 0 .. c1.length) + { + const itemCmp = c1[i].opCmp(c2[i]); + if(itemCmp != 0){return itemCmp;} + } + return 0; + } + + final switch(type) + { + case NodeType.string: + return std.algorithm.cmp(getValue!string, + rhs.getValue!string); + case NodeType.integer: + return cmp(getValue!long, rhs.getValue!long); + case NodeType.boolean: + const b1 = getValue!bool; + const b2 = rhs.getValue!bool; + return b1 ? b2 ? 0 : 1 + : b2 ? -1 : 0; + case NodeType.binary: + const b1 = getValue!(ubyte[]); + const b2 = rhs.getValue!(ubyte[]); + return std.algorithm.cmp(b1, b2); + case NodeType.null_: + return 0; + case NodeType.decimal: + const r1 = getValue!real; + const r2 = rhs.getValue!real; + if(isNaN(r1)) + { + return isNaN(r2) ? 0 : -1; + } + if(isNaN(r2)) + { + return 1; + } + // Fuzzy equality. + if(r1 <= r2 + real.epsilon && r1 >= r2 - real.epsilon) + { + return 0; + } + return cmp(r1, r2); + case NodeType.timestamp: + const t1 = getValue!SysTime; + const t2 = rhs.getValue!SysTime; + return cmp(t1, t2); + case NodeType.mapping: + return compareCollections!(Pair[])(this, rhs); + case NodeType.sequence: + return compareCollections!(Node[])(this, rhs); + case NodeType.merge: + assert(false, "Cannot compare merge nodes"); + case NodeType.invalid: + assert(false, "Cannot compare invalid nodes"); + } + } + + // Ensure opCmp is symmetric for collections + @safe unittest + { + auto node1 = Node( + [ + Node("New York Yankees", "tag:yaml.org,2002:str"), + Node("Atlanta Braves", "tag:yaml.org,2002:str") + ], "tag:yaml.org,2002:seq" + ); + auto node2 = Node( + [ + Node("Detroit Tigers", "tag:yaml.org,2002:str"), + Node("Chicago cubs", "tag:yaml.org,2002:str") + ], "tag:yaml.org,2002:seq" + ); + assert(node1 > node2); + assert(node2 < node1); + } + + // Compute hash of the node. + hash_t toHash() nothrow const @trusted + { + const valueHash = value_.match!(v => hashOf(v)); + + return tag_ is null ? valueHash : tag_.hashOf(valueHash); + } + @safe unittest + { + assert(Node(42).toHash() != Node(41).toHash()); + assert(Node(42).toHash() != Node(42, "some-tag").toHash()); + } + + /// Get type of the node value. + @property NodeType type() const scope @safe pure nothrow @nogc + { + return this.value_.match!( + (const bool _) => NodeType.boolean, + (const long _) => NodeType.integer, + (const Node[] _) => NodeType.sequence, + (const ubyte[] _) => NodeType.binary, + (const string _) => NodeType.string, + (const Node.Pair[] _) => NodeType.mapping, + (const SysTime _) => NodeType.timestamp, + (const YAMLNull _) => NodeType.null_, + (const YAMLMerge _) => NodeType.merge, + (const real _) => NodeType.decimal, + (const YAMLInvalid _) => NodeType.invalid, + ); + } + + /// Get the kind of node this is. + @property NodeID nodeID() const scope @safe pure nothrow @nogc + { + final switch (type) + { + case NodeType.sequence: + return NodeID.sequence; + case NodeType.mapping: + return NodeID.mapping; + case NodeType.boolean: + case NodeType.integer: + case NodeType.binary: + case NodeType.string: + case NodeType.timestamp: + case NodeType.null_: + case NodeType.merge: + case NodeType.decimal: + return NodeID.scalar; + case NodeType.invalid: + return NodeID.invalid; + } + } + package: + + // Get a string representation of the node tree. Used for debugging. + // + // Params: level = Level of the node in the tree. + // + // Returns: String representing the node tree. + @property string debugString(uint level = 0) const scope @safe + { + string indent; + foreach(i; 0 .. level){indent ~= " ";} + + final switch (nodeID) + { + case NodeID.invalid: + return indent ~ "invalid"; + case NodeID.sequence: + string result = indent ~ "sequence:\n"; + foreach(ref node; get!(Node[])) + { + result ~= node.debugString(level + 1); + } + return result; + case NodeID.mapping: + string result = indent ~ "mapping:\n"; + foreach(ref pair; get!(Node.Pair[])) + { + result ~= indent ~ " pair\n"; + result ~= pair.key.debugString(level + 2); + result ~= pair.value.debugString(level + 2); + } + return result; + case NodeID.scalar: + return indent ~ "scalar(" ~ + (convertsTo!string ? get!string : text(type)) ~ ")\n"; + } + } + + + public: + @property string nodeTypeString() const scope @safe pure nothrow @nogc + { + final switch (nodeID) + { + case NodeID.mapping: + return "mapping"; + case NodeID.sequence: + return "sequence"; + case NodeID.scalar: + return "scalar"; + case NodeID.invalid: + return "invalid"; + } + } + + // Determine if the value can be converted to specified type. + @property bool convertsTo(T)() const + { + if(isType!T){return true;} + + // Every type allowed in Value should be convertible to string. + static if(isSomeString!T) {return true;} + else static if(isFloatingPoint!T){return type.among!(NodeType.integer, NodeType.decimal);} + else static if(isIntegral!T) {return type == NodeType.integer;} + else static if(is(Unqual!T==bool)){return type == NodeType.boolean;} + else {return false;} + } + /** + * Sets the style of this node when dumped. + * + * Params: style = Any valid style. + */ + void setStyle(CollectionStyle style) @safe + { + enforce(!isValid || (nodeID.among(NodeID.mapping, NodeID.sequence)), new NodeException( + "Cannot set collection style for non-collection nodes", startMark_)); + collectionStyle = style; + } + /// Ditto + void setStyle(ScalarStyle style) @safe + { + enforce(!isValid || (nodeID == NodeID.scalar), new NodeException( + "Cannot set scalar style for non-scalar nodes", startMark_)); + scalarStyle = style; + } + /// + @safe unittest + { + import dyaml.dumper; + auto stream = new Appender!string(); + auto node = Node([1, 2, 3, 4, 5]); + node.setStyle(CollectionStyle.block); + + auto dumper = dumper(); + dumper.dump(stream, node); + } + /// + @safe unittest + { + import dyaml.dumper; + auto stream = new Appender!string(); + auto node = Node(4); + node.setStyle(ScalarStyle.literal); + + auto dumper = dumper(); + dumper.dump(stream, node); + } + @safe unittest + { + assertThrown!NodeException(Node(4).setStyle(CollectionStyle.block)); + assertThrown!NodeException(Node([4]).setStyle(ScalarStyle.literal)); + } + @safe unittest + { + import dyaml.dumper; + { + auto stream = new Appender!string(); + auto node = Node([1, 2, 3, 4, 5]); + node.setStyle(CollectionStyle.block); + auto dumper = dumper(); + dumper.explicitEnd = false; + dumper.explicitStart = false; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + + //Block style should start with a hyphen. + assert(stream.data[0] == '-'); + } + { + auto stream = new Appender!string(); + auto node = Node([1, 2, 3, 4, 5]); + node.setStyle(CollectionStyle.flow); + auto dumper = dumper(); + dumper.explicitEnd = false; + dumper.explicitStart = false; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + + //Flow style should start with a bracket. + assert(stream.data[0] == '['); + } + { + auto stream = new Appender!string(); + auto node = Node(1); + node.setStyle(ScalarStyle.singleQuoted); + auto dumper = dumper(); + dumper.explicitEnd = false; + dumper.explicitStart = false; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + + assert(stream.data == "!!int '1'\n"); + } + { + auto stream = new Appender!string(); + auto node = Node(1); + node.setStyle(ScalarStyle.doubleQuoted); + auto dumper = dumper(); + dumper.explicitEnd = false; + dumper.explicitStart = false; + dumper.YAMLVersion = null; + dumper.dump(stream, node); + + assert(stream.data == "!!int \"1\"\n"); + } + } + + private: + // Determine if the value stored by the node is of specified type. + // + // This only works for default YAML types, not for user defined types. + @property bool isType(T)() const + { + return value_.match!( + (const T _) => true, + _ => false, + ); + } + + /// Check at compile time if a type is stored natively + enum canBeType (T) = is(typeof({ value_.match!((const T _) => true, _ => false); })); + + + // Implementation of contains() and containsKey(). + bool contains_(T, Flag!"key" key, string func)(T rhs) const + { + final switch (nodeID) + { + case NodeID.mapping: + return findPair!(T, key)(rhs) >= 0; + case NodeID.sequence: + static if(!key) + { + foreach(ref node; getValue!(Node[])) + { + if(node == rhs){return true;} + } + return false; + } + else + { + throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node", + startMark_); + } + case NodeID.scalar: + case NodeID.invalid: + throw new NodeException("Trying to use " ~ func ~ "() on a " ~ nodeTypeString ~ " node", + startMark_); + } + + } + + // Implementation of remove() and removeAt() + void remove_(T, Flag!"key" key, string func)(T rhs) + { + static void removeElem(E, I)(ref Node node, I index) + { + auto elems = node.getValue!(E[]); + moveAll(elems[cast(size_t)index + 1 .. $], elems[cast(size_t)index .. $ - 1]); + elems.length = elems.length - 1; + node.setValue(elems); + } + + final switch (nodeID) + { + case NodeID.mapping: + const index = findPair!(T, key)(rhs); + if(index >= 0){removeElem!Pair(this, index);} + break; + case NodeID.sequence: + static long getIndex(ref Node node, ref T rhs) + { + foreach(idx, ref elem; node.get!(Node[])) + { + if(elem.convertsTo!T && elem.as!(T, No.stringConversion) == rhs) + { + return idx; + } + } + return -1; + } + + const index = select!key(rhs, getIndex(this, rhs)); + + // This throws if the index is not integral. + checkSequenceIndex(index); + + static if(isIntegral!(typeof(index))){removeElem!Node(this, index); break; } + else {assert(false, "Non-integral sequence index");} + case NodeID.scalar: + case NodeID.invalid: + throw new NodeException("Trying to " ~ func ~ "() from a " ~ nodeTypeString ~ " node", + startMark_); + } + } + + // Get index of pair with key (or value, if key is false) matching index. + // Cannot be inferred @safe due to https://issues.dlang.org/show_bug.cgi?id=16528 + sizediff_t findPair(T, Flag!"key" key = Yes.key)(const scope ref T index) + const scope @safe + { + const pairs = getValue!(Pair[])(); + const(Node)* node; + foreach(idx, ref const(Pair) pair; pairs) + { + static if(key){node = &pair.key;} + else {node = &pair.value;} + + + const bool typeMatch = (isFloatingPoint!T && (node.type.among!(NodeType.integer, NodeType.decimal))) || + (isIntegral!T && node.type == NodeType.integer) || + (is(Unqual!T==bool) && node.type == NodeType.boolean) || + (isSomeString!T && node.type == NodeType.string) || + (node.isType!T); + if(typeMatch && *node == index) + { + return idx; + } + } + return -1; + } + + // Check if index is integral and in range. + void checkSequenceIndex(T)(T index) const scope @safe + { + assert(nodeID == NodeID.sequence, + "checkSequenceIndex() called on a " ~ nodeTypeString ~ " node"); + + static if(!isIntegral!T) + { + throw new NodeException("Indexing a sequence with a non-integral type.", startMark_); + } + else + { + enforce(index >= 0 && index < getValue!(Node[]).length, + new NodeException("Sequence index out of range: " ~ to!string(index), + startMark_)); + } + } + // Safe wrapper for getting a value out of the variant. + inout(T) getValue(T)() @safe return scope inout + { + alias RType = typeof(return); + return value_.tryMatch!((RType r) => r); + } + // Safe wrapper for coercing a value out of the variant. + inout(T) coerceValue(T)() @trusted scope return inout + { + alias RType = typeof(return); + static if (is(typeof({ RType rt = T.init; T t = RType.init; }))) + alias TType = T; + else // `inout` matters (indirection) + alias TType = RType; + + // `inout(Node[]).to!string` apparently is not safe: + // struct SumTypeBug { + // import std.conv; + // Node[] data; + // + // string bug () inout @safe + // { + // return this.data.to!string; + // } + // } + // Doesn't compile with DMD v2.100.0 + return this.value_.tryMatch!( + (inout bool v) @safe => v.to!TType, + (inout long v) @safe => v.to!TType, + (inout Node[] v) @trusted => v.to!TType, + (inout ubyte[] v) @safe => v.to!TType, + (inout string v) @safe => v.to!TType, + (inout Node.Pair[] v) @trusted => v.to!TType, + (inout SysTime v) @trusted => v.to!TType, + (inout real v) @safe => v.to!TType, + (inout YAMLNull v) @safe => null.to!TType, + ); + } + // Safe wrapper for setting a value for the variant. + void setValue(T)(T value) @trusted + { + static if (allowed!T) + { + value_ = value; + } + else + { + auto tmpNode = cast(Node)value; + tag_ = tmpNode.tag; + scalarStyle = tmpNode.scalarStyle; + collectionStyle = tmpNode.collectionStyle; + value_ = tmpNode.value_; + } + } + + /// + public void toString (DGT) (scope DGT sink) + const scope @safe + { + this.value_.match!( + (const bool v) => formattedWrite(sink, v ? "true" : "false"), + (const long v) => formattedWrite(sink, "%s", v), + (const Node[] v) => formattedWrite(sink, "[%(%s, %)]", v), + (const ubyte[] v) => formattedWrite(sink, "%s", v), + (const string v) => formattedWrite(sink, `"%s"`, v), + (const Node.Pair[] v) => formattedWrite(sink, "{%(%s, %)}", v), + (const SysTime v) => formattedWrite(sink, "%s", v), + (const YAMLNull v) => formattedWrite(sink, "%s", v), + (const YAMLMerge v) => formattedWrite(sink, "%s", v), + (const real v) => formattedWrite(sink, "%s", v), + (const YAMLInvalid v) => formattedWrite(sink, "%s", v), + ); + } +} + +package: +// Merge pairs into an array of pairs based on merge rules in the YAML spec. +// +// Any new pair will only be added if there is not already a pair +// with the same key. +// +// Params: pairs = Appender managing the array of pairs to merge into. +// toMerge = Pairs to merge. +void merge(ref Appender!(Node.Pair[]) pairs, Node.Pair[] toMerge) @safe +{ + bool eq(ref Node.Pair a, ref Node.Pair b) @safe + { + return a.key == b.key; + } + + foreach(ref pair; toMerge) if(!canFind!eq(pairs.data, pair)) + { + pairs.put(pair); + } +} + +enum hasNodeConstructor(T) = hasSimpleNodeConstructor!T || hasExpandedNodeConstructor!T; +template hasSimpleNodeConstructor(T) +{ + static if (is(T == struct)) + { + enum hasSimpleNodeConstructor = is(typeof(T(Node.init))); + } + else static if (is(T == class)) + { + enum hasSimpleNodeConstructor = is(typeof(new T(Node.init))); + } + else enum hasSimpleNodeConstructor = false; +} +template hasExpandedNodeConstructor(T) +{ + static if (is(T == struct)) + { + enum hasExpandedNodeConstructor = is(typeof(T(Node.init, ""))); + } + else static if (is(T == class)) + { + enum hasExpandedNodeConstructor = is(typeof(new T(Node.init, ""))); + } + else enum hasExpandedNodeConstructor = false; +} +enum castableToNode(T) = (is(T == struct) || is(T == class)) && is(typeof(T.opCast!Node()) : Node); + +@safe unittest +{ + import dyaml : Loader, Node; + + static struct Foo + { + string[] bars; + + this(const Node node) + { + foreach(value; node["bars"].sequence) + { + bars ~= value.as!string.idup; + } + } + } + + Loader.fromString(`{ bars: ["a", "b"] }`) + .load + .as!(Foo); +} +@safe unittest +{ + import dyaml : Loader, Node; + import std : split, to; + + static class MyClass + { + int x, y, z; + + this(Node node) + { + auto parts = node.as!string().split(":"); + x = parts[0].to!int; + y = parts[1].to!int; + z = parts[2].to!int; + } + } + + auto loader = Loader.fromString(`"1:2:3"`); + Node node = loader.load(); + auto mc = node.get!MyClass; +} +@safe unittest +{ + import dyaml : Loader, Node; + import std : split, to; + + static class MyClass + { + int x, y, z; + + this(Node node) + { + auto parts = node.as!string().split(":"); + x = parts[0].to!int; + y = parts[1].to!int; + z = parts[2].to!int; + } + } + + auto loader = Loader.fromString(`"1:2:3"`); + const node = loader.load(); + auto mc = node.get!MyClass; +} diff --git a/source/dyaml/package.d b/source/dyaml/package.d new file mode 100644 index 0000000..e61b716 --- /dev/null +++ b/source/dyaml/package.d @@ -0,0 +1,15 @@ +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +module dyaml; + +public import dyaml.dumper; +public import dyaml.encoding; +public import dyaml.exception; +public import dyaml.linebreak; +public import dyaml.loader; +public import dyaml.resolver; +public import dyaml.style; +public import dyaml.node; diff --git a/source/dyaml/parser.d b/source/dyaml/parser.d new file mode 100644 index 0000000..befdfa4 --- /dev/null +++ b/source/dyaml/parser.d @@ -0,0 +1,958 @@ + +// Copyright Ferdinand Majerech 2011-2014. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * YAML parser. + * Code based on PyYAML: http://www.pyyaml.org + */ +module dyaml.parser; + + +import std.algorithm; +import std.array; +import std.conv; +import std.exception; +import std.typecons; + +import dyaml.event; +import dyaml.exception; +import dyaml.scanner; +import dyaml.style; +import dyaml.token; +import dyaml.tagdirective; + + +/** + * The following YAML grammar is LL(1) and is parsed by a recursive descent + * parser. + * + * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END + * implicit_document ::= block_node DOCUMENT-END* + * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + * block_node_or_indentless_sequence ::= + * ALIAS + * | properties (block_content | indentless_block_sequence)? + * | block_content + * | indentless_block_sequence + * block_node ::= ALIAS + * | properties block_content? + * | block_content + * flow_node ::= ALIAS + * | properties flow_content? + * | flow_content + * properties ::= TAG ANCHOR? | ANCHOR TAG? + * block_content ::= block_collection | flow_collection | SCALAR + * flow_content ::= flow_collection | SCALAR + * block_collection ::= block_sequence | block_mapping + * flow_collection ::= flow_sequence | flow_mapping + * block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + * indentless_sequence ::= (BLOCK-ENTRY block_node?)+ + * block_mapping ::= BLOCK-MAPPING_START + * ((KEY block_node_or_indentless_sequence?)? + * (VALUE block_node_or_indentless_sequence?)?)* + * BLOCK-END + * flow_sequence ::= FLOW-SEQUENCE-START + * (flow_sequence_entry FLOW-ENTRY)* + * flow_sequence_entry? + * FLOW-SEQUENCE-END + * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * flow_mapping ::= FLOW-MAPPING-START + * (flow_mapping_entry FLOW-ENTRY)* + * flow_mapping_entry? + * FLOW-MAPPING-END + * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * + * FIRST sets: + * + * stream: { STREAM-START } + * explicit_document: { DIRECTIVE DOCUMENT-START } + * implicit_document: FIRST(block_node) + * block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } + * flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } + * block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } + * flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } + * block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } + * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } + * block_sequence: { BLOCK-SEQUENCE-START } + * block_mapping: { BLOCK-MAPPING-START } + * block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } + * indentless_sequence: { ENTRY } + * flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } + * flow_sequence: { FLOW-SEQUENCE-START } + * flow_mapping: { FLOW-MAPPING-START } + * flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } + * flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } + */ + + +/** + * Marked exception thrown at parser errors. + * + * See_Also: MarkedYAMLException + */ +class ParserException : MarkedYAMLException +{ + mixin MarkedExceptionCtors; +} + +package: +/// Generates events from tokens provided by a Scanner. +/// +/// While Parser receives tokens with non-const character slices, the events it +/// produces are immutable strings, which are usually the same slices, cast to string. +/// Parser is the last layer of D:YAML that may possibly do any modifications to these +/// slices. +final class Parser +{ + private: + ///Default tag handle shortcuts and replacements. + static TagDirective[] defaultTagDirectives_ = + [TagDirective("!", "!"), TagDirective("!!", "tag:yaml.org,2002:")]; + + ///Scanner providing YAML tokens. + Scanner scanner_; + + ///Event produced by the most recent state. + Event currentEvent_; + + ///YAML version string. + string YAMLVersion_ = null; + ///Tag handle shortcuts and replacements. + TagDirective[] tagDirectives_; + + ///Stack of states. + Appender!(Event delegate() @safe[]) states_; + ///Stack of marks used to keep track of extents of e.g. YAML collections. + Appender!(Mark[]) marks_; + + ///Current state. + Event delegate() @safe state_; + + public: + ///Construct a Parser using specified Scanner. + this(Scanner scanner) @safe + { + state_ = &parseStreamStart; + scanner_ = scanner; + states_.reserve(32); + marks_.reserve(32); + } + + /** + * Check if any events are left. May have side effects in some cases. + */ + bool empty() @safe + { + ensureState(); + return currentEvent_.isNull; + } + + /** + * Return the current event. + * + * Must not be called if there are no events left. + */ + Event front() @safe + { + ensureState(); + assert(!currentEvent_.isNull, "No event left to peek"); + return currentEvent_; + } + + /** + * Skip to the next event. + * + * Must not be called if there are no events left. + */ + void popFront() @safe + { + currentEvent_.id = EventID.invalid; + ensureState(); + } + + private: + /// If current event is invalid, load the next valid one if possible. + void ensureState() @safe + { + if(currentEvent_.isNull && state_ !is null) + { + currentEvent_ = state_(); + } + } + ///Pop and return the newest state in states_. + Event delegate() @safe popState() @safe + { + enforce(states_.data.length > 0, + new YAMLException("Parser: Need to pop state but no states left to pop")); + const result = states_.data.back; + states_.shrinkTo(states_.data.length - 1); + return result; + } + + ///Pop and return the newest mark in marks_. + Mark popMark() @safe + { + enforce(marks_.data.length > 0, + new YAMLException("Parser: Need to pop mark but no marks left to pop")); + const result = marks_.data.back; + marks_.shrinkTo(marks_.data.length - 1); + return result; + } + + /// Push a state on the stack + void pushState(Event delegate() @safe state) @safe + { + states_ ~= state; + } + /// Push a mark on the stack + void pushMark(Mark mark) @safe + { + marks_ ~= mark; + } + + /** + * stream ::= STREAM-START implicit_document? explicit_document* STREAM-END + * implicit_document ::= block_node DOCUMENT-END* + * explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* + */ + + ///Parse stream start. + Event parseStreamStart() @safe + { + const token = scanner_.front; + scanner_.popFront(); + state_ = &parseImplicitDocumentStart; + return streamStartEvent(token.startMark, token.endMark); + } + + /// Parse implicit document start, unless explicit detected: if so, parse explicit. + Event parseImplicitDocumentStart() @safe + { + // Parse an implicit document. + if(!scanner_.front.id.among!(TokenID.directive, TokenID.documentStart, + TokenID.streamEnd)) + { + tagDirectives_ = defaultTagDirectives_; + const token = scanner_.front; + + pushState(&parseDocumentEnd); + state_ = &parseBlockNode; + + return documentStartEvent(token.startMark, token.endMark, false, null, null); + } + return parseDocumentStart(); + } + + ///Parse explicit document start. + Event parseDocumentStart() @safe + { + //Parse any extra document end indicators. + while(scanner_.front.id == TokenID.documentEnd) + { + scanner_.popFront(); + } + + //Parse an explicit document. + if(scanner_.front.id != TokenID.streamEnd) + { + const startMark = scanner_.front.startMark; + + auto tagDirectives = processDirectives(); + enforce(scanner_.front.id == TokenID.documentStart, + new ParserException("Expected document start but found " ~ + scanner_.front.idString, + scanner_.front.startMark)); + + const endMark = scanner_.front.endMark; + scanner_.popFront(); + pushState(&parseDocumentEnd); + state_ = &parseDocumentContent; + return documentStartEvent(startMark, endMark, true, YAMLVersion_, tagDirectives); + } + else + { + //Parse the end of the stream. + const token = scanner_.front; + scanner_.popFront(); + assert(states_.data.length == 0); + assert(marks_.data.length == 0); + state_ = null; + return streamEndEvent(token.startMark, token.endMark); + } + } + + ///Parse document end (explicit or implicit). + Event parseDocumentEnd() @safe + { + Mark startMark = scanner_.front.startMark; + const bool explicit = scanner_.front.id == TokenID.documentEnd; + Mark endMark = startMark; + if (explicit) + { + endMark = scanner_.front.endMark; + scanner_.popFront(); + } + + state_ = &parseDocumentStart; + + return documentEndEvent(startMark, endMark, explicit); + } + + ///Parse document content. + Event parseDocumentContent() @safe + { + if(scanner_.front.id.among!(TokenID.directive, TokenID.documentStart, + TokenID.documentEnd, TokenID.streamEnd)) + { + state_ = popState(); + return processEmptyScalar(scanner_.front.startMark); + } + return parseBlockNode(); + } + + /// Process directives at the beginning of a document. + TagDirective[] processDirectives() @safe + { + // Destroy version and tag handles from previous document. + YAMLVersion_ = null; + tagDirectives_.length = 0; + + // Process directives. + while(scanner_.front.id == TokenID.directive) + { + const token = scanner_.front; + scanner_.popFront(); + string value = token.value.idup; + if(token.directive == DirectiveType.yaml) + { + enforce(YAMLVersion_ is null, + new ParserException("Duplicate YAML directive", token.startMark)); + const minor = value.split(".")[0]; + enforce(minor == "1", + new ParserException("Incompatible document (version 1.x is required)", + token.startMark)); + YAMLVersion_ = value; + } + else if(token.directive == DirectiveType.tag) + { + auto handle = value[0 .. token.valueDivider]; + + foreach(ref pair; tagDirectives_) + { + // handle + const h = pair.handle; + enforce(h != handle, new ParserException("Duplicate tag handle: " ~ handle, + token.startMark)); + } + tagDirectives_ ~= + TagDirective(handle, value[token.valueDivider .. $]); + } + // Any other directive type is ignored (only YAML and TAG are in YAML + // 1.1/1.2, any other directives are "reserved") + } + + TagDirective[] value = tagDirectives_; + + //Add any default tag handles that haven't been overridden. + foreach(ref defaultPair; defaultTagDirectives_) + { + bool found; + foreach(ref pair; tagDirectives_) if(defaultPair.handle == pair.handle) + { + found = true; + break; + } + if(!found) {tagDirectives_ ~= defaultPair; } + } + + return value; + } + + /** + * block_node_or_indentless_sequence ::= ALIAS + * | properties (block_content | indentless_block_sequence)? + * | block_content + * | indentless_block_sequence + * block_node ::= ALIAS + * | properties block_content? + * | block_content + * flow_node ::= ALIAS + * | properties flow_content? + * | flow_content + * properties ::= TAG ANCHOR? | ANCHOR TAG? + * block_content ::= block_collection | flow_collection | SCALAR + * flow_content ::= flow_collection | SCALAR + * block_collection ::= block_sequence | block_mapping + * flow_collection ::= flow_sequence | flow_mapping + */ + + ///Parse a node. + Event parseNode(const Flag!"block" block, + const Flag!"indentlessSequence" indentlessSequence = No.indentlessSequence) + @trusted + { + if(scanner_.front.id == TokenID.alias_) + { + const token = scanner_.front; + scanner_.popFront(); + state_ = popState(); + return aliasEvent(token.startMark, token.endMark, + cast(string)token.value); + } + + string anchor; + string tag; + Mark startMark, endMark, tagMark; + bool invalidMarks = true; + // The index in the tag string where tag handle ends and tag suffix starts. + uint tagHandleEnd; + + //Get anchor/tag if detected. Return false otherwise. + bool get(const TokenID id, const Flag!"first" first, ref string target) @safe + { + if(scanner_.front.id != id){return false;} + invalidMarks = false; + const token = scanner_.front; + scanner_.popFront(); + if(first){startMark = token.startMark;} + if(id == TokenID.tag) + { + tagMark = token.startMark; + tagHandleEnd = token.valueDivider; + } + endMark = token.endMark; + target = token.value.idup; + return true; + } + + //Anchor and/or tag can be in any order. + if(get(TokenID.anchor, Yes.first, anchor)){get(TokenID.tag, No.first, tag);} + else if(get(TokenID.tag, Yes.first, tag)) {get(TokenID.anchor, No.first, anchor);} + + if(tag !is null){tag = processTag(tag, tagHandleEnd, startMark, tagMark);} + + if(invalidMarks) + { + startMark = endMark = scanner_.front.startMark; + } + + bool implicit = (tag is null || tag == "!"); + + if(indentlessSequence && scanner_.front.id == TokenID.blockEntry) + { + state_ = &parseIndentlessSequenceEntry; + return sequenceStartEvent + (startMark, scanner_.front.endMark, anchor, + tag, implicit, CollectionStyle.block); + } + + if(scanner_.front.id == TokenID.scalar) + { + auto token = scanner_.front; + scanner_.popFront(); + auto value = token.style == ScalarStyle.doubleQuoted + ? handleDoubleQuotedScalarEscapes(token.value) + : cast(string)token.value; + + implicit = (token.style == ScalarStyle.plain && tag is null) || tag == "!"; + state_ = popState(); + return scalarEvent(startMark, token.endMark, anchor, tag, + implicit, value, token.style); + } + + if(scanner_.front.id == TokenID.flowSequenceStart) + { + endMark = scanner_.front.endMark; + state_ = &parseFlowSequenceEntry!(Yes.first); + return sequenceStartEvent(startMark, endMark, anchor, tag, + implicit, CollectionStyle.flow); + } + + if(scanner_.front.id == TokenID.flowMappingStart) + { + endMark = scanner_.front.endMark; + state_ = &parseFlowMappingKey!(Yes.first); + return mappingStartEvent(startMark, endMark, anchor, tag, + implicit, CollectionStyle.flow); + } + + if(block && scanner_.front.id == TokenID.blockSequenceStart) + { + endMark = scanner_.front.endMark; + state_ = &parseBlockSequenceEntry!(Yes.first); + return sequenceStartEvent(startMark, endMark, anchor, tag, + implicit, CollectionStyle.block); + } + + if(block && scanner_.front.id == TokenID.blockMappingStart) + { + endMark = scanner_.front.endMark; + state_ = &parseBlockMappingKey!(Yes.first); + return mappingStartEvent(startMark, endMark, anchor, tag, + implicit, CollectionStyle.block); + } + + if(anchor !is null || tag !is null) + { + state_ = popState(); + + //PyYAML uses a tuple(implicit, false) for the second last arg here, + //but the second bool is never used after that - so we don't use it. + + //Empty scalars are allowed even if a tag or an anchor is specified. + return scalarEvent(startMark, endMark, anchor, tag, + implicit , ""); + } + + const token = scanner_.front; + throw new ParserException("While parsing a " ~ (block ? "block" : "flow") ~ " node", + startMark, "expected node content, but found: " + ~ token.idString, token.startMark); + } + + /// Handle escape sequences in a double quoted scalar. + /// + /// Moved here from scanner as it can't always be done in-place with slices. + string handleDoubleQuotedScalarEscapes(const(char)[] tokenValue) const @safe + { + string notInPlace; + bool inEscape; + auto appender = appender!(string)(); + for(const(char)[] oldValue = tokenValue; !oldValue.empty();) + { + const dchar c = oldValue.front(); + oldValue.popFront(); + + if(!inEscape) + { + if(c != '\\') + { + if(notInPlace is null) { appender.put(c); } + else { notInPlace ~= c; } + continue; + } + // Escape sequence starts with a '\' + inEscape = true; + continue; + } + + import dyaml.escapes; + scope(exit) { inEscape = false; } + + // 'Normal' escape sequence. + if(c.among!(escapes)) + { + if(notInPlace is null) + { + // \L and \C can't be handled in place as the expand into + // many-byte unicode chars + if(c != 'L' && c != 'P') + { + appender.put(dyaml.escapes.fromEscape(c)); + continue; + } + // Need to duplicate as we won't fit into + // token.value - which is what appender uses + notInPlace = appender.data.dup; + notInPlace ~= dyaml.escapes.fromEscape(c); + continue; + } + notInPlace ~= dyaml.escapes.fromEscape(c); + continue; + } + + // Unicode char written in hexadecimal in an escape sequence. + if(c.among!(escapeHexCodeList)) + { + // Scanner has already checked that the hex string is valid. + + const hexLength = dyaml.escapes.escapeHexLength(c); + // Any hex digits are 1-byte so this works. + const(char)[] hex = oldValue[0 .. hexLength]; + oldValue = oldValue[hexLength .. $]; + import std.ascii : isHexDigit; + assert(!hex.canFind!(d => !d.isHexDigit), + "Scanner must ensure the hex string is valid"); + + const decoded = cast(dchar)parse!int(hex, 16u); + if(notInPlace is null) { appender.put(decoded); } + else { notInPlace ~= decoded; } + continue; + } + + assert(false, "Scanner must handle unsupported escapes"); + } + + return notInPlace is null ? appender.data : notInPlace; + } + + /** + * Process a tag string retrieved from a tag token. + * + * Params: tag = Tag before processing. + * handleEnd = Index in tag where tag handle ends and tag suffix + * starts. + * startMark = Position of the node the tag belongs to. + * tagMark = Position of the tag. + */ + string processTag(const string tag, const uint handleEnd, + const Mark startMark, const Mark tagMark) + const @safe + { + const handle = tag[0 .. handleEnd]; + const suffix = tag[handleEnd .. $]; + + if(handle.length > 0) + { + string replacement; + foreach(ref pair; tagDirectives_) + { + if(pair.handle == handle) + { + replacement = pair.prefix; + break; + } + } + //handle must be in tagDirectives_ + enforce(replacement !is null, + new ParserException("While parsing a node", startMark, + "found undefined tag handle: " ~ handle, tagMark)); + return replacement ~ suffix; + } + return suffix; + } + + ///Wrappers to parse nodes. + Event parseBlockNode() @safe {return parseNode(Yes.block);} + Event parseFlowNode() @safe {return parseNode(No.block);} + Event parseBlockNodeOrIndentlessSequence() @safe {return parseNode(Yes.block, Yes.indentlessSequence);} + + ///block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END + + ///Parse an entry of a block sequence. If first is true, this is the first entry. + Event parseBlockSequenceEntry(Flag!"first" first)() @safe + { + static if(first) + { + pushMark(scanner_.front.startMark); + scanner_.popFront(); + } + + if(scanner_.front.id == TokenID.blockEntry) + { + const token = scanner_.front; + scanner_.popFront(); + if(!scanner_.front.id.among!(TokenID.blockEntry, TokenID.blockEnd)) + { + pushState(&parseBlockSequenceEntry!(No.first)); + return parseBlockNode(); + } + + state_ = &parseBlockSequenceEntry!(No.first); + return processEmptyScalar(token.endMark); + } + + if(scanner_.front.id != TokenID.blockEnd) + { + const token = scanner_.front; + throw new ParserException("While parsing a block collection", marks_.data.back, + "expected block end, but found " ~ token.idString, + token.startMark); + } + + state_ = popState(); + popMark(); + const token = scanner_.front; + scanner_.popFront(); + return sequenceEndEvent(token.startMark, token.endMark); + } + + ///indentless_sequence ::= (BLOCK-ENTRY block_node?)+ + + ///Parse an entry of an indentless sequence. + Event parseIndentlessSequenceEntry() @safe + { + if(scanner_.front.id == TokenID.blockEntry) + { + const token = scanner_.front; + scanner_.popFront(); + + if(!scanner_.front.id.among!(TokenID.blockEntry, TokenID.key, + TokenID.value, TokenID.blockEnd)) + { + pushState(&parseIndentlessSequenceEntry); + return parseBlockNode(); + } + + state_ = &parseIndentlessSequenceEntry; + return processEmptyScalar(token.endMark); + } + + state_ = popState(); + const token = scanner_.front; + return sequenceEndEvent(token.startMark, token.endMark); + } + + /** + * block_mapping ::= BLOCK-MAPPING_START + * ((KEY block_node_or_indentless_sequence?)? + * (VALUE block_node_or_indentless_sequence?)?)* + * BLOCK-END + */ + + ///Parse a key in a block mapping. If first is true, this is the first key. + Event parseBlockMappingKey(Flag!"first" first)() @safe + { + static if(first) + { + pushMark(scanner_.front.startMark); + scanner_.popFront(); + } + + if(scanner_.front.id == TokenID.key) + { + const token = scanner_.front; + scanner_.popFront(); + + if(!scanner_.front.id.among!(TokenID.key, TokenID.value, TokenID.blockEnd)) + { + pushState(&parseBlockMappingValue); + return parseBlockNodeOrIndentlessSequence(); + } + + state_ = &parseBlockMappingValue; + return processEmptyScalar(token.endMark); + } + + if(scanner_.front.id != TokenID.blockEnd) + { + const token = scanner_.front; + throw new ParserException("While parsing a block mapping", marks_.data.back, + "expected block end, but found: " ~ token.idString, + token.startMark); + } + + state_ = popState(); + popMark(); + const token = scanner_.front; + scanner_.popFront(); + return mappingEndEvent(token.startMark, token.endMark); + } + + ///Parse a value in a block mapping. + Event parseBlockMappingValue() @safe + { + if(scanner_.front.id == TokenID.value) + { + const token = scanner_.front; + scanner_.popFront(); + + if(!scanner_.front.id.among!(TokenID.key, TokenID.value, TokenID.blockEnd)) + { + pushState(&parseBlockMappingKey!(No.first)); + return parseBlockNodeOrIndentlessSequence(); + } + + state_ = &parseBlockMappingKey!(No.first); + return processEmptyScalar(token.endMark); + } + + state_= &parseBlockMappingKey!(No.first); + return processEmptyScalar(scanner_.front.startMark); + } + + /** + * flow_sequence ::= FLOW-SEQUENCE-START + * (flow_sequence_entry FLOW-ENTRY)* + * flow_sequence_entry? + * FLOW-SEQUENCE-END + * flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + * + * Note that while production rules for both flow_sequence_entry and + * flow_mapping_entry are equal, their interpretations are different. + * For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` + * generate an inline mapping (set syntax). + */ + + ///Parse an entry in a flow sequence. If first is true, this is the first entry. + Event parseFlowSequenceEntry(Flag!"first" first)() @safe + { + static if(first) + { + pushMark(scanner_.front.startMark); + scanner_.popFront(); + } + + if(scanner_.front.id != TokenID.flowSequenceEnd) + { + static if(!first) + { + if(scanner_.front.id == TokenID.flowEntry) + { + scanner_.popFront(); + } + else + { + const token = scanner_.front; + throw new ParserException("While parsing a flow sequence", marks_.data.back, + "expected ',' or ']', but got: " ~ + token.idString, token.startMark); + } + } + + if(scanner_.front.id == TokenID.key) + { + const token = scanner_.front; + state_ = &parseFlowSequenceEntryMappingKey; + return mappingStartEvent(token.startMark, token.endMark, + null, null, true, CollectionStyle.flow); + } + else if(scanner_.front.id != TokenID.flowSequenceEnd) + { + pushState(&parseFlowSequenceEntry!(No.first)); + return parseFlowNode(); + } + } + + const token = scanner_.front; + scanner_.popFront(); + state_ = popState(); + popMark(); + return sequenceEndEvent(token.startMark, token.endMark); + } + + ///Parse a key in flow context. + Event parseFlowKey(Event delegate() @safe nextState) @safe + { + const token = scanner_.front; + scanner_.popFront(); + + if(!scanner_.front.id.among!(TokenID.value, TokenID.flowEntry, + TokenID.flowSequenceEnd)) + { + pushState(nextState); + return parseFlowNode(); + } + + state_ = nextState; + return processEmptyScalar(token.endMark); + } + + ///Parse a mapping key in an entry in a flow sequence. + Event parseFlowSequenceEntryMappingKey() @safe + { + return parseFlowKey(&parseFlowSequenceEntryMappingValue); + } + + ///Parse a mapping value in a flow context. + Event parseFlowValue(TokenID checkId, Event delegate() @safe nextState) + @safe + { + if(scanner_.front.id == TokenID.value) + { + const token = scanner_.front; + scanner_.popFront(); + if(!scanner_.front.id.among(TokenID.flowEntry, checkId)) + { + pushState(nextState); + return parseFlowNode(); + } + + state_ = nextState; + return processEmptyScalar(token.endMark); + } + + state_ = nextState; + return processEmptyScalar(scanner_.front.startMark); + } + + ///Parse a mapping value in an entry in a flow sequence. + Event parseFlowSequenceEntryMappingValue() @safe + { + return parseFlowValue(TokenID.flowSequenceEnd, + &parseFlowSequenceEntryMappingEnd); + } + + ///Parse end of a mapping in a flow sequence entry. + Event parseFlowSequenceEntryMappingEnd() @safe + { + state_ = &parseFlowSequenceEntry!(No.first); + const token = scanner_.front; + return mappingEndEvent(token.startMark, token.startMark); + } + + /** + * flow_mapping ::= FLOW-MAPPING-START + * (flow_mapping_entry FLOW-ENTRY)* + * flow_mapping_entry? + * FLOW-MAPPING-END + * flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? + */ + + ///Parse a key in a flow mapping. + Event parseFlowMappingKey(Flag!"first" first)() @safe + { + static if(first) + { + pushMark(scanner_.front.startMark); + scanner_.popFront(); + } + + if(scanner_.front.id != TokenID.flowMappingEnd) + { + static if(!first) + { + if(scanner_.front.id == TokenID.flowEntry) + { + scanner_.popFront(); + } + else + { + const token = scanner_.front; + throw new ParserException("While parsing a flow mapping", marks_.data.back, + "expected ',' or '}', but got: " ~ + token.idString, token.startMark); + } + } + + if(scanner_.front.id == TokenID.key) + { + return parseFlowKey(&parseFlowMappingValue); + } + + if(scanner_.front.id != TokenID.flowMappingEnd) + { + pushState(&parseFlowMappingEmptyValue); + return parseFlowNode(); + } + } + + const token = scanner_.front; + scanner_.popFront(); + state_ = popState(); + popMark(); + return mappingEndEvent(token.startMark, token.endMark); + } + + ///Parse a value in a flow mapping. + Event parseFlowMappingValue() @safe + { + return parseFlowValue(TokenID.flowMappingEnd, &parseFlowMappingKey!(No.first)); + } + + ///Parse an empty value in a flow mapping. + Event parseFlowMappingEmptyValue() @safe + { + state_ = &parseFlowMappingKey!(No.first); + return processEmptyScalar(scanner_.front.startMark); + } + + ///Return an empty scalar. + Event processEmptyScalar(const Mark mark) @safe pure nothrow const @nogc + { + return scalarEvent(mark, mark, null, null, true, ""); + } +} diff --git a/source/dyaml/queue.d b/source/dyaml/queue.d new file mode 100644 index 0000000..57b0d34 --- /dev/null +++ b/source/dyaml/queue.d @@ -0,0 +1,272 @@ + +// Copyright Ferdinand Majerech 2011-2014. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +module dyaml.queue; + + +import std.traits : hasMember, hasIndirections; + +package: + +/// Simple queue implemented as a singly linked list with a tail pointer. +/// +/// Needed in some D:YAML code that needs a queue-like structure without too much +/// reallocation that goes with an array. +/// +/// Allocations are non-GC and are damped by a free-list based on the nodes +/// that are removed. Note that elements lifetime must be managed +/// outside. +struct Queue(T) +if (!hasMember!(T, "__xdtor")) +{ + +private: + + // Linked list node containing one element and pointer to the next node. + struct Node + { + T payload_; + Node* next_; + } + + // Start of the linked list - first element added in time (end of the queue). + Node* first_; + // Last element of the linked list - last element added in time (start of the queue). + Node* last_; + // free-list + Node* stock; + + // Length of the queue. + size_t length_; + + // allocate a new node or recycle one from the stock. + Node* makeNewNode(T thePayload, Node* theNext = null) @trusted nothrow @nogc + { + import std.experimental.allocator : make; + import std.experimental.allocator.mallocator : Mallocator; + + Node* result; + if (stock !is null) + { + result = stock; + stock = result.next_; + result.payload_ = thePayload; + result.next_ = theNext; + } + else + { + result = Mallocator.instance.make!(Node)(thePayload, theNext); + // GC can dispose T managed member if it thinks they are no used... + static if (hasIndirections!T) + { + import core.memory : GC; + GC.addRange(result, Node.sizeof); + } + } + return result; + } + + // free the stock of available free nodes. + void freeStock() @trusted @nogc nothrow + { + import std.experimental.allocator.mallocator : Mallocator; + + while (stock !is null) + { + Node* toFree = stock; + stock = stock.next_; + static if (hasIndirections!T) + { + import core.memory : GC; + GC.removeRange(toFree); + } + Mallocator.instance.deallocate((cast(ubyte*) toFree)[0 .. Node.sizeof]); + } + } + +public: + + @disable void opAssign(ref Queue); + @disable bool opEquals(ref Queue); + @disable int opCmp(ref Queue); + + this(this) @safe nothrow @nogc + { + auto node = first_; + first_ = null; + last_ = null; + while (node !is null) + { + Node* newLast = makeNewNode(node.payload_); + if (last_ !is null) + last_.next_ = newLast; + if (first_ is null) + first_ = newLast; + last_ = newLast; + node = node.next_; + } + } + + ~this() @safe nothrow @nogc + { + freeStock(); + stock = first_; + freeStock(); + } + + /// Returns a forward range iterating over this queue. + auto range() @safe pure nothrow @nogc + { + static struct Result + { + private Node* cursor; + + void popFront() @safe pure nothrow @nogc + { + cursor = cursor.next_; + } + ref T front() @safe pure nothrow @nogc + in(cursor !is null) + { + return cursor.payload_; + } + bool empty() @safe pure nothrow @nogc const + { + return cursor is null; + } + } + return Result(first_); + } + + /// Push a new item to the queue. + void push(T item) @nogc @safe nothrow + { + Node* newLast = makeNewNode(item); + if (last_ !is null) + last_.next_ = newLast; + if (first_ is null) + first_ = newLast; + last_ = newLast; + ++length_; + } + + /// Insert a new item putting it to specified index in the linked list. + void insert(T item, const size_t idx) @safe nothrow + in + { + assert(idx <= length_); + } + do + { + if (idx == 0) + { + first_ = makeNewNode(item, first_); + ++length_; + } + // Adding before last added element, so we can just push. + else if (idx == length_) + { + push(item); + } + else + { + // Get the element before one we're inserting. + Node* current = first_; + foreach (i; 1 .. idx) + current = current.next_; + + assert(current); + // Insert a new node after current, and put current.next_ behind it. + current.next_ = makeNewNode(item, current.next_); + ++length_; + } + } + + /// Returns: The next element in the queue and remove it. + T pop() @safe nothrow + in + { + assert(!empty, "Trying to pop an element from an empty queue"); + } + do + { + T result = peek(); + + Node* oldStock = stock; + Node* old = first_; + first_ = first_.next_; + + // start the stock from the popped element + stock = old; + old.next_ = null; + // add the existing "old" stock to the new first stock element + if (oldStock !is null) + stock.next_ = oldStock; + + if (--length_ == 0) + { + assert(first_ is null); + last_ = null; + } + + return result; + } + + /// Returns: The next element in the queue. + ref inout(T) peek() @safe pure nothrow inout @nogc + in + { + assert(!empty, "Trying to peek at an element in an empty queue"); + } + do + { + return first_.payload_; + } + + /// Returns: true of the queue empty, false otherwise. + bool empty() @safe pure nothrow const @nogc + { + return first_ is null; + } + + /// Returns: The number of elements in the queue. + size_t length() @safe pure nothrow const @nogc + { + return length_; + } +} + +@safe nothrow unittest +{ + auto queue = Queue!int(); + assert(queue.empty); + foreach (i; 0 .. 65) + { + queue.push(5); + assert(queue.pop() == 5); + assert(queue.empty); + assert(queue.length_ == 0); + } + + int[] array = [1, -1, 2, -2, 3, -3, 4, -4, 5, -5]; + foreach (i; array) + { + queue.push(i); + } + + array = 42 ~ array[0 .. 3] ~ 42 ~ array[3 .. $] ~ 42; + queue.insert(42, 3); + queue.insert(42, 0); + queue.insert(42, queue.length); + + int[] array2; + while (!queue.empty) + { + array2 ~= queue.pop(); + } + + assert(array == array2); +} diff --git a/source/dyaml/reader.d b/source/dyaml/reader.d new file mode 100644 index 0000000..ae44c80 --- /dev/null +++ b/source/dyaml/reader.d @@ -0,0 +1,909 @@ + +// Copyright Ferdinand Majerech 2011-2014. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +module dyaml.reader; + + +import core.stdc.stdlib; +import core.stdc.string; +import core.thread; + +import std.algorithm; +import std.array; +import std.conv; +import std.exception; +import std.range; +import std.string; +import std.system; +import std.typecons; +import std.utf; + +import tinyendian; + +import dyaml.encoding; +import dyaml.exception; + +alias isBreak = among!('\n', '\u0085', '\u2028', '\u2029'); + +package: + + +///Exception thrown at Reader errors. +class ReaderException : YAMLException +{ + this(string msg, string file = __FILE__, size_t line = __LINE__) + @safe pure nothrow + { + super("Reader error: " ~ msg, file, line); + } +} + +/// Provides an API to read characters from a UTF-8 buffer and build slices into that +/// buffer to avoid allocations (see SliceBuilder). +final class Reader +{ + private: + // Buffer of currently loaded characters. + char[] buffer_; + + // Current position within buffer. Only data after this position can be read. + size_t bufferOffset_; + + // Index of the current character in the buffer. + size_t charIndex_; + // Number of characters (code points) in buffer_. + size_t characterCount_; + + // File name + string name_; + // Current line in file. + uint line_; + // Current column in file. + uint column_; + + // Original Unicode encoding of the data. + Encoding encoding_; + + version(unittest) + { + // Endianness of the input before it was converted (for testing) + Endian endian_; + } + + // The number of consecutive ASCII characters starting at bufferOffset_. + // + // Used to minimize UTF-8 decoding. + size_t upcomingASCII_; + + // Index to buffer_ where the last decoded character starts. + size_t lastDecodedBufferOffset_; + // Offset, relative to charIndex_, of the last decoded character, + // in code points, not chars. + size_t lastDecodedCharOffset_; + + public: + /// Construct a Reader. + /// + /// Params: buffer = Buffer with YAML data. This may be e.g. the entire + /// contents of a file or a string. $(B will) be modified by + /// the Reader and other parts of D:YAML (D:YAML tries to + /// reuse the buffer to minimize memory allocations) + /// name = File name if the buffer is the contents of a file or + /// `""` if the buffer is the contents of a string. + /// + /// Throws: ReaderException on a UTF decoding error or if there are + /// nonprintable Unicode characters illegal in YAML. + this(ubyte[] buffer, string name = "") @safe pure + { + name_ = name; + auto endianResult = fixUTFByteOrder(buffer); + if(endianResult.bytesStripped > 0) + { + throw new ReaderException("Size of UTF-16 or UTF-32 input not aligned " ~ + "to 2 or 4 bytes, respectively"); + } + + version(unittest) { endian_ = endianResult.endian; } + encoding_ = endianResult.encoding; + + auto utf8Result = toUTF8(endianResult.array, endianResult.encoding); + const msg = utf8Result.errorMessage; + if(msg !is null) + { + throw new ReaderException("Error when converting to UTF-8: " ~ msg); + } + + buffer_ = utf8Result.utf8; + + characterCount_ = utf8Result.characterCount; + // Check that all characters in buffer are printable. + enforce(isPrintableValidUTF8(buffer_), + new ReaderException("Special unicode characters are not allowed")); + + this.sliceBuilder = SliceBuilder(this); + checkASCII(); + } + + /// Get character at specified index relative to current position. + /// + /// Params: index = Index of the character to get relative to current position + /// in the buffer. Can point outside of the buffer; In that + /// case, '\0' will be returned. + /// + /// Returns: Character at specified position or '\0' if outside of the buffer. + /// + // XXX removed; search for 'risky' to find why. + // Throws: ReaderException if trying to read past the end of the buffer. + dchar peek(const size_t index) @safe pure + { + if(index < upcomingASCII_) { return buffer_[bufferOffset_ + index]; } + if(characterCount_ <= charIndex_ + index) + { + // XXX This is risky; revert this if bugs are introduced. We rely on + // the assumption that Reader only uses peek() to detect end of buffer. + // The test suite passes. + // Revert this case here and in other peek() versions if this causes + // errors. + // throw new ReaderException("Trying to read past the end of the buffer"); + return '\0'; + } + + // Optimized path for Scanner code that peeks chars in linear order to + // determine the length of some sequence. + if(index == lastDecodedCharOffset_) + { + ++lastDecodedCharOffset_; + const char b = buffer_[lastDecodedBufferOffset_]; + // ASCII + if(b < 0x80) + { + ++lastDecodedBufferOffset_; + return b; + } + return decode(buffer_, lastDecodedBufferOffset_); + } + + // 'Slow' path where we decode everything up to the requested character. + const asciiToTake = min(upcomingASCII_, index); + lastDecodedCharOffset_ = asciiToTake; + lastDecodedBufferOffset_ = bufferOffset_ + asciiToTake; + dchar d; + while(lastDecodedCharOffset_ <= index) + { + d = decodeNext(); + } + + return d; + } + + /// Optimized version of peek() for the case where peek index is 0. + dchar peek() @safe pure + { + if(upcomingASCII_ > 0) { return buffer_[bufferOffset_]; } + if(characterCount_ <= charIndex_) { return '\0'; } + + lastDecodedCharOffset_ = 0; + lastDecodedBufferOffset_ = bufferOffset_; + return decodeNext(); + } + + /// Get byte at specified index relative to current position. + /// + /// Params: index = Index of the byte to get relative to current position + /// in the buffer. Can point outside of the buffer; In that + /// case, '\0' will be returned. + /// + /// Returns: Byte at specified position or '\0' if outside of the buffer. + char peekByte(const size_t index) @safe pure nothrow @nogc + { + return characterCount_ > (charIndex_ + index) ? buffer_[bufferOffset_ + index] : '\0'; + } + + /// Optimized version of peekByte() for the case where peek byte index is 0. + char peekByte() @safe pure nothrow @nogc + { + return characterCount_ > charIndex_ ? buffer_[bufferOffset_] : '\0'; + } + + + /// Get specified number of characters starting at current position. + /// + /// Note: This gets only a "view" into the internal buffer, which will be + /// invalidated after other Reader calls. Use SliceBuilder to build slices + /// for permanent use. + /// + /// Params: length = Number of characters (code points, not bytes) to get. May + /// reach past the end of the buffer; in that case the returned + /// slice will be shorter. + /// + /// Returns: Characters starting at current position or an empty slice if out of bounds. + char[] prefix(const size_t length) @safe pure + { + return slice(length); + } + + /// Get specified number of bytes, not code points, starting at current position. + /// + /// Note: This gets only a "view" into the internal buffer, which will be + /// invalidated after other Reader calls. Use SliceBuilder to build slices + /// for permanent use. + /// + /// Params: length = Number bytes (not code points) to get. May NOT reach past + /// the end of the buffer; should be used with peek() to avoid + /// this. + /// + /// Returns: Bytes starting at current position. + char[] prefixBytes(const size_t length) @safe pure nothrow @nogc + in(length == 0 || bufferOffset_ + length <= buffer_.length, "prefixBytes out of bounds") + { + return buffer_[bufferOffset_ .. bufferOffset_ + length]; + } + + /// Get a slice view of the internal buffer, starting at the current position. + /// + /// Note: This gets only a "view" into the internal buffer, + /// which get invalidated after other Reader calls. + /// + /// Params: end = End of the slice relative to current position. May reach past + /// the end of the buffer; in that case the returned slice will + /// be shorter. + /// + /// Returns: Slice into the internal buffer or an empty slice if out of bounds. + char[] slice(const size_t end) @safe pure + { + // Fast path in case the caller has already peek()ed all the way to end. + if(end == lastDecodedCharOffset_) + { + return buffer_[bufferOffset_ .. lastDecodedBufferOffset_]; + } + + const asciiToTake = min(upcomingASCII_, end, buffer_.length); + lastDecodedCharOffset_ = asciiToTake; + lastDecodedBufferOffset_ = bufferOffset_ + asciiToTake; + + // 'Slow' path - decode everything up to end. + while(lastDecodedCharOffset_ < end && + lastDecodedBufferOffset_ < buffer_.length) + { + decodeNext(); + } + + return buffer_[bufferOffset_ .. lastDecodedBufferOffset_]; + } + + /// Get the next character, moving buffer position beyond it. + /// + /// Returns: Next character. + /// + /// Throws: ReaderException if trying to read past the end of the buffer + /// or if invalid data is read. + dchar get() @safe pure + { + const result = peek(); + forward(); + return result; + } + + /// Get specified number of characters, moving buffer position beyond them. + /// + /// Params: length = Number or characters (code points, not bytes) to get. + /// + /// Returns: Characters starting at current position. + char[] get(const size_t length) @safe pure + { + auto result = slice(length); + forward(length); + return result; + } + + /// Move current position forward. + /// + /// Params: length = Number of characters to move position forward. + void forward(size_t length) @safe pure + { + while(length > 0) + { + auto asciiToTake = min(upcomingASCII_, length); + charIndex_ += asciiToTake; + length -= asciiToTake; + upcomingASCII_ -= asciiToTake; + + for(; asciiToTake > 0; --asciiToTake) + { + const c = buffer_[bufferOffset_++]; + // c is ASCII, do we only need to check for ASCII line breaks. + if(c == '\n' || (c == '\r' && buffer_[bufferOffset_] != '\n')) + { + ++line_; + column_ = 0; + continue; + } + ++column_; + } + + // If we have used up all upcoming ASCII chars, the next char is + // non-ASCII even after this returns, so upcomingASCII_ doesn't need to + // be updated - it's zero. + if(length == 0) { break; } + + assert(upcomingASCII_ == 0, + "Running unicode handling code but we haven't run out of ASCII chars"); + assert(bufferOffset_ < buffer_.length, + "Attempted to decode past the end of YAML buffer"); + assert(buffer_[bufferOffset_] >= 0x80, + "ASCII must be handled by preceding code"); + + ++charIndex_; + const c = decode(buffer_, bufferOffset_); + + // New line. (can compare with '\n' without decoding since it's ASCII) + if(c.isBreak || (c == '\r' && buffer_[bufferOffset_] != '\n')) + { + ++line_; + column_ = 0; + } + else if(c != '\uFEFF') { ++column_; } + --length; + checkASCII(); + } + + lastDecodedBufferOffset_ = bufferOffset_; + lastDecodedCharOffset_ = 0; + } + + /// Move current position forward by one character. + void forward() @safe pure + { + ++charIndex_; + lastDecodedBufferOffset_ = bufferOffset_; + lastDecodedCharOffset_ = 0; + + // ASCII + if(upcomingASCII_ > 0) + { + --upcomingASCII_; + const c = buffer_[bufferOffset_++]; + + if(c == '\n' || (c == '\r' && buffer_[bufferOffset_] != '\n')) + { + ++line_; + column_ = 0; + return; + } + ++column_; + return; + } + + // UTF-8 + assert(bufferOffset_ < buffer_.length, + "Attempted to decode past the end of YAML buffer"); + assert(buffer_[bufferOffset_] >= 0x80, + "ASCII must be handled by preceding code"); + + const c = decode(buffer_, bufferOffset_); + + // New line. (can compare with '\n' without decoding since it's ASCII) + if(c.isBreak || (c == '\r' && buffer_[bufferOffset_] != '\n')) + { + ++line_; + column_ = 0; + } + else if(c != '\uFEFF') { ++column_; } + + checkASCII(); + } + + /// Used to build slices of read data in Reader; to avoid allocations. + SliceBuilder sliceBuilder; + + /// Get a string describing current buffer position, used for error messages. + Mark mark() const pure nothrow @nogc @safe { return Mark(name_, line_, column_); } + + /// Get file name. + string name() const @safe pure nothrow @nogc { return name_; } + + /// Set file name. + void name(string name) pure @safe nothrow @nogc { name_ = name; } + + /// Get current line number. + uint line() const @safe pure nothrow @nogc { return line_; } + + /// Get current column number. + uint column() const @safe pure nothrow @nogc { return column_; } + + /// Get index of the current character in the buffer. + size_t charIndex() const @safe pure nothrow @nogc { return charIndex_; } + + /// Get encoding of the input buffer. + Encoding encoding() const @safe pure nothrow @nogc { return encoding_; } + +private: + // Update upcomingASCII_ (should be called forward()ing over a UTF-8 sequence) + void checkASCII() @safe pure nothrow @nogc + { + upcomingASCII_ = countASCII(buffer_[bufferOffset_ .. $]); + } + + // Decode the next character relative to + // lastDecodedCharOffset_/lastDecodedBufferOffset_ and update them. + // + // Does not advance the buffer position. Used in peek() and slice(). + dchar decodeNext() @safe pure + { + assert(lastDecodedBufferOffset_ < buffer_.length, + "Attempted to decode past the end of YAML buffer"); + const char b = buffer_[lastDecodedBufferOffset_]; + ++lastDecodedCharOffset_; + // ASCII + if(b < 0x80) + { + ++lastDecodedBufferOffset_; + return b; + } + + return decode(buffer_, lastDecodedBufferOffset_); + } +} + +/// Used to build slices of already read data in Reader buffer, avoiding allocations. +/// +/// Usually these slices point to unchanged Reader data, but sometimes the data is +/// changed due to how YAML interprets certain characters/strings. +/// +/// See begin() documentation. +struct SliceBuilder +{ +private: + // No copying by the user. + @disable this(this); + @disable void opAssign(ref SliceBuilder); + + // Reader this builder works in. + Reader reader_; + + // Start of the slice om reader_.buffer_ (size_t.max while no slice being build) + size_t start_ = size_t.max; + // End of the slice om reader_.buffer_ (size_t.max while no slice being build) + size_t end_ = size_t.max; + + // Stack of slice ends to revert to (see Transaction) + // + // Very few levels as we don't want arbitrarily nested transactions. + size_t[4] endStack_; + // The number of elements currently in endStack_. + size_t endStackUsed_; + + @safe const pure nothrow @nogc invariant() + { + if(!inProgress) { return; } + assert(end_ <= reader_.bufferOffset_, "Slice ends after buffer position"); + assert(start_ <= end_, "Slice start after slice end"); + } + + // Is a slice currently being built? + bool inProgress() @safe const pure nothrow @nogc + in(start_ == size_t.max ? end_ == size_t.max : end_ != size_t.max, "start_/end_ are not consistent") + { + return start_ != size_t.max; + } + +public: + /// Begin building a slice. + /// + /// Only one slice can be built at any given time; before beginning a new slice, + /// finish the previous one (if any). + /// + /// The slice starts at the current position in the Reader buffer. It can only be + /// extended up to the current position in the buffer; Reader methods get() and + /// forward() move the position. E.g. it is valid to extend a slice by write()-ing + /// a string just returned by get() - but not one returned by prefix() unless the + /// position has changed since the prefix() call. + void begin() @safe pure nothrow @nogc + in(!inProgress, "Beginning a slice while another slice is being built") + in(endStackUsed_ == 0, "Slice stack not empty at slice begin") + { + + start_ = reader_.bufferOffset_; + end_ = reader_.bufferOffset_; + } + + /// Finish building a slice and return it. + /// + /// Any Transactions on the slice must be committed or destroyed before the slice + /// is finished. + /// + /// Returns a string; once a slice is finished it is definitive that its contents + /// will not be changed. + char[] finish() @safe pure nothrow @nogc + in(inProgress, "finish called without begin") + in(endStackUsed_ == 0, "Finishing a slice with running transactions.") + { + + auto result = reader_.buffer_[start_ .. end_]; + start_ = end_ = size_t.max; + return result; + } + + /// Write a string to the slice being built. + /// + /// Data can only be written up to the current position in the Reader buffer. + /// + /// If str is a string returned by a Reader method, and str starts right after the + /// end of the slice being built, the slice is extended (trivial operation). + /// + /// See_Also: begin + void write(scope char[] str) @safe pure nothrow @nogc + { + assert(inProgress, "write called without begin"); + assert(end_ <= reader_.bufferOffset_, + "AT START: Slice ends after buffer position"); + + // Nothing? Already done. + if (str.length == 0) { return; } + // If str starts at the end of the slice (is a string returned by a Reader + // method), just extend the slice to contain str. + if(&str[0] == &reader_.buffer_[end_]) + { + end_ += str.length; + } + // Even if str does not start at the end of the slice, it still may be returned + // by a Reader method and point to buffer. So we need to memmove. + else + { + copy(str, reader_.buffer_[end_..end_ + str.length * char.sizeof]); + end_ += str.length; + } + } + + /// Write a character to the slice being built. + /// + /// Data can only be written up to the current position in the Reader buffer. + /// + /// See_Also: begin + void write(dchar c) @safe pure + in(inProgress, "write called without begin") + { + if(c < 0x80) + { + reader_.buffer_[end_++] = cast(char)c; + return; + } + + // We need to encode a non-ASCII dchar into UTF-8 + char[4] encodeBuf; + const bytes = encode(encodeBuf, c); + reader_.buffer_[end_ .. end_ + bytes] = encodeBuf[0 .. bytes]; + end_ += bytes; + } + + /// Insert a character to a specified position in the slice. + /// + /// Enlarges the slice by 1 char. Note that the slice can only extend up to the + /// current position in the Reader buffer. + /// + /// Params: + /// + /// c = The character to insert. + /// position = Position to insert the character at in code units, not code points. + /// Must be less than slice length(); a previously returned length() + /// can be used. + void insert(const dchar c, const size_t position) @safe pure + in(inProgress, "insert called without begin") + in(start_ + position <= end_, "Trying to insert after the end of the slice") + { + + const point = start_ + position; + const movedLength = end_ - point; + + // Encode c into UTF-8 + char[4] encodeBuf; + if(c < 0x80) { encodeBuf[0] = cast(char)c; } + const size_t bytes = c < 0x80 ? 1 : encode(encodeBuf, c); + + if(movedLength > 0) + { + copy(reader_.buffer_[point..point + movedLength * char.sizeof], + reader_.buffer_[point + bytes..point + bytes + movedLength * char.sizeof]); + } + reader_.buffer_[point .. point + bytes] = encodeBuf[0 .. bytes]; + end_ += bytes; + } + + /// Get the current length of the slice. + size_t length() @safe const pure nothrow @nogc + { + return end_ - start_; + } + + /// A slice building transaction. + /// + /// Can be used to save and revert back to slice state. + struct Transaction + { + private: + // The slice builder affected by the transaction. + SliceBuilder* builder_; + // Index of the return point of the transaction in StringBuilder.endStack_. + size_t stackLevel_; + // True after commit() has been called. + bool committed_; + + public: + /// Begins a transaction on a SliceBuilder object. + /// + /// The transaction must end $(B after) any transactions created within the + /// transaction but $(B before) the slice is finish()-ed. A transaction can be + /// ended either by commit()-ing or reverting through the destructor. + /// + /// Saves the current state of a slice. + this(SliceBuilder* builder) @safe pure nothrow @nogc + { + builder_ = builder; + stackLevel_ = builder_.endStackUsed_; + builder_.push(); + } + + /// Commit changes to the slice. + /// + /// Ends the transaction - can only be called once, and removes the possibility + /// to revert slice state. + /// + /// Does nothing for a default-initialized transaction (the transaction has not + /// been started yet). + void commit() @safe pure nothrow @nogc + in(!committed_, "Can't commit a transaction more than once") + { + + if(builder_ is null) { return; } + assert(builder_.endStackUsed_ == stackLevel_ + 1, + "Parent transactions don't fully contain child transactions"); + builder_.apply(); + committed_ = true; + } + + /// Destroy the transaction and revert it if it hasn't been committed yet. + void end() @safe pure nothrow @nogc + in(builder_ && builder_.endStackUsed_ == stackLevel_ + 1, "Parent transactions don't fully contain child transactions") + { + builder_.pop(); + builder_ = null; + } + + } + +private: + // Push the current end of the slice so we can revert to it if needed. + // + // Used by Transaction. + void push() @safe pure nothrow @nogc + in(inProgress, "push called without begin") + in(endStackUsed_ < endStack_.length, "Slice stack overflow") + { + endStack_[endStackUsed_++] = end_; + } + + // Pop the current end of endStack_ and set the end of the slice to the popped + // value, reverting changes since the old end was pushed. + // + // Used by Transaction. + void pop() @safe pure nothrow @nogc + in(inProgress, "pop called without begin") + in(endStackUsed_ > 0, "Trying to pop an empty slice stack") + { + end_ = endStack_[--endStackUsed_]; + } + + // Pop the current end of endStack_, but keep the current end of the slice, applying + // changes made since pushing the old end. + // + // Used by Transaction. + void apply() @safe pure nothrow @nogc + in(inProgress, "apply called without begin") + in(endStackUsed_ > 0, "Trying to apply an empty slice stack") + { + --endStackUsed_; + } +} + + +private: + +// Convert a UTF-8/16/32 buffer to UTF-8, in-place if possible. +// +// Params: +// +// input = Buffer with UTF-8/16/32 data to decode. May be overwritten by the +// conversion, in which case the result will be a slice of this buffer. +// encoding = Encoding of input. +// +// Returns: +// +// A struct with the following members: +// +// $(D string errorMessage) In case of an error, the error message is stored here. If +// there was no error, errorMessage is NULL. Always check +// this first. +// $(D char[] utf8) input converted to UTF-8. May be a slice of input. +// $(D size_t characterCount) Number of characters (code points) in input. +auto toUTF8(ubyte[] input, const UTFEncoding encoding) @safe pure nothrow +{ + // Documented in function ddoc. + struct Result + { + string errorMessage; + char[] utf8; + size_t characterCount; + } + + Result result; + + // Encode input_ into UTF-8 if it's encoded as UTF-16 or UTF-32. + // + // Params: + // + // buffer = The input buffer to encode. + // result = A Result struct to put encoded result and any error messages to. + // + // On error, result.errorMessage will be set. + static void encode(C)(C[] input, ref Result result) @safe pure + { + // We can do UTF-32->UTF-8 in place because all UTF-8 sequences are 4 or + // less bytes. + static if(is(C == dchar)) + { + char[4] encodeBuf; + auto utf8 = cast(char[])input; + auto length = 0; + foreach(dchar c; input) + { + ++result.characterCount; + // ASCII + if(c < 0x80) + { + utf8[length++] = cast(char)c; + continue; + } + + std.utf.encode(encodeBuf, c); + const bytes = codeLength!char(c); + utf8[length .. length + bytes] = encodeBuf[0 .. bytes]; + length += bytes; + } + result.utf8 = utf8[0 .. length]; + } + // Unfortunately we can't do UTF-16 in place so we just use std.conv.to + else + { + result.characterCount = std.utf.count(input); + result.utf8 = input.to!(char[]); + } + } + + try final switch(encoding) + { + case UTFEncoding.UTF_8: + result.utf8 = cast(char[])input; + result.utf8.validate(); + result.characterCount = std.utf.count(result.utf8); + break; + case UTFEncoding.UTF_16: + assert(input.length % 2 == 0, "UTF-16 buffer size must be even"); + encode(cast(wchar[])input, result); + break; + case UTFEncoding.UTF_32: + assert(input.length % 4 == 0, "UTF-32 buffer size must be a multiple of 4"); + encode(cast(dchar[])input, result); + break; + } + catch(ConvException e) { result.errorMessage = e.msg; } + catch(UTFException e) { result.errorMessage = e.msg; } + catch(Exception e) + { + assert(false, "Unexpected exception in encode(): " ~ e.msg); + } + + return result; +} + +/// Determine if all characters (code points, not bytes) in a string are printable. +bool isPrintableValidUTF8(const char[] chars) @safe pure +{ + import std.uni : isControl, isWhite; + foreach (dchar chr; chars) + { + if (!chr.isValidDchar || (chr.isControl && !chr.isWhite)) + { + return false; + } + } + return true; +} + +/// Counts the number of ASCII characters in buffer until the first UTF-8 sequence. +/// +/// Used to determine how many characters we can process without decoding. +size_t countASCII(const(char)[] buffer) @safe pure nothrow @nogc +{ + return buffer.byCodeUnit.until!(x => x > 0x7F).walkLength; +} +// Unittests. + +void testEndian(R)() +{ + void endian_test(ubyte[] data, Encoding encoding_expected, Endian endian_expected) + { + auto reader = new R(data); + assert(reader.encoding == encoding_expected); + assert(reader.endian_ == endian_expected); + } + ubyte[] little_endian_utf_16 = [0xFF, 0xFE, 0x7A, 0x00]; + ubyte[] big_endian_utf_16 = [0xFE, 0xFF, 0x00, 0x7A]; + endian_test(little_endian_utf_16, Encoding.UTF_16, Endian.littleEndian); + endian_test(big_endian_utf_16, Encoding.UTF_16, Endian.bigEndian); +} + +void testPeekPrefixForward(R)() +{ + import std.encoding; + ubyte[] data = bomTable[BOM.utf8].sequence ~ cast(ubyte[])"data"; + auto reader = new R(data); + assert(reader.peek() == 'd'); + assert(reader.peek(1) == 'a'); + assert(reader.peek(2) == 't'); + assert(reader.peek(3) == 'a'); + assert(reader.peek(4) == '\0'); + assert(reader.prefix(4) == "data"); + // assert(reader.prefix(6) == "data\0"); + reader.forward(2); + assert(reader.peek(1) == 'a'); + // assert(collectException(reader.peek(3))); +} + +void testUTF(R)() +{ + import std.encoding; + dchar[] data = cast(dchar[])"data"; + void utf_test(T)(T[] data, BOM bom) + { + ubyte[] bytes = bomTable[bom].sequence ~ + (cast(ubyte[])data)[0 .. data.length * T.sizeof]; + auto reader = new R(bytes); + assert(reader.peek() == 'd'); + assert(reader.peek(1) == 'a'); + assert(reader.peek(2) == 't'); + assert(reader.peek(3) == 'a'); + } + utf_test!char(to!(char[])(data), BOM.utf8); + utf_test!wchar(to!(wchar[])(data), endian == Endian.bigEndian ? BOM.utf16be : BOM.utf16le); + utf_test(data, endian == Endian.bigEndian ? BOM.utf32be : BOM.utf32le); +} + +void test1Byte(R)() +{ + ubyte[] data = [97]; + + auto reader = new R(data); + assert(reader.peek() == 'a'); + assert(reader.peek(1) == '\0'); + // assert(collectException(reader.peek(2))); +} + +@system unittest +{ + testEndian!Reader(); + testPeekPrefixForward!Reader(); + testUTF!Reader(); + test1Byte!Reader(); +} +//Issue 257 - https://github.com/dlang-community/D-YAML/issues/257 +@safe unittest +{ + import dyaml.loader : Loader; + auto yaml = "hello "; + auto root = Loader.fromString(yaml).load(); + + assert(root.isValid); +} diff --git a/source/dyaml/representer.d b/source/dyaml/representer.d new file mode 100644 index 0000000..f903b60 --- /dev/null +++ b/source/dyaml/representer.d @@ -0,0 +1,517 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * YAML node _representer. Prepares YAML nodes for output. A tutorial can be + * found $(LINK2 ../tutorials/custom_types.html, here). + * + * Code based on $(LINK2 http://www.pyyaml.org, PyYAML). + */ +module dyaml.representer; + + +import std.algorithm; +import std.array; +import std.base64; +import std.container; +import std.conv; +import std.datetime; +import std.exception; +import std.format; +import std.math; +import std.typecons; +import std.string; + +import dyaml.exception; +import dyaml.node; +import dyaml.serializer; +import dyaml.style; + +package: +///Exception thrown on Representer errors. +class RepresenterException : YAMLException +{ + mixin ExceptionCtors; +} + +/** + * Represents YAML nodes as scalar, sequence and mapping nodes ready for output. + */ +Node representData(const Node data, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe +{ + Node result; + final switch(data.type) + { + case NodeType.null_: + result = representNull(); + break; + case NodeType.merge: + break; + case NodeType.boolean: + result = representBool(data); + break; + case NodeType.integer: + result = representLong(data); + break; + case NodeType.decimal: + result = representReal(data); + break; + case NodeType.binary: + result = representBytes(data); + break; + case NodeType.timestamp: + result = representSysTime(data); + break; + case NodeType.string: + result = representString(data); + break; + case NodeType.mapping: + result = representPairs(data, defaultScalarStyle, defaultCollectionStyle); + break; + case NodeType.sequence: + result = representNodes(data, defaultScalarStyle, defaultCollectionStyle); + break; + case NodeType.invalid: + assert(0); + } + + final switch (result.nodeID) + { + case NodeID.scalar: + if (result.scalarStyle == ScalarStyle.invalid) + { + result.scalarStyle = defaultScalarStyle; + } + break; + case NodeID.sequence, NodeID.mapping: + if (defaultCollectionStyle != CollectionStyle.invalid) + { + result.collectionStyle = defaultCollectionStyle; + } + break; + case NodeID.invalid: + break; + } + + + //Override tag if specified. + if(data.tag_ !is null){result.tag_ = data.tag_;} + + //Remember style if this was loaded before. + if(data.scalarStyle != ScalarStyle.invalid) + { + result.scalarStyle = data.scalarStyle; + } + if(data.collectionStyle != CollectionStyle.invalid) + { + result.collectionStyle = data.collectionStyle; + } + return result; +} + +@safe unittest +{ + // We don't emit yaml merge nodes. + assert(representData(Node(YAMLMerge()), ScalarStyle.invalid, CollectionStyle.invalid) == Node.init); +} + +@safe unittest +{ + assert(representData(Node(YAMLNull()), ScalarStyle.invalid, CollectionStyle.invalid) == Node("null", "tag:yaml.org,2002:null")); +} + +@safe unittest +{ + assert(representData(Node(cast(string)null), ScalarStyle.invalid, CollectionStyle.invalid) == Node("", "tag:yaml.org,2002:str")); + assert(representData(Node("Hello world!"), ScalarStyle.invalid, CollectionStyle.invalid) == Node("Hello world!", "tag:yaml.org,2002:str")); +} + +@safe unittest +{ + assert(representData(Node(64), ScalarStyle.invalid, CollectionStyle.invalid) == Node("64", "tag:yaml.org,2002:int")); +} + +@safe unittest +{ + assert(representData(Node(true), ScalarStyle.invalid, CollectionStyle.invalid) == Node("true", "tag:yaml.org,2002:bool")); + assert(representData(Node(false), ScalarStyle.invalid, CollectionStyle.invalid) == Node("false", "tag:yaml.org,2002:bool")); +} + +@safe unittest +{ + // Float comparison is pretty unreliable... + auto result = representData(Node(1.0), ScalarStyle.invalid, CollectionStyle.invalid); + assert(isClose(result.as!string.to!real, 1.0)); + assert(result.tag == "tag:yaml.org,2002:float"); + + assert(representData(Node(real.nan), ScalarStyle.invalid, CollectionStyle.invalid) == Node(".nan", "tag:yaml.org,2002:float")); + assert(representData(Node(real.infinity), ScalarStyle.invalid, CollectionStyle.invalid) == Node(".inf", "tag:yaml.org,2002:float")); + assert(representData(Node(-real.infinity), ScalarStyle.invalid, CollectionStyle.invalid) == Node("-.inf", "tag:yaml.org,2002:float")); +} + +@safe unittest +{ + assert(representData(Node(SysTime(DateTime(2000, 3, 14, 12, 34, 56), UTC())), ScalarStyle.invalid, CollectionStyle.invalid) == Node("2000-03-14T12:34:56Z", "tag:yaml.org,2002:timestamp")); +} + +@safe unittest +{ + assert(representData(Node(Node[].init, "tag:yaml.org,2002:set"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node.Pair[].init, "tag:yaml.org,2002:set")); + assert(representData(Node(Node[].init, "tag:yaml.org,2002:seq"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node[].init, "tag:yaml.org,2002:seq")); + { + auto nodes = [ + Node("a"), + Node("b"), + Node("c"), + ]; + assert(representData(Node(nodes, "tag:yaml.org,2002:set"), ScalarStyle.invalid, CollectionStyle.invalid) == + Node([ + Node.Pair( + Node("a", "tag:yaml.org,2002:str"), + Node("null", "tag:yaml.org,2002:null") + ), + Node.Pair( + Node("b", "tag:yaml.org,2002:str"), + Node("null", "tag:yaml.org,2002:null") + ), + Node.Pair( + Node("c", "tag:yaml.org,2002:str"), + Node("null", "tag:yaml.org,2002:null") + ) + ], "tag:yaml.org,2002:set")); + } + { + auto nodes = [ + Node("a"), + Node("b"), + Node("c"), + ]; + assert(representData(Node(nodes, "tag:yaml.org,2002:seq"), ScalarStyle.invalid, CollectionStyle.invalid) == + Node([ + Node("a", "tag:yaml.org,2002:str"), + Node("b", "tag:yaml.org,2002:str"), + Node("c", "tag:yaml.org,2002:str") + ], "tag:yaml.org,2002:seq")); + } +} + +@safe unittest +{ + assert(representData(Node(Node.Pair[].init, "tag:yaml.org,2002:omap"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node[].init, "tag:yaml.org,2002:omap")); + assert(representData(Node(Node.Pair[].init, "tag:yaml.org,2002:pairs"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node[].init, "tag:yaml.org,2002:pairs")); + assert(representData(Node(Node.Pair[].init, "tag:yaml.org,2002:map"), ScalarStyle.invalid, CollectionStyle.invalid) == Node(Node.Pair[].init, "tag:yaml.org,2002:map")); + { + auto nodes = [ + Node.Pair("a", "b"), + Node.Pair("a", "c") + ]; + assertThrown(representData(Node(nodes, "tag:yaml.org,2002:omap"), ScalarStyle.invalid, CollectionStyle.invalid)); + } + // Yeah, this gets ugly really fast. + { + auto nodes = [ + Node.Pair("a", "b"), + Node.Pair("a", "c") + ]; + assert(representData(Node(nodes, "tag:yaml.org,2002:pairs"), ScalarStyle.invalid, CollectionStyle.invalid) == + Node([ + Node( + [Node.Pair( + Node("a", "tag:yaml.org,2002:str"), + Node("b", "tag:yaml.org,2002:str") + )], + "tag:yaml.org,2002:map"), + Node( + [Node.Pair( + Node("a", "tag:yaml.org,2002:str"), + Node("c", "tag:yaml.org,2002:str") + )], + "tag:yaml.org,2002:map"), + ], "tag:yaml.org,2002:pairs")); + } + { + auto nodes = [ + Node.Pair("a", "b"), + Node.Pair("a", "c") + ]; + assertThrown(representData(Node(nodes, "tag:yaml.org,2002:map"), ScalarStyle.invalid, CollectionStyle.invalid)); + } + { + auto nodes = [ + Node.Pair("a", "b"), + Node.Pair("c", "d") + ]; + assert(representData(Node(nodes, "tag:yaml.org,2002:omap"), ScalarStyle.invalid, CollectionStyle.invalid) == + Node([ + Node([ + Node.Pair( + Node("a", "tag:yaml.org,2002:str"), + Node("b", "tag:yaml.org,2002:str") + ) + ], "tag:yaml.org,2002:map"), + Node([ + Node.Pair( + Node("c", "tag:yaml.org,2002:str"), + Node("d", "tag:yaml.org,2002:str") + ) + ], "tag:yaml.org,2002:map" + )], "tag:yaml.org,2002:omap")); + } + { + auto nodes = [ + Node.Pair("a", "b"), + Node.Pair("c", "d") + ]; + assert(representData(Node(nodes, "tag:yaml.org,2002:map"), ScalarStyle.invalid, CollectionStyle.invalid) == + Node([ + Node.Pair( + Node("a", "tag:yaml.org,2002:str"), + Node("b", "tag:yaml.org,2002:str") + ), + Node.Pair( + Node("c", "tag:yaml.org,2002:str"), + Node("d", "tag:yaml.org,2002:str") + ), + ], "tag:yaml.org,2002:map")); + } +} + +private: + +//Represent a _null _node as a _null YAML value. +Node representNull() @safe +{ + return Node("null", "tag:yaml.org,2002:null"); +} + +//Represent a string _node as a string scalar. +Node representString(const Node node) @safe +{ + string value = node.as!string; + return Node(value, "tag:yaml.org,2002:str"); +} + +//Represent a bytes _node as a binary scalar. +Node representBytes(const Node node) @safe +{ + const ubyte[] value = node.as!(ubyte[]); + if(value is null){return Node("null", "tag:yaml.org,2002:null");} + + auto newNode = Node(Base64.encode(value).idup, "tag:yaml.org,2002:binary"); + newNode.scalarStyle = ScalarStyle.literal; + return newNode; +} + +//Represent a bool _node as a bool scalar. +Node representBool(const Node node) @safe +{ + return Node(node.as!bool ? "true" : "false", "tag:yaml.org,2002:bool"); +} + +//Represent a long _node as an integer scalar. +Node representLong(const Node node) @safe +{ + return Node(node.as!long.to!string, "tag:yaml.org,2002:int"); +} + +//Represent a real _node as a floating point scalar. +Node representReal(const Node node) @safe +{ + real f = node.as!real; + string value = isNaN(f) ? ".nan": + f == real.infinity ? ".inf": + f == -1.0 * real.infinity ? "-.inf": + {auto a = appender!string(); + formattedWrite(a, "%12f", f); + return a.data.strip();}(); + + return Node(value, "tag:yaml.org,2002:float"); +} + +//Represent a SysTime _node as a timestamp. +Node representSysTime(const Node node) @safe +{ + return Node(node.as!SysTime.toISOExtString(), "tag:yaml.org,2002:timestamp"); +} + +//Represent a sequence _node as sequence/set. +Node representNodes(const Node node, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe +{ + auto nodes = node.as!(Node[]); + if(node.tag_ == "tag:yaml.org,2002:set") + { + //YAML sets are mapping with null values. + Node.Pair[] pairs; + pairs.length = nodes.length; + + foreach(idx, key; nodes) + { + pairs[idx] = Node.Pair(key, Node("null", "tag:yaml.org,2002:null")); + } + Node.Pair[] value; + value.length = pairs.length; + + auto bestStyle = CollectionStyle.flow; + foreach(idx, pair; pairs) + { + value[idx] = Node.Pair(representData(pair.key, defaultScalarStyle, defaultCollectionStyle), representData(pair.value, defaultScalarStyle, defaultCollectionStyle)); + if(value[idx].shouldUseBlockStyle) + { + bestStyle = CollectionStyle.block; + } + } + + auto newNode = Node(value, node.tag_); + newNode.collectionStyle = bestStyle; + return newNode; + } + else + { + Node[] value; + value.length = nodes.length; + + auto bestStyle = CollectionStyle.flow; + foreach(idx, item; nodes) + { + value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); + const isScalar = value[idx].nodeID == NodeID.scalar; + const s = value[idx].scalarStyle; + if(!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)) + { + bestStyle = CollectionStyle.block; + } + } + + auto newNode = Node(value, "tag:yaml.org,2002:seq"); + newNode.collectionStyle = bestStyle; + return newNode; + } +} + +bool shouldUseBlockStyle(const Node value) @safe +{ + const isScalar = value.nodeID == NodeID.scalar; + const s = value.scalarStyle; + return (!isScalar || (s != ScalarStyle.invalid && s != ScalarStyle.plain)); +} +bool shouldUseBlockStyle(const Node.Pair value) @safe +{ + const keyScalar = value.key.nodeID == NodeID.scalar; + const valScalar = value.value.nodeID == NodeID.scalar; + const keyStyle = value.key.scalarStyle; + const valStyle = value.value.scalarStyle; + if(!keyScalar || + (keyStyle != ScalarStyle.invalid && keyStyle != ScalarStyle.plain)) + { + return true; + } + if(!valScalar || + (valStyle != ScalarStyle.invalid && valStyle != ScalarStyle.plain)) + { + return true; + } + return false; +} + +//Represent a mapping _node as map/ordered map/pairs. +Node representPairs(const Node node, ScalarStyle defaultScalarStyle, CollectionStyle defaultCollectionStyle) @safe +{ + auto pairs = node.as!(Node.Pair[]); + + bool hasDuplicates(const Node.Pair[] pairs) @safe + { + //TODO this should be replaced by something with deterministic memory allocation. + auto keys = redBlackTree!Node(); + foreach(pair; pairs) + { + if(pair.key in keys){return true;} + keys.insert(pair.key); + } + return false; + } + + Node[] mapToSequence(const Node.Pair[] pairs) @safe + { + Node[] nodes; + nodes.length = pairs.length; + foreach(idx, pair; pairs) + { + Node.Pair value; + + auto bestStyle = value.shouldUseBlockStyle ? CollectionStyle.block : CollectionStyle.flow; + value = Node.Pair(representData(pair.key, defaultScalarStyle, defaultCollectionStyle), representData(pair.value, defaultScalarStyle, defaultCollectionStyle)); + + auto newNode = Node([value], "tag:yaml.org,2002:map"); + newNode.collectionStyle = bestStyle; + nodes[idx] = newNode; + } + return nodes; + } + + if(node.tag_ == "tag:yaml.org,2002:omap") + { + enforce(!hasDuplicates(pairs), + new RepresenterException("Duplicate entry in an ordered map")); + auto sequence = mapToSequence(pairs); + Node[] value; + value.length = sequence.length; + + auto bestStyle = CollectionStyle.flow; + foreach(idx, item; sequence) + { + value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); + if(value[idx].shouldUseBlockStyle) + { + bestStyle = CollectionStyle.block; + } + } + + auto newNode = Node(value, node.tag_); + newNode.collectionStyle = bestStyle; + return newNode; + } + else if(node.tag_ == "tag:yaml.org,2002:pairs") + { + auto sequence = mapToSequence(pairs); + Node[] value; + value.length = sequence.length; + + auto bestStyle = CollectionStyle.flow; + foreach(idx, item; sequence) + { + value[idx] = representData(item, defaultScalarStyle, defaultCollectionStyle); + if(value[idx].shouldUseBlockStyle) + { + bestStyle = CollectionStyle.block; + } + } + + auto newNode = Node(value, node.tag_); + newNode.collectionStyle = bestStyle; + return newNode; + } + else + { + enforce(!hasDuplicates(pairs), + new RepresenterException("Duplicate entry in an unordered map")); + Node.Pair[] value; + value.length = pairs.length; + + auto bestStyle = CollectionStyle.flow; + foreach(idx, pair; pairs) + { + value[idx] = Node.Pair(representData(pair.key, defaultScalarStyle, defaultCollectionStyle), representData(pair.value, defaultScalarStyle, defaultCollectionStyle)); + if(value[idx].shouldUseBlockStyle) + { + bestStyle = CollectionStyle.block; + } + } + + auto newNode = Node(value, "tag:yaml.org,2002:map"); + newNode.collectionStyle = bestStyle; + return newNode; + } +} diff --git a/source/dyaml/resolver.d b/source/dyaml/resolver.d new file mode 100644 index 0000000..16d8419 --- /dev/null +++ b/source/dyaml/resolver.d @@ -0,0 +1,260 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * Implements a class that resolves YAML tags. This can be used to implicitly + * resolve tags for custom data types, removing the need to explicitly + * specify tags in YAML. A tutorial can be found + * $(LINK2 ../tutorials/custom_types.html, here). + * + * Code based on $(LINK2 http://www.pyyaml.org, PyYAML). + */ +module dyaml.resolver; + + +import std.conv; +import std.regex; +import std.typecons; +import std.utf; + +import dyaml.node; +import dyaml.exception; + + +/// Type of `regexes` +private alias RegexType = Tuple!(string, "tag", const Regex!char, "regexp", string, "chars"); + +private immutable RegexType[] regexes = [ + RegexType("tag:yaml.org,2002:bool", + regex(r"^(?:yes|Yes|YES|no|No|NO|true|True|TRUE" ~ + "|false|False|FALSE|on|On|ON|off|Off|OFF)$"), + "yYnNtTfFoO"), + RegexType("tag:yaml.org,2002:float", + regex(r"^(?:[-+]?([0-9][0-9_]*)\\.[0-9_]*" ~ + "(?:[eE][-+][0-9]+)?|[-+]?(?:[0-9][0-9_]" ~ + "*)?\\.[0-9_]+(?:[eE][-+][0-9]+)?|[-+]?" ~ + "[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]" ~ + "*|[-+]?\\.(?:inf|Inf|INF)|\\." ~ + "(?:nan|NaN|NAN))$"), + "-+0123456789."), + RegexType("tag:yaml.org,2002:int", + regex(r"^(?:[-+]?0b[0-1_]+" ~ + "|[-+]?0[0-7_]+" ~ + "|[-+]?(?:0|[1-9][0-9_]*)" ~ + "|[-+]?0x[0-9a-fA-F_]+" ~ + "|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$"), + "-+0123456789"), + RegexType("tag:yaml.org,2002:merge", regex(r"^<<$"), "<"), + RegexType("tag:yaml.org,2002:null", + regex(r"^$|^(?:~|null|Null|NULL)$"), "~nN\0"), + RegexType("tag:yaml.org,2002:timestamp", + regex(r"^[0-9][0-9][0-9][0-9]-[0-9][0-9]-" ~ + "[0-9][0-9]|[0-9][0-9][0-9][0-9]-[0-9]" ~ + "[0-9]?-[0-9][0-9]?[Tt]|[ \t]+[0-9]" ~ + "[0-9]?:[0-9][0-9]:[0-9][0-9]" ~ + "(?:\\.[0-9]*)?(?:[ \t]*Z|[-+][0-9]" ~ + "[0-9]?(?::[0-9][0-9])?)?$"), + "0123456789"), + RegexType("tag:yaml.org,2002:value", regex(r"^=$"), "="), + + //The following resolver is only for documentation purposes. It cannot work + //because plain scalars cannot start with '!', '&', or '*'. + RegexType("tag:yaml.org,2002:yaml", regex(r"^(?:!|&|\*)$"), "!&*"), +]; + +/** + * Resolves YAML tags (data types). + * + * Can be used to implicitly resolve custom data types of scalar values. + */ +struct Resolver +{ + private: + // Default tag to use for scalars. + string defaultScalarTag_ = "tag:yaml.org,2002:str"; + // Default tag to use for sequences. + string defaultSequenceTag_ = "tag:yaml.org,2002:seq"; + // Default tag to use for mappings. + string defaultMappingTag_ = "tag:yaml.org,2002:map"; + + /* + * Arrays of scalar resolver tuples indexed by starting character of a scalar. + * + * Each tuple stores regular expression the scalar must match, + * and tag to assign to it if it matches. + */ + Tuple!(string, const Regex!char)[][dchar] yamlImplicitResolvers_; + + package: + static auto withDefaultResolvers() @safe + { + Resolver resolver; + foreach(pair; regexes) + { + resolver.addImplicitResolver(pair.tag, pair.regexp, pair.chars); + } + return resolver; + } + + public: + @disable bool opEquals(ref Resolver); + @disable int opCmp(ref Resolver); + + /** + * Add an implicit scalar resolver. + * + * If a scalar matches regexp and starts with any character in first, + * its _tag is set to tag. If it matches more than one resolver _regexp + * resolvers added _first override ones added later. Default resolvers + * override any user specified resolvers, but they can be disabled in + * Resolver constructor. + * + * If a scalar is not resolved to anything, it is assigned the default + * YAML _tag for strings. + * + * Params: tag = Tag to resolve to. + * regexp = Regular expression the scalar must match to have this _tag. + * first = String of possible starting characters of the scalar. + * + */ + void addImplicitResolver(string tag, const Regex!char regexp, string first) + pure @safe + { + foreach(const dchar c; first) + { + if((c in yamlImplicitResolvers_) is null) + { + yamlImplicitResolvers_[c] = []; + } + yamlImplicitResolvers_[c] ~= tuple(tag, regexp); + } + } + /// Resolve scalars starting with 'A' to !_tag + @safe unittest + { + import std.file : write; + import std.regex : regex; + import dyaml.loader : Loader; + import dyaml.resolver : Resolver; + + write("example.yaml", "A"); + + auto loader = Loader.fromFile("example.yaml"); + loader.resolver.addImplicitResolver("!tag", regex("A.*"), "A"); + + auto node = loader.load(); + assert(node.tag == "!tag"); + } + + package: + /** + * Resolve tag of a node. + * + * Params: kind = Type of the node. + * tag = Explicit tag of the node, if any. + * value = Value of the node, if any. + * implicit = Should the node be implicitly resolved? + * + * If the tag is already specified and not non-specific, that tag will + * be returned. + * + * Returns: Resolved tag. + */ + string resolve(const NodeID kind, const string tag, scope string value, + const bool implicit) @safe + { + import std.array : empty, front; + if((tag !is null) && (tag != "!")) + { + return tag; + } + + final switch (kind) + { + case NodeID.scalar: + if(!implicit) + { + return defaultScalarTag_; + } + + //Get the first char of the value. + const dchar first = value.empty ? '\0' : value.front; + + auto resolvers = (first in yamlImplicitResolvers_) is null ? + [] : yamlImplicitResolvers_[first]; + + //If regexp matches, return tag. + foreach(resolver; resolvers) + { + // source/dyaml/resolver.d(192,35): Error: scope variable `__tmpfordtorXXX` + // assigned to non-scope parameter `this` calling + // `std.regex.RegexMatch!string.RegexMatch.~this` + bool isEmpty = () @trusted { + return match(value, resolver[1]).empty; + }(); + if(!isEmpty) + { + return resolver[0]; + } + } + return defaultScalarTag_; + case NodeID.sequence: + return defaultSequenceTag_; + case NodeID.mapping: + return defaultMappingTag_; + case NodeID.invalid: + assert(false, "Cannot resolve an invalid node"); + } + } + @safe unittest + { + auto resolver = Resolver.withDefaultResolvers; + + bool tagMatch(string tag, string[] values) @safe + { + const string expected = tag; + foreach(value; values) + { + const string resolved = resolver.resolve(NodeID.scalar, null, value, true); + if(expected != resolved) + { + return false; + } + } + return true; + } + + assert(tagMatch("tag:yaml.org,2002:bool", + ["yes", "NO", "True", "on"])); + assert(tagMatch("tag:yaml.org,2002:float", + ["6.8523015e+5", "685.230_15e+03", "685_230.15", + "190:20:30.15", "-.inf", ".NaN"])); + assert(tagMatch("tag:yaml.org,2002:int", + ["685230", "+685_230", "02472256", "0x_0A_74_AE", + "0b1010_0111_0100_1010_1110", "190:20:30"])); + assert(tagMatch("tag:yaml.org,2002:merge", ["<<"])); + assert(tagMatch("tag:yaml.org,2002:null", ["~", "null", ""])); + assert(tagMatch("tag:yaml.org,2002:str", + ["abcd", "9a8b", "9.1adsf"])); + assert(tagMatch("tag:yaml.org,2002:timestamp", + ["2001-12-15T02:59:43.1Z", + "2001-12-14t21:59:43.10-05:00", + "2001-12-14 21:59:43.10 -5", + "2001-12-15 2:59:43.10", + "2002-12-14"])); + assert(tagMatch("tag:yaml.org,2002:value", ["="])); + assert(tagMatch("tag:yaml.org,2002:yaml", ["!", "&", "*"])); + } + + ///Returns: Default scalar tag. + @property string defaultScalarTag() const pure @safe nothrow {return defaultScalarTag_;} + + ///Returns: Default sequence tag. + @property string defaultSequenceTag() const pure @safe nothrow {return defaultSequenceTag_;} + + ///Returns: Default mapping tag. + @property string defaultMappingTag() const pure @safe nothrow {return defaultMappingTag_;} +} diff --git a/source/dyaml/scanner.d b/source/dyaml/scanner.d new file mode 100644 index 0000000..17893d1 --- /dev/null +++ b/source/dyaml/scanner.d @@ -0,0 +1,1809 @@ + +// Copyright Ferdinand Majerech 2011-2014. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/// YAML scanner. +/// Code based on PyYAML: http://www.pyyaml.org +module dyaml.scanner; + + +import core.stdc.string; + +import std.algorithm; +import std.array; +import std.conv; +import std.ascii : isAlphaNum, isDigit, isHexDigit; +import std.exception; +import std.string; +import std.typecons; +import std.traits : Unqual; +import std.utf; + +import dyaml.escapes; +import dyaml.exception; +import dyaml.queue; +import dyaml.reader; +import dyaml.style; +import dyaml.token; + +package: +/// Scanner produces tokens of the following types: +/// STREAM-START +/// STREAM-END +/// DIRECTIVE(name, value) +/// DOCUMENT-START +/// DOCUMENT-END +/// BLOCK-SEQUENCE-START +/// BLOCK-MAPPING-START +/// BLOCK-END +/// FLOW-SEQUENCE-START +/// FLOW-MAPPING-START +/// FLOW-SEQUENCE-END +/// FLOW-MAPPING-END +/// BLOCK-ENTRY +/// FLOW-ENTRY +/// KEY +/// VALUE +/// ALIAS(value) +/// ANCHOR(value) +/// TAG(value) +/// SCALAR(value, plain, style) + +alias isBreak = among!('\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); + +alias isBreakOrSpace = among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); + +alias isWhiteSpace = among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); + +alias isNonLinebreakWhitespace = among!(' ', '\t'); + +alias isNonScalarStartCharacter = among!('-', '?', ':', ',', '[', ']', '{', '}', + '#', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`', ' ', '\t', '\0', '\n', + '\r', '\u0085', '\u2028', '\u2029'); + +alias isURIChar = among!('-', ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', + '_', '.', '!', '~', '*', '\'', '(', ')', '[', ']', '%'); + +alias isNSChar = among!(' ', '\n', '\r', '\u0085', '\u2028', '\u2029'); + +alias isBChar = among!('\n', '\r', '\u0085', '\u2028', '\u2029'); + +alias isFlowScalarBreakSpace = among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029', '\'', '"', '\\'); + +alias isNSAnchorName = c => !c.isWhiteSpace && !c.among!('[', ']', '{', '}', ',', '\uFEFF'); + +/// Marked exception thrown at scanner errors. +/// +/// See_Also: MarkedYAMLException +class ScannerException : MarkedYAMLException +{ + mixin MarkedExceptionCtors; +} + +/// Generates tokens from data provided by a Reader. +struct Scanner +{ + private: + /// A simple key is a key that is not denoted by the '?' indicator. + /// For example: + /// --- + /// block simple key: value + /// ? not a simple key: + /// : { flow simple key: value } + /// We emit the KEY token before all keys, so when we find a potential simple + /// key, we try to locate the corresponding ':' indicator. Simple keys should be + /// limited to a single line and 1024 characters. + /// + /// 16 bytes on 64-bit. + static struct SimpleKey + { + /// Character index in reader where the key starts. + uint charIndex = uint.max; + /// Index of the key token from start (first token scanned being 0). + uint tokenIndex; + /// Line the key starts at. + uint line; + /// Column the key starts at. + ushort column; + /// Is this required to be a simple key? + bool required; + /// Is this struct "null" (invalid)?. + bool isNull; + } + + /// Block chomping types. + enum Chomping + { + /// Strip all trailing line breaks. '-' indicator. + strip, + /// Line break of the last line is preserved, others discarded. Default. + clip, + /// All trailing line breaks are preserved. '+' indicator. + keep + } + + /// Reader used to read from a file/stream. + Reader reader_; + /// Are we done scanning? + bool done_; + + /// Level of nesting in flow context. If 0, we're in block context. + uint flowLevel_; + /// Current indentation level. + int indent_ = -1; + /// Past indentation levels. Used as a stack. + Appender!(int[]) indents_; + + /// Processed tokens not yet emitted. Used as a queue. + Queue!Token tokens_; + + /// Number of tokens emitted through the getToken method. + uint tokensTaken_; + + /// Can a simple key start at the current position? A simple key may start: + /// - at the beginning of the line, not counting indentation spaces + /// (in block context), + /// - after '{', '[', ',' (in the flow context), + /// - after '?', ':', '-' (in the block context). + /// In the block context, this flag also signifies if a block collection + /// may start at the current position. + bool allowSimpleKey_ = true; + + /// Possible simple keys indexed by flow levels. + SimpleKey[] possibleSimpleKeys_; + + public: + /// Construct a Scanner using specified Reader. + this(Reader reader) @safe nothrow + { + // Return the next token, but do not delete it from the queue + reader_ = reader; + fetchStreamStart(); + } + + /// Advance to the next token + void popFront() @safe + { + ++tokensTaken_; + tokens_.pop(); + } + + /// Return the current token + const(Token) front() @safe + { + enforce(!empty, "No token left to peek"); + return tokens_.peek(); + } + + /// Return whether there are any more tokens left. + bool empty() @safe + { + while (needMoreTokens()) + { + fetchToken(); + } + return tokens_.empty; + } + + /// Set file name. + void name(string name) @safe pure nothrow @nogc + { + reader_.name = name; + } + + private: + /// Most scanning error messages have the same format; so build them with this + /// function. + string expected(T)(string expected, T found) + { + return text("expected ", expected, ", but found ", found); + } + + /// Determine whether or not we need to fetch more tokens before peeking/getting a token. + bool needMoreTokens() @safe pure + { + if(done_) { return false; } + if(tokens_.empty) { return true; } + + /// The current token may be a potential simple key, so we need to look further. + stalePossibleSimpleKeys(); + return nextPossibleSimpleKey() == tokensTaken_; + } + + /// Fetch at token, adding it to tokens_. + void fetchToken() @safe + { + // Eat whitespaces and comments until we reach the next token. + scanToNextToken(); + + // Remove obsolete possible simple keys. + stalePossibleSimpleKeys(); + + // Compare current indentation and column. It may add some tokens + // and decrease the current indentation level. + unwindIndent(reader_.column); + + // Get the next character. + const dchar c = reader_.peekByte(); + + // Fetch the token. + if(c == '\0') { return fetchStreamEnd(); } + if(checkDirective()) { return fetchDirective(); } + if(checkDocumentStart()) { return fetchDocumentStart(); } + if(checkDocumentEnd()) { return fetchDocumentEnd(); } + // Order of the following checks is NOT significant. + switch(c) + { + case '[': return fetchFlowSequenceStart(); + case '{': return fetchFlowMappingStart(); + case ']': return fetchFlowSequenceEnd(); + case '}': return fetchFlowMappingEnd(); + case ',': return fetchFlowEntry(); + case '!': return fetchTag(); + case '\'': return fetchSingle(); + case '\"': return fetchDouble(); + case '*': return fetchAlias(); + case '&': return fetchAnchor(); + case '?': if(checkKey()) { return fetchKey(); } goto default; + case ':': if(checkValue()) { return fetchValue(); } goto default; + case '-': if(checkBlockEntry()) { return fetchBlockEntry(); } goto default; + case '|': if(flowLevel_ == 0) { return fetchLiteral(); } break; + case '>': if(flowLevel_ == 0) { return fetchFolded(); } break; + default: if(checkPlain()) { return fetchPlain(); } + } + + throw new ScannerException("While scanning for the next token, found character " ~ + "\'%s\', index %s that cannot start any token" + .format(c, to!int(c)), reader_.mark); + } + + + /// Return the token number of the nearest possible simple key. + uint nextPossibleSimpleKey() @safe pure nothrow @nogc + { + uint minTokenNumber = uint.max; + foreach(k, ref simpleKey; possibleSimpleKeys_) + { + if(simpleKey.isNull) { continue; } + minTokenNumber = min(minTokenNumber, simpleKey.tokenIndex); + } + return minTokenNumber; + } + + /// Remove entries that are no longer possible simple keys. + /// + /// According to the YAML specification, simple keys + /// - should be limited to a single line, + /// - should be no longer than 1024 characters. + /// Disabling this will allow simple keys of any length and + /// height (may cause problems if indentation is broken though). + void stalePossibleSimpleKeys() @safe pure + { + foreach(level, ref key; possibleSimpleKeys_) + { + if(key.isNull) { continue; } + if(key.line != reader_.line || reader_.charIndex - key.charIndex > 1024) + { + enforce(!key.required, + new ScannerException("While scanning a simple key", + Mark(reader_.name, key.line, key.column), + "could not find expected ':'", reader_.mark)); + key.isNull = true; + } + } + } + + /// Check if the next token starts a possible simple key and if so, save its position. + /// + /// This function is called for ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. + void savePossibleSimpleKey() @safe pure + { + // Check if a simple key is required at the current position. + const required = (flowLevel_ == 0 && indent_ == reader_.column); + assert(allowSimpleKey_ || !required, "A simple key is required only if it is " ~ + "the first token in the current line. Therefore it is always allowed."); + + if(!allowSimpleKey_) { return; } + + // The next token might be a simple key, so save its number and position. + removePossibleSimpleKey(); + const tokenCount = tokensTaken_ + cast(uint)tokens_.length; + + const line = reader_.line; + const column = reader_.column; + const key = SimpleKey(cast(uint)reader_.charIndex, tokenCount, line, + cast(ushort)min(column, ushort.max), required); + + if(possibleSimpleKeys_.length <= flowLevel_) + { + const oldLength = possibleSimpleKeys_.length; + possibleSimpleKeys_.length = flowLevel_ + 1; + //No need to initialize the last element, it's already done in the next line. + possibleSimpleKeys_[oldLength .. flowLevel_] = SimpleKey.init; + } + possibleSimpleKeys_[flowLevel_] = key; + } + + /// Remove the saved possible key position at the current flow level. + void removePossibleSimpleKey() @safe pure + { + if(possibleSimpleKeys_.length <= flowLevel_) { return; } + + if(!possibleSimpleKeys_[flowLevel_].isNull) + { + const key = possibleSimpleKeys_[flowLevel_]; + enforce(!key.required, + new ScannerException("While scanning a simple key", + Mark(reader_.name, key.line, key.column), + "could not find expected ':'", reader_.mark)); + possibleSimpleKeys_[flowLevel_].isNull = true; + } + } + + /// Decrease indentation, removing entries in indents_. + /// + /// Params: column = Current column in the file/stream. + void unwindIndent(const int column) @safe + { + if(flowLevel_ > 0) + { + // In flow context, tokens should respect indentation. + // The condition should be `indent >= column` according to the spec. + // But this condition will prohibit intuitively correct + // constructions such as + // key : { + // } + + // In the flow context, indentation is ignored. We make the scanner less + // restrictive than what the specification requires. + // if(pedantic_ && flowLevel_ > 0 && indent_ > column) + // { + // throw new ScannerException("Invalid intendation or unclosed '[' or '{'", + // reader_.mark) + // } + return; + } + + // In block context, we may need to issue the BLOCK-END tokens. + while(indent_ > column) + { + indent_ = indents_.data.back; + assert(indents_.data.length); + indents_.shrinkTo(indents_.data.length - 1); + tokens_.push(blockEndToken(reader_.mark, reader_.mark)); + } + } + + /// Increase indentation if needed. + /// + /// Params: column = Current column in the file/stream. + /// + /// Returns: true if the indentation was increased, false otherwise. + bool addIndent(int column) @safe + { + if(indent_ >= column){return false;} + indents_ ~= indent_; + indent_ = column; + return true; + } + + + /// Add STREAM-START token. + void fetchStreamStart() @safe nothrow + { + tokens_.push(streamStartToken(reader_.mark, reader_.mark, reader_.encoding)); + } + + ///Add STREAM-END token. + void fetchStreamEnd() @safe + { + //Set intendation to -1 . + unwindIndent(-1); + removePossibleSimpleKey(); + allowSimpleKey_ = false; + possibleSimpleKeys_.destroy; + + tokens_.push(streamEndToken(reader_.mark, reader_.mark)); + done_ = true; + } + + /// Add DIRECTIVE token. + void fetchDirective() @safe + { + // Set intendation to -1 . + unwindIndent(-1); + // Reset simple keys. + removePossibleSimpleKey(); + allowSimpleKey_ = false; + + auto directive = scanDirective(); + tokens_.push(directive); + } + + /// Add DOCUMENT-START or DOCUMENT-END token. + void fetchDocumentIndicator(TokenID id)() + if(id == TokenID.documentStart || id == TokenID.documentEnd) + { + // Set indentation to -1 . + unwindIndent(-1); + // Reset simple keys. Note that there can't be a block collection after '---'. + removePossibleSimpleKey(); + allowSimpleKey_ = false; + + Mark startMark = reader_.mark; + reader_.forward(3); + tokens_.push(simpleToken!id(startMark, reader_.mark)); + } + + /// Aliases to add DOCUMENT-START or DOCUMENT-END token. + alias fetchDocumentStart = fetchDocumentIndicator!(TokenID.documentStart); + alias fetchDocumentEnd = fetchDocumentIndicator!(TokenID.documentEnd); + + /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. + void fetchFlowCollectionStart(TokenID id)() @safe + { + // '[' and '{' may start a simple key. + savePossibleSimpleKey(); + // Simple keys are allowed after '[' and '{'. + allowSimpleKey_ = true; + ++flowLevel_; + + Mark startMark = reader_.mark; + reader_.forward(); + tokens_.push(simpleToken!id(startMark, reader_.mark)); + } + + /// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. + alias fetchFlowSequenceStart = fetchFlowCollectionStart!(TokenID.flowSequenceStart); + alias fetchFlowMappingStart = fetchFlowCollectionStart!(TokenID.flowMappingStart); + + /// Add FLOW-SEQUENCE-START or FLOW-MAPPING-START token. + void fetchFlowCollectionEnd(TokenID id)() + { + // Reset possible simple key on the current level. + removePossibleSimpleKey(); + // No simple keys after ']' and '}'. + allowSimpleKey_ = false; + --flowLevel_; + + Mark startMark = reader_.mark; + reader_.forward(); + tokens_.push(simpleToken!id(startMark, reader_.mark)); + } + + /// Aliases to add FLOW-SEQUENCE-START or FLOW-MAPPING-START token/ + alias fetchFlowSequenceEnd = fetchFlowCollectionEnd!(TokenID.flowSequenceEnd); + alias fetchFlowMappingEnd = fetchFlowCollectionEnd!(TokenID.flowMappingEnd); + + /// Add FLOW-ENTRY token; + void fetchFlowEntry() @safe + { + // Reset possible simple key on the current level. + removePossibleSimpleKey(); + // Simple keys are allowed after ','. + allowSimpleKey_ = true; + + Mark startMark = reader_.mark; + reader_.forward(); + tokens_.push(flowEntryToken(startMark, reader_.mark)); + } + + /// Additional checks used in block context in fetchBlockEntry and fetchKey. + /// + /// Params: type = String representing the token type we might need to add. + /// id = Token type we might need to add. + void blockChecks(string type, TokenID id)() + { + enum context = type ~ " keys are not allowed here"; + // Are we allowed to start a key (not neccesarily a simple one)? + enforce(allowSimpleKey_, new ScannerException(context, reader_.mark)); + + if(addIndent(reader_.column)) + { + tokens_.push(simpleToken!id(reader_.mark, reader_.mark)); + } + } + + /// Add BLOCK-ENTRY token. Might add BLOCK-SEQUENCE-START in the process. + void fetchBlockEntry() @safe + { + if(flowLevel_ == 0) { blockChecks!("Sequence", TokenID.blockSequenceStart)(); } + + // It's an error for the block entry to occur in the flow context, + // but we let the parser detect this. + + // Reset possible simple key on the current level. + removePossibleSimpleKey(); + // Simple keys are allowed after '-'. + allowSimpleKey_ = true; + + Mark startMark = reader_.mark; + reader_.forward(); + tokens_.push(blockEntryToken(startMark, reader_.mark)); + } + + /// Add KEY token. Might add BLOCK-MAPPING-START in the process. + void fetchKey() @safe + { + if(flowLevel_ == 0) { blockChecks!("Mapping", TokenID.blockMappingStart)(); } + + // Reset possible simple key on the current level. + removePossibleSimpleKey(); + // Simple keys are allowed after '?' in the block context. + allowSimpleKey_ = (flowLevel_ == 0); + + Mark startMark = reader_.mark; + reader_.forward(); + tokens_.push(keyToken(startMark, reader_.mark)); + } + + /// Add VALUE token. Might add KEY and/or BLOCK-MAPPING-START in the process. + void fetchValue() @safe + { + //Do we determine a simple key? + if(possibleSimpleKeys_.length > flowLevel_ && + !possibleSimpleKeys_[flowLevel_].isNull) + { + const key = possibleSimpleKeys_[flowLevel_]; + possibleSimpleKeys_[flowLevel_].isNull = true; + Mark keyMark = Mark(reader_.name, key.line, key.column); + const idx = key.tokenIndex - tokensTaken_; + + assert(idx >= 0); + + // Add KEY. + // Manually inserting since tokens are immutable (need linked list). + tokens_.insert(keyToken(keyMark, keyMark), idx); + + // If this key starts a new block mapping, we need to add BLOCK-MAPPING-START. + if(flowLevel_ == 0 && addIndent(key.column)) + { + tokens_.insert(blockMappingStartToken(keyMark, keyMark), idx); + } + + // There cannot be two simple keys in a row. + allowSimpleKey_ = false; + } + // Part of a complex key + else + { + // We can start a complex value if and only if we can start a simple key. + enforce(flowLevel_ > 0 || allowSimpleKey_, + new ScannerException("Mapping values are not allowed here", reader_.mark)); + + // If this value starts a new block mapping, we need to add + // BLOCK-MAPPING-START. It'll be detected as an error later by the parser. + if(flowLevel_ == 0 && addIndent(reader_.column)) + { + tokens_.push(blockMappingStartToken(reader_.mark, reader_.mark)); + } + + // Reset possible simple key on the current level. + removePossibleSimpleKey(); + // Simple keys are allowed after ':' in the block context. + allowSimpleKey_ = (flowLevel_ == 0); + } + + // Add VALUE. + Mark startMark = reader_.mark; + reader_.forward(); + tokens_.push(valueToken(startMark, reader_.mark)); + } + + /// Add ALIAS or ANCHOR token. + void fetchAnchor_(TokenID id)() @safe + if(id == TokenID.alias_ || id == TokenID.anchor) + { + // ALIAS/ANCHOR could be a simple key. + savePossibleSimpleKey(); + // No simple keys after ALIAS/ANCHOR. + allowSimpleKey_ = false; + + auto anchor = scanAnchor(id); + tokens_.push(anchor); + } + + /// Aliases to add ALIAS or ANCHOR token. + alias fetchAlias = fetchAnchor_!(TokenID.alias_); + alias fetchAnchor = fetchAnchor_!(TokenID.anchor); + + /// Add TAG token. + void fetchTag() @safe + { + //TAG could start a simple key. + savePossibleSimpleKey(); + //No simple keys after TAG. + allowSimpleKey_ = false; + + tokens_.push(scanTag()); + } + + /// Add block SCALAR token. + void fetchBlockScalar(ScalarStyle style)() @safe + if(style == ScalarStyle.literal || style == ScalarStyle.folded) + { + // Reset possible simple key on the current level. + removePossibleSimpleKey(); + // A simple key may follow a block scalar. + allowSimpleKey_ = true; + + auto blockScalar = scanBlockScalar(style); + tokens_.push(blockScalar); + } + + /// Aliases to add literal or folded block scalar. + alias fetchLiteral = fetchBlockScalar!(ScalarStyle.literal); + alias fetchFolded = fetchBlockScalar!(ScalarStyle.folded); + + /// Add quoted flow SCALAR token. + void fetchFlowScalar(ScalarStyle quotes)() + { + // A flow scalar could be a simple key. + savePossibleSimpleKey(); + // No simple keys after flow scalars. + allowSimpleKey_ = false; + + // Scan and add SCALAR. + auto scalar = scanFlowScalar(quotes); + tokens_.push(scalar); + } + + /// Aliases to add single or double quoted block scalar. + alias fetchSingle = fetchFlowScalar!(ScalarStyle.singleQuoted); + alias fetchDouble = fetchFlowScalar!(ScalarStyle.doubleQuoted); + + /// Add plain SCALAR token. + void fetchPlain() @safe + { + // A plain scalar could be a simple key + savePossibleSimpleKey(); + // No simple keys after plain scalars. But note that scanPlain() will + // change this flag if the scan is finished at the beginning of the line. + allowSimpleKey_ = false; + auto plain = scanPlain(); + + // Scan and add SCALAR. May change allowSimpleKey_ + tokens_.push(plain); + } + + pure: + + ///Check if the next token is DIRECTIVE: ^ '%' ... + bool checkDirective() @safe + { + return reader_.peekByte() == '%' && reader_.column == 0; + } + + /// Check if the next token is DOCUMENT-START: ^ '---' (' '|'\n') + bool checkDocumentStart() @safe + { + // Check one char first, then all 3, to prevent reading outside the buffer. + return reader_.column == 0 && + reader_.peekByte() == '-' && + reader_.prefix(3) == "---" && + reader_.peek(3).isWhiteSpace; + } + + /// Check if the next token is DOCUMENT-END: ^ '...' (' '|'\n') + bool checkDocumentEnd() @safe + { + // Check one char first, then all 3, to prevent reading outside the buffer. + return reader_.column == 0 && + reader_.peekByte() == '.' && + reader_.prefix(3) == "..." && + reader_.peek(3).isWhiteSpace; + } + + /// Check if the next token is BLOCK-ENTRY: '-' (' '|'\n') + bool checkBlockEntry() @safe + { + return !!reader_.peek(1).isWhiteSpace; + } + + /// Check if the next token is KEY(flow context): '?' + /// + /// or KEY(block context): '?' (' '|'\n') + bool checkKey() @safe + { + return (flowLevel_ > 0 || reader_.peek(1).isWhiteSpace); + } + + /// Check if the next token is VALUE(flow context): ':' + /// + /// or VALUE(block context): ':' (' '|'\n') + bool checkValue() @safe + { + return flowLevel_ > 0 || reader_.peek(1).isWhiteSpace; + } + + /// Check if the next token is a plain scalar. + /// + /// A plain scalar may start with any non-space character except: + /// '-', '?', ':', ',', '[', ']', '{', '}', + /// '#', '&', '*', '!', '|', '>', '\'', '\"', + /// '%', '@', '`'. + /// + /// It may also start with + /// '-', '?', ':' + /// if it is followed by a non-space character. + /// + /// Note that we limit the last rule to the block context (except the + /// '-' character) because we want the flow context to be space + /// independent. + bool checkPlain() @safe + { + const c = reader_.peek(); + if(!c.isNonScalarStartCharacter) + { + return true; + } + return !reader_.peek(1).isWhiteSpace && + (c == '-' || (flowLevel_ == 0 && (c == '?' || c == ':'))); + } + + /// Move to the next non-space character. + void findNextNonSpace() @safe + { + while(reader_.peekByte() == ' ') { reader_.forward(); } + } + + /// Scan a string of alphanumeric or "-_" characters. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanAlphaNumericToSlice(string name)(const Mark startMark) + { + size_t length; + dchar c = reader_.peek(); + while(c.isAlphaNum || c.among!('-', '_')) { c = reader_.peek(++length); } + + enforce(length > 0, new ScannerException("While scanning " ~ name, + startMark, expected("alphanumeric, '-' or '_'", c), reader_.mark)); + + reader_.sliceBuilder.write(reader_.get(length)); + } + + /// Scan a string. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanAnchorAliasToSlice(const Mark startMark) @safe + { + size_t length; + dchar c = reader_.peek(); + while (c.isNSAnchorName) + { + c = reader_.peek(++length); + } + + enforce(length > 0, new ScannerException("While scanning an anchor or alias", + startMark, expected("a printable character besides '[', ']', '{', '}' and ','", c), reader_.mark)); + + reader_.sliceBuilder.write(reader_.get(length)); + } + + /// Scan and throw away all characters until next line break. + void scanToNextBreak() @safe + { + while(!reader_.peek().isBreak) { reader_.forward(); } + } + + /// Scan all characters until next line break. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanToNextBreakToSlice() @safe + { + uint length; + while(!reader_.peek(length).isBreak) + { + ++length; + } + reader_.sliceBuilder.write(reader_.get(length)); + } + + + /// Move to next token in the file/stream. + /// + /// We ignore spaces, line breaks and comments. + /// If we find a line break in the block context, we set + /// allowSimpleKey` on. + /// + /// We do not yet support BOM inside the stream as the + /// specification requires. Any such mark will be considered as a part + /// of the document. + void scanToNextToken() @safe + { + // TODO(PyYAML): We need to make tab handling rules more sane. A good rule is: + // Tabs cannot precede tokens + // BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, + // KEY(block), VALUE(block), BLOCK-ENTRY + // So the checking code is + // if : + // allowSimpleKey_ = false + // We also need to add the check for `allowSimpleKey_ == true` to + // `unwindIndent` before issuing BLOCK-END. + // Scanners for block, flow, and plain scalars need to be modified. + + for(;;) + { + //All whitespace in flow context is ignored, even whitespace + // not allowed in other contexts + if (flowLevel_ > 0) + { + while(reader_.peekByte().isNonLinebreakWhitespace) { reader_.forward(); } + } + else + { + findNextNonSpace(); + } + if(reader_.peekByte() == '#') { scanToNextBreak(); } + if(scanLineBreak() != '\0') + { + if(flowLevel_ == 0) { allowSimpleKey_ = true; } + } + else + { + break; + } + } + } + + /// Scan directive token. + Token scanDirective() @safe + { + Mark startMark = reader_.mark; + // Skip the '%'. + reader_.forward(); + + // Scan directive name + reader_.sliceBuilder.begin(); + scanDirectiveNameToSlice(startMark); + const name = reader_.sliceBuilder.finish(); + + reader_.sliceBuilder.begin(); + + // Index where tag handle ends and suffix starts in a tag directive value. + uint tagHandleEnd = uint.max; + if(name == "YAML") { scanYAMLDirectiveValueToSlice(startMark); } + else if(name == "TAG") { tagHandleEnd = scanTagDirectiveValueToSlice(startMark); } + char[] value = reader_.sliceBuilder.finish(); + + Mark endMark = reader_.mark; + + DirectiveType directive; + if(name == "YAML") { directive = DirectiveType.yaml; } + else if(name == "TAG") { directive = DirectiveType.tag; } + else + { + directive = DirectiveType.reserved; + scanToNextBreak(); + } + + scanDirectiveIgnoredLine(startMark); + + return directiveToken(startMark, endMark, value, directive, tagHandleEnd); + } + + /// Scan name of a directive token. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanDirectiveNameToSlice(const Mark startMark) @safe + { + // Scan directive name. + scanAlphaNumericToSlice!"a directive"(startMark); + + enforce(reader_.peek().among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), + new ScannerException("While scanning a directive", startMark, + expected("alphanumeric, '-' or '_'", reader_.peek()), reader_.mark)); + } + + /// Scan value of a YAML directive token. Returns major, minor version separated by '.'. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanYAMLDirectiveValueToSlice(const Mark startMark) @safe + { + findNextNonSpace(); + + scanYAMLDirectiveNumberToSlice(startMark); + + enforce(reader_.peekByte() == '.', + new ScannerException("While scanning a directive", startMark, + expected("digit or '.'", reader_.peek()), reader_.mark)); + // Skip the '.'. + reader_.forward(); + + reader_.sliceBuilder.write('.'); + scanYAMLDirectiveNumberToSlice(startMark); + + enforce(reader_.peek().among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), + new ScannerException("While scanning a directive", startMark, + expected("digit or '.'", reader_.peek()), reader_.mark)); + } + + /// Scan a number from a YAML directive. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanYAMLDirectiveNumberToSlice(const Mark startMark) @safe + { + enforce(isDigit(reader_.peek()), + new ScannerException("While scanning a directive", startMark, + expected("digit", reader_.peek()), reader_.mark)); + + // Already found the first digit in the enforce(), so set length to 1. + uint length = 1; + while(reader_.peek(length).isDigit) { ++length; } + + reader_.sliceBuilder.write(reader_.get(length)); + } + + /// Scan value of a tag directive. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + /// + /// Returns: Length of tag handle (which is before tag prefix) in scanned data + uint scanTagDirectiveValueToSlice(const Mark startMark) @safe + { + findNextNonSpace(); + const startLength = reader_.sliceBuilder.length; + scanTagDirectiveHandleToSlice(startMark); + const handleLength = cast(uint)(reader_.sliceBuilder.length - startLength); + findNextNonSpace(); + scanTagDirectivePrefixToSlice(startMark); + + return handleLength; + } + + /// Scan handle of a tag directive. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanTagDirectiveHandleToSlice(const Mark startMark) @safe + { + scanTagHandleToSlice!"directive"(startMark); + enforce(reader_.peekByte() == ' ', + new ScannerException("While scanning a directive handle", startMark, + expected("' '", reader_.peek()), reader_.mark)); + } + + /// Scan prefix of a tag directive. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanTagDirectivePrefixToSlice(const Mark startMark) @safe + { + scanTagURIToSlice!"directive"(startMark); + enforce(reader_.peek().among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), + new ScannerException("While scanning a directive prefix", startMark, + expected("' '", reader_.peek()), reader_.mark)); + } + + /// Scan (and ignore) ignored line after a directive. + void scanDirectiveIgnoredLine(const Mark startMark) @safe + { + findNextNonSpace(); + if(reader_.peekByte() == '#') { scanToNextBreak(); } + enforce(reader_.peek().isBreak, + new ScannerException("While scanning a directive", startMark, + expected("comment or a line break", reader_.peek()), reader_.mark)); + scanLineBreak(); + } + + + /// Scan an alias or an anchor. + /// + /// The specification does not restrict characters for anchors and + /// aliases. This may lead to problems, for instance, the document: + /// [ *alias, value ] + /// can be interpteted in two ways, as + /// [ "value" ] + /// and + /// [ *alias , "value" ] + /// Therefore we restrict aliases to ASCII alphanumeric characters. + Token scanAnchor(const TokenID id) @safe + { + const startMark = reader_.mark; + reader_.forward(); // The */& character was only peeked, so we drop it now + + reader_.sliceBuilder.begin(); + scanAnchorAliasToSlice(startMark); + // On error, value is discarded as we return immediately + char[] value = reader_.sliceBuilder.finish(); + + assert(!reader_.peek().isNSAnchorName, "Anchor/alias name not fully scanned"); + + if(id == TokenID.alias_) + { + return aliasToken(startMark, reader_.mark, value); + } + if(id == TokenID.anchor) + { + return anchorToken(startMark, reader_.mark, value); + } + assert(false, "This code should never be reached"); + } + + /// Scan a tag token. + Token scanTag() @safe + { + const startMark = reader_.mark; + dchar c = reader_.peek(1); + + reader_.sliceBuilder.begin(); + scope(failure) { reader_.sliceBuilder.finish(); } + // Index where tag handle ends and tag suffix starts in the tag value + // (slice) we will produce. + uint handleEnd; + + if(c == '<') + { + reader_.forward(2); + + handleEnd = 0; + scanTagURIToSlice!"tag"(startMark); + enforce(reader_.peekByte() == '>', + new ScannerException("While scanning a tag", startMark, + expected("'>'", reader_.peek()), reader_.mark)); + reader_.forward(); + } + else if(c.isWhiteSpace) + { + reader_.forward(); + handleEnd = 0; + reader_.sliceBuilder.write('!'); + } + else + { + uint length = 1; + bool useHandle; + + while(!c.isBreakOrSpace) + { + if(c == '!') + { + useHandle = true; + break; + } + ++length; + c = reader_.peek(length); + } + + if(useHandle) + { + scanTagHandleToSlice!"tag"(startMark); + handleEnd = cast(uint)reader_.sliceBuilder.length; + } + else + { + reader_.forward(); + reader_.sliceBuilder.write('!'); + handleEnd = cast(uint)reader_.sliceBuilder.length; + } + + scanTagURIToSlice!"tag"(startMark); + } + + enforce(reader_.peek().isBreakOrSpace, + new ScannerException("While scanning a tag", startMark, expected("' '", reader_.peek()), + reader_.mark)); + + char[] slice = reader_.sliceBuilder.finish(); + return tagToken(startMark, reader_.mark, slice, handleEnd); + } + + /// Scan a block scalar token with specified style. + Token scanBlockScalar(const ScalarStyle style) @safe + { + const startMark = reader_.mark; + + // Scan the header. + reader_.forward(); + + const indicators = scanBlockScalarIndicators(startMark); + + const chomping = indicators[0]; + const increment = indicators[1]; + scanBlockScalarIgnoredLine(startMark); + + // Determine the indentation level and go to the first non-empty line. + Mark endMark; + uint indent = max(1, indent_ + 1); + + reader_.sliceBuilder.begin(); + alias Transaction = SliceBuilder.Transaction; + // Used to strip the last line breaks written to the slice at the end of the + // scalar, which may be needed based on chomping. + Transaction breaksTransaction = Transaction(&reader_.sliceBuilder); + // Read the first indentation/line breaks before the scalar. + size_t startLen = reader_.sliceBuilder.length; + if(increment == int.min) + { + auto indentation = scanBlockScalarIndentationToSlice(); + endMark = indentation[1]; + indent = max(indent, indentation[0]); + } + else + { + indent += increment - 1; + endMark = scanBlockScalarBreaksToSlice(indent); + } + + // int.max means there's no line break (int.max is outside UTF-32). + dchar lineBreak = cast(dchar)int.max; + + // Scan the inner part of the block scalar. + while(reader_.column == indent && reader_.peekByte() != '\0') + { + breaksTransaction.commit(); + const bool leadingNonSpace = !reader_.peekByte().among!(' ', '\t'); + // This is where the 'interesting' non-whitespace data gets read. + scanToNextBreakToSlice(); + lineBreak = scanLineBreak(); + + + // This transaction serves to rollback data read in the + // scanBlockScalarBreaksToSlice() call. + breaksTransaction = Transaction(&reader_.sliceBuilder); + startLen = reader_.sliceBuilder.length; + // The line breaks should actually be written _after_ the if() block + // below. We work around that by inserting + endMark = scanBlockScalarBreaksToSlice(indent); + + // This will not run during the last iteration (see the if() vs the + // while()), hence breaksTransaction rollback (which happens after this + // loop) will never roll back data written in this if() block. + if(reader_.column == indent && reader_.peekByte() != '\0') + { + // Unfortunately, folding rules are ambiguous. + + // This is the folding according to the specification: + if(style == ScalarStyle.folded && lineBreak == '\n' && + leadingNonSpace && !reader_.peekByte().among!(' ', '\t')) + { + // No breaks were scanned; no need to insert the space in the + // middle of slice. + if(startLen == reader_.sliceBuilder.length) + { + reader_.sliceBuilder.write(' '); + } + } + else + { + // We need to insert in the middle of the slice in case any line + // breaks were scanned. + reader_.sliceBuilder.insert(lineBreak, startLen); + } + + ////this is Clark Evans's interpretation (also in the spec + ////examples): + // + //if(style == ScalarStyle.folded && lineBreak == '\n') + //{ + // if(startLen == endLen) + // { + // if(!" \t"d.canFind(reader_.peekByte())) + // { + // reader_.sliceBuilder.write(' '); + // } + // else + // { + // chunks ~= lineBreak; + // } + // } + //} + //else + //{ + // reader_.sliceBuilder.insertBack(lineBreak, endLen - startLen); + //} + } + else + { + break; + } + } + + // If chompint is Keep, we keep (commit) the last scanned line breaks + // (which are at the end of the scalar). Otherwise re remove them (end the + // transaction). + if(chomping == Chomping.keep) { breaksTransaction.commit(); } + else { breaksTransaction.end(); } + if(chomping != Chomping.strip && lineBreak != int.max) + { + // If chomping is Keep, we keep the line break but the first line break + // that isn't stripped (since chomping isn't Strip in this branch) must + // be inserted _before_ the other line breaks. + if(chomping == Chomping.keep) + { + reader_.sliceBuilder.insert(lineBreak, startLen); + } + // If chomping is not Keep, breaksTransaction was cancelled so we can + // directly write the first line break (as it isn't stripped - chomping + // is not Strip) + else + { + reader_.sliceBuilder.write(lineBreak); + } + } + + char[] slice = reader_.sliceBuilder.finish(); + return scalarToken(startMark, endMark, slice, style); + } + + /// Scan chomping and indentation indicators of a scalar token. + Tuple!(Chomping, int) scanBlockScalarIndicators(const Mark startMark) @safe + { + auto chomping = Chomping.clip; + int increment = int.min; + dchar c = reader_.peek(); + + /// Indicators can be in any order. + if(getChomping(c, chomping)) + { + getIncrement(c, increment, startMark); + } + else + { + const gotIncrement = getIncrement(c, increment, startMark); + if(gotIncrement) { getChomping(c, chomping); } + } + + enforce(c.among!(' ', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'), + new ScannerException("While scanning a block scalar", startMark, + expected("chomping or indentation indicator", c), reader_.mark)); + + return tuple(chomping, increment); + } + + /// Get chomping indicator, if detected. Return false otherwise. + /// + /// Used in scanBlockScalarIndicators. + /// + /// Params: + /// + /// c = The character that may be a chomping indicator. + /// chomping = Write the chomping value here, if detected. + bool getChomping(ref dchar c, ref Chomping chomping) @safe + { + if(!c.among!('+', '-')) { return false; } + chomping = c == '+' ? Chomping.keep : Chomping.strip; + reader_.forward(); + c = reader_.peek(); + return true; + } + + /// Get increment indicator, if detected. Return false otherwise. + /// + /// Used in scanBlockScalarIndicators. + /// + /// Params: + /// + /// c = The character that may be an increment indicator. + /// If an increment indicator is detected, this will be updated to + /// the next character in the Reader. + /// increment = Write the increment value here, if detected. + /// startMark = Mark for error messages. + bool getIncrement(ref dchar c, ref int increment, const Mark startMark) @safe + { + if(!c.isDigit) { return false; } + // Convert a digit to integer. + increment = c - '0'; + assert(increment < 10 && increment >= 0, "Digit has invalid value"); + + enforce(increment > 0, + new ScannerException("While scanning a block scalar", startMark, + expected("indentation indicator in range 1-9", "0"), reader_.mark)); + + reader_.forward(); + c = reader_.peek(); + return true; + } + + /// Scan (and ignore) ignored line in a block scalar. + void scanBlockScalarIgnoredLine(const Mark startMark) @safe + { + findNextNonSpace(); + if(reader_.peekByte()== '#') { scanToNextBreak(); } + + enforce(reader_.peek().isBreak, + new ScannerException("While scanning a block scalar", startMark, + expected("comment or line break", reader_.peek()), reader_.mark)); + + scanLineBreak(); + } + + /// Scan indentation in a block scalar, returning line breaks, max indent and end mark. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + Tuple!(uint, Mark) scanBlockScalarIndentationToSlice() @safe + { + uint maxIndent; + Mark endMark = reader_.mark; + + while(reader_.peek().among!(' ', '\n', '\r', '\u0085', '\u2028', '\u2029')) + { + if(reader_.peekByte() != ' ') + { + reader_.sliceBuilder.write(scanLineBreak()); + endMark = reader_.mark; + continue; + } + reader_.forward(); + maxIndent = max(reader_.column, maxIndent); + } + + return tuple(maxIndent, endMark); + } + + /// Scan line breaks at lower or specified indentation in a block scalar. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + Mark scanBlockScalarBreaksToSlice(const uint indent) @safe + { + Mark endMark = reader_.mark; + + for(;;) + { + while(reader_.column < indent && reader_.peekByte() == ' ') { reader_.forward(); } + if(!reader_.peek().among!('\n', '\r', '\u0085', '\u2028', '\u2029')) { break; } + reader_.sliceBuilder.write(scanLineBreak()); + endMark = reader_.mark; + } + + return endMark; + } + + /// Scan a qouted flow scalar token with specified quotes. + Token scanFlowScalar(const ScalarStyle quotes) @safe + { + const startMark = reader_.mark; + const quote = reader_.get(); + + reader_.sliceBuilder.begin(); + + scanFlowScalarNonSpacesToSlice(quotes, startMark); + + while(reader_.peek() != quote) + { + scanFlowScalarSpacesToSlice(startMark); + scanFlowScalarNonSpacesToSlice(quotes, startMark); + } + reader_.forward(); + + auto slice = reader_.sliceBuilder.finish(); + return scalarToken(startMark, reader_.mark, slice, quotes); + } + + /// Scan nonspace characters in a flow scalar. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanFlowScalarNonSpacesToSlice(const ScalarStyle quotes, const Mark startMark) + @safe + { + for(;;) + { + dchar c = reader_.peek(); + + size_t numCodePoints; + while(!reader_.peek(numCodePoints).isFlowScalarBreakSpace) { ++numCodePoints; } + + if (numCodePoints > 0) { reader_.sliceBuilder.write(reader_.get(numCodePoints)); } + + c = reader_.peek(); + if(quotes == ScalarStyle.singleQuoted && c == '\'' && reader_.peek(1) == '\'') + { + reader_.forward(2); + reader_.sliceBuilder.write('\''); + } + else if((quotes == ScalarStyle.doubleQuoted && c == '\'') || + (quotes == ScalarStyle.singleQuoted && c.among!('"', '\\'))) + { + reader_.forward(); + reader_.sliceBuilder.write(c); + } + else if(quotes == ScalarStyle.doubleQuoted && c == '\\') + { + reader_.forward(); + c = reader_.peek(); + if(c.among!(escapes)) + { + reader_.forward(); + // Escaping has been moved to Parser as it can't be done in + // place (in a slice) in case of '\P' and '\L' (very uncommon, + // but we don't want to break the spec) + char[2] escapeSequence = ['\\', cast(char)c]; + reader_.sliceBuilder.write(escapeSequence); + } + else if(c.among!(escapeHexCodeList)) + { + const hexLength = dyaml.escapes.escapeHexLength(c); + reader_.forward(); + + foreach(i; 0 .. hexLength) { + enforce(reader_.peek(i).isHexDigit, + new ScannerException("While scanning a double quoted scalar", startMark, + expected("escape sequence of hexadecimal numbers", + reader_.peek(i)), reader_.mark)); + } + char[] hex = reader_.get(hexLength); + + enforce((hex.length > 0) && (hex.length <= 8), + new ScannerException("While scanning a double quoted scalar", startMark, + "overflow when parsing an escape sequence of " ~ + "hexadecimal numbers.", reader_.mark)); + + char[2] escapeStart = ['\\', cast(char) c]; + reader_.sliceBuilder.write(escapeStart); + reader_.sliceBuilder.write(hex); + + } + else if(c.among!('\n', '\r', '\u0085', '\u2028', '\u2029')) + { + scanLineBreak(); + scanFlowScalarBreaksToSlice(startMark); + } + else + { + throw new ScannerException("While scanning a double quoted scalar", startMark, + text("found unsupported escape character ", c), + reader_.mark); + } + } + else { return; } + } + } + + /// Scan space characters in a flow scalar. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// spaces into that slice. + void scanFlowScalarSpacesToSlice(const Mark startMark) @safe + { + // Increase length as long as we see whitespace. + size_t length; + while(reader_.peekByte(length).among!(' ', '\t')) { ++length; } + auto whitespaces = reader_.prefixBytes(length); + + // Can check the last byte without striding because '\0' is ASCII + const c = reader_.peek(length); + enforce(c != '\0', + new ScannerException("While scanning a quoted scalar", startMark, + "found unexpected end of buffer", reader_.mark)); + + // Spaces not followed by a line break. + if(!c.among!('\n', '\r', '\u0085', '\u2028', '\u2029')) + { + reader_.forward(length); + reader_.sliceBuilder.write(whitespaces); + return; + } + + // There's a line break after the spaces. + reader_.forward(length); + const lineBreak = scanLineBreak(); + + if(lineBreak != '\n') { reader_.sliceBuilder.write(lineBreak); } + + // If we have extra line breaks after the first, scan them into the + // slice. + const bool extraBreaks = scanFlowScalarBreaksToSlice(startMark); + + // No extra breaks, one normal line break. Replace it with a space. + if(lineBreak == '\n' && !extraBreaks) { reader_.sliceBuilder.write(' '); } + } + + /// Scan line breaks in a flow scalar. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// line breaks into that slice. + bool scanFlowScalarBreaksToSlice(const Mark startMark) @safe + { + // True if at least one line break was found. + bool anyBreaks; + for(;;) + { + // Instead of checking indentation, we check for document separators. + const prefix = reader_.prefix(3); + enforce(!(prefix == "---" || prefix == "...") || + !reader_.peek(3).isWhiteSpace, + new ScannerException("While scanning a quoted scalar", startMark, + "found unexpected document separator", reader_.mark)); + + // Skip any whitespaces. + while(reader_.peekByte().among!(' ', '\t')) { reader_.forward(); } + + // Encountered a non-whitespace non-linebreak character, so we're done. + if(!reader_.peek().among!(' ', '\n', '\r', '\u0085', '\u2028', '\u2029')) { break; } + + const lineBreak = scanLineBreak(); + anyBreaks = true; + reader_.sliceBuilder.write(lineBreak); + } + return anyBreaks; + } + + /// Scan plain scalar token (no block, no quotes). + Token scanPlain() @safe + { + // We keep track of the allowSimpleKey_ flag here. + // Indentation rules are loosed for the flow context + const startMark = reader_.mark; + Mark endMark = startMark; + const indent = indent_ + 1; + + // We allow zero indentation for scalars, but then we need to check for + // document separators at the beginning of the line. + // if(indent == 0) { indent = 1; } + + reader_.sliceBuilder.begin(); + + alias Transaction = SliceBuilder.Transaction; + Transaction spacesTransaction; + // Stop at a comment. + while(reader_.peekByte() != '#') + { + // Scan the entire plain scalar. + size_t length; + dchar c = reader_.peek(length); + for(;;) + { + const cNext = reader_.peek(length + 1); + if(c.isWhiteSpace || + (flowLevel_ == 0 && c == ':' && cNext.isWhiteSpace) || + (flowLevel_ > 0 && c.among!(',', ':', '?', '[', ']', '{', '}'))) + { + break; + } + ++length; + c = cNext; + } + + // It's not clear what we should do with ':' in the flow context. + enforce(flowLevel_ == 0 || c != ':' || + reader_.peek(length + 1).isWhiteSpace || + reader_.peek(length + 1).among!(',', '[', ']', '{', '}'), + new ScannerException("While scanning a plain scalar", startMark, + "found unexpected ':' . Please check " ~ + "http://pyyaml.org/wiki/YAMLColonInFlowContext for details.", + reader_.mark)); + + if(length == 0) { break; } + + allowSimpleKey_ = false; + + reader_.sliceBuilder.write(reader_.get(length)); + + endMark = reader_.mark; + + spacesTransaction.commit(); + spacesTransaction = Transaction(&reader_.sliceBuilder); + + const startLength = reader_.sliceBuilder.length; + scanPlainSpacesToSlice(); + if(startLength == reader_.sliceBuilder.length || + (flowLevel_ == 0 && reader_.column < indent)) + { + break; + } + } + + spacesTransaction.end(); + char[] slice = reader_.sliceBuilder.finish(); + + return scalarToken(startMark, endMark, slice, ScalarStyle.plain); + } + + /// Scan spaces in a plain scalar. + /// + /// Assumes that the caller is building a slice in Reader, and puts the spaces + /// into that slice. + void scanPlainSpacesToSlice() @safe + { + // The specification is really confusing about tabs in plain scalars. + // We just forbid them completely. Do not use tabs in YAML! + + // Get as many plain spaces as there are. + size_t length; + while(reader_.peekByte(length) == ' ') { ++length; } + char[] whitespaces = reader_.prefixBytes(length); + reader_.forward(length); + + const dchar c = reader_.peek(); + if(!c.isNSChar) + { + // We have spaces, but no newline. + if(whitespaces.length > 0) { reader_.sliceBuilder.write(whitespaces); } + return; + } + + // Newline after the spaces (if any) + const lineBreak = scanLineBreak(); + allowSimpleKey_ = true; + + static bool end(Reader reader_) @safe pure + { + const prefix = reader_.prefix(3); + return ("---" == prefix || "..." == prefix) + && reader_.peek(3).among!(' ', '\t', '\0', '\n', '\r', '\u0085', '\u2028', '\u2029'); + } + + if(end(reader_)) { return; } + + bool extraBreaks; + + alias Transaction = SliceBuilder.Transaction; + auto transaction = Transaction(&reader_.sliceBuilder); + if(lineBreak != '\n') { reader_.sliceBuilder.write(lineBreak); } + while(reader_.peek().isNSChar) + { + if(reader_.peekByte() == ' ') { reader_.forward(); } + else + { + const lBreak = scanLineBreak(); + extraBreaks = true; + reader_.sliceBuilder.write(lBreak); + + if(end(reader_)) { return; } + } + } + transaction.commit(); + + // No line breaks, only a space. + if(lineBreak == '\n' && !extraBreaks) { reader_.sliceBuilder.write(' '); } + } + + /// Scan handle of a tag token. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanTagHandleToSlice(string name)(const Mark startMark) + { + dchar c = reader_.peek(); + enum contextMsg = "While scanning a " ~ name; + enforce(c == '!', + new ScannerException(contextMsg, startMark, expected("'!'", c), reader_.mark)); + + uint length = 1; + c = reader_.peek(length); + if(c != ' ') + { + while(c.isAlphaNum || c.among!('-', '_')) + { + ++length; + c = reader_.peek(length); + } + enforce(c == '!', + new ScannerException(contextMsg, startMark, expected("'!'", c), reader_.mark)); + ++length; + } + + reader_.sliceBuilder.write(reader_.get(length)); + } + + /// Scan URI in a tag token. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanTagURIToSlice(string name)(const Mark startMark) + { + // Note: we do not check if URI is well-formed. + dchar c = reader_.peek(); + const startLen = reader_.sliceBuilder.length; + { + uint length; + while(c.isAlphaNum || c.isURIChar) + { + if(c == '%') + { + auto chars = reader_.get(length); + reader_.sliceBuilder.write(chars); + length = 0; + scanURIEscapesToSlice!name(startMark); + } + else { ++length; } + c = reader_.peek(length); + } + if(length > 0) + { + auto chars = reader_.get(length); + reader_.sliceBuilder.write(chars); + length = 0; + } + } + // OK if we scanned something, error otherwise. + enum contextMsg = "While parsing a " ~ name; + enforce(reader_.sliceBuilder.length > startLen, + new ScannerException(contextMsg, startMark, expected("URI", c), reader_.mark)); + } + + // Not @nogc yet because std.utf.decode is not @nogc + /// Scan URI escape sequences. + /// + /// Assumes that the caller is building a slice in Reader, and puts the scanned + /// characters into that slice. + void scanURIEscapesToSlice(string name)(const Mark startMark) + { + import core.exception : UnicodeException; + // URI escapes encode a UTF-8 string. We store UTF-8 code units here for + // decoding into UTF-32. + Appender!string buffer; + + + enum contextMsg = "While scanning a " ~ name; + while(reader_.peekByte() == '%') + { + reader_.forward(); + char[2] nextByte = [reader_.peekByte(), reader_.peekByte(1)]; + + enforce(nextByte[0].isHexDigit && nextByte[1].isHexDigit, + new ScannerException(contextMsg, startMark, + expected("URI escape sequence of 2 hexadecimal " ~ + "numbers", nextByte), reader_.mark)); + + buffer ~= nextByte[].to!ubyte(16); + + reader_.forward(2); + } + try + { + foreach (dchar chr; buffer.data) + { + reader_.sliceBuilder.write(chr); + } + } + catch (UnicodeException) + { + throw new ScannerException(contextMsg, startMark, + "Invalid UTF-8 data encoded in URI escape sequence", + reader_.mark); + } + } + + + /// Scan a line break, if any. + /// + /// Transforms: + /// '\r\n' : '\n' + /// '\r' : '\n' + /// '\n' : '\n' + /// '\u0085' : '\n' + /// '\u2028' : '\u2028' + /// '\u2029 : '\u2029' + /// no break : '\0' + dchar scanLineBreak() @safe + { + // Fast path for ASCII line breaks. + const b = reader_.peekByte(); + if(b < 0x80) + { + if(b == '\n' || b == '\r') + { + if(reader_.prefix(2) == "\r\n") { reader_.forward(2); } + else { reader_.forward(); } + return '\n'; + } + return '\0'; + } + + const c = reader_.peek(); + if(c == '\x85') + { + reader_.forward(); + return '\n'; + } + if(c == '\u2028' || c == '\u2029') + { + reader_.forward(); + return c; + } + return '\0'; + } +} diff --git a/source/dyaml/serializer.d b/source/dyaml/serializer.d new file mode 100644 index 0000000..cbaef63 --- /dev/null +++ b/source/dyaml/serializer.d @@ -0,0 +1,322 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/** + * YAML serializer. + * Code based on PyYAML: http://www.pyyaml.org + */ +module dyaml.serializer; + + +import std.array; +import std.format; +import std.typecons; + +import dyaml.emitter; +import dyaml.event; +import dyaml.exception; +import dyaml.node; +import dyaml.resolver; +import dyaml.tagdirective; +import dyaml.token; + + +package: + +///Serializes represented YAML nodes, generating events which are then emitted by Emitter. +struct Serializer +{ + private: + ///Resolver used to determine which tags are automaticaly resolvable. + Resolver resolver_; + + ///Do all document starts have to be specified explicitly? + Flag!"explicitStart" explicitStart_; + ///Do all document ends have to be specified explicitly? + Flag!"explicitEnd" explicitEnd_; + ///YAML version string. + string YAMLVersion_; + + ///Tag directives to emit. + TagDirective[] tagDirectives_; + + //TODO Use something with more deterministic memory usage. + ///Nodes with assigned anchors. + string[Node] anchors_; + ///Nodes with assigned anchors that are already serialized. + bool[Node] serializedNodes_; + ///ID of the last anchor generated. + uint lastAnchorID_ = 0; + + public: + /** + * Construct a Serializer. + * + * Params: + * resolver = Resolver used to determine which tags are automaticaly resolvable. + * explicitStart = Do all document starts have to be specified explicitly? + * explicitEnd = Do all document ends have to be specified explicitly? + * YAMLVersion = YAML version string. + * tagDirectives = Tag directives to emit. + */ + this(Resolver resolver, + const Flag!"explicitStart" explicitStart, + const Flag!"explicitEnd" explicitEnd, string YAMLVersion, + TagDirective[] tagDirectives) @safe + { + resolver_ = resolver; + explicitStart_ = explicitStart; + explicitEnd_ = explicitEnd; + YAMLVersion_ = YAMLVersion; + tagDirectives_ = tagDirectives; + } + + ///Begin the stream. + void startStream(EmitterT)(ref EmitterT emitter) @safe + { + emitter.emit(streamStartEvent(Mark(), Mark())); + } + + ///End the stream. + void endStream(EmitterT)(ref EmitterT emitter) @safe + { + emitter.emit(streamEndEvent(Mark(), Mark())); + } + + ///Serialize a node, emitting it in the process. + void serialize(EmitterT)(ref EmitterT emitter, ref Node node) @safe + { + emitter.emit(documentStartEvent(Mark(), Mark(), explicitStart_, + YAMLVersion_, tagDirectives_)); + anchorNode(node); + serializeNode(emitter, node); + emitter.emit(documentEndEvent(Mark(), Mark(), explicitEnd_)); + serializedNodes_.destroy(); + anchors_.destroy(); + string[Node] emptyAnchors; + anchors_ = emptyAnchors; + lastAnchorID_ = 0; + } + + private: + /** + * Determine if it's a good idea to add an anchor to a node. + * + * Used to prevent associating every single repeating scalar with an + * anchor/alias - only nodes long enough can use anchors. + * + * Params: node = Node to check for anchorability. + * + * Returns: True if the node is anchorable, false otherwise. + */ + static bool anchorable(ref Node node) @safe + { + if(node.nodeID == NodeID.scalar) + { + return (node.type == NodeType.string) ? node.as!string.length > 64 : + (node.type == NodeType.binary) ? node.as!(ubyte[]).length > 64 : + false; + } + return node.length > 2; + } + + @safe unittest + { + import std.string : representation; + auto shortString = "not much"; + auto longString = "A fairly long string that would be a good idea to add an anchor to"; + auto node1 = Node(shortString); + auto node2 = Node(shortString.representation.dup); + auto node3 = Node(longString); + auto node4 = Node(longString.representation.dup); + auto node5 = Node([node1]); + auto node6 = Node([node1, node2, node3, node4]); + assert(!anchorable(node1)); + assert(!anchorable(node2)); + assert(anchorable(node3)); + assert(anchorable(node4)); + assert(!anchorable(node5)); + assert(anchorable(node6)); + } + + ///Add an anchor to the node if it's anchorable and not anchored yet. + void anchorNode(ref Node node) @safe + { + if(!anchorable(node)){return;} + + if((node in anchors_) !is null) + { + if(anchors_[node] is null) + { + anchors_[node] = generateAnchor(); + } + return; + } + + anchors_.remove(node); + final switch (node.nodeID) + { + case NodeID.mapping: + foreach(ref Node key, ref Node value; node) + { + anchorNode(key); + anchorNode(value); + } + break; + case NodeID.sequence: + foreach(ref Node item; node) + { + anchorNode(item); + } + break; + case NodeID.invalid: + assert(0); + case NodeID.scalar: + } + } + + ///Generate and return a new anchor. + string generateAnchor() @safe + { + ++lastAnchorID_; + auto appender = appender!string(); + formattedWrite(appender, "id%03d", lastAnchorID_); + return appender.data; + } + + ///Serialize a node and all its subnodes. + void serializeNode(EmitterT)(ref EmitterT emitter, ref Node node) @safe + { + //If the node has an anchor, emit an anchor (as aliasEvent) on the + //first occurrence, save it in serializedNodes_, and emit an alias + //if it reappears. + string aliased; + if(anchorable(node) && (node in anchors_) !is null) + { + aliased = anchors_[node]; + if((node in serializedNodes_) !is null) + { + emitter.emit(aliasEvent(Mark(), Mark(), aliased)); + return; + } + serializedNodes_[node] = true; + } + final switch (node.nodeID) + { + case NodeID.mapping: + const defaultTag = resolver_.defaultMappingTag; + const implicit = node.tag_ == defaultTag; + emitter.emit(mappingStartEvent(Mark(), Mark(), aliased, node.tag_, + implicit, node.collectionStyle)); + foreach(ref Node key, ref Node value; node) + { + serializeNode(emitter, key); + serializeNode(emitter, value); + } + emitter.emit(mappingEndEvent(Mark(), Mark())); + return; + case NodeID.sequence: + const defaultTag = resolver_.defaultSequenceTag; + const implicit = node.tag_ == defaultTag; + emitter.emit(sequenceStartEvent(Mark(), Mark(), aliased, node.tag_, + implicit, node.collectionStyle)); + foreach(ref Node item; node) + { + serializeNode(emitter, item); + } + emitter.emit(sequenceEndEvent(Mark(), Mark())); + return; + case NodeID.scalar: + assert(node.type == NodeType.string, "Scalar node type must be string before serialized"); + auto value = node.as!string; + const detectedTag = resolver_.resolve(NodeID.scalar, null, value, true); + const bool isDetected = node.tag_ == detectedTag; + + emitter.emit(scalarEvent(Mark(), Mark(), aliased, node.tag_, + isDetected, value.idup, node.scalarStyle)); + return; + case NodeID.invalid: + assert(0); + } + } +} + +// Issue #244 +@safe unittest +{ + import dyaml.dumper : dumper; + auto node = Node([ + Node.Pair( + Node(""), + Node([ + Node([ + Node.Pair( + Node("d"), + Node([ + Node([ + Node.Pair( + Node("c"), + Node("") + ), + Node.Pair( + Node("b"), + Node("") + ), + Node.Pair( + Node(""), + Node("") + ) + ]) + ]) + ), + ]), + Node([ + Node.Pair( + Node("d"), + Node([ + Node(""), + Node(""), + Node([ + Node.Pair( + Node("c"), + Node("") + ), + Node.Pair( + Node("b"), + Node("") + ), + Node.Pair( + Node(""), + Node("") + ) + ]) + ]) + ), + Node.Pair( + Node("z"), + Node("") + ), + Node.Pair( + Node(""), + Node("") + ) + ]), + Node("") + ]) + ), + Node.Pair( + Node("g"), + Node("") + ), + Node.Pair( + Node("h"), + Node("") + ), + ]); + + auto stream = appender!string(); + dumper().dump(stream, node); +} diff --git a/source/dyaml/stdsumtype.d b/source/dyaml/stdsumtype.d new file mode 100644 index 0000000..3dac4dd --- /dev/null +++ b/source/dyaml/stdsumtype.d @@ -0,0 +1,2627 @@ +/++ + This module was copied from Phobos at commit 87c6e7e35 (2022-07-06). + This is necessary to include https://github.com/dlang/phobos/pull/8501 + which is a fix needed for DIP1000 compatibility. A couple minor changes + where also required to deal with `package(std)` imports. + +[SumType] is a generic discriminated union implementation that uses +design-by-introspection to generate safe and efficient code. Its features +include: + +* [Pattern matching.][match] +* Support for self-referential types. +* Full attribute correctness (`pure`, `@safe`, `@nogc`, and `nothrow` are + inferred whenever possible). +* A type-safe and memory-safe API compatible with DIP 1000 (`scope`). +* No dependency on runtime type information (`TypeInfo`). +* Compatibility with BetterC. + +License: Boost License 1.0 +Authors: Paul Backus +Source: $(PHOBOSSRC std/sumtype.d) ++/ +module dyaml.stdsumtype; + +/// $(DIVID basic-usage,$(H3 Basic usage)) +version (D_BetterC) {} else +@safe unittest +{ + import std.math.operations : isClose; + + struct Fahrenheit { double degrees; } + struct Celsius { double degrees; } + struct Kelvin { double degrees; } + + alias Temperature = SumType!(Fahrenheit, Celsius, Kelvin); + + // Construct from any of the member types. + Temperature t1 = Fahrenheit(98.6); + Temperature t2 = Celsius(100); + Temperature t3 = Kelvin(273); + + // Use pattern matching to access the value. + Fahrenheit toFahrenheit(Temperature t) + { + return Fahrenheit( + t.match!( + (Fahrenheit f) => f.degrees, + (Celsius c) => c.degrees * 9.0/5 + 32, + (Kelvin k) => k.degrees * 9.0/5 - 459.4 + ) + ); + } + + assert(toFahrenheit(t1).degrees.isClose(98.6)); + assert(toFahrenheit(t2).degrees.isClose(212)); + assert(toFahrenheit(t3).degrees.isClose(32)); + + // Use ref to modify the value in place. + void freeze(ref Temperature t) + { + t.match!( + (ref Fahrenheit f) => f.degrees = 32, + (ref Celsius c) => c.degrees = 0, + (ref Kelvin k) => k.degrees = 273 + ); + } + + freeze(t1); + assert(toFahrenheit(t1).degrees.isClose(32)); + + // Use a catch-all handler to give a default result. + bool isFahrenheit(Temperature t) + { + return t.match!( + (Fahrenheit f) => true, + _ => false + ); + } + + assert(isFahrenheit(t1)); + assert(!isFahrenheit(t2)); + assert(!isFahrenheit(t3)); +} + +/** $(DIVID introspection-based-matching, $(H3 Introspection-based matching)) + * + * In the `length` and `horiz` functions below, the handlers for `match` do not + * specify the types of their arguments. Instead, matching is done based on how + * the argument is used in the body of the handler: any type with `x` and `y` + * properties will be matched by the `rect` handlers, and any type with `r` and + * `theta` properties will be matched by the `polar` handlers. + */ +version (D_BetterC) {} else +@safe unittest +{ + import std.math.operations : isClose; + import std.math.trigonometry : cos; + import std.math.constants : PI; + import std.math.algebraic : sqrt; + + struct Rectangular { double x, y; } + struct Polar { double r, theta; } + alias Vector = SumType!(Rectangular, Polar); + + double length(Vector v) + { + return v.match!( + rect => sqrt(rect.x^^2 + rect.y^^2), + polar => polar.r + ); + } + + double horiz(Vector v) + { + return v.match!( + rect => rect.x, + polar => polar.r * cos(polar.theta) + ); + } + + Vector u = Rectangular(1, 1); + Vector v = Polar(1, PI/4); + + assert(length(u).isClose(sqrt(2.0))); + assert(length(v).isClose(1)); + assert(horiz(u).isClose(1)); + assert(horiz(v).isClose(sqrt(0.5))); +} + +/** $(DIVID arithmetic-expression-evaluator, $(H3 Arithmetic expression evaluator)) + * + * This example makes use of the special placeholder type `This` to define a + * [recursive data type](https://en.wikipedia.org/wiki/Recursive_data_type): an + * [abstract syntax tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree) for + * representing simple arithmetic expressions. + */ +version (D_BetterC) {} else +@system unittest +{ + import std.functional : partial; + import std.traits : EnumMembers; + import std.typecons : Tuple; + + enum Op : string + { + Plus = "+", + Minus = "-", + Times = "*", + Div = "/" + } + + // An expression is either + // - a number, + // - a variable, or + // - a binary operation combining two sub-expressions. + alias Expr = SumType!( + double, + string, + Tuple!(Op, "op", This*, "lhs", This*, "rhs") + ); + + // Shorthand for Tuple!(Op, "op", Expr*, "lhs", Expr*, "rhs"), + // the Tuple type above with Expr substituted for This. + alias BinOp = Expr.Types[2]; + + // Factory function for number expressions + Expr* num(double value) + { + return new Expr(value); + } + + // Factory function for variable expressions + Expr* var(string name) + { + return new Expr(name); + } + + // Factory function for binary operation expressions + Expr* binOp(Op op, Expr* lhs, Expr* rhs) + { + return new Expr(BinOp(op, lhs, rhs)); + } + + // Convenience wrappers for creating BinOp expressions + alias sum = partial!(binOp, Op.Plus); + alias diff = partial!(binOp, Op.Minus); + alias prod = partial!(binOp, Op.Times); + alias quot = partial!(binOp, Op.Div); + + // Evaluate expr, looking up variables in env + double eval(Expr expr, double[string] env) + { + return expr.match!( + (double num) => num, + (string var) => env[var], + (BinOp bop) + { + double lhs = eval(*bop.lhs, env); + double rhs = eval(*bop.rhs, env); + final switch (bop.op) + { + static foreach (op; EnumMembers!Op) + { + case op: + return mixin("lhs" ~ op ~ "rhs"); + } + } + } + ); + } + + // Return a "pretty-printed" representation of expr + string pprint(Expr expr) + { + import std.format : format; + + return expr.match!( + (double num) => "%g".format(num), + (string var) => var, + (BinOp bop) => "(%s %s %s)".format( + pprint(*bop.lhs), + cast(string) bop.op, + pprint(*bop.rhs) + ) + ); + } + + Expr* myExpr = sum(var("a"), prod(num(2), var("b"))); + double[string] myEnv = ["a":3, "b":4, "c":7]; + + assert(eval(*myExpr, myEnv) == 11); + assert(pprint(*myExpr) == "(a + (2 * b))"); +} + +import std.format.spec : FormatSpec, singleSpec; +import std.meta : AliasSeq, Filter, IndexOf = staticIndexOf, Map = staticMap; +import std.meta : NoDuplicates; +import std.meta : anySatisfy, allSatisfy; +import std.traits : hasElaborateCopyConstructor, hasElaborateDestructor; +import std.traits : isAssignable, isCopyable, isStaticArray, isRvalueAssignable; +import std.traits : ConstOf, ImmutableOf, InoutOf, TemplateArgsOf; + +// FIXME: std.sumtype : `std.traits : DeducedParameterType` and `std.conv : toCtString` +// are `package(std)` but trivial, hence copied below +import std.traits : CommonType, /*DeducatedParameterType*/ Unqual; +private template DeducedParameterType(T) +{ + static if (is(T == U*, U) || is(T == U[], U)) + alias DeducedParameterType = Unqual!T; + else + alias DeducedParameterType = T; +} + +import std.typecons : ReplaceTypeUnless; +import std.typecons : Flag; +//import std.conv : toCtString; +private enum toCtString(ulong n) = n.stringof[0 .. $ - "LU".length]; + +/// Placeholder used to refer to the enclosing [SumType]. +struct This {} + +// True if a variable of type T can appear on the lhs of an assignment +private enum isAssignableTo(T) = + isAssignable!T || (!isCopyable!T && isRvalueAssignable!T); + +// toHash is required by the language spec to be nothrow and @safe +private enum isHashable(T) = __traits(compiles, + () nothrow @safe { hashOf(T.init); } +); + +private enum hasPostblit(T) = __traits(hasPostblit, T); + +private enum isInout(T) = is(T == inout); + +/** + * A [tagged union](https://en.wikipedia.org/wiki/Tagged_union) that can hold a + * single value from any of a specified set of types. + * + * The value in a `SumType` can be operated on using [pattern matching][match]. + * + * To avoid ambiguity, duplicate types are not allowed (but see the + * ["basic usage" example](#basic-usage) for a workaround). + * + * The special type `This` can be used as a placeholder to create + * self-referential types, just like with `Algebraic`. See the + * ["Arithmetic expression evaluator" example](#arithmetic-expression-evaluator) for + * usage. + * + * A `SumType` is initialized by default to hold the `.init` value of its + * first member type, just like a regular union. The version identifier + * `SumTypeNoDefaultCtor` can be used to disable this behavior. + * + * See_Also: $(REF Algebraic, std,variant) + */ +struct SumType(Types...) +if (is(NoDuplicates!Types == Types) && Types.length > 0) +{ + /// The types a `SumType` can hold. + alias Types = AliasSeq!( + ReplaceTypeUnless!(isSumTypeInstance, This, typeof(this), TemplateArgsOf!SumType) + ); + +private: + + enum bool canHoldTag(T) = Types.length <= T.max; + alias unsignedInts = AliasSeq!(ubyte, ushort, uint, ulong); + + alias Tag = Filter!(canHoldTag, unsignedInts)[0]; + + union Storage + { + // Workaround for https://issues.dlang.org/show_bug.cgi?id=20068 + template memberName(T) + if (IndexOf!(T, Types) >= 0) + { + enum tid = IndexOf!(T, Types); + mixin("enum memberName = `values_", toCtString!tid, "`;"); + } + + static foreach (T; Types) + { + mixin("T ", memberName!T, ";"); + } + } + + Storage storage; + Tag tag; + + /* Accesses the value stored in a SumType. + * + * This method is memory-safe, provided that: + * + * 1. A SumType's tag is always accurate. + * 2. A SumType cannot be assigned to in @safe code if that assignment + * could cause unsafe aliasing. + * + * All code that accesses a SumType's tag or storage directly, including + * @safe code in this module, must be manually checked to ensure that it + * does not violate either of the above requirements. + */ + @trusted + ref inout(T) get(T)() inout + if (IndexOf!(T, Types) >= 0) + { + enum tid = IndexOf!(T, Types); + assert(tag == tid, + "This `" ~ SumType.stringof ~ + "` does not contain a(n) `" ~ T.stringof ~ "`" + ); + return __traits(getMember, storage, Storage.memberName!T); + } + +public: + + // Workaround for https://issues.dlang.org/show_bug.cgi?id=21399 + version (StdDdoc) + { + // Dummy type to stand in for loop variable + private struct T; + + /// Constructs a `SumType` holding a specific value. + this(T value); + + /// ditto + this(const(T) value) const; + + /// ditto + this(immutable(T) value) immutable; + + /// ditto + this(Value)(Value value) inout + if (is(Value == DeducedParameterType!(inout(T)))); + } + + static foreach (tid, T; Types) + { + /// Constructs a `SumType` holding a specific value. + this(T value) + { + import core.lifetime : forward; + + static if (isCopyable!T) + { + // Workaround for https://issues.dlang.org/show_bug.cgi?id=21542 + __traits(getMember, storage, Storage.memberName!T) = __ctfe ? value : forward!value; + } + else + { + __traits(getMember, storage, Storage.memberName!T) = forward!value; + } + + tag = tid; + } + + static if (isCopyable!(const(T))) + { + static if (IndexOf!(const(T), Map!(ConstOf, Types)) == tid) + { + /// ditto + this(const(T) value) const + { + __traits(getMember, storage, Storage.memberName!T) = value; + tag = tid; + } + } + } + else + { + @disable this(const(T) value) const; + } + + static if (isCopyable!(immutable(T))) + { + static if (IndexOf!(immutable(T), Map!(ImmutableOf, Types)) == tid) + { + /// ditto + this(immutable(T) value) immutable + { + __traits(getMember, storage, Storage.memberName!T) = value; + tag = tid; + } + } + } + else + { + @disable this(immutable(T) value) immutable; + } + + static if (isCopyable!(inout(T))) + { + static if (IndexOf!(inout(T), Map!(InoutOf, Types)) == tid) + { + /// ditto + this(Value)(Value value) inout + if (is(Value == DeducedParameterType!(inout(T)))) + { + __traits(getMember, storage, Storage.memberName!T) = value; + tag = tid; + } + } + } + else + { + @disable this(Value)(Value value) inout + if (is(Value == DeducedParameterType!(inout(T)))); + } + } + + static if (anySatisfy!(hasElaborateCopyConstructor, Types)) + { + static if + ( + allSatisfy!(isCopyable, Map!(InoutOf, Types)) + && !anySatisfy!(hasPostblit, Map!(InoutOf, Types)) + && allSatisfy!(isInout, Map!(InoutOf, Types)) + ) + { + /// Constructs a `SumType` that's a copy of another `SumType`. + this(ref inout(SumType) other) inout + { + storage = other.match!((ref value) { + alias OtherTypes = Map!(InoutOf, Types); + enum tid = IndexOf!(typeof(value), OtherTypes); + alias T = Types[tid]; + + mixin("inout(Storage) newStorage = { ", + Storage.memberName!T, ": value", + " };"); + + return newStorage; + }); + + tag = other.tag; + } + } + else + { + static if (allSatisfy!(isCopyable, Types)) + { + /// ditto + this(ref SumType other) + { + storage = other.match!((ref value) { + alias T = typeof(value); + + mixin("Storage newStorage = { ", + Storage.memberName!T, ": value", + " };"); + + return newStorage; + }); + + tag = other.tag; + } + } + else + { + @disable this(ref SumType other); + } + + static if (allSatisfy!(isCopyable, Map!(ConstOf, Types))) + { + /// ditto + this(ref const(SumType) other) const + { + storage = other.match!((ref value) { + alias OtherTypes = Map!(ConstOf, Types); + enum tid = IndexOf!(typeof(value), OtherTypes); + alias T = Types[tid]; + + mixin("const(Storage) newStorage = { ", + Storage.memberName!T, ": value", + " };"); + + return newStorage; + }); + + tag = other.tag; + } + } + else + { + @disable this(ref const(SumType) other) const; + } + + static if (allSatisfy!(isCopyable, Map!(ImmutableOf, Types))) + { + /// ditto + this(ref immutable(SumType) other) immutable + { + storage = other.match!((ref value) { + alias OtherTypes = Map!(ImmutableOf, Types); + enum tid = IndexOf!(typeof(value), OtherTypes); + alias T = Types[tid]; + + mixin("immutable(Storage) newStorage = { ", + Storage.memberName!T, ": value", + " };"); + + return newStorage; + }); + + tag = other.tag; + } + } + else + { + @disable this(ref immutable(SumType) other) immutable; + } + } + } + + version (SumTypeNoDefaultCtor) + { + @disable this(); + } + + // Workaround for https://issues.dlang.org/show_bug.cgi?id=21399 + version (StdDdoc) + { + // Dummy type to stand in for loop variable + private struct T; + + /** + * Assigns a value to a `SumType`. + * + * If any of the `SumType`'s members other than the one being assigned + * to contain pointers or references, it is possible for the assignment + * to cause memory corruption (see the + * ["Memory corruption" example](#memory-corruption) below for an + * illustration of how). Therefore, such assignments are considered + * `@system`. + * + * An individual assignment can be `@trusted` if the caller can + * guarantee that there are no outstanding references to any `SumType` + * members that contain pointers or references at the time the + * assignment occurs. + * + * Examples: + * + * $(DIVID memory-corruption, $(H3 Memory corruption)) + * + * This example shows how assignment to a `SumType` can be used to + * cause memory corruption in `@system` code. In `@safe` code, the + * assignment `s = 123` would not be allowed. + * + * --- + * SumType!(int*, int) s = new int; + * s.tryMatch!( + * (ref int* p) { + * s = 123; // overwrites `p` + * return *p; // undefined behavior + * } + * ); + * --- + */ + ref SumType opAssign(T rhs); + } + + static foreach (tid, T; Types) + { + static if (isAssignableTo!T) + { + /** + * Assigns a value to a `SumType`. + * + * If any of the `SumType`'s members other than the one being assigned + * to contain pointers or references, it is possible for the assignment + * to cause memory corruption (see the + * ["Memory corruption" example](#memory-corruption) below for an + * illustration of how). Therefore, such assignments are considered + * `@system`. + * + * An individual assignment can be `@trusted` if the caller can + * guarantee that there are no outstanding references to any `SumType` + * members that contain pointers or references at the time the + * assignment occurs. + * + * Examples: + * + * $(DIVID memory-corruption, $(H3 Memory corruption)) + * + * This example shows how assignment to a `SumType` can be used to + * cause memory corruption in `@system` code. In `@safe` code, the + * assignment `s = 123` would not be allowed. + * + * --- + * SumType!(int*, int) s = new int; + * s.tryMatch!( + * (ref int* p) { + * s = 123; // overwrites `p` + * return *p; // undefined behavior + * } + * ); + * --- + */ + ref SumType opAssign(T rhs) + { + import core.lifetime : forward; + import std.traits : hasIndirections, hasNested; + import std.meta : AliasSeq, Or = templateOr; + + alias OtherTypes = + AliasSeq!(Types[0 .. tid], Types[tid + 1 .. $]); + enum unsafeToOverwrite = + anySatisfy!(Or!(hasIndirections, hasNested), OtherTypes); + + static if (unsafeToOverwrite) + { + cast(void) () @system {}(); + } + + this.match!destroyIfOwner; + + static if (isCopyable!T) + { + // Workaround for https://issues.dlang.org/show_bug.cgi?id=21542 + mixin("Storage newStorage = { ", + Storage.memberName!T, ": __ctfe ? rhs : forward!rhs", + " };"); + } + else + { + mixin("Storage newStorage = { ", + Storage.memberName!T, ": forward!rhs", + " };"); + } + + storage = newStorage; + tag = tid; + + return this; + } + } + } + + static if (allSatisfy!(isAssignableTo, Types)) + { + static if (allSatisfy!(isCopyable, Types)) + { + /** + * Copies the value from another `SumType` into this one. + * + * See the value-assignment overload for details on `@safe`ty. + * + * Copy assignment is `@disable`d if any of `Types` is non-copyable. + */ + ref SumType opAssign(ref SumType rhs) + { + rhs.match!((ref value) { this = value; }); + return this; + } + } + else + { + @disable ref SumType opAssign(ref SumType rhs); + } + + /** + * Moves the value from another `SumType` into this one. + * + * See the value-assignment overload for details on `@safe`ty. + */ + ref SumType opAssign(SumType rhs) + { + import core.lifetime : move; + + rhs.match!((ref value) { + static if (isCopyable!(typeof(value))) + { + // Workaround for https://issues.dlang.org/show_bug.cgi?id=21542 + this = __ctfe ? value : move(value); + } + else + { + this = move(value); + } + }); + return this; + } + } + + /** + * Compares two `SumType`s for equality. + * + * Two `SumType`s are equal if they are the same kind of `SumType`, they + * contain values of the same type, and those values are equal. + */ + bool opEquals(this This, Rhs)(auto ref Rhs rhs) + if (!is(CommonType!(This, Rhs) == void)) + { + static if (is(This == Rhs)) + { + return AliasSeq!(this, rhs).match!((ref value, ref rhsValue) { + static if (is(typeof(value) == typeof(rhsValue))) + { + return value == rhsValue; + } + else + { + return false; + } + }); + } + else + { + alias CommonSumType = CommonType!(This, Rhs); + return cast(CommonSumType) this == cast(CommonSumType) rhs; + } + } + + // Workaround for https://issues.dlang.org/show_bug.cgi?id=19407 + static if (__traits(compiles, anySatisfy!(hasElaborateDestructor, Types))) + { + // If possible, include the destructor only when it's needed + private enum includeDtor = anySatisfy!(hasElaborateDestructor, Types); + } + else + { + // If we can't tell, always include it, even when it does nothing + private enum includeDtor = true; + } + + static if (includeDtor) + { + /// Calls the destructor of the `SumType`'s current value. + ~this() + { + this.match!destroyIfOwner; + } + } + + invariant + { + this.match!((ref value) { + static if (is(typeof(value) == class)) + { + if (value !is null) + { + assert(value); + } + } + else static if (is(typeof(value) == struct)) + { + assert(&value); + } + }); + } + + // Workaround for https://issues.dlang.org/show_bug.cgi?id=21400 + version (StdDdoc) + { + /** + * Returns a string representation of the `SumType`'s current value. + * + * Not available when compiled with `-betterC`. + */ + string toString(this This)(); + + /** + * Handles formatted writing of the `SumType`'s current value. + * + * Not available when compiled with `-betterC`. + * + * Params: + * sink = Output range to write to. + * fmt = Format specifier to use. + * + * See_Also: $(REF formatValue, std,format) + */ + void toString(this This, Sink, Char)(ref Sink sink, const ref FormatSpec!Char fmt); + } + + version (D_BetterC) {} else + /** + * Returns a string representation of the `SumType`'s current value. + * + * Not available when compiled with `-betterC`. + */ + string toString(this This)() + { + import std.conv : to; + + return this.match!(to!string); + } + + version (D_BetterC) {} else + /** + * Handles formatted writing of the `SumType`'s current value. + * + * Not available when compiled with `-betterC`. + * + * Params: + * sink = Output range to write to. + * fmt = Format specifier to use. + * + * See_Also: $(REF formatValue, std,format) + */ + void toString(this This, Sink, Char)(ref Sink sink, const ref FormatSpec!Char fmt) + { + import std.format.write : formatValue; + + this.match!((ref value) { + formatValue(sink, value, fmt); + }); + } + + static if (allSatisfy!(isHashable, Map!(ConstOf, Types))) + { + // Workaround for https://issues.dlang.org/show_bug.cgi?id=21400 + version (StdDdoc) + { + /** + * Returns the hash of the `SumType`'s current value. + * + * Not available when compiled with `-betterC`. + */ + size_t toHash() const; + } + + // Workaround for https://issues.dlang.org/show_bug.cgi?id=20095 + version (D_BetterC) {} else + /** + * Returns the hash of the `SumType`'s current value. + * + * Not available when compiled with `-betterC`. + */ + size_t toHash() const + { + return this.match!hashOf; + } + } +} + +// Construction +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + MySum y = MySum(3.14); +} + +// Assignment +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + x = 3.14; +} + +// Self assignment +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + MySum y = MySum(3.14); + y = x; +} + +// Equality +@safe unittest +{ + alias MySum = SumType!(int, float); + + assert(MySum(123) == MySum(123)); + assert(MySum(123) != MySum(456)); + assert(MySum(123) != MySum(123.0)); + assert(MySum(123) != MySum(456.0)); + +} + +// Equality of differently-qualified SumTypes +// Disabled in BetterC due to use of dynamic arrays +version (D_BetterC) {} else +@safe unittest +{ + alias SumA = SumType!(int, float); + alias SumB = SumType!(const(int[]), int[]); + alias SumC = SumType!(int[], const(int[])); + + int[] ma = [1, 2, 3]; + const(int[]) ca = [1, 2, 3]; + + assert(const(SumA)(123) == SumA(123)); + assert(const(SumB)(ma[]) == SumB(ca[])); + assert(const(SumC)(ma[]) == SumC(ca[])); +} + +// Imported types +@safe unittest +{ + import std.typecons : Tuple; + + alias MySum = SumType!(Tuple!(int, int)); +} + +// const and immutable types +@safe unittest +{ + alias MySum = SumType!(const(int[]), immutable(float[])); +} + +// Recursive types +@safe unittest +{ + alias MySum = SumType!(This*); + assert(is(MySum.Types[0] == MySum*)); +} + +// Allowed types +@safe unittest +{ + import std.meta : AliasSeq; + + alias MySum = SumType!(int, float, This*); + + assert(is(MySum.Types == AliasSeq!(int, float, MySum*))); +} + +// Types with destructors and postblits +@system unittest +{ + int copies; + + static struct Test + { + bool initialized = false; + int* copiesPtr; + + this(this) { (*copiesPtr)++; } + ~this() { if (initialized) (*copiesPtr)--; } + } + + alias MySum = SumType!(int, Test); + + Test t = Test(true, &copies); + + { + MySum x = t; + assert(copies == 1); + } + assert(copies == 0); + + { + MySum x = 456; + assert(copies == 0); + } + assert(copies == 0); + + { + MySum x = t; + assert(copies == 1); + x = 456; + assert(copies == 0); + } + + { + MySum x = 456; + assert(copies == 0); + x = t; + assert(copies == 1); + } + + { + MySum x = t; + MySum y = x; + assert(copies == 2); + } + + { + MySum x = t; + MySum y; + y = x; + assert(copies == 2); + } +} + +// Doesn't destroy reference types +// Disabled in BetterC due to use of classes +version (D_BetterC) {} else +@system unittest +{ + bool destroyed; + + class C + { + ~this() + { + destroyed = true; + } + } + + struct S + { + ~this() {} + } + + alias MySum = SumType!(S, C); + + C c = new C(); + { + MySum x = c; + destroyed = false; + } + assert(!destroyed); + + { + MySum x = c; + destroyed = false; + x = S(); + assert(!destroyed); + } +} + +// Types with @disable this() +@safe unittest +{ + static struct NoInit + { + @disable this(); + } + + alias MySum = SumType!(NoInit, int); + + assert(!__traits(compiles, MySum())); + auto _ = MySum(42); +} + +// const SumTypes +version (D_BetterC) {} else // not @nogc, https://issues.dlang.org/show_bug.cgi?id=22117 +@safe unittest +{ + auto _ = const(SumType!(int[]))([1, 2, 3]); +} + +// Equality of const SumTypes +@safe unittest +{ + alias MySum = SumType!int; + + auto _ = const(MySum)(123) == const(MySum)(456); +} + +// Compares reference types using value equality +@safe unittest +{ + import std.array : staticArray; + + static struct Field {} + static struct Struct { Field[] fields; } + alias MySum = SumType!Struct; + + static arr1 = staticArray([Field()]); + static arr2 = staticArray([Field()]); + + auto a = MySum(Struct(arr1[])); + auto b = MySum(Struct(arr2[])); + + assert(a == b); +} + +// toString +// Disabled in BetterC due to use of std.conv.text +version (D_BetterC) {} else +@safe unittest +{ + import std.conv : text; + + static struct Int { int i; } + static struct Double { double d; } + alias Sum = SumType!(Int, Double); + + assert(Sum(Int(42)).text == Int(42).text, Sum(Int(42)).text); + assert(Sum(Double(33.3)).text == Double(33.3).text, Sum(Double(33.3)).text); + assert((const(Sum)(Int(42))).text == (const(Int)(42)).text, (const(Sum)(Int(42))).text); +} + +// string formatting +// Disabled in BetterC due to use of std.format.format +version (D_BetterC) {} else +@safe unittest +{ + import std.format : format; + + SumType!int x = 123; + + assert(format!"%s"(x) == format!"%s"(123)); + assert(format!"%x"(x) == format!"%x"(123)); +} + +// string formatting of qualified SumTypes +// Disabled in BetterC due to use of std.format.format and dynamic arrays +version (D_BetterC) {} else +@safe unittest +{ + import std.format : format; + + int[] a = [1, 2, 3]; + const(SumType!(int[])) x = a; + + assert(format!"%(%d, %)"(x) == format!"%(%s, %)"(a)); +} + +// Github issue #16 +// Disabled in BetterC due to use of dynamic arrays +version (D_BetterC) {} else +@safe unittest +{ + alias Node = SumType!(This[], string); + + // override inference of @system attribute for cyclic functions + assert((() @trusted => + Node([Node([Node("x")])]) + == + Node([Node([Node("x")])]) + )()); +} + +// Github issue #16 with const +// Disabled in BetterC due to use of dynamic arrays +version (D_BetterC) {} else +@safe unittest +{ + alias Node = SumType!(const(This)[], string); + + // override inference of @system attribute for cyclic functions + assert((() @trusted => + Node([Node([Node("x")])]) + == + Node([Node([Node("x")])]) + )()); +} + +// Stale pointers +// Disabled in BetterC due to use of dynamic arrays +version (D_BetterC) {} else +@system unittest +{ + alias MySum = SumType!(ubyte, void*[2]); + + MySum x = [null, cast(void*) 0x12345678]; + void** p = &x.get!(void*[2])[1]; + x = ubyte(123); + + assert(*p != cast(void*) 0x12345678); +} + +// Exception-safe assignment +// Disabled in BetterC due to use of exceptions +version (D_BetterC) {} else +@safe unittest +{ + static struct A + { + int value = 123; + } + + static struct B + { + int value = 456; + this(this) { throw new Exception("oops"); } + } + + alias MySum = SumType!(A, B); + + MySum x; + try + { + x = B(); + } + catch (Exception e) {} + + assert( + (x.tag == 0 && x.get!A.value == 123) || + (x.tag == 1 && x.get!B.value == 456) + ); +} + +// Types with @disable this(this) +@safe unittest +{ + import core.lifetime : move; + + static struct NoCopy + { + @disable this(this); + } + + alias MySum = SumType!NoCopy; + + NoCopy lval = NoCopy(); + + MySum x = NoCopy(); + MySum y = NoCopy(); + + + assert(!__traits(compiles, SumType!NoCopy(lval))); + + y = NoCopy(); + y = move(x); + assert(!__traits(compiles, y = lval)); + assert(!__traits(compiles, y = x)); + + bool b = x == y; +} + +// Github issue #22 +// Disabled in BetterC due to use of std.typecons.Nullable +version (D_BetterC) {} else +@safe unittest +{ + import std.typecons; + + static struct A + { + SumType!(Nullable!int) a = Nullable!int.init; + } +} + +// Static arrays of structs with postblits +// Disabled in BetterC due to use of dynamic arrays +version (D_BetterC) {} else +@safe unittest +{ + static struct S + { + int n; + this(this) { n++; } + } + + SumType!(S[1]) x = [S(0)]; + SumType!(S[1]) y = x; + + auto xval = x.get!(S[1])[0].n; + auto yval = y.get!(S[1])[0].n; + + assert(xval != yval); +} + +// Replacement does not happen inside SumType +// Disabled in BetterC due to use of associative arrays +version (D_BetterC) {} else +@safe unittest +{ + import std.typecons : Tuple, ReplaceTypeUnless; + alias A = Tuple!(This*,SumType!(This*))[SumType!(This*,string)[This]]; + alias TR = ReplaceTypeUnless!(isSumTypeInstance, This, int, A); + static assert(is(TR == Tuple!(int*,SumType!(This*))[SumType!(This*, string)[int]])); +} + +// Supports nested self-referential SumTypes +@safe unittest +{ + import std.typecons : Tuple, Flag; + alias Nat = SumType!(Flag!"0", Tuple!(This*)); + alias Inner = SumType!Nat; + alias Outer = SumType!(Nat*, Tuple!(This*, This*)); +} + +// Self-referential SumTypes inside Algebraic +// Disabled in BetterC due to use of std.variant.Algebraic +version (D_BetterC) {} else +@safe unittest +{ + import std.variant : Algebraic; + + alias T = Algebraic!(SumType!(This*)); + + assert(is(T.AllowedTypes[0].Types[0] == T.AllowedTypes[0]*)); +} + +// Doesn't call @system postblits in @safe code +@safe unittest +{ + static struct SystemCopy { @system this(this) {} } + SystemCopy original; + + assert(!__traits(compiles, () @safe + { + SumType!SystemCopy copy = original; + })); + + assert(!__traits(compiles, () @safe + { + SumType!SystemCopy copy; copy = original; + })); +} + +// Doesn't overwrite pointers in @safe code +@safe unittest +{ + alias MySum = SumType!(int*, int); + + MySum x; + + assert(!__traits(compiles, () @safe + { + x = 123; + })); + + assert(!__traits(compiles, () @safe + { + x = MySum(123); + })); +} + +// Types with invariants +// Disabled in BetterC due to use of exceptions +version (D_BetterC) {} else +version (D_Invariants) +@system unittest +{ + import std.exception : assertThrown; + import core.exception : AssertError; + + struct S + { + int i; + invariant { assert(i >= 0); } + } + + class C + { + int i; + invariant { assert(i >= 0); } + } + + SumType!S x; + x.match!((ref v) { v.i = -1; }); + assertThrown!AssertError(assert(&x)); + + SumType!C y = new C(); + y.match!((ref v) { v.i = -1; }); + assertThrown!AssertError(assert(&y)); +} + +// Calls value postblit on self-assignment +@safe unittest +{ + static struct S + { + int n; + this(this) { n++; } + } + + SumType!S x = S(); + SumType!S y; + y = x; + + auto xval = x.get!S.n; + auto yval = y.get!S.n; + + assert(xval != yval); +} + +// Github issue #29 +@safe unittest +{ + alias A = SumType!string; + + @safe A createA(string arg) + { + return A(arg); + } + + @safe void test() + { + A a = createA(""); + } +} + +// SumTypes as associative array keys +// Disabled in BetterC due to use of associative arrays +version (D_BetterC) {} else +@safe unittest +{ + int[SumType!(int, string)] aa; +} + +// toString with non-copyable types +// Disabled in BetterC due to use of std.conv.to (in toString) +version (D_BetterC) {} else +@safe unittest +{ + struct NoCopy + { + @disable this(this); + } + + SumType!NoCopy x; + + auto _ = x.toString(); +} + +// Can use the result of assignment +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum a = MySum(123); + MySum b = MySum(3.14); + + assert((a = b) == b); + assert((a = MySum(123)) == MySum(123)); + assert((a = 3.14) == MySum(3.14)); + assert(((a = b) = MySum(123)) == MySum(123)); +} + +// Types with copy constructors +@safe unittest +{ + static struct S + { + int n; + + this(ref return scope inout S other) inout + { + n = other.n + 1; + } + } + + SumType!S x = S(); + SumType!S y = x; + + auto xval = x.get!S.n; + auto yval = y.get!S.n; + + assert(xval != yval); +} + +// Copyable by generated copy constructors +@safe unittest +{ + static struct Inner + { + ref this(ref inout Inner other) {} + } + + static struct Outer + { + SumType!Inner inner; + } + + Outer x; + Outer y = x; +} + +// Types with qualified copy constructors +@safe unittest +{ + static struct ConstCopy + { + int n; + this(inout int n) inout { this.n = n; } + this(ref const typeof(this) other) const { this.n = other.n; } + } + + static struct ImmutableCopy + { + int n; + this(inout int n) inout { this.n = n; } + this(ref immutable typeof(this) other) immutable { this.n = other.n; } + } + + const SumType!ConstCopy x = const(ConstCopy)(1); + immutable SumType!ImmutableCopy y = immutable(ImmutableCopy)(1); +} + +// Types with disabled opEquals +@safe unittest +{ + static struct S + { + @disable bool opEquals(const S rhs) const; + } + + auto _ = SumType!S(S()); +} + +// Types with non-const opEquals +@safe unittest +{ + static struct S + { + int i; + bool opEquals(S rhs) { return i == rhs.i; } + } + + auto _ = SumType!S(S(123)); +} + +// Incomparability of different SumTypes +@safe unittest +{ + SumType!(int, string) x = 123; + SumType!(string, int) y = 123; + + assert(!__traits(compiles, x != y)); +} + +// Self-reference in return/parameter type of function pointer member +// Disabled in BetterC due to use of delegates +version (D_BetterC) {} else +@safe unittest +{ + alias T = SumType!(int, This delegate(This)); +} + +// Construction and assignment from implicitly-convertible lvalue +@safe unittest +{ + alias MySum = SumType!bool; + + const(bool) b = true; + + MySum x = b; + MySum y; y = b; +} + +// @safe assignment to the only pointer type in a SumType +@safe unittest +{ + SumType!(string, int) sm = 123; + sm = "this should be @safe"; +} + +// Immutable member type with copy constructor +// https://issues.dlang.org/show_bug.cgi?id=22572 +@safe unittest +{ + static struct CopyConstruct + { + this(ref inout CopyConstruct other) inout {} + } + + static immutable struct Value + { + CopyConstruct c; + } + + SumType!Value s; +} + +// Construction of inout-qualified SumTypes +// https://issues.dlang.org/show_bug.cgi?id=22901 +@safe unittest +{ + static inout(SumType!(int[])) example(inout(int[]) arr) + { + return inout(SumType!(int[]))(arr); + } +} + +// Assignment of struct with overloaded opAssign in CTFE +// https://issues.dlang.org/show_bug.cgi?id=23182 +@safe unittest +{ + static struct HasOpAssign + { + void opAssign(HasOpAssign rhs) {} + } + + static SumType!HasOpAssign test() + { + SumType!HasOpAssign s; + // Test both overloads + s = HasOpAssign(); + s = SumType!HasOpAssign(); + return s; + } + + // Force CTFE + enum result = test(); +} + +/// True if `T` is an instance of the `SumType` template, otherwise false. +private enum bool isSumTypeInstance(T) = is(T == SumType!Args, Args...); + +@safe unittest +{ + static struct Wrapper + { + SumType!int s; + alias s this; + } + + assert(isSumTypeInstance!(SumType!int)); + assert(!isSumTypeInstance!Wrapper); +} + +/// True if `T` is a [SumType] or implicitly converts to one, otherwise false. +enum bool isSumType(T) = is(T : SumType!Args, Args...); + +/// +@safe unittest +{ + static struct ConvertsToSumType + { + SumType!int payload; + alias payload this; + } + + static struct ContainsSumType + { + SumType!int payload; + } + + assert(isSumType!(SumType!int)); + assert(isSumType!ConvertsToSumType); + assert(!isSumType!ContainsSumType); +} + +/** + * Calls a type-appropriate function with the value held in a [SumType]. + * + * For each possible type the [SumType] can hold, the given handlers are + * checked, in order, to see whether they accept a single argument of that type. + * The first one that does is chosen as the match for that type. (Note that the + * first match may not always be the most exact match. + * See ["Avoiding unintentional matches"](#avoiding-unintentional-matches) for + * one common pitfall.) + * + * Every type must have a matching handler, and every handler must match at + * least one type. This is enforced at compile time. + * + * Handlers may be functions, delegates, or objects with `opCall` overloads. If + * a function with more than one overload is given as a handler, all of the + * overloads are considered as potential matches. + * + * Templated handlers are also accepted, and will match any type for which they + * can be [implicitly instantiated](https://dlang.org/glossary.html#ifti). See + * ["Introspection-based matching"](#introspection-based-matching) for an + * example of templated handler usage. + * + * If multiple [SumType]s are passed to match, their values are passed to the + * handlers as separate arguments, and matching is done for each possible + * combination of value types. See ["Multiple dispatch"](#multiple-dispatch) for + * an example. + * + * Returns: + * The value returned from the handler that matches the currently-held type. + * + * See_Also: $(REF visit, std,variant) + */ +template match(handlers...) +{ + import std.typecons : Yes; + + /** + * The actual `match` function. + * + * Params: + * args = One or more [SumType] objects. + */ + auto ref match(SumTypes...)(auto ref SumTypes args) + if (allSatisfy!(isSumType, SumTypes) && args.length > 0) + { + return matchImpl!(Yes.exhaustive, handlers)(args); + } +} + +/** $(DIVID avoiding-unintentional-matches, $(H3 Avoiding unintentional matches)) + * + * Sometimes, implicit conversions may cause a handler to match more types than + * intended. The example below shows two solutions to this problem. + */ +@safe unittest +{ + alias Number = SumType!(double, int); + + Number x; + + // Problem: because int implicitly converts to double, the double + // handler is used for both types, and the int handler never matches. + assert(!__traits(compiles, + x.match!( + (double d) => "got double", + (int n) => "got int" + ) + )); + + // Solution 1: put the handler for the "more specialized" type (in this + // case, int) before the handler for the type it converts to. + assert(__traits(compiles, + x.match!( + (int n) => "got int", + (double d) => "got double" + ) + )); + + // Solution 2: use a template that only accepts the exact type it's + // supposed to match, instead of any type that implicitly converts to it. + alias exactly(T, alias fun) = function (arg) + { + static assert(is(typeof(arg) == T)); + return fun(arg); + }; + + // Now, even if we put the double handler first, it will only be used for + // doubles, not ints. + assert(__traits(compiles, + x.match!( + exactly!(double, d => "got double"), + exactly!(int, n => "got int") + ) + )); +} + +/** $(DIVID multiple-dispatch, $(H3 Multiple dispatch)) + * + * Pattern matching can be performed on multiple `SumType`s at once by passing + * handlers with multiple arguments. This usually leads to more concise code + * than using nested calls to `match`, as show below. + */ +@safe unittest +{ + struct Point2D { double x, y; } + struct Point3D { double x, y, z; } + + alias Point = SumType!(Point2D, Point3D); + + version (none) + { + // This function works, but the code is ugly and repetitive. + // It uses three separate calls to match! + @safe pure nothrow @nogc + bool sameDimensions(Point p1, Point p2) + { + return p1.match!( + (Point2D _) => p2.match!( + (Point2D _) => true, + _ => false + ), + (Point3D _) => p2.match!( + (Point3D _) => true, + _ => false + ) + ); + } + } + + // This version is much nicer. + @safe pure nothrow @nogc + bool sameDimensions(Point p1, Point p2) + { + alias doMatch = match!( + (Point2D _1, Point2D _2) => true, + (Point3D _1, Point3D _2) => true, + (_1, _2) => false + ); + + return doMatch(p1, p2); + } + + Point a = Point2D(1, 2); + Point b = Point2D(3, 4); + Point c = Point3D(5, 6, 7); + Point d = Point3D(8, 9, 0); + + assert( sameDimensions(a, b)); + assert( sameDimensions(c, d)); + assert(!sameDimensions(a, c)); + assert(!sameDimensions(d, b)); +} + +/** + * Attempts to call a type-appropriate function with the value held in a + * [SumType], and throws on failure. + * + * Matches are chosen using the same rules as [match], but are not required to + * be exhaustive—in other words, a type (or combination of types) is allowed to + * have no matching handler. If a type without a handler is encountered at + * runtime, a [MatchException] is thrown. + * + * Not available when compiled with `-betterC`. + * + * Returns: + * The value returned from the handler that matches the currently-held type, + * if a handler was given for that type. + * + * Throws: + * [MatchException], if the currently-held type has no matching handler. + * + * See_Also: $(REF tryVisit, std,variant) + */ +version (D_Exceptions) +template tryMatch(handlers...) +{ + import std.typecons : No; + + /** + * The actual `tryMatch` function. + * + * Params: + * args = One or more [SumType] objects. + */ + auto ref tryMatch(SumTypes...)(auto ref SumTypes args) + if (allSatisfy!(isSumType, SumTypes) && args.length > 0) + { + return matchImpl!(No.exhaustive, handlers)(args); + } +} + +/** + * Thrown by [tryMatch] when an unhandled type is encountered. + * + * Not available when compiled with `-betterC`. + */ +version (D_Exceptions) +class MatchException : Exception +{ + /// + pure @safe @nogc nothrow + this(string msg, string file = __FILE__, size_t line = __LINE__) + { + super(msg, file, line); + } +} + +/** + * True if `handler` is a potential match for `Ts`, otherwise false. + * + * See the documentation for [match] for a full explanation of how matches are + * chosen. + */ +template canMatch(alias handler, Ts...) +if (Ts.length > 0) +{ + enum canMatch = is(typeof((ref Ts args) => handler(args))); +} + +/// +@safe unittest +{ + alias handleInt = (int i) => "got an int"; + + assert( canMatch!(handleInt, int)); + assert(!canMatch!(handleInt, string)); +} + +// Includes all overloads of the given handler +@safe unittest +{ + static struct OverloadSet + { + static void fun(int n) {} + static void fun(double d) {} + } + + assert(canMatch!(OverloadSet.fun, int)); + assert(canMatch!(OverloadSet.fun, double)); +} + +// Like aliasSeqOf!(iota(n)), but works in BetterC +private template Iota(size_t n) +{ + static if (n == 0) + { + alias Iota = AliasSeq!(); + } + else + { + alias Iota = AliasSeq!(Iota!(n - 1), n - 1); + } +} + +@safe unittest +{ + assert(is(Iota!0 == AliasSeq!())); + assert(Iota!1 == AliasSeq!(0)); + assert(Iota!3 == AliasSeq!(0, 1, 2)); +} + +/* The number that the dim-th argument's tag is multiplied by when + * converting TagTuples to and from case indices ("caseIds"). + * + * Named by analogy to the stride that the dim-th index into a + * multidimensional static array is multiplied by to calculate the + * offset of a specific element. + */ +private size_t stride(size_t dim, lengths...)() +{ + import core.checkedint : mulu; + + size_t result = 1; + bool overflow = false; + + static foreach (i; 0 .. dim) + { + result = mulu(result, lengths[i], overflow); + } + + /* The largest number matchImpl uses, numCases, is calculated with + * stride!(SumTypes.length), so as long as this overflow check + * passes, we don't need to check for overflow anywhere else. + */ + assert(!overflow, "Integer overflow"); + return result; +} + +private template matchImpl(Flag!"exhaustive" exhaustive, handlers...) +{ + auto ref matchImpl(SumTypes...)(auto ref SumTypes args) + if (allSatisfy!(isSumType, SumTypes) && args.length > 0) + { + alias stride(size_t i) = .stride!(i, Map!(typeCount, SumTypes)); + alias TagTuple = .TagTuple!(SumTypes); + + /* + * A list of arguments to be passed to a handler needed for the case + * labeled with `caseId`. + */ + template handlerArgs(size_t caseId) + { + enum tags = TagTuple.fromCaseId(caseId); + enum argsFrom(size_t i : tags.length) = ""; + enum argsFrom(size_t i) = "args[" ~ toCtString!i ~ "].get!(SumTypes[" ~ toCtString!i ~ "]" ~ + ".Types[" ~ toCtString!(tags[i]) ~ "])(), " ~ argsFrom!(i + 1); + enum handlerArgs = argsFrom!0; + } + + /* An AliasSeq of the types of the member values in the argument list + * returned by `handlerArgs!caseId`. + * + * Note that these are the actual (that is, qualified) types of the + * member values, which may not be the same as the types listed in + * the arguments' `.Types` properties. + */ + template valueTypes(size_t caseId) + { + enum tags = TagTuple.fromCaseId(caseId); + + template getType(size_t i) + { + enum tid = tags[i]; + alias T = SumTypes[i].Types[tid]; + alias getType = typeof(args[i].get!T()); + } + + alias valueTypes = Map!(getType, Iota!(tags.length)); + } + + /* The total number of cases is + * + * Π SumTypes[i].Types.length for 0 ≤ i < SumTypes.length + * + * Or, equivalently, + * + * ubyte[SumTypes[0].Types.length]...[SumTypes[$-1].Types.length].sizeof + * + * Conveniently, this is equal to stride!(SumTypes.length), so we can + * use that function to compute it. + */ + enum numCases = stride!(SumTypes.length); + + /* Guaranteed to never be a valid handler index, since + * handlers.length <= size_t.max. + */ + enum noMatch = size_t.max; + + // An array that maps caseIds to handler indices ("hids"). + enum matches = () + { + size_t[numCases] matches; + + // Workaround for https://issues.dlang.org/show_bug.cgi?id=19561 + foreach (ref match; matches) + { + match = noMatch; + } + + static foreach (caseId; 0 .. numCases) + { + static foreach (hid, handler; handlers) + { + static if (canMatch!(handler, valueTypes!caseId)) + { + if (matches[caseId] == noMatch) + { + matches[caseId] = hid; + } + } + } + } + + return matches; + }(); + + import std.algorithm.searching : canFind; + + // Check for unreachable handlers + static foreach (hid, handler; handlers) + { + static assert(matches[].canFind(hid), + "`handlers[" ~ toCtString!hid ~ "]` " ~ + "of type `" ~ ( __traits(isTemplate, handler) + ? "template" + : typeof(handler).stringof + ) ~ "` " ~ + "never matches" + ); + } + + // Workaround for https://issues.dlang.org/show_bug.cgi?id=19993 + enum handlerName(size_t hid) = "handler" ~ toCtString!hid; + + static foreach (size_t hid, handler; handlers) + { + mixin("alias ", handlerName!hid, " = handler;"); + } + + immutable argsId = TagTuple(args).toCaseId; + + final switch (argsId) + { + static foreach (caseId; 0 .. numCases) + { + case caseId: + static if (matches[caseId] != noMatch) + { + return mixin(handlerName!(matches[caseId]), "(", handlerArgs!caseId, ")"); + } + else + { + static if (exhaustive) + { + static assert(false, + "No matching handler for types `" ~ valueTypes!caseId.stringof ~ "`"); + } + else + { + throw new MatchException( + "No matching handler for types `" ~ valueTypes!caseId.stringof ~ "`"); + } + } + } + } + + assert(false, "unreachable"); + } +} + +private enum typeCount(SumType) = SumType.Types.length; + +/* A TagTuple represents a single possible set of tags that `args` + * could have at runtime. + * + * Because D does not allow a struct to be the controlling expression + * of a switch statement, we cannot dispatch on the TagTuple directly. + * Instead, we must map each TagTuple to a unique integer and generate + * a case label for each of those integers. + * + * This mapping is implemented in `fromCaseId` and `toCaseId`. It uses + * the same technique that's used to map index tuples to memory offsets + * in a multidimensional static array. + * + * For example, when `args` consists of two SumTypes with two member + * types each, the TagTuples corresponding to each case label are: + * + * case 0: TagTuple([0, 0]) + * case 1: TagTuple([1, 0]) + * case 2: TagTuple([0, 1]) + * case 3: TagTuple([1, 1]) + * + * When there is only one argument, the caseId is equal to that + * argument's tag. + */ +private struct TagTuple(SumTypes...) +{ + size_t[SumTypes.length] tags; + alias tags this; + + alias stride(size_t i) = .stride!(i, Map!(typeCount, SumTypes)); + + invariant + { + static foreach (i; 0 .. tags.length) + { + assert(tags[i] < SumTypes[i].Types.length, "Invalid tag"); + } + } + + this(ref const(SumTypes) args) + { + static foreach (i; 0 .. tags.length) + { + tags[i] = args[i].tag; + } + } + + static TagTuple fromCaseId(size_t caseId) + { + TagTuple result; + + // Most-significant to least-significant + static foreach_reverse (i; 0 .. result.length) + { + result[i] = caseId / stride!i; + caseId %= stride!i; + } + + return result; + } + + size_t toCaseId() + { + size_t result; + + static foreach (i; 0 .. tags.length) + { + result += tags[i] * stride!i; + } + + return result; + } +} + +// Matching +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + MySum y = MySum(3.14); + + assert(x.match!((int v) => true, (float v) => false)); + assert(y.match!((int v) => false, (float v) => true)); +} + +// Missing handlers +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + + assert(!__traits(compiles, x.match!((int x) => true))); + assert(!__traits(compiles, x.match!())); +} + +// Handlers with qualified parameters +// Disabled in BetterC due to use of dynamic arrays +version (D_BetterC) {} else +@safe unittest +{ + alias MySum = SumType!(int[], float[]); + + MySum x = MySum([1, 2, 3]); + MySum y = MySum([1.0, 2.0, 3.0]); + + assert(x.match!((const(int[]) v) => true, (const(float[]) v) => false)); + assert(y.match!((const(int[]) v) => false, (const(float[]) v) => true)); +} + +// Handlers for qualified types +// Disabled in BetterC due to use of dynamic arrays +version (D_BetterC) {} else +@safe unittest +{ + alias MySum = SumType!(immutable(int[]), immutable(float[])); + + MySum x = MySum([1, 2, 3]); + + assert(x.match!((immutable(int[]) v) => true, (immutable(float[]) v) => false)); + assert(x.match!((const(int[]) v) => true, (const(float[]) v) => false)); + // Tail-qualified parameters + assert(x.match!((immutable(int)[] v) => true, (immutable(float)[] v) => false)); + assert(x.match!((const(int)[] v) => true, (const(float)[] v) => false)); + // Generic parameters + assert(x.match!((immutable v) => true)); + assert(x.match!((const v) => true)); + // Unqualified parameters + assert(!__traits(compiles, + x.match!((int[] v) => true, (float[] v) => false) + )); +} + +// Delegate handlers +// Disabled in BetterC due to use of closures +version (D_BetterC) {} else +@safe unittest +{ + alias MySum = SumType!(int, float); + + int answer = 42; + MySum x = MySum(42); + MySum y = MySum(3.14); + + assert(x.match!((int v) => v == answer, (float v) => v == answer)); + assert(!y.match!((int v) => v == answer, (float v) => v == answer)); +} + +version (unittest) +{ + version (D_BetterC) + { + // std.math.isClose depends on core.runtime.math, so use a + // libc-based version for testing with -betterC + @safe pure @nogc nothrow + private bool isClose(double lhs, double rhs) + { + import core.stdc.math : fabs; + + return fabs(lhs - rhs) < 1e-5; + } + } + else + { + import std.math.operations : isClose; + } +} + +// Generic handler +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + MySum y = MySum(3.14); + + assert(x.match!(v => v*2) == 84); + assert(y.match!(v => v*2).isClose(6.28)); +} + +// Fallback to generic handler +// Disabled in BetterC due to use of std.conv.to +version (D_BetterC) {} else +@safe unittest +{ + import std.conv : to; + + alias MySum = SumType!(int, float, string); + + MySum x = MySum(42); + MySum y = MySum("42"); + + assert(x.match!((string v) => v.to!int, v => v*2) == 84); + assert(y.match!((string v) => v.to!int, v => v*2) == 42); +} + +// Multiple non-overlapping generic handlers +@safe unittest +{ + import std.array : staticArray; + + alias MySum = SumType!(int, float, int[], char[]); + + static ints = staticArray([1, 2, 3]); + static chars = staticArray(['a', 'b', 'c']); + + MySum x = MySum(42); + MySum y = MySum(3.14); + MySum z = MySum(ints[]); + MySum w = MySum(chars[]); + + assert(x.match!(v => v*2, v => v.length) == 84); + assert(y.match!(v => v*2, v => v.length).isClose(6.28)); + assert(w.match!(v => v*2, v => v.length) == 3); + assert(z.match!(v => v*2, v => v.length) == 3); +} + +// Structural matching +@safe unittest +{ + static struct S1 { int x; } + static struct S2 { int y; } + alias MySum = SumType!(S1, S2); + + MySum a = MySum(S1(0)); + MySum b = MySum(S2(0)); + + assert(a.match!(s1 => s1.x + 1, s2 => s2.y - 1) == 1); + assert(b.match!(s1 => s1.x + 1, s2 => s2.y - 1) == -1); +} + +// Separate opCall handlers +@safe unittest +{ + static struct IntHandler + { + bool opCall(int arg) + { + return true; + } + } + + static struct FloatHandler + { + bool opCall(float arg) + { + return false; + } + } + + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + MySum y = MySum(3.14); + + assert(x.match!(IntHandler.init, FloatHandler.init)); + assert(!y.match!(IntHandler.init, FloatHandler.init)); +} + +// Compound opCall handler +@safe unittest +{ + static struct CompoundHandler + { + bool opCall(int arg) + { + return true; + } + + bool opCall(float arg) + { + return false; + } + } + + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + MySum y = MySum(3.14); + + assert(x.match!(CompoundHandler.init)); + assert(!y.match!(CompoundHandler.init)); +} + +// Ordered matching +@safe unittest +{ + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + + assert(x.match!((int v) => true, v => false)); +} + +// Non-exhaustive matching +version (D_Exceptions) +@system unittest +{ + import std.exception : assertThrown, assertNotThrown; + + alias MySum = SumType!(int, float); + + MySum x = MySum(42); + MySum y = MySum(3.14); + + assertNotThrown!MatchException(x.tryMatch!((int n) => true)); + assertThrown!MatchException(y.tryMatch!((int n) => true)); +} + +// Non-exhaustive matching in @safe code +version (D_Exceptions) +@safe unittest +{ + SumType!(int, float) x; + + auto _ = x.tryMatch!( + (int n) => n + 1, + ); +} + +// Handlers with ref parameters +@safe unittest +{ + alias Value = SumType!(long, double); + + auto value = Value(3.14); + + value.match!( + (long) {}, + (ref double d) { d *= 2; } + ); + + assert(value.get!double.isClose(6.28)); +} + +// Unreachable handlers +@safe unittest +{ + alias MySum = SumType!(int, string); + + MySum s; + + assert(!__traits(compiles, + s.match!( + (int _) => 0, + (string _) => 1, + (double _) => 2 + ) + )); + + assert(!__traits(compiles, + s.match!( + _ => 0, + (int _) => 1 + ) + )); +} + +// Unsafe handlers +@system unittest +{ + SumType!int x; + alias unsafeHandler = (int x) @system { return; }; + + assert(!__traits(compiles, () @safe + { + x.match!unsafeHandler; + })); + + auto test() @system + { + return x.match!unsafeHandler; + } +} + +// Overloaded handlers +@safe unittest +{ + static struct OverloadSet + { + static string fun(int i) { return "int"; } + static string fun(double d) { return "double"; } + } + + alias MySum = SumType!(int, double); + + MySum a = 42; + MySum b = 3.14; + + assert(a.match!(OverloadSet.fun) == "int"); + assert(b.match!(OverloadSet.fun) == "double"); +} + +// Overload sets that include SumType arguments +@safe unittest +{ + alias Inner = SumType!(int, double); + alias Outer = SumType!(Inner, string); + + static struct OverloadSet + { + @safe: + static string fun(int i) { return "int"; } + static string fun(double d) { return "double"; } + static string fun(string s) { return "string"; } + static string fun(Inner i) { return i.match!fun; } + static string fun(Outer o) { return o.match!fun; } + } + + Outer a = Inner(42); + Outer b = Inner(3.14); + Outer c = "foo"; + + assert(OverloadSet.fun(a) == "int"); + assert(OverloadSet.fun(b) == "double"); + assert(OverloadSet.fun(c) == "string"); +} + +// Overload sets with ref arguments +@safe unittest +{ + static struct OverloadSet + { + static void fun(ref int i) { i = 42; } + static void fun(ref double d) { d = 3.14; } + } + + alias MySum = SumType!(int, double); + + MySum x = 0; + MySum y = 0.0; + + x.match!(OverloadSet.fun); + y.match!(OverloadSet.fun); + + assert(x.match!((value) => is(typeof(value) == int) && value == 42)); + assert(y.match!((value) => is(typeof(value) == double) && value == 3.14)); +} + +// Overload sets with templates +@safe unittest +{ + import std.traits : isNumeric; + + static struct OverloadSet + { + static string fun(string arg) + { + return "string"; + } + + static string fun(T)(T arg) + if (isNumeric!T) + { + return "numeric"; + } + } + + alias MySum = SumType!(int, string); + + MySum x = 123; + MySum y = "hello"; + + assert(x.match!(OverloadSet.fun) == "numeric"); + assert(y.match!(OverloadSet.fun) == "string"); +} + +// Github issue #24 +@safe unittest +{ + void test() @nogc + { + int acc = 0; + SumType!int(1).match!((int x) => acc += x); + } +} + +// Github issue #31 +@safe unittest +{ + void test() @nogc + { + int acc = 0; + + SumType!(int, string)(1).match!( + (int x) => acc += x, + (string _) => 0, + ); + } +} + +// Types that `alias this` a SumType +@safe unittest +{ + static struct A {} + static struct B {} + static struct D { SumType!(A, B) value; alias value this; } + + auto _ = D().match!(_ => true); +} + +// Multiple dispatch +@safe unittest +{ + alias MySum = SumType!(int, string); + + static int fun(MySum x, MySum y) + { + import std.meta : Args = AliasSeq; + + return Args!(x, y).match!( + (int xv, int yv) => 0, + (string xv, int yv) => 1, + (int xv, string yv) => 2, + (string xv, string yv) => 3 + ); + } + + assert(fun(MySum(0), MySum(0)) == 0); + assert(fun(MySum(""), MySum(0)) == 1); + assert(fun(MySum(0), MySum("")) == 2); + assert(fun(MySum(""), MySum("")) == 3); +} + +// inout SumTypes +@safe unittest +{ + inout(int[]) fun(inout(SumType!(int[])) x) + { + return x.match!((inout(int[]) a) => a); + } +} + +private void destroyIfOwner(T)(ref T value) +{ + static if (hasElaborateDestructor!T) + { + destroy(value); + } +} diff --git a/source/dyaml/style.d b/source/dyaml/style.d new file mode 100644 index 0000000..319592c --- /dev/null +++ b/source/dyaml/style.d @@ -0,0 +1,37 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +///YAML node formatting styles. +module dyaml.style; + + +///Scalar styles. +enum ScalarStyle : ubyte +{ + /// Invalid (uninitialized) style + invalid = 0, + /// `|` (Literal block style) + literal, + /// `>` (Folded block style) + folded, + /// Plain scalar + plain, + /// Single quoted scalar + singleQuoted, + /// Double quoted scalar + doubleQuoted +} + +///Collection styles. +enum CollectionStyle : ubyte +{ + /// Invalid (uninitialized) style + invalid = 0, + /// Block style. + block, + /// Flow style. + flow +} diff --git a/source/dyaml/tagdirective.d b/source/dyaml/tagdirective.d new file mode 100644 index 0000000..54687fe --- /dev/null +++ b/source/dyaml/tagdirective.d @@ -0,0 +1,15 @@ + +// Copyright Ferdinand Majerech 2011. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +///Tag directives. +module dyaml.tagdirective; + +///Single tag directive. handle is the shortcut, prefix is the prefix that replaces it. +struct TagDirective +{ + string handle; + string prefix; +} diff --git a/source/dyaml/token.d b/source/dyaml/token.d new file mode 100644 index 0000000..5400a3f --- /dev/null +++ b/source/dyaml/token.d @@ -0,0 +1,172 @@ + +// Copyright Ferdinand Majerech 2011-2014. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/// YAML tokens. +/// Code based on PyYAML: http://www.pyyaml.org +module dyaml.token; + + +import std.conv; + +import dyaml.encoding; +import dyaml.exception; +import dyaml.reader; +import dyaml.style; + + +package: + +/// Token types. +enum TokenID : ubyte +{ + // Invalid (uninitialized) token + invalid = 0, + directive, + documentStart, + documentEnd, + streamStart, + streamEnd, + blockSequenceStart, + blockMappingStart, + blockEnd, + flowSequenceStart, + flowMappingStart, + flowSequenceEnd, + flowMappingEnd, + key, + value, + blockEntry, + flowEntry, + alias_, + anchor, + tag, + scalar +} + +/// Specifies the type of a tag directive token. +enum DirectiveType : ubyte +{ + // YAML version directive. + yaml, + // Tag directive. + tag, + // Any other directive is "reserved" for future YAML versions. + reserved +} + +/// Token produced by scanner. +/// +/// 32 bytes on 64-bit. +struct Token +{ + @disable int opCmp(ref Token); + + // 16B + /// Value of the token, if any. + /// + /// Values are char[] instead of string, as Parser may still change them in a few + /// cases. Parser casts values to strings when producing Events. + char[] value; + // 4B + /// Start position of the token in file/stream. + Mark startMark; + // 4B + /// End position of the token in file/stream. + Mark endMark; + // 1B + /// Token type. + TokenID id; + // 1B + /// Style of scalar token, if this is a scalar token. + ScalarStyle style; + // 1B + /// Encoding, if this is a stream start token. + Encoding encoding; + // 1B + /// Type of directive for directiveToken. + DirectiveType directive; + // 4B + /// Used to split value into 2 substrings for tokens that need 2 values (tagToken) + uint valueDivider; + + /// Get string representation of the token ID. + @property string idString() @safe pure const {return id.to!string;} +} + +/// Construct a directive token. +/// +/// Params: start = Start position of the token. +/// end = End position of the token. +/// value = Value of the token. +/// directive = Directive type (YAML or TAG in YAML 1.1). +/// nameEnd = Position of the end of the name +Token directiveToken(const Mark start, const Mark end, char[] value, + DirectiveType directive, const uint nameEnd) @safe pure nothrow @nogc +{ + return Token(value, start, end, TokenID.directive, ScalarStyle.init, Encoding.init, + directive, nameEnd); +} + +/// Construct a simple (no value) token with specified type. +/// +/// Params: id = Type of the token. +/// start = Start position of the token. +/// end = End position of the token. +Token simpleToken(TokenID id)(const Mark start, const Mark end) +{ + return Token(null, start, end, id); +} + +/// Construct a stream start token. +/// +/// Params: start = Start position of the token. +/// end = End position of the token. +/// encoding = Encoding of the stream. +Token streamStartToken(const Mark start, const Mark end, const Encoding encoding) @safe pure nothrow @nogc +{ + return Token(null, start, end, TokenID.streamStart, ScalarStyle.invalid, encoding); +} + +/// Aliases for construction of simple token types. +alias streamEndToken = simpleToken!(TokenID.streamEnd); +alias blockSequenceStartToken = simpleToken!(TokenID.blockSequenceStart); +alias blockMappingStartToken = simpleToken!(TokenID.blockMappingStart); +alias blockEndToken = simpleToken!(TokenID.blockEnd); +alias keyToken = simpleToken!(TokenID.key); +alias valueToken = simpleToken!(TokenID.value); +alias blockEntryToken = simpleToken!(TokenID.blockEntry); +alias flowEntryToken = simpleToken!(TokenID.flowEntry); + +/// Construct a simple token with value with specified type. +/// +/// Params: id = Type of the token. +/// start = Start position of the token. +/// end = End position of the token. +/// value = Value of the token. +/// valueDivider = A hack for TagToken to store 2 values in value; the first +/// value goes up to valueDivider, the second after it. +Token simpleValueToken(TokenID id)(const Mark start, const Mark end, char[] value, + const uint valueDivider = uint.max) +{ + return Token(value, start, end, id, ScalarStyle.invalid, Encoding.init, + DirectiveType.init, valueDivider); +} + +/// Alias for construction of tag token. +alias tagToken = simpleValueToken!(TokenID.tag); +alias aliasToken = simpleValueToken!(TokenID.alias_); +alias anchorToken = simpleValueToken!(TokenID.anchor); + +/// Construct a scalar token. +/// +/// Params: start = Start position of the token. +/// end = End position of the token. +/// value = Value of the token. +/// style = Style of the token. +Token scalarToken(const Mark start, const Mark end, char[] value, const ScalarStyle style) @safe pure nothrow @nogc +{ + return Token(value, start, end, TokenID.scalar, style); +} diff --git a/source/tinyendian.d b/source/tinyendian.d new file mode 100644 index 0000000..731b048 --- /dev/null +++ b/source/tinyendian.d @@ -0,0 +1,213 @@ +// Copyright Ferdinand Majerech 2014. +// Distributed under the Boost Software License, Version 1.0. +// (See accompanying file LICENSE_1_0.txt or copy at +// http://www.boost.org/LICENSE_1_0.txt) + +/// A minimal library providing functionality for changing the endianness of data. +module tinyendian; + +import std.system : Endian, endian; + +/// Unicode UTF encodings. +enum UTFEncoding : ubyte +{ + UTF_8, + UTF_16, + UTF_32 +} +/// +@safe unittest +{ + const ints = [314, -101]; + int[2] intsSwapBuffer = ints; + swapByteOrder(intsSwapBuffer[]); + swapByteOrder(intsSwapBuffer[]); + assert(ints == intsSwapBuffer, "Lost information when swapping byte order"); + + const floats = [3.14f, 10.1f]; + float[2] floatsSwapBuffer = floats; + swapByteOrder(floatsSwapBuffer[]); + swapByteOrder(floatsSwapBuffer[]); + assert(floats == floatsSwapBuffer, "Lost information when swapping byte order"); +} + +/** Swap byte order of items in an array in place. + * + * Params: + * + * T = Item type. Must be either 2 or 4 bytes long. + * array = Buffer with values to fix byte order of. + */ +void swapByteOrder(T)(T[] array) @trusted @nogc pure nothrow +if (T.sizeof == 2 || T.sizeof == 4) +{ + // Swap the byte order of all read characters. + foreach (ref item; array) + { + static if (T.sizeof == 2) + { + import std.algorithm.mutation : swap; + swap(*cast(ubyte*)&item, *(cast(ubyte*)&item + 1)); + } + else static if (T.sizeof == 4) + { + import core.bitop : bswap; + const swapped = bswap(*cast(uint*)&item); + item = *cast(const(T)*)&swapped; + } + else static assert(false, "Unsupported T: " ~ T.stringof); + } +} + +/// See fixUTFByteOrder. +struct FixUTFByteOrderResult +{ + ubyte[] array; + UTFEncoding encoding; + Endian endian; + uint bytesStripped = 0; +} + +/** Convert byte order of an array encoded in UTF(8/16/32) to system endianness in place. + * + * Uses the UTF byte-order-mark (BOM) to determine UTF encoding. If there is no BOM + * at the beginning of array, UTF-8 is assumed (this is compatible with ASCII). The + * BOM, if any, will be removed from the buffer. + * + * If the encoding is determined to be UTF-16 or UTF-32 and there aren't enough bytes + * for the last code unit (i.e. if array.length is odd for UTF-16 or not divisible by + * 4 for UTF-32), the extra bytes (1 for UTF-16, 1-3 for UTF-32) are stripped. + * + * Note that this function does $(B not) check if the array is a valid UTF string. It + * only works with the BOM and 1,2 or 4-byte items. + * + * Params: + * + * array = The array with UTF-data. + * + * Returns: + * + * A struct with the following members: + * + * $(D ubyte[] array) A slice of the input array containing data in correct + * byte order, without BOM and in case of UTF-16/UTF-32, + * without stripped bytes, if any. + * $(D UTFEncoding encoding) Encoding of the result (UTF-8, UTF-16 or UTF-32) + * $(D std.system.Endian endian) Endianness of the original array. + * $(D uint bytesStripped) Number of bytes stripped from a UTF-16/UTF-32 array, if + * any. This is non-zero only if array.length was not + * divisible by 2 or 4 for UTF-16 and UTF-32, respectively. + * + * Complexity: (BIGOH array.length) + */ +auto fixUTFByteOrder(ubyte[] array) @safe @nogc pure nothrow +{ + // Enumerates UTF BOMs, matching indices to byteOrderMarks/bomEndian. + enum BOM: ubyte + { + UTF_8 = 0, + UTF_16_LE = 1, + UTF_16_BE = 2, + UTF_32_LE = 3, + UTF_32_BE = 4, + None = ubyte.max + } + + // These 2 are from std.stream + static immutable ubyte[][5] byteOrderMarks = [ [0xEF, 0xBB, 0xBF], + [0xFF, 0xFE], + [0xFE, 0xFF], + [0xFF, 0xFE, 0x00, 0x00], + [0x00, 0x00, 0xFE, 0xFF] ]; + static immutable Endian[5] bomEndian = [ endian, + Endian.littleEndian, + Endian.bigEndian, + Endian.littleEndian, + Endian.bigEndian ]; + + // Documented in function ddoc. + + FixUTFByteOrderResult result; + + // Detect BOM, if any, in the bytes we've read. -1 means no BOM. + // Need the last match: First 2 bytes of UTF-32LE BOM match the UTF-16LE BOM. If we + // used the first match, UTF-16LE would be detected when we have a UTF-32LE BOM. + import std.algorithm.searching : startsWith; + BOM bomId = BOM.None; + foreach (i, bom; byteOrderMarks) + if (array.startsWith(bom)) + bomId = cast(BOM)i; + + result.endian = (bomId != BOM.None) ? bomEndian[bomId] : Endian.init; + + // Start of UTF data (after BOM, if any) + size_t start = 0; + // If we've read more than just the BOM, put the rest into the array. + with(BOM) final switch(bomId) + { + case None: result.encoding = UTFEncoding.UTF_8; break; + case UTF_8: + start = 3; + result.encoding = UTFEncoding.UTF_8; + break; + case UTF_16_LE, UTF_16_BE: + result.bytesStripped = array.length % 2; + start = 2; + result.encoding = UTFEncoding.UTF_16; + break; + case UTF_32_LE, UTF_32_BE: + result.bytesStripped = array.length % 4; + start = 4; + result.encoding = UTFEncoding.UTF_32; + break; + } + + // If there's a BOM, we need to move data back to ensure it starts at array[0] + if (start != 0) + { + array = array[start .. $ - result.bytesStripped]; + } + + // We enforce above that array.length is divisible by 2/4 for UTF-16/32 + if (endian != result.endian) + { + if (result.encoding == UTFEncoding.UTF_16) + swapByteOrder(cast(wchar[])array); + else if (result.encoding == UTFEncoding.UTF_32) + swapByteOrder(cast(dchar[])array); + } + + result.array = array; + return result; +} +/// +@safe unittest +{ + { + ubyte[] s = [0xEF, 0xBB, 0xBF, 'a']; + FixUTFByteOrderResult r = fixUTFByteOrder(s); + assert(r.encoding == UTFEncoding.UTF_8); + assert(r.array.length == 1); + assert(r.array == ['a']); + assert(r.endian == Endian.littleEndian); + } + + { + ubyte[] s = ['a']; + FixUTFByteOrderResult r = fixUTFByteOrder(s); + assert(r.encoding == UTFEncoding.UTF_8); + assert(r.array.length == 1); + assert(r.array == ['a']); + assert(r.endian == Endian.bigEndian); + } + + { + // strip 'a' b/c not complete unit + ubyte[] s = [0xFE, 0xFF, 'a']; + FixUTFByteOrderResult r = fixUTFByteOrder(s); + assert(r.encoding == UTFEncoding.UTF_16); + assert(r.array.length == 0); + assert(r.endian == Endian.bigEndian); + } + +} diff --git a/test/.gitignore b/test/.gitignore index 7170394..cfe8d61 100644 --- a/test/.gitignore +++ b/test/.gitignore @@ -19,3 +19,51 @@ /test_registry /issue_2051_running_unittests_from_dub_single_file_packages_fails /run-unittest + +test.log +custom-source-main-bug487/custom-source-main-bug487 +3-copyFiles/bin/ +ignore-hidden-1/ignore-hidden-1 +ignore-hidden-2/ignore-hidden-2 +expected-import-path-output +expected-string-import-path-output +expected-describe-data-1-list-output +expected-describe-data-2-dmd-output +expected-issue616-output +describe-project/dummy.dat +describe-project/dummy-dep1.dat +*/main/main +*/*test-library +*/*test-application +*/exec-simple +issue1474/ext/fortytwo.d +issue2452/ext/fortytwo.d + +cov-ctfe/test +issue1003-check-empty-ld-flags/issue1003-empty-ld-flags +issue1053-extra-files-visuald/LICENSE.txt +issue1053-extra-files-visuald/README.txt +issue1053-extra-files-visuald/extra_files.sln +issue1117-extra-dependency-files/test +issue1396-pre-post-run-commands/test +issue1396-pre-post-run-commands/test.txt +issue1477-subpackage-visuald-paths/library_subpackage_a.sln +issue1477-subpackage-visuald-paths/sub/subpackage_a/subpackage_a.sln +issue1504-envvar-in-path/test +issue1773-lint/report.json +issue2085-target-none-visuald/root.sln +issue2086-copyfiles-subpackage-targetpath/sub/sub +issue2086-copyfiles-subpackage-targetpath/sub/to_be_deployed.txt +issue2262-exact-cached-version-match/testproj +issue361-optional-deps/cmp.tmp +issue838-custom-cache-paths/test +issue97-targettype-none/a/issue97-targettype-none_a +issue97-targettype-none/b/issue97-targettype-none_b +issue990-download-optional-selected/b +output-1504.txt +version-filters-diamond/version-filters-diamond +version-filters-none/version-filters-none +version-filters-source-dep/version-filters-source-dep +version-filters/version-filters +version-spec/newfoo/foo-test-application +version-spec/oldfoo/foo-test-application diff --git a/test/1-dynLib-simple/.no_build_ldc2 b/test/1-dynLib-simple/.no_build_ldc2 deleted file mode 100644 index e69de29..0000000 --- a/test/1-dynLib-simple/.no_build_ldc2 +++ /dev/null diff --git a/test/1-dynLib-simple/dub.json b/test/1-dynLib-simple/dub.json index 71da415..c7747c3 100644 --- a/test/1-dynLib-simple/dub.json +++ b/test/1-dynLib-simple/dub.json @@ -1,5 +1,4 @@ { "name": "dynlib-simple", - "targetType": "dynamicLibrary", - "dflags-ldc": ["-link-defaultlib-shared"] + "targetType": "dynamicLibrary" } diff --git a/test/2-dynLib-dep/dub.json b/test/2-dynLib-dep/dub.json index 1d3d5cf..47abf4e 100644 --- a/test/2-dynLib-dep/dub.json +++ b/test/2-dynLib-dep/dub.json @@ -3,5 +3,7 @@ "dependencies": { "dynlib-simple": { "path": "../1-dynLib-simple/" } }, + "lflags-linux": ["-rpath", "$$ORIGIN"], + "lflags-darwin": ["-rpath", "@executable_path"], "dflags-ldc": ["-link-defaultlib-shared"] } diff --git a/test/2-dynLib-with-staticLib-dep/.no_build_ldc2 b/test/2-dynLib-with-staticLib-dep/.no_build_ldc2 deleted file mode 100644 index e69de29..0000000 --- a/test/2-dynLib-with-staticLib-dep/.no_build_ldc2 +++ /dev/null diff --git a/test/4-describe-data-1-list.sh b/test/4-describe-data-1-list.sh index 630b609..58f5293 100755 --- a/test/4-describe-data-1-list.sh +++ b/test/4-describe-data-1-list.sh @@ -122,16 +122,18 @@ # --data=requirements echo "allowWarnings" >> "$expected_file" echo "disallowInlining" >> "$expected_file" -#echo "requireContracts" >> "$expected_file" # Not sure if this (from a sourceLib dependency) should be missing from the result +echo "requireContracts" >> "$expected_file" echo >> "$expected_file" # --data=options echo "debugMode" >> "$expected_file" -echo "releaseMode" >> "$expected_file" +# releaseMode is not included, even though it's specified, because the requireContracts requirement drops it echo "debugInfo" >> "$expected_file" +echo "stackStomping" >> "$expected_file" echo "warnings" >> "$expected_file" -#echo "stackStomping" >> "$expected_file" # Not sure if this (from a sourceLib dependency) should be missing from the result if ! diff "$expected_file" "$temp_file"; then + echo "Result:" + cat "$temp_file" die $LINENO 'The project data did not match the expected output!' fi diff --git a/test/4-describe-data-2-dmd.sh b/test/4-describe-data-2-dmd.sh index bf3c1da..7d994dc 100755 --- a/test/4-describe-data-2-dmd.sh +++ b/test/4-describe-data-2-dmd.sh @@ -72,10 +72,10 @@ echo -n "'$CURR_DIR/describe-dependency-2/some-path/dummy.d' " >> "$expected_file" # --data=options echo -n "-debug " >> "$expected_file" -echo -n "-release " >> "$expected_file" +# releaseMode is not included, even though it's specified, because the requireContracts requirement drops it echo -n "-g " >> "$expected_file" +echo -n "-gx " >> "$expected_file" echo -n "-wi" >> "$expected_file" -#echo -n "-gx " >> "$expected_file" # Not sure if this (from a sourceLib dependency) should be missing from the result echo "" >> "$expected_file" if ! diff "$expected_file" "$temp_file"; then diff --git a/test/cache-generated-test-config.sh b/test/cache-generated-test-config.sh index f4e0e27..a8ce0cc 100755 --- a/test/cache-generated-test-config.sh +++ b/test/cache-generated-test-config.sh @@ -12,15 +12,15 @@ EXECUTABLE_TIME="$(${STAT} cache-generated-test-config-test-library)" [ -z "$EXECUTABLE_TIME" ] && die $LINENO 'no EXECUTABLE_TIME was found' -MAIN_TIME="$(${STAT} "$(ls .dub/code/*dub_test_root.d)")" +MAIN_TIME="$(${STAT} "$(ls .dub/code/*/dub_test_root.d)")" [ -z "$MAIN_TIME" ] && die $LINENO 'no MAIN_TIME was found' ${DUB} test --compiler=${DC} -MAIN_FILES_COUNT=$(ls .dub/code/*dub_test_root.d | wc -l) +MAIN_FILES_COUNT=$(ls .dub/code/*/dub_test_root.d | wc -l) [ $MAIN_FILES_COUNT -ne 1 ] && die $LINENO 'DUB generated more then one main file' [ "$EXECUTABLE_TIME" != "$(${STAT} cache-generated-test-config-test-library)" ] && die $LINENO 'The executable has been rebuilt' -[ "$MAIN_TIME" != "$(${STAT} "$(ls .dub/code/*dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' +[ "$MAIN_TIME" != "$(${STAT} "$(ls .dub/code/*/dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' ## test with empty DFLAGS environment variable DFLAGS="" ${DUB} test --compiler=${DC} @@ -30,15 +30,15 @@ EXECUTABLE_TIME="$(${STAT} cache-generated-test-config-test-library)" [ -z "$EXECUTABLE_TIME" ] && die $LINENO 'no EXECUTABLE_TIME was found' -MAIN_TIME="$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*dub_test_root.d)")" +MAIN_TIME="$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*/dub_test_root.d)")" [ -z "$MAIN_TIME" ] && die $LINENO 'no MAIN_TIME was found' DFLAGS="" ${DUB} test --compiler=${DC} -MAIN_FILES_COUNT=$(ls .dub/code/*-\$DFLAGS-*dub_test_root.d | wc -l) +MAIN_FILES_COUNT=$(ls .dub/code/*-\$DFLAGS-*/dub_test_root.d | wc -l) [ $MAIN_FILES_COUNT -ne 1 ] && die $LINENO 'DUB generated more then one main file' [ "$EXECUTABLE_TIME" != "$(${STAT} cache-generated-test-config-test-library)" ] && die $LINENO 'The executable has been rebuilt' -[ "$MAIN_TIME" != "$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' +[ "$MAIN_TIME" != "$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*/dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' ## test with DFLAGS environment variable DFLAGS="-g" ${DUB} test --compiler=${DC} @@ -48,15 +48,15 @@ EXECUTABLE_TIME="$(${STAT} cache-generated-test-config-test-library)" [ -z "$EXECUTABLE_TIME" ] && die $LINENO 'no EXECUTABLE_TIME was found' -MAIN_TIME="$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*dub_test_root.d)")" +MAIN_TIME="$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*/dub_test_root.d)")" [ -z "$MAIN_TIME" ] && die $LINENO 'no MAIN_TIME was found' DFLAGS="-g" ${DUB} test --compiler=${DC} -MAIN_FILES_COUNT=$(ls .dub/code/*-\$DFLAGS-*dub_test_root.d | wc -l) +MAIN_FILES_COUNT=$(ls .dub/code/*-\$DFLAGS-*/dub_test_root.d | wc -l) [ $MAIN_FILES_COUNT -ne 1 ] && die $LINENO 'DUB generated more then one main file' [ "$EXECUTABLE_TIME" != "$(${STAT} cache-generated-test-config-test-library)" ] && die $LINENO 'The executable has been rebuilt' -[ "$MAIN_TIME" != "$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' +[ "$MAIN_TIME" != "$(${STAT} "$(ls .dub/code/*-\$DFLAGS-*/dub_test_root.d | head -n1)")" ] && die $LINENO 'The test main file has been rebuilt' diff --git a/test/colored-output.sh b/test/colored-output.sh new file mode 100755 index 0000000..84d87f4 --- /dev/null +++ b/test/colored-output.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd ${CURR_DIR}/1-exec-simple + +# Test that --color=off disables colors correctly +${DUB} build --color=off --compiler=${DC} 2>&1 | { ! \grep $'^\x1b\[' -c; } + +# Test that --color=automatic detects no TTY correctly +${DUB} build --color=automatic --compiler=${DC} 2>&1 | { ! \grep $'^\x1b\[' -c; } + +# Test that no --color= has same behaviour as --color=automatic +${DUB} build --compiler=${DC} 2>&1 | { ! \grep $'^\x1b\[' -c; } + +# Test that --color=on enables colors in any case +${DUB} build --color=on --compiler=${DC} 2>&1 | \grep $'^\x1b\[' -c + +# Test forwarding to dmd flag -color + +# Test that --color=on set dmd flag -color +${DUB} build -v --color=on --compiler=${DC} -f 2>&1 | \grep '\-color' -c + +# Test that --color=off set no dmd flag +${DUB} build -v --color=off --compiler=${DC} -f 2>&1 | { ! \grep '\-color' -c; } + +# Test that --color=automatic set no dmd flag +${DUB} build -v --color=automatic --compiler=${DC} -f 2>&1 | { ! \grep '\-color' -c; } diff --git a/test/cov-ctfe.sh b/test/cov-ctfe.sh new file mode 100755 index 0000000..5b733c4 --- /dev/null +++ b/test/cov-ctfe.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +DIR=$(dirname "${BASH_SOURCE[0]}") +. "$DIR"/common.sh +"$DUB" run --root "$DIR"/cov-ctfe --build=cov-ctfe diff --git a/test/cov-ctfe/.no_build b/test/cov-ctfe/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/cov-ctfe/.no_build diff --git a/test/cov-ctfe/.no_run b/test/cov-ctfe/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/cov-ctfe/.no_run diff --git a/test/cov-ctfe/.no_test b/test/cov-ctfe/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/cov-ctfe/.no_test diff --git a/test/cov-ctfe/dub.sdl b/test/cov-ctfe/dub.sdl new file mode 100644 index 0000000..51da666 --- /dev/null +++ b/test/cov-ctfe/dub.sdl @@ -0,0 +1,5 @@ +name "test" +version "1.0.0" +targetType "executable" +dflags "-cov=100" +mainSourceFile "test.d" diff --git a/test/cov-ctfe/test.d b/test/cov-ctfe/test.d new file mode 100644 index 0000000..d1056be --- /dev/null +++ b/test/cov-ctfe/test.d @@ -0,0 +1,17 @@ +int f(int x) +{ + return x + 1; +} + +int g(int x) +{ + return x * 2; +} + +enum gResult = g(12); // execute g() at compile-time + +int main(string[] args) +{ + assert(f(11) + gResult == 36); + return 0; +} diff --git a/test/dpath-variable.sh b/test/dpath-variable.sh new file mode 100755 index 0000000..7e68020 --- /dev/null +++ b/test/dpath-variable.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +export DPATH="${CURR_DIR}/dpath-variable/dpath" +rm -rf "$DPATH" +cd "${CURR_DIR}/dpath-variable" +"${DUB}" upgrade + +if [[ ! -f "$DPATH/dub/packages/gitcompatibledubpackage-1.0.1/gitcompatibledubpackage/dub.json" ]]; then + die $LINENO 'Did not get dependencies installed into $DPATH.' +fi + +# just for making this shell script easier to write, copy the variable +DPATH_ALIAS="$DPATH" +# unset the variable so DUB doesn't pick it up though +unset DPATH +rm -rf "$DPATH_ALIAS" +echo '{"dubHome":"'"$DPATH_ALIAS"/dub2'"}' > "${CURR_DIR}/dpath-variable/dub.settings.json" + +function cleanup { + rm "${CURR_DIR}/dpath-variable/dub.settings.json" +} +trap cleanup EXIT + +"${DUB}" upgrade + +if [[ ! -f "$DPATH_ALIAS/dub2/packages/gitcompatibledubpackage-1.0.1/gitcompatibledubpackage/dub.json" ]]; then + die $LINENO 'Did not get dependencies installed into dubHome (set from config).' +fi diff --git a/test/dpath-variable/.gitignore b/test/dpath-variable/.gitignore new file mode 100644 index 0000000..9cfa21e --- /dev/null +++ b/test/dpath-variable/.gitignore @@ -0,0 +1 @@ +dpath diff --git a/test/dpath-variable/dub.json b/test/dpath-variable/dub.json new file mode 100644 index 0000000..0d25e30 --- /dev/null +++ b/test/dpath-variable/dub.json @@ -0,0 +1,6 @@ +{ + "name": "dpath-variable", + "dependencies": { + "gitcompatibledubpackage": "1.0.1" + } +} diff --git a/test/dpath-variable/source/app.d b/test/dpath-variable/source/app.d new file mode 100644 index 0000000..9198103 --- /dev/null +++ b/test/dpath-variable/source/app.d @@ -0,0 +1,3 @@ +void main() +{ +} diff --git a/test/dub_test_root.sh b/test/dub_test_root.sh new file mode 100755 index 0000000..c5ce710 --- /dev/null +++ b/test/dub_test_root.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +# Make sure the auto-generated 'dub_test_root' module is importable for +# non-all-at-once compilations too. + +set -euo pipefail + +TMPDIR=$(mktemp -d "$(basename "$0").XXXXXX") + +function cleanup { + rm -rf "$TMPDIR" +} +trap cleanup EXIT + +cd "$TMPDIR" + +echo 'name "foo"' > dub.sdl + +mkdir -p source +echo 'import dub_test_root : allModules;' > source/foo.d + +$DUB test --build-mode=singleFile diff --git a/test/expected-issue1037-output b/test/expected-issue1037-output index e03b110..2f19647 100644 --- a/test/expected-issue1037-output +++ b/test/expected-issue1037-output @@ -1,3 +1,3 @@ -Unresolvable dependencies to package gitcompatibledubpackage: - b >=0.0.0 @DIR/b depends on gitcompatibledubpackage ~>1.0.2 +Error Unresolvable dependencies to package gitcompatibledubpackage: + b @DIR/b depends on gitcompatibledubpackage ~>1.0.2 issue1037-better-dependency-messages ~master depends on gitcompatibledubpackage 1.0.1 diff --git a/test/feat663-search.sh b/test/feat663-search.sh index 21cea3e..77289a6 100755 --- a/test/feat663-search.sh +++ b/test/feat663-search.sh @@ -10,6 +10,6 @@ if ! OUTPUT=$(${DUB} search '"dub-registry"' -v 2>&1); then die $LINENO '`dub search "dub-registry"` failed' "$OUTPUT" fi -if ! grep -q '^dub-registry (.*)\s'<<<"$OUTPUT"; then - die $LINENO '`grep -q '"'"'^dub-registry (.*)\s'"'"'` failed' "$OUTPUT" +if ! grep -q '^\s\sdub-registry (.*)\s'<<<"$OUTPUT"; then + die $LINENO '`grep -q '"'"'^\s\sdub-registry (.*)\s'"'"'` failed' "$OUTPUT" fi diff --git a/test/interactive-remove.sh b/test/interactive-remove.sh index fd65ab8..ee5f5a3 100755 --- a/test/interactive-remove.sh +++ b/test/interactive-remove.sh @@ -32,7 +32,7 @@ $DUB fetch dub@1.9.0 && [ -d $HOME/.dub/packages/dub-1.9.0/dub ] $DUB fetch dub@1.10.0 && [ -d $HOME/.dub/packages/dub-1.10.0/dub ] -# is non-interactive with `--version=` +# is non-interactive with a $DUB remove dub@1.9.0 $DUB remove dub@1.10.0 if [ -d $HOME/.dub/packages/dub-1.9.0/dub ] || [ -d $HOME/.dub/packages/dub-1.10.0/dub ]; then diff --git a/test/issue1040-run-with-ver.sh b/test/issue1040-run-with-ver.sh index db3ea12..10b1004 100755 --- a/test/issue1040-run-with-ver.sh +++ b/test/issue1040-run-with-ver.sh @@ -15,16 +15,16 @@ cd ${CURR_DIR}/issue1040-tmpdir -$DUB fetch dub@1.17.0 --cache=local -$DUB fetch dub@1.18.0 --cache=local -$DUB fetch dub@1.19.0 --cache=local +$DUB fetch dub@1.27.0 --cache=local +$DUB fetch dub@1.28.0 --cache=local +$DUB fetch dub@1.29.0 --cache=local -if { $DUB fetch dub@1.18.0 --cache=local || true; } | grep -cF 'Fetching' > /dev/null; then +if { $DUB fetch dub@1.28.0 --cache=local || true; } | grep -cF 'Fetching' > /dev/null; then die $LINENO 'Test for doubly fetch of the specified version has failed.' fi -if ! { $DUB run dub -q --cache=local -- --version || true; } | grep -cF 'DUB version 1.19.0' > /dev/null; then +if ! { $DUB run dub -q --cache=local -- --version || true; } | grep -cF 'DUB version 1.29.0' > /dev/null; then die $LINENO 'Test for selection of the latest fetched version has failed.' fi -if ! { $DUB run dub@1.18.0 -q --cache=local -- --version || true; } | grep -cF 'DUB version 1.18.0' > /dev/null; then +if ! { $DUB run dub@1.28.0 -q --cache=local -- --version || true; } | grep -cF 'DUB version 1.28.0' > /dev/null; then die $LINENO 'Test for selection of the specified version has failed.' fi diff --git a/test/issue1194-warn-wrong-subconfig.sh b/test/issue1194-warn-wrong-subconfig.sh index 414bfe2..2bb7e54 100755 --- a/test/issue1194-warn-wrong-subconfig.sh +++ b/test/issue1194-warn-wrong-subconfig.sh @@ -3,12 +3,14 @@ OUTPUT=`${DUB} build --root ${CURR_DIR}/issue1194-warn-wrong-subconfig 2>&1 || true` +trap 'printf "%s" "Failing received output:\n$OUTPUT" | hexdump -C' ERR + # make sure the proper errors occur in the output -echo $OUTPUT | grep -c "sub configuration directive \"bar\" -> \"baz\" references a package that is not specified as a dependency" > /dev/null -echo $OUTPUT | grep -c "sub configuration directive \"staticlib-simple\" -> \"foo\" references a configuration that does not exist" > /dev/null -! echo $OUTPUT | grep -c "sub configuration directive \"sourcelib-simple\" -> \"library\" references a package that is not specified as a dependency" > /dev/null -! echo $OUTPUT | grep -c "sub configuration directive \"sourcelib-simple\" -> \"library\" references a configuration that does not exist" > /dev/null +echo "$OUTPUT" | fgrep -c 'sub configuration directive "bar" -> [baz] references a package that is not specified as a dependency' > /dev/null +echo $OUTPUT | fgrep -c 'sub configuration directive "staticlib-simple" -> [foo] references a configuration that does not exist' > /dev/null +! echo $OUTPUT | fgrep -c 'sub configuration directive "sourcelib-simple" -> [library] references a package that is not specified as a dependency' > /dev/null +! echo $OUTPUT | fgrep -c 'sub configuration directive "sourcelib-simple" -> [library] references a configuration that does not exist' > /dev/null # make sure no bogs warnings are issued for packages with no sub configuration directives OUTPUT=`${DUB} build --root ${CURR_DIR}/1-exec-simple 2>&1` -! echo $OUTPUT | grep -c "sub configuration directive.*references" > /dev/null +! echo $OUTPUT | grep -c 'sub configuration directive.*references' > /dev/null diff --git a/test/issue1474-generate-source.script.d b/test/issue1474-generate-source.script.d new file mode 100644 index 0000000..d345f58 --- /dev/null +++ b/test/issue1474-generate-source.script.d @@ -0,0 +1,29 @@ +/+ dub.sdl: + name "issue1474-generate-source" ++/ + +module issue1474_generate_source; + +import std.process; +import std.stdio; +import std.algorithm; +import std.path; + +int main() +{ + const dub = environment.get("DUB", buildPath(__FILE_FULL_PATH__.dirName.dirName, "bin", "dub")); + const curr_dir = environment.get("CURR_DIR", buildPath(__FILE_FULL_PATH__.dirName)); + const dc = environment.get("DC", "dmd"); + const cmd = [dub, "build", "--compiler", dc]; + const result = execute(cmd, null, Config.none, size_t.max, curr_dir.buildPath("issue1474")); + if (result.status || result.output.canFind("Failed")) + { + writefln("\n> %-(%s %)", cmd); + writeln("==========================================================="); + writeln(result.output); + writeln("==========================================================="); + writeln("Last command failed with exit code ", result.status, '\n'); + return 1; + } + return 0; +} diff --git a/test/issue1474/.no_build b/test/issue1474/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue1474/.no_build diff --git a/test/issue1474/.no_run b/test/issue1474/.no_run deleted file mode 100644 index e69de29..0000000 --- a/test/issue1474/.no_run +++ /dev/null diff --git a/test/issue1474/.no_test b/test/issue1474/.no_test deleted file mode 100644 index e69de29..0000000 --- a/test/issue1474/.no_test +++ /dev/null diff --git a/test/issue1474/dub.json b/test/issue1474/dub.json index 80d9635..f4dec04 100644 --- a/test/issue1474/dub.json +++ b/test/issue1474/dub.json @@ -2,7 +2,10 @@ "name": "generated-sources", "description": "Example of using pre generate commands to generate source code.", "sourceFiles": ["ext/*.d"], - "preGenerateCommands": [ + "preGenerateCommands-posix": [ "echo 'extern(C) int fun42 () { return 42; }' > ext/fortytwo.d" + ], + "preGenerateCommands-windows": [ + "echo extern(C) int fun42 () { return 42; }> ext/fortytwo.d" ] } diff --git a/test/issue1505-single-file-package-dynamic-library.sh b/test/issue1505-single-file-package-dynamic-library.sh index a08bef6..0a78b20 100755 --- a/test/issue1505-single-file-package-dynamic-library.sh +++ b/test/issue1505-single-file-package-dynamic-library.sh @@ -1,11 +1,16 @@ #!/usr/bin/env bash +set -eux -o pipefail + . $(dirname "${BASH_SOURCE[0]}")/common.sh cd ${CURR_DIR} + rm -f libsingle-file-test-dynamic-library.{so,dylib} rm -f single-file-test-dynamic-library.dll ${DUB} build --single issue1505-single-file-package-dynamic-library.d -if [ ! -f libsingle-file-test-dynamic-library.{so,dylib} ] && [ ! -f single-file-test-dynamic-library.dll ]; then +if [[ ! -f libsingle-file-test-dynamic-library.so ]] \ +&& [[ ! -f libsingle-file-test-dynamic-library.dylib ]] \ +&& [[ ! -f single-file-test-dynamic-library.dll ]]; then die $LINENO 'Normal invocation did not produce a dynamic library in the current directory' fi rm -f libsingle-file-test-dynamic-library.{so,dylib} diff --git a/test/issue1856-build-unittest.sh b/test/issue1856-build-unittest.sh new file mode 100755 index 0000000..2819b31 --- /dev/null +++ b/test/issue1856-build-unittest.sh @@ -0,0 +1,69 @@ +#!/usr/bin/env bash + +set -euo pipefail + +TMPDIR=$(mktemp -d "$(basename "$0").XXXXXX") + +function cleanup { + rm -rf "$TMPDIR" +} +trap cleanup EXIT + +# no unittest config +cat > "$TMPDIR/no_ut.d" < "$TMPDIR/partial_ut.d" < "$TMPDIR/partial_ut2.d" < "$TMPDIR/full_ut.d" < %-(%s %)", cmd); + writeln("==========================================================="); + writeln(result.output); + writeln("==========================================================="); + writeln("Last command failed with exit code ", result.status, '\n'); + return 1; + } + + foreach (deployed; dirEntries(deployment_dir, "*", SpanMode.depth).filter!isFile) + if (!isWritable(deployed)) + { + writeln(deployed, " is expected to be writable, but it is not."); + return 1; + } + + return 0; +} + +void makeReadOnly(string name) +{ + version (Windows) + { + import core.sys.windows.windows; + + name.setAttributes(name.getAttributes() | FILE_ATTRIBUTE_READONLY); + } + else version (Posix) + { + import core.sys.posix.sys.stat; + + name.setAttributes(name.getAttributes() & ~(S_IWUSR | S_IWGRP | S_IWOTH)); + } + else + static assert("Needs implementation."); + + import std.exception; + import std.stdio; + assertThrown!ErrnoException(File(name, "w")); +} + +void makeWritable(string name) +{ + version (Windows) + { + import core.sys.windows.windows; + + name.setAttributes(name.getAttributes() & ~FILE_ATTRIBUTE_READONLY); + } + else version (Posix) + { + import core.sys.posix.sys.stat; + + name.setAttributes(name.getAttributes() | S_IWUSR); + } + else + static assert("Needs implementation."); + + import std.exception; + import std.stdio; + assertNotThrown!ErrnoException(File(name, "w")); +} + +bool isWritable(string name) +{ + version (Windows) + { + import core.sys.windows.windows; + + return (name.getAttributes() & FILE_ATTRIBUTE_READONLY) == 0; + } + else version (Posix) + { + import core.sys.posix.sys.stat; + + return (name.getAttributes() & S_IWUSR) != 0; + } + else + static assert("Needs implementation."); +} diff --git a/test/issue2234-copy-read-only-files/.gitignore b/test/issue2234-copy-read-only-files/.gitignore new file mode 100644 index 0000000..ba077a4 --- /dev/null +++ b/test/issue2234-copy-read-only-files/.gitignore @@ -0,0 +1 @@ +bin diff --git a/test/issue2234-copy-read-only-files/dub.json b/test/issue2234-copy-read-only-files/dub.json new file mode 100644 index 0000000..f911f1d --- /dev/null +++ b/test/issue2234-copy-read-only-files/dub.json @@ -0,0 +1,8 @@ +{ + "name": "issue2234_copy_read_only_files", + "copyFiles": [ + "files/to_be_deployed.bin", + "files/images" + ], + "targetPath": "bin" +} diff --git a/test/issue2234-copy-read-only-files/files/images/to_be_deployed.img b/test/issue2234-copy-read-only-files/files/images/to_be_deployed.img new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue2234-copy-read-only-files/files/images/to_be_deployed.img diff --git a/test/issue2234-copy-read-only-files/files/to_be_deployed.bin b/test/issue2234-copy-read-only-files/files/to_be_deployed.bin new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue2234-copy-read-only-files/files/to_be_deployed.bin diff --git a/test/issue2234-copy-read-only-files/source/app.d b/test/issue2234-copy-read-only-files/source/app.d new file mode 100644 index 0000000..ab73b3a --- /dev/null +++ b/test/issue2234-copy-read-only-files/source/app.d @@ -0,0 +1 @@ +void main() {} diff --git a/test/issue2258-dynLib-exe-dep/.no_build_dmd b/test/issue2258-dynLib-exe-dep/.no_build_dmd new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue2258-dynLib-exe-dep/.no_build_dmd diff --git a/test/issue2258-dynLib-exe-dep/.no_build_gdc b/test/issue2258-dynLib-exe-dep/.no_build_gdc new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue2258-dynLib-exe-dep/.no_build_gdc diff --git a/test/issue2258-dynLib-exe-dep/dub.json b/test/issue2258-dynLib-exe-dep/dub.json new file mode 100644 index 0000000..30a9f4b --- /dev/null +++ b/test/issue2258-dynLib-exe-dep/dub.json @@ -0,0 +1,11 @@ +{ + "name": "dynlib-exe-dep", + "targetType": "executable", + "dependencies": { + "dynlib-simple": { "path": "../1-dynLib-simple/" } + }, + "lflags-linux": ["-rpath", "$$ORIGIN"], + "lflags-darwin": ["-rpath", "@executable_path"], + "dflags-ldc": ["-link-defaultlib-shared"] + +} diff --git a/test/issue2258-dynLib-exe-dep/source/app.d b/test/issue2258-dynLib-exe-dep/source/app.d new file mode 100644 index 0000000..52022fb --- /dev/null +++ b/test/issue2258-dynLib-exe-dep/source/app.d @@ -0,0 +1,7 @@ +module app; +import dynlib.app; + +extern(C) void main() +{ + entry(); +} diff --git a/test/issue2348-postbuildcommands.script.d b/test/issue2348-postbuildcommands.script.d new file mode 100644 index 0000000..a65a280 --- /dev/null +++ b/test/issue2348-postbuildcommands.script.d @@ -0,0 +1,30 @@ +/+ dub.sdl: +name "issue2348" +buildType "test" { + buildOptions "syntaxOnly" + postBuildCommands "echo xxx" +} ++/ +module issue2348; + +import std.process; +import std.stdio; +import std.algorithm; +import std.path; + +int main() +{ + const dub = environment.get("DUB", buildPath(__FILE_FULL_PATH__.dirName.dirName, "bin", "dub.exe")); + const cmd = [dub, "build", "--build=test", "--single", __FILE_FULL_PATH__]; + const result = execute(cmd, null, Config.none, size_t.max, __FILE_FULL_PATH__.dirName); + if (result.status || result.output.canFind("Failed")) + { + writefln("\n> %-(%s %)", cmd); + writeln("==========================================================="); + writeln(result.output); + writeln("==========================================================="); + writeln("Last command failed with exit code ", result.status, '\n'); + return 1; + } + return 0; +} diff --git a/test/issue2377-dynLib-dep-extra-files.script.d b/test/issue2377-dynLib-dep-extra-files.script.d new file mode 100644 index 0000000..4eb90ec --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files.script.d @@ -0,0 +1,135 @@ +/+ dub.sdl: +name "issue2377_dynlib_dep_extra_files" ++/ + +module issue2377_dynlib_dep_extra_files.script; + +import std.exception : enforce; +import std.file; +import std.path; + +version (DigitalMars) version (Windows) version = DMD_Windows; +version (DMD_Windows) { + void main() { + import std.stdio; + writeln("WARNING: skipping test '" ~ __FILE_FULL_PATH__.baseName ~ "' with DMD on Windows."); + } +} else: + +void main() { + import std.process : environment; + + version (Windows) enum exeExt = ".exe"; + else enum exeExt = ""; + const dub = environment.get("DUB", buildPath(__FILE_FULL_PATH__.dirName.dirName, "bin", "dub"~exeExt)); + + enum testDir = buildPath(__FILE_FULL_PATH__.dirName, "issue2377-dynLib-dep-extra-files"); + + // 1. `parent` as root package (depending on dynamic/static dep1, which depends on dynamic/static dep2) + chdir(buildPath(testDir, "parent")); + if (exists("output")) + rmdirRecurse("output"); + + // 1.1 dynlib config + run(dub ~ " build -c dynlib"); + chdir("output/dynlib"); + assertDynLibExists("parent"); + assertDynLibExists("dep1"); + assertDynLibExists("dep2"); + version (Windows) { + assertFileExists("parent.pdb"); + assertFileExists("parent.lib"); + assertFileExists("parent.exp"); + assertFileExists("dep1.pdb"); + assertFileExists("dep1.lib"); + assertFileExists("dep1.exp"); + assertFileExists("dep2.pdb"); + assertFileExists("dep2.lib"); + assertFileExists("dep2.exp"); + } + chdir("../.."); + + // 1.2 dynlib_static config + run(dub ~ " build -c dynlib_static"); + chdir("output/dynlib_static"); + assertDynLibExists("parent"); + version (Windows) { + assertFileExists("parent.pdb"); + assertFileExists("parent.lib"); + assertFileExists("parent.exp"); + } + enforce(!canFindFiles("*dep*"), "unexpected dependency files in statically linked dynlib output dir"); + chdir("../.."); + + // 1.3 exe_static config + run(dub ~ " build -c exe_static"); + chdir("output/exe_static"); + version (Windows) run(`.\parent.exe`); + else run("./parent"); + version (Windows) { + assertFileExists("parent.pdb"); + enforce(!exists("parent.lib"), "unexpected import .lib for executable"); + enforce(!exists("parent.exp"), "unexpected .exp file for executable"); + } + enforce(!canFindFiles("*dep*"), "unexpected dependency files in statically linked executable output dir"); + chdir("../.."); + + // 1.4 exe_dynamic config + run(dub ~ " build -c exe_dynamic"); + chdir("output/exe_dynamic"); + version (Windows) run(`.\parent.exe`); + else run(`LD_LIBRARY_PATH=".:${LD_LIBRARY_PATH:-}" ./parent`); + assertDynLibExists("dep1"); + assertDynLibExists("dep2"); + version (Windows) { + assertFileExists("dep1.pdb"); + assertFileExists("dep2.pdb"); + enforce(!canFindFiles("*.lib"), "unexpected import libs in dynamically linked executable output dir"); + enforce(!canFindFiles("*.exp"), "unexpected .exp files in dynamically linked executable output dir"); + } + chdir("../.."); + + // 2. `framework` as root package (targetType `none`) + chdir(buildPath(testDir, "framework")); + run(dub ~ " build"); + assertDynLibExists("dep1"); + assertDynLibExists("dep2"); + version (Windows) { + assertFileExists("dep1.pdb"); + assertFileExists("dep1.lib"); + assertFileExists("dep1.exp"); + assertFileExists("dep2.pdb"); + assertFileExists("dep2.lib"); + assertFileExists("dep2.exp"); + } +} + +void run(string command) { + import std.process; + const status = spawnShell(command).wait(); + enforce(status == 0, "command '" ~ command ~ "' failed"); +} + +void assertFileExists(string path) { + enforce(exists(path), "expected file '" ~ path ~ "' not found"); +} + +void assertDynLibExists(string name) { + version (Windows) { + enum prefix = ""; + enum suffix = ".dll"; + } else version (OSX) { + enum prefix = "lib"; + enum suffix = ".dylib"; + } else { + enum prefix = "lib"; + enum suffix = ".so"; + } + + assertFileExists(prefix ~ name ~ suffix); +} + +bool canFindFiles(string pattern) { + auto entries = dirEntries(".", pattern, SpanMode.shallow); + return !entries.empty(); +} diff --git a/test/issue2377-dynLib-dep-extra-files/.gitignore b/test/issue2377-dynLib-dep-extra-files/.gitignore new file mode 100644 index 0000000..5d658ec --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/.gitignore @@ -0,0 +1 @@ +/parent/output/ diff --git a/test/issue2377-dynLib-dep-extra-files/.no_build b/test/issue2377-dynLib-dep-extra-files/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/.no_build diff --git a/test/issue2377-dynLib-dep-extra-files/dep1/dub.sdl b/test/issue2377-dynLib-dep-extra-files/dep1/dub.sdl new file mode 100644 index 0000000..19d1989 --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/dep1/dub.sdl @@ -0,0 +1,14 @@ +name "dep1" +dependency "dep2" path="../dep2" + +configuration "library" { + targetType "staticLibrary" + targetPath "output/library" + subConfiguration "dep2" "library" +} + +configuration "dynlib" { + targetType "dynamicLibrary" + targetPath "output/dynlib" + subConfiguration "dep2" "dynlib" +} diff --git a/test/issue2377-dynLib-dep-extra-files/dep1/source/dep1.d b/test/issue2377-dynLib-dep-extra-files/dep1/source/dep1.d new file mode 100644 index 0000000..80e536c --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/dep1/source/dep1.d @@ -0,0 +1,6 @@ +module dep1; + +void foo1() { + import dep2; + foo2(); +} diff --git a/test/issue2377-dynLib-dep-extra-files/dep2/dub.sdl b/test/issue2377-dynLib-dep-extra-files/dep2/dub.sdl new file mode 100644 index 0000000..e323ee7 --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/dep2/dub.sdl @@ -0,0 +1,11 @@ +name "dep2" + +configuration "library" { + targetType "staticLibrary" + targetPath "output/library" +} + +configuration "dynlib" { + targetType "dynamicLibrary" + targetPath "output/dynlib" +} diff --git a/test/issue2377-dynLib-dep-extra-files/dep2/source/dep2.d b/test/issue2377-dynLib-dep-extra-files/dep2/source/dep2.d new file mode 100644 index 0000000..c5e04d6 --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/dep2/source/dep2.d @@ -0,0 +1,3 @@ +module dep2; + +void foo2() {} diff --git a/test/issue2377-dynLib-dep-extra-files/framework/dub.sdl b/test/issue2377-dynLib-dep-extra-files/framework/dub.sdl new file mode 100644 index 0000000..dfbb79b --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/framework/dub.sdl @@ -0,0 +1,5 @@ +name "framework" +targetType "none" + +dependency "dep1" path="../dep1" +subConfiguration "dep1" "dynlib" diff --git a/test/issue2377-dynLib-dep-extra-files/parent/dub.sdl b/test/issue2377-dynLib-dep-extra-files/parent/dub.sdl new file mode 100644 index 0000000..dd43bf6 --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/parent/dub.sdl @@ -0,0 +1,28 @@ +name "parent" +dependency "dep1" path="../dep1" + +configuration "dynlib" { + targetType "dynamicLibrary" + targetPath "output/dynlib" + subConfiguration "dep1" "dynlib" +} + +configuration "dynlib_static" { + targetType "dynamicLibrary" + targetPath "output/dynlib_static" + subConfiguration "dep1" "library" +} + +configuration "exe_static" { + targetType "executable" + targetPath "output/exe_static" + subConfiguration "dep1" "library" +} + +configuration "exe_dynamic" { + targetType "executable" + targetPath "output/exe_dynamic" + subConfiguration "dep1" "dynlib" + dflags "-link-defaultlib-shared" platform="ldc" + dflags "-defaultlib=libphobos2.so" platform="linux-dmd" +} diff --git a/test/issue2377-dynLib-dep-extra-files/parent/source/app.d b/test/issue2377-dynLib-dep-extra-files/parent/source/app.d new file mode 100644 index 0000000..d7fa2b1 --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/parent/source/app.d @@ -0,0 +1,11 @@ +module app; + +// Add a dummy export to enforce creation of import .lib and .exp file for the (Windows) executable. +// They shouldn't be copied to the output dir though. +export void dummy() {} + +void main() { + import parent; + parent_bar(); + dummy(); +} diff --git a/test/issue2377-dynLib-dep-extra-files/parent/source/parent.d b/test/issue2377-dynLib-dep-extra-files/parent/source/parent.d new file mode 100644 index 0000000..3c35529 --- /dev/null +++ b/test/issue2377-dynLib-dep-extra-files/parent/source/parent.d @@ -0,0 +1,6 @@ +module parent; + +void parent_bar() { + import dep1; + foo1(); +} diff --git a/test/issue2448/dub.json b/test/issue2448/dub.json new file mode 100644 index 0000000..76c561c --- /dev/null +++ b/test/issue2448/dub.json @@ -0,0 +1,5 @@ +{ + "name": "use-source-files", + "description": "Example of using source files.", + "sourceFiles": ["ext/*.d"] +} diff --git a/test/issue2448/ext/kekw.d b/test/issue2448/ext/kekw.d new file mode 100644 index 0000000..f1585d3 --- /dev/null +++ b/test/issue2448/ext/kekw.d @@ -0,0 +1 @@ +extern(C) string funkekw () { return "KEKW";} diff --git a/test/issue2448/source/app.d b/test/issue2448/source/app.d new file mode 100644 index 0000000..b327028 --- /dev/null +++ b/test/issue2448/source/app.d @@ -0,0 +1,8 @@ +import std.stdio; + +extern(C) string funkekw (); + +void main() +{ + writefln("Juan: %s", funkekw()); +} diff --git a/test/issue2452/dub.json b/test/issue2452/dub.json new file mode 100644 index 0000000..40f2fd0 --- /dev/null +++ b/test/issue2452/dub.json @@ -0,0 +1,9 @@ +{ + "name": "generated-sources-and-source-files-without-glob", + "description": "Example of using pre generate commands and sourceFiles without glob.", + "sourceFiles": ["ext/fortytwo.d"], + "preGenerateCommands": [ + "mkdir -p ext", + "echo 'extern(C) int fun42 () { return 42; }' > ext/fortytwo.d" + ] +} diff --git a/test/issue2452/source/app.d b/test/issue2452/source/app.d new file mode 100644 index 0000000..6c0c6fa --- /dev/null +++ b/test/issue2452/source/app.d @@ -0,0 +1,8 @@ +import std.stdio; + +import fortytwo; + +void main() +{ + writefln("ShouldBe42: %s", fun42()); +} diff --git a/test/issue877-auto-fetch-package-on-run.sh b/test/issue877-auto-fetch-package-on-run.sh index b350722..ddaea8a 100755 --- a/test/issue877-auto-fetch-package-on-run.sh +++ b/test/issue877-auto-fetch-package-on-run.sh @@ -2,7 +2,7 @@ set -eu -o pipefail set -x -$DUB remove --version="*" gitcompatibledubpackage || true +$DUB remove 'gitcompatibledubpackage@*' || true # check whether the interactive run mode works echo "y" | $DUB run gitcompatibledubpackage | grep "Hello DUB" diff --git a/test/test-upgrade-subpackages.sh b/test/test-upgrade-subpackages.sh new file mode 100755 index 0000000..860e686 --- /dev/null +++ b/test/test-upgrade-subpackages.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +. $(dirname ${BASH_SOURCE[0]})/common.sh + +PACK_PATH="$CURR_DIR"/path-subpackage-ref + +# make sure that there are no left-over selections files +rm -f $PACK_PATH/dub.selections.json $PACK_PATH/subpack/dub.selections.json + +# first upgrade only the root package +if ! ${DUB} upgrade --root $PACK_PATH; then + die $LINENO 'The upgrade command failed.' +fi +if [ ! -f $PACK_PATH/dub.selections.json ] || [ -f $PACK_PATH/subpack/dub.selections.json ]; then + die $LINENO 'The upgrade command did not generate the right set of dub.selections.json files.' +fi + +rm -f $PACK_PATH/dub.selections.json + +# now upgrade with all sub packages +if ! ${DUB} upgrade -s --root $PACK_PATH; then + die $LINENO 'The upgrade command failed with -s.' +fi +if [ ! -f $PACK_PATH/dub.selections.json ] || [ ! -f $PACK_PATH/subpack/dub.selections.json ]; then + die $LINENO 'The upgrade command did not generate all dub.selections.json files.' +fi + +# clean up +rm -f $PACK_PATH/dub.selections.json $PACK_PATH/subpack/dub.selections.json diff --git a/test/timeout.sh b/test/timeout.sh index 4e7f7db..746b0aa 100755 --- a/test/timeout.sh +++ b/test/timeout.sh @@ -31,10 +31,10 @@ \r {}') for i in $(seq 0 $((${#res} - 1))); do - echo -n "${res:$i:1}" + echo -n "${res:$i:1}" || true sleep 1 done -} | nc -l $PORT >/dev/null & +} | tail -n +1 | nc -l $PORT >/dev/null & PID=$! if timeout 10s time $DUB fetch dub --skip-registry=all --registry=http://localhost:$PORT; then die $LINENO 'Fetching from too slow registry should fail.' diff --git a/test/unittest-cov-ctfe.sh b/test/unittest-cov-ctfe.sh new file mode 100755 index 0000000..a97a262 --- /dev/null +++ b/test/unittest-cov-ctfe.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +DIR=$(dirname "${BASH_SOURCE[0]}") +. "$DIR"/common.sh +"$DUB" test --root "$DIR"/unittest-cov-ctfe --build=unittest-cov-ctfe diff --git a/test/unittest-cov-ctfe/.no_build b/test/unittest-cov-ctfe/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/unittest-cov-ctfe/.no_build diff --git a/test/unittest-cov-ctfe/.no_run b/test/unittest-cov-ctfe/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/unittest-cov-ctfe/.no_run diff --git a/test/unittest-cov-ctfe/.no_test b/test/unittest-cov-ctfe/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/unittest-cov-ctfe/.no_test diff --git a/test/unittest-cov-ctfe/dub.sdl b/test/unittest-cov-ctfe/dub.sdl new file mode 100644 index 0000000..de485d0 --- /dev/null +++ b/test/unittest-cov-ctfe/dub.sdl @@ -0,0 +1,4 @@ +name "test" +version "1.0.0" +targetType "library" +dflags "-cov=100" diff --git a/test/unittest-cov-ctfe/source/mod.d b/test/unittest-cov-ctfe/source/mod.d new file mode 100644 index 0000000..649655a --- /dev/null +++ b/test/unittest-cov-ctfe/source/mod.d @@ -0,0 +1,18 @@ +module mod; + +int f(int x) +{ + return x + 1; +} + +int g(int x) +{ + return x * 2; +} + +enum gResult = g(12); // execute g() at compile-time + +unittest +{ + assert(f(11) + gResult == 36); +} diff --git a/test/version-spec.sh b/test/version-spec.sh index 8d0677f..498d59f 100755 --- a/test/version-spec.sh +++ b/test/version-spec.sh @@ -16,21 +16,21 @@ [[ $($DUB describe foo@'*' | grep path | head -n 1) == *"/newfoo/"* ]] [[ $($DUB describe foo@'>0.0.1 <2.0.0' | grep path | head -n 1) == *"/newfoo/"* ]] -[[ $($DUB test foo | head -n 1) == *"/newfoo/" ]] -[[ $($DUB test foo@1.0.0 | head -n 1) == *"/newfoo/" ]] -[[ $($DUB test foo@0.1.0 | head -n 1) == *"/oldfoo/" ]] +[[ $($DUB test foo | tail -n +1 | head -n 1) == *"/newfoo/" ]] +[[ $($DUB test foo@1.0.0 | tail -n +1 | head -n 1) == *"/newfoo/" ]] +[[ $($DUB test foo@0.1.0 | tail -n +1 | head -n 1) == *"/oldfoo/" ]] [[ $($DUB lint foo | tail -n 1) == *"/newfoo/" ]] [[ $($DUB lint foo@1.0.0 | tail -n 1) == *"/newfoo/" ]] [[ $($DUB lint foo@0.1.0 | tail -n 1) == *"/oldfoo/" ]] -[[ $($DUB generate cmake foo | head -n 1) == *"/newfoo/" ]] -[[ $($DUB generate cmake foo@1.0.0 | head -n 1) == *"/newfoo/" ]] -[[ $($DUB generate cmake foo@0.1.0 | head -n 1) == *"/oldfoo/" ]] +[[ $($DUB generate cmake foo | tail -n +1 | head -n 1) == *"/newfoo/" ]] +[[ $($DUB generate cmake foo@1.0.0 | tail -n +1 | head -n 1) == *"/newfoo/" ]] +[[ $($DUB generate cmake foo@0.1.0 | tail -n +1 | head -n 1) == *"/oldfoo/" ]] -[[ $($DUB build -n foo | head -n 1) == *"/newfoo/" ]] -[[ $($DUB build -n foo@1.0.0 | head -n 1) == *"/newfoo/" ]] -[[ $($DUB build -n foo@0.1.0 | head -n 1) == *"/oldfoo/" ]] +[[ $($DUB build -n foo | tail -n +1 | head -n 1) == *"/newfoo/" ]] +[[ $($DUB build -n foo@1.0.0 | tail -n +1 | head -n 1) == *"/newfoo/" ]] +[[ $($DUB build -n foo@0.1.0 | tail -n +1 | head -n 1) == *"/oldfoo/" ]] [[ $($DUB run -n foo | tail -n 1) == 'new-foo' ]] [[ $($DUB run -n foo@1.0.0 | tail -n 1) == 'new-foo' ]]