diff --git a/.codecov.yml b/.codecov.yml index b012da1..f7cb59e 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -29,6 +29,6 @@ changes: off patch: default: - threshold: 5 + informational: true comment: false diff --git a/.travis.yml b/.travis.yml index d527954..2efa367 100644 --- a/.travis.yml +++ b/.travis.yml @@ -21,7 +21,13 @@ - d: gdc include: - stage: test - d: dmd-2.078.0 + d: dmd-2.081.1 + env: [FRONTEND=2.081] + - d: dmd-2.080.1 + env: [FRONTEND=2.080] + - d: dmd-2.079.1 + env: [FRONTEND=2.078] + - d: dmd-2.078.1 env: [FRONTEND=2.078] - d: dmd-2.077.1 env: [FRONTEND=2.077, COVERAGE=true] @@ -35,14 +41,12 @@ env: [FRONTEND=2.073] - d: dmd-2.072.2 env: [FRONTEND=2.072] - - d: dmd-2.071.2 - env: [FRONTEND=2.071] - - d: dmd-2.070.2 - env: [FRONTEND=2.070] - - d: dmd-2.069.2 - env: [FRONTEND=2.069] - - d: dmd-2.068.2 - env: [FRONTEND=2.068] + - d: ldc-1.10.0 + env: [FRONTEND=2.080] + - d: ldc-1.9.0 + env: [FRONTEND=2.079] + - d: ldc-1.8.0 + env: [FRONTEND=2.078] - d: ldc-1.7.0 env: [FRONTEND=2.077] - d: ldc-1.6.0 @@ -53,16 +57,6 @@ env: [FRONTEND=2.074] - d: ldc-1.3.0 env: [FRONTEND=2.073] - - d: ldc-1.2.0 - env: [FRONTEND=2.072] - - d: ldc-1.1.0 - env: [FRONTEND=2.071] - - d: ldc-1.0.0 - env: [FRONTEND=2.070] - - d: gdc - env: [FRONTEND=2.068] - - d: gdc-4.8.5 - env: [FRONTEND=2.068] - stage: deploy d: ldc os: osx @@ -115,6 +109,6 @@ if: type = pull_request or (type = push and branch = master) # Until deployment of the release binaries is fixed, always build them #- name: deploy - #if: type = push and tag =~ ^v + #if: type = push and tag =~ ^v\d+\.\d+\.\d+[^-]*\$ # not a pre-release tag - name: update-latest - if: type = push and tag =~ ^v + if: type = push and tag =~ ^v\d+\.\d+\.\d+[^-]*\$ # not a pre-release tag diff --git a/changelog/buildSettingsVars.dd b/changelog/buildSettingsVars.dd new file mode 100644 index 0000000..9e82917 --- /dev/null +++ b/changelog/buildSettingsVars.dd @@ -0,0 +1,21 @@ +Variables such as `$ARCH` or `$PLATFORM` are now supported in the build settings. + +JSON lines such as +------- + "lflags-posix-x86": [ "-L$PACKAGE_DIR/lib/posix-x86" ], + "lflags-posix-x86_64": [ "-L$PACKAGE_DIR/lib/posix-x86_64" ], +------- +can be turned into +------- + "lflags-posix": [ "-L$PACKAGE_DIR/lib/posix-$ARCH" ], +------- + +Both `$VAR` and `${VAR}` syntaxes are supported. +The supported variables are as follow: +$(TABLE + $(TR $(TH name) $(TH values)) + $(TR $(TD `$ARCH`) $(TD $(D_INLINECODE "x86", "x86_64"))) + $(TR $(TD `$PLATFORM`) $(TD $(D_INLINECODE "linux", "windows", ...))) + $(TR $(TD `$PLATFORM_POSIX`) $(TD $(D_INLINECODE "posix", "windows", ...))) + $(TR $(TD `$BUILD_TYPE`) $(TD $(D_INLINECODE "debug", "release", ...))) +) diff --git a/changelog/buildTypeSyntax.dd b/changelog/buildTypeSyntax.dd new file mode 100644 index 0000000..1afaa67 --- /dev/null +++ b/changelog/buildTypeSyntax.dd @@ -0,0 +1,3 @@ +DUB supports build type "syntax" + +With this release new build type "syntax" is added. This build type is useful for IDEs to check the syntax of D coding without generating binaries. \ No newline at end of file diff --git a/changelog/env-var-replacement.dd b/changelog/env-var-replacement.dd new file mode 100644 index 0000000..3fb9f55 --- /dev/null +++ b/changelog/env-var-replacement.dd @@ -0,0 +1,8 @@ +Enviroment variable expansion was improved + +Environment variable expansion now supports the braced `${MY_VAR}` expansion syntax: e.g. for `${PACKAGE_PATH}_suffix`. + +Moreover, `$PACKAGE_PATH`, `$ROOT_PACKAGE_PATH`, and `$DEP_PACKAGE_PATH` no longer end + with a `/` or `\` to support clean concatenation, e.g. `${PACKAGE_PATH}/subpath`. + +Learn more about the details at $(LINK2 https://github.com/dlang/dub/pull/1392, #1392). diff --git a/changelog/recursive_dependecy_resolution.dd b/changelog/recursive_dependecy_resolution.dd new file mode 100644 index 0000000..b8aa7fa --- /dev/null +++ b/changelog/recursive_dependecy_resolution.dd @@ -0,0 +1,10 @@ +Dependency resolution has been reimplemented using a recursive algorithm + +The new algorithm minimizes the search space while descending the dependency +graph. Compared to the old approach, it is now much less likely to run into +pathological cases that result in exponential run time ("The dependency +resolution algorithm is taking too long"). + +Furthermore, the error message in case of unsatisfiable dependencies is more +precise, usually making it straight forward to debug issues in the dependency +graph of a failing package. diff --git a/changelog/stdinForSingleFilePackages.dd b/changelog/stdinForSingleFilePackages.dd new file mode 100644 index 0000000..a9f4b57 --- /dev/null +++ b/changelog/stdinForSingleFilePackages.dd @@ -0,0 +1,6 @@ +DUB accepts single file packages on STDIN + +You can pass single file packages to dub on STDIN using dash as first argument to DUB. +All arguments after dash will be passed as runtime arguments to the application. + +Example `cat app.d | dub - --foo=bar` \ No newline at end of file diff --git a/changelog/upgrade_check.dd b/changelog/upgrade_check.dd new file mode 100644 index 0000000..6b59134 --- /dev/null +++ b/changelog/upgrade_check.dd @@ -0,0 +1,8 @@ +The regular upgrade check has been removed + +Previously dub would regularly (once a day) check for possible package upgrades before building a packages. +This lead to unexpected build failures, e.g. when internet connectivity was down or dependency resolution failed, and caused unnecessary delays. + +The build flag `--nodeps` now only suppresses resolution of missing dependencies. + +The new upgrade flag `--dry-run` was added to explicitly check for upgradable packages without actually upgrading anything. diff --git a/dub.selections.json b/dub.selections.json index 52e7c92..486c146 100644 --- a/dub.selections.json +++ b/dub.selections.json @@ -10,7 +10,7 @@ "libevent": "2.0.2+2.0.16", "memutils": "0.4.9", "openssl": "1.1.6+1.0.1g", - "stdx-allocator": "2.77.0", + "stdx-allocator": "2.77.2", "taggedalgebraic": "0.10.8", "vibe-core": "1.4.0-alpha.1", "vibe-d": "0.8.3-alpha.3" diff --git a/scripts/fish-completion/dub.fish b/scripts/fish-completion/dub.fish index f55f345..c8cd222 100644 --- a/scripts/fish-completion/dub.fish +++ b/scripts/fish-completion/dub.fish @@ -49,7 +49,7 @@ complete -c dub -n "contains '$cmd' (commandline -poc)" -s a -l arch -r -d "Force architecture" complete -c dub -n "contains '$cmd' (commandline -poc)" -s d -l debug -r -d "Debug identifier" complete -c dub -n "contains '$cmd' (commandline -poc)" -l nodeps -d "No dependency check" - complete -c dub -n "contains '$cmd' (commandline -poc)" -s b -l build -u -x -d "Build type" -a "debug plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov unittest-cov" + complete -c dub -n "contains '$cmd' (commandline -poc)" -s b -l build -u -x -d "Build type" -a "debug plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov unittest-cov syntax" complete -c dub -n "contains '$cmd' (commandline -poc)" -l build-mode -x -d "How compiler & linker are invoked" -a "separate allAtOnce singleFile" complete -c dub -n "contains '$cmd' (commandline -poc)" -l compiler -x -d "Compiler binary" -a "dmd gdc ldc gdmd ldmd" end diff --git a/semaphore-ci.sh b/semaphore-ci.sh new file mode 100755 index 0000000..a415466 --- /dev/null +++ b/semaphore-ci.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +set -euo pipefail +set -x + +if [ "${D_VERSION:-dmd}" == "gdc" ] ; then + + # Use the dub-updating fork of the installer script until https://github.com/dlang/installer/pull/301 is merged + wget https://raw.githubusercontent.com/wilzbach/installer-dub/master/script/install.sh -O install.dub.sh + bash install.dub.sh -a dub + dub_path_activate="$(find $HOME/dlang/*/activate | head -1)" + rm "${dub_path_activate}" + dub_path="$(dirname "$dub_path_activate")" + sudo ln -s "${dub_path}/dub" /usr/bin/dub + + export DMD=gdmd + export DC=gdc + # It's technically ~"2.076", but Ternary doesn't seem to have been ported and Vibe.d seems to depend on this. + # Ternary was added in 2.072: https://dlang.org/phobos/std_typecons.html#.Ternary + # However, the nonet tests is done only for > 2.072 + export FRONTEND=2.072 + + sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test + sudo apt-get update + sudo apt-get install -y gdc-8 + # fetch the dmd-like wrapper + sudo wget https://raw.githubusercontent.com/D-Programming-GDC/GDMD/master/dmd-script -O /usr/bin/gdmd + sudo chmod +x /usr/bin/gdmd + # DUB requires gdmd + sudo ln -s /usr/bin/gdc-8 /usr/bin/gdc + # fake install script and create a fake 'activate' script + mkdir -p ~/dlang/gdc-8 + echo "deactivate(){ echo;}" > ~/dlang/gdc-8/activate + +else + curl --connect-timeout 5 --max-time 10 --retry 5 --retry-delay 1 --retry-max-time 60 https://dlang.org/install.sh | bash -s "$D_VERSION" +fi + +./travis-ci.sh diff --git a/source/dub/commandline.d b/source/dub/commandline.d index 6f545b0..bd947f4 100644 --- a/source/dub/commandline.d +++ b/source/dub/commandline.d @@ -97,6 +97,16 @@ environment["TEMP"] = environment["TEMP"].replace("/", "\\"); } + // special stdin syntax + if (args.length >= 2 && args[1] == "-") + { + import dub.internal.utils: getTempFile; + + auto path = getTempFile("app", ".d"); + stdin.byChunk(4096).joiner.toFile(path.toNativeString()); + args = args[0] ~ [path.toNativeString()] ~ args[2..$]; + } + // special single-file package shebang syntax if (args.length >= 2 && args[1].endsWith(".d")) { args = args[0] ~ ["run", "-q", "--temp-build", "--single", args[1], "--"] ~ args[2 ..$]; @@ -245,7 +255,7 @@ // make the CWD package available so that for example sub packages can reference their // parent package. try dub.packageManager.getOrLoadPackage(NativePath(options.root_path)); - catch (Exception e) { logDiagnostic("No package found in current working directory."); } + catch (Exception e) { logDiagnostic("No valid package found in current working directory: %s", e.msg); } } } @@ -280,7 +290,11 @@ { args.getopt("h|help", &help, ["Display general or command specific help"]); args.getopt("root", &root_path, ["Path to operate in instead of the current working dir"]); - args.getopt("registry", ®istry_urls, ["Search the given DUB registry URL first when resolving dependencies. Can be specified multiple times."]); + args.getopt("registry", ®istry_urls, [ + "Search the given registry URL first when resolving dependencies. Can be specified multiple times. Available registry types:", + " DUB: URL to DUB registry (default)", + " Maven: URL to Maven repository + group id containing dub packages as artifacts. E.g. mvn+http://localhost:8040/maven/libs-release/dubpackages", + ]); args.getopt("skip-registry", &skipRegistry, [ "Sets a mode for skipping the search on certain package registry types:", " none: Search all configured or default registries (default)", @@ -595,7 +609,7 @@ args.getopt("b|build", &m_buildType, [ "Specifies the type of build to perform. Note that setting the DFLAGS environment variable will override the build type with custom flags.", "Possible names:", - " debug (default), plain, release, release-debug, release-nobounds, unittest, profile, profile-gc, docs, ddox, cov, unittest-cov and custom types" + " debug (default), plain, release, release-debug, release-nobounds, unittest, profile, profile-gc, docs, ddox, cov, unittest-cov, syntax and custom types" ]); args.getopt("c|config", &m_buildConfig, [ "Builds the specified configuration. Configurations can be defined in dub.json" @@ -616,7 +630,7 @@ "Define the specified debug version identifier when building - can be used multiple times" ]); args.getopt("nodeps", &m_nodeps, [ - "Do not check/update dependencies before building" + "Do not resolve missing dependencies before building" ]); args.getopt("build-mode", &m_buildMode, [ "Specifies the way the compiler and linker are invoked. Valid values:", @@ -656,8 +670,6 @@ } if (!m_nodeps) { - // TODO: only upgrade(select) if necessary, only upgrade(upgrade) every now and then - // retrieve missing packages dub.project.reinit(); if (!dub.project.hasAllDependencies) { @@ -665,11 +677,6 @@ if (m_single) dub.upgrade(UpgradeOptions.select | UpgradeOptions.noSaveSelections); else dub.upgrade(UpgradeOptions.select); } - - if (!m_single) { - logDiagnostic("Checking for upgrades."); - dub.upgrade(UpgradeOptions.upgrade|UpgradeOptions.printUpgradesOnly|UpgradeOptions.useCachedResult); - } } dub.project.validate(); @@ -1127,6 +1134,7 @@ bool m_forceRemove = false; bool m_missingOnly = false; bool m_verify = false; + bool m_dryRun = false; } this() @@ -1151,6 +1159,9 @@ args.getopt("verify", &m_verify, [ "Updates the project and performs a build. If successful, rewrites the selected versions file ." ]); + args.getopt("dry-run", &m_dryRun, [ + "Only print what would be upgraded, but don't actually upgrade anything." + ]); args.getopt("missing-only", &m_missingOnly, [ "Performs an upgrade only for dependencies that don't yet have a version selected. This is also done automatically before each build." ]); @@ -1169,6 +1180,7 @@ auto options = UpgradeOptions.upgrade|UpgradeOptions.select; if (m_missingOnly) options &= ~UpgradeOptions.upgrade; if (m_prerelease) options |= UpgradeOptions.preRelease; + if (m_dryRun) options |= UpgradeOptions.dryRun; dub.upgrade(options, free_args); return 0; } diff --git a/source/dub/compilers/utils.d b/source/dub/compilers/utils.d index 999a69e..6e57867 100644 --- a/source/dub/compilers/utils.d +++ b/source/dub/compilers/utils.d @@ -192,6 +192,7 @@ {[BuildOption.optimize], "Call DUB with --build=release"}, {[BuildOption.profile], "Call DUB with --build=profile"}, {[BuildOption.unittests], "Call DUB with --build=unittest"}, + {[BuildOption.syntaxOnly], "Call DUB with --build=syntax"}, {[BuildOption.warnings, BuildOption.warningsAsErrors], "Use \"buildRequirements\" to control the warning level"}, {[BuildOption.ignoreDeprecations, BuildOption.deprecationWarnings, BuildOption.deprecationErrors], "Use \"buildRequirements\" to control the deprecation warning level"}, {[BuildOption.property], "This flag is deprecated and has no effect"} diff --git a/source/dub/dependencyresolver.d b/source/dub/dependencyresolver.d index 1662cd3..3b61758 100644 --- a/source/dub/dependencyresolver.d +++ b/source/dub/dependencyresolver.d @@ -1,7 +1,7 @@ /** Dependency configuration/version resolution algorithm. - Copyright: © 2014 rejectedsoftware e.K. + Copyright: © 2014-2018 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ @@ -18,7 +18,23 @@ import std.string : format, indexOf, lastIndexOf; +/** Resolves dependency graph with multiple configurations per package. + + The term "configuration" can mean any kind of alternative dependency + configuration of a package. In particular, it can mean different + versions of a package. + + `CONFIG` is an abstract type denoting a single configuration of a certain + package, whereas `CONFIGS` denotes a set of configurations. The + representation of both can be freely chosen, so that `CONFIGS` for example + can be defined in terms of a version range. +*/ class DependencyResolver(CONFIGS, CONFIG) { + /** Encapsulates a list of outgoing edges in the dependency graph. + + A value of this type represents a single dependency with multiple + possible configurations for the target package. + */ static struct TreeNodes { string pack; CONFIGS configs; @@ -37,6 +53,10 @@ } } + /** A single node in the dependency graph. + + Nodes are a combination of a package and a single package configuration. + */ static struct TreeNode { string pack; CONFIG config; @@ -54,230 +74,33 @@ } } - private static struct PackageConfigs - { - static struct Depender - { - TreeNode origin; - TreeNodes dependency; - } - - // all possible configurations to test for this package - CONFIG[] allConfigs; - - // determines whether this package has any dependencies, may be - // different from allConfigs.length > 0 after certain configurations - // have been filtered out - bool anyConfig; - - Depender[] origins; - } - CONFIG[string] resolve(TreeNode root, bool throw_on_failure = true) { - auto root_base_pack = basePackage(root.pack); - - // find all possible configurations of each possible dependency - size_t[string] package_indices; - string[size_t] package_names; - PackageConfigs[] configs; - bool[string] maybe_optional_deps; - bool[TreeNode] visited; - - void findConfigsRec(TreeNode parent, bool parent_unique) - { - if (parent in visited) return; - visited[parent] = true; - - foreach (ch; getChildren(parent)) { - auto basepack = basePackage(ch.pack); - auto pidx = configs.length; - - if (ch.depType != DependencyType.required) maybe_optional_deps[ch.pack] = true; - - PackageConfigs config; - if (auto pi = basepack in package_indices) { - pidx = *pi; - config = configs[*pi]; - } else { - if (basepack == root_base_pack) config.allConfigs = [root.config]; - else config.allConfigs = getAllConfigs(basepack); - configs ~= config; - package_indices[basepack] = pidx; - package_names[pidx] = basepack; - } - - foreach (c; getSpecificConfigs(basepack, ch)) - if (!config.allConfigs.canFind(c)) - config.allConfigs = c ~ config.allConfigs; - - if (config.allConfigs.length > 0) - config.anyConfig = true; - - // store package depending on this for better error messages - config.origins ~= PackageConfigs.Depender(parent, ch); - - // eliminate configurations from which we know that they can't satisfy - // the uniquely defined root dependencies (==version or ~branch style dependencies) - if (parent_unique) config.allConfigs = config.allConfigs.filter!(c => matches(ch.configs, c)).array; - - configs[pidx] = config; - - foreach (v; config.allConfigs) - findConfigsRec(TreeNode(ch.pack, v), parent_unique && config.allConfigs.length == 1); - } - } - findConfigsRec(root, true); - - // append an invalid configuration to denote an unchosen dependency - // this is used to properly support optional dependencies (when - // getChildren() returns no configurations for an optional dependency, - // but getAllConfigs() has already provided an existing list of configs) - foreach (i, ref cfgs; configs) - if (cfgs.allConfigs.length == 0 || package_names[i] in maybe_optional_deps) - cfgs.allConfigs = cfgs.allConfigs ~ CONFIG.invalid; - - logDebug("Configurations used for dependency resolution:"); - foreach (n, i; package_indices) logDebug(" %s (%s%s): %s", n, i, n in maybe_optional_deps ? ", maybe optional" : ", required", configs[i]); - - auto config_indices = new size_t[configs.length]; - config_indices[] = 0; - - visited = null; - sizediff_t validateConfigs(TreeNode parent, ref ConflictError error) - { - import std.algorithm : max; - - if (parent in visited) return -1; - - visited[parent] = true; - sizediff_t maxcpi = -1; - sizediff_t parentidx = package_indices.get(basePackage(parent.pack), -1); - auto parentbase = basePackage(parent.pack); - - // loop over all dependencies - foreach (ch; getChildren(parent)) { - auto basepack = basePackage(ch.pack); - assert(basepack in package_indices, format("%s not in packages %s", basepack, package_indices)); - - // get the current config/version of the current dependency - sizediff_t childidx = package_indices[basepack]; - auto child = configs[childidx]; - - if (child.allConfigs.length == 1 && child.allConfigs[0] == CONFIG.invalid) { - // ignore invalid optional dependencies - if (ch.depType != DependencyType.required) - continue; - - if (parentbase == root_base_pack) { - import std.uni : toLower; - auto lp = ch.pack.toLower(); - if (lp != ch.pack) { - logError("Dependency \"%s\" of %s contains upper case letters, but must be lower case.", ch.pack, parent.pack); - if (getAllConfigs(lp).length) logError("Did you mean \"%s\"?", lp); - } - if (child.anyConfig) - throw new Exception(format("Root package %s reference %s %s cannot be satisfied.\nPackages causing the conflict:\n\t%s", - parent.pack, ch.pack, ch.configs, - child.origins.map!(a => a.origin.pack ~ " depends on " ~ a.dependency.configs.to!string).join("\n\t"))); - else - throw new Exception(format("Root package %s references unknown package %s", parent.pack, ch.pack)); - } - // choose another parent config to avoid the invalid child - if (parentidx > maxcpi) { - error = ConflictError(ConflictError.Kind.invalidDependency, parent, ch, CONFIG.invalid); - logDiagnostic("%s (ci=%s)", error, parentidx); - maxcpi = parentidx; - } - } else { - auto config = child.allConfigs[config_indices[childidx]]; - auto chnode = TreeNode(ch.pack, config); - - if (config == CONFIG.invalid || !matches(ch.configs, config)) { - // ignore missing optional dependencies - if (config == CONFIG.invalid && ch.depType != DependencyType.required) - continue; - - // if we are at the root level, we can safely skip the maxcpi computation and instead choose another childidx config - if (parentbase == root_base_pack) { - error = ConflictError(ConflictError.Kind.noRootMatch, parent, ch, config); - return childidx; - } - - if (childidx > maxcpi) { - maxcpi = max(childidx, parentidx); - error = ConflictError(ConflictError.Kind.childMismatch, parent, ch, config); - logDebug("%s (ci=%s)", error, maxcpi); - } - - // we know that either the child or the parent needs to be switched - // to another configuration, no need to continue with other children - if (config == CONFIG.invalid) break; - } - - maxcpi = max(maxcpi, validateConfigs(chnode, error)); - } - } - return maxcpi; - } - - Nullable!ConflictError first_error; - size_t loop_counter = 0; - // Leave the possibility to opt-out from the loop limit import std.process : environment; bool no_loop_limit = environment.get("DUB_NO_RESOLVE_LIMIT") !is null; - while (true) { - assert(no_loop_limit || loop_counter++ < 1_000_000, - "The dependency resolution process is taking too long. The" - ~ " dependency graph is likely hitting a pathological case in" - ~ " the resolution algorithm. Please file a bug report at" - ~ " https://github.com/dlang/dub/issues and mention the package" - ~ " recipe that reproduces this error."); + auto rootbase = root.pack.basePackageName; - // check if the current combination of configurations works out - visited = null; - ConflictError error; - auto conflict_index = validateConfigs(root, error); - if (first_error.isNull) first_error = error; + // build up the dependency graph, eliminating as many configurations/ + // versions as possible + ResolveContext context; + context.configs[rootbase] = [ResolveConfig(root.config, true)]; + long loop_counter = no_loop_limit ? long.max : 1_000_000; + constrain(root, context, loop_counter); - // print out current iteration state - logDebug("Interation (ci=%s) %s", conflict_index, { - import std.array : join; - auto cs = new string[configs.length]; - foreach (p, i; package_indices) { - if (configs[i].allConfigs.length) - cs[i] = p~" "~configs[i].allConfigs[config_indices[i]].to!string~(i >= 0 && i >= conflict_index ? " (C)" : ""); - else cs[i] = p ~ " [no config]"; - } - return cs.join(", "); - }()); + // remove any non-default optional dependencies + purgeOptionalDependencies(root, context.result); - if (conflict_index < 0) { - CONFIG[string] ret; - foreach (p, i; package_indices) - if (configs[i].allConfigs.length) { - auto cfg = configs[i].allConfigs[config_indices[i]]; - if (cfg != CONFIG.invalid) ret[p] = cfg; - } - logDebug("Resolved dependencies before optional-purge: %s", ret.byKey.map!(k => k~" "~ret[k].to!string)); - purgeOptionalDependencies(root, ret); - logDebug("Resolved dependencies after optional-purge: %s", ret.byKey.map!(k => k~" "~ret[k].to!string)); - return ret; - } + // the root package is implied by the `root` argument and will not be + // returned explicitly + context.result.remove(rootbase); - // find the next combination of configurations - foreach_reverse (pi, ref i; config_indices) { - if (pi > conflict_index) i = 0; - else if (++i >= configs[pi].allConfigs.length) i = 0; - else break; - } - if (config_indices.all!"a==0") { - if (throw_on_failure) throw new Exception(format("Could not find a valid dependency tree configuration: %s", first_error.get)); - else return null; - } - } + logDiagnostic("Dependency resolution result:"); + foreach (d; context.result.keys.sort()) + logDiagnostic(" %s: %s", d, context.result[d]); + + return context.result; } protected abstract CONFIG[] getAllConfigs(string pack); @@ -285,6 +108,154 @@ protected abstract TreeNodes[] getChildren(TreeNode node); protected abstract bool matches(CONFIGS configs, CONFIG config); + private static struct ResolveConfig { + CONFIG config; + bool included; + } + + private static struct ResolveContext { + /** Contains all packages visited by the resolution process so far. + + The key is the qualified name of the package (base + sub) + */ + void[0][string] visited; + + /// The finally chosen configurations for each package + CONFIG[string] result; + + /// The set of available configurations for each package + ResolveConfig[][string] configs; + + /// Determines if a certain package has already been processed + bool isVisited(string package_) const { return (package_ in visited) !is null; } + + /// Marks a package as processed + void setVisited(string package_) { visited[package_] = (void[0]).init; } + + /// Returns a deep clone + ResolveContext clone() + { + ResolveContext ret; + ret.visited = this.visited.dup; + ret.result = this.result.dup; + foreach (pack, cfgs; this.configs) { + ret.configs[pack] = cfgs.dup; + } + return ret; + } + } + + + /** Starting with a single node, fills `context` with a minimized set of + configurations that form valid solutions. + */ + private void constrain(TreeNode n, ref ResolveContext context, ref long max_iterations) + { + auto base = n.pack.basePackageName; + assert(base in context.configs); + if (context.isVisited(n.pack)) return; + context.setVisited(n.pack); + context.result[base] = n.config; + foreach (j, ref sc; context.configs[base]) + sc.included = sc.config == n.config; + + auto dependencies = getChildren(n); + + foreach (dep; dependencies) { + // lazily load all dependency configurations + auto depbase = dep.pack.basePackageName; + auto di = depbase in context.configs; + if (!di) { + context.configs[depbase] = + getAllConfigs(depbase) + .map!(c => ResolveConfig(c, true)) + .array; + di = depbase in context.configs; + } + + // add any dependee defined dependency configurations + foreach (sc; getSpecificConfigs(n.pack, dep)) + if (!(*di).canFind!(c => c.config == sc)) + *di = ResolveConfig(sc, true) ~ *di; + + // restrain the configurations to the current dependency spec + bool any_config = false; + foreach (i, ref c; *di) + if (c.included) { + if (!matches(dep.configs, c.config)) + c.included = false; + else any_config = true; + } + + if (!any_config && dep.depType == DependencyType.required) { + if ((*di).length) + throw new ResolveException(n, dep, context); + else throw new DependencyLoadException(n, dep); + } + } + + constrainDependencies(n, dependencies, 0, context, max_iterations); + } + + /** Recurses back into `constrain` while recursively going through `n`'s + dependencies. + + This attempts to constrain each dependency, while keeping each of them + in a nested stack frame. This allows any errors to properly back + propagate. + */ + private void constrainDependencies(TreeNode n, TreeNodes[] dependencies, size_t depidx, + ref ResolveContext context, ref long max_iterations) + { + if (depidx >= dependencies.length) return; + + assert (--max_iterations > 0, + "The dependency resolution process is taking too long. The" + ~ " dependency graph is likely hitting a pathological case in" + ~ " the resolution algorithm. Please file a bug report at" + ~ " https://github.com/dlang/dub/issues and mention the package" + ~ " recipe that reproduces this error."); + + auto dep = &dependencies[depidx]; + auto depbase = dep.pack.basePackageName; + auto depconfigs = context.configs[depbase]; + + Exception first_err; + + // try each configuration/version of the current dependency + foreach (i, c; depconfigs) { + if (c.included) { + try { + // try the configuration on a cloned context + auto subcontext = context.clone; + constrain(TreeNode(dep.pack, c.config), subcontext, max_iterations); + constrainDependencies(n, dependencies, depidx+1, subcontext, max_iterations); + // if a branch succeeded, replace the current context + // with the one from the branch and return + context = subcontext; + return; + } catch (Exception e) { + if (!first_err) first_err = e; + } + } + } + + // ignore unsatisfiable optional dependencies + if (dep.depType != DependencyType.required) { + auto subcontext = context.clone; + constrainDependencies(n, dependencies, depidx+1, subcontext, max_iterations); + context = subcontext; + return; + } + + // report the first error encountered to the user + if (first_err) throw first_err; + + // should have thrown in constrainRec before reaching this + assert(false, format("Got no configuration for dependency %s %s of %s %s!?", + dep.pack, dep.configs, n.pack, n.config)); + } + private void purgeOptionalDependencies(TreeNode root, ref CONFIG[string] configs) { bool[string] required; @@ -294,9 +265,9 @@ { if (node.pack in visited) return; visited[node.pack] = true; - required[basePackage(node.pack)] = true; + required[node.pack.basePackageName] = true; foreach (dep; getChildren(node).filter!(dep => dep.depType != DependencyType.optional)) - if (auto dp = basePackage(dep.pack) in configs) + if (auto dp = dep.pack.basePackageName in configs) markRecursively(TreeNode(dep.pack, *dp)); } @@ -309,35 +280,56 @@ configs.remove(p); } - private struct ConflictError { - enum Kind { - none, - noRootMatch, - childMismatch, - invalidDependency - } + final class ResolveException : Exception { + import std.range : chain, only; + import std.typecons : tuple; - Kind kind; - TreeNode parent; - TreeNodes child; - CONFIG config; + string failedNode; - string toString() - const { - final switch (kind) { - case Kind.none: return "no error"; - case Kind.noRootMatch: - return "No match for dependency %s %s of %s" - .format(child.pack, child.configs, parent.pack); - case Kind.childMismatch: - return "Dependency %s -> %s %s mismatches with selected version %s" - .format(parent.pack, child.pack, child.configs, config); - case Kind.invalidDependency: - return "Package %s contains invalid dependency %s (no version candidates)" - .format(parent.pack, child.pack); + this(TreeNode parent, TreeNodes dep, in ref ResolveContext context, string file = __FILE__, size_t line = __LINE__) + { + auto m = format("Unresolvable dependencies to package %s:", dep.pack.basePackageName); + super(m, file, line); + + this.failedNode = dep.pack; + + auto failbase = failedNode.basePackageName; + + // get the list of all dependencies to the failed package + auto deps = context.visited.byKey + .filter!(p => p.basePackageName in context.result) + .map!(p => TreeNode(p, context.result[p.basePackageName])) + .map!(n => getChildren(n) + .filter!(d => d.pack.basePackageName == failbase) + .map!(d => tuple(n, d)) + ) + .join + .sort!((a, b) => a[0].pack < b[0].pack); + + foreach (d; deps) { + // filter out trivial self-dependencies + if (d[0].pack.basePackageName == failbase + && matches(d[1].configs, d[0].config)) + continue; + msg ~= format("\n %s %s depends on %s %s", d[0].pack, d[0].config, d[1].pack, d[1].configs); } } } + + final class DependencyLoadException : Exception { + TreeNode parent; + TreeNodes dependency; + + this(TreeNode parent, TreeNodes dep) + { + auto m = format("Failed to find any versions for package %s, referenced by %s %s", + dep.pack, parent.pack, parent.config); + super(m, file, line); + + this.parent = parent; + this.dependency = dep; + } + } } enum DependencyType { @@ -346,14 +338,12 @@ optional } -private string basePackage(string p) +private string basePackageName(string p) { - auto idx = indexOf(p, ':'); - if (idx < 0) return p; - return p[0 .. idx]; + import std.algorithm.searching : findSplit; + return p.findSplit(":")[0]; } - unittest { static struct IntConfig { int value; @@ -457,7 +447,7 @@ assert(res.resolve(TreeNode("a", ic(0))) == ["b":ic(2)], to!string(res.resolve(TreeNode("a", ic(0))))); } - // make sure non-satisfyable dependencies are not a problem, even if non-optional in some dependencies + // make sure non-satisfiable dependencies are not a problem, even if non-optional in some dependencies with (TestResolver) { auto res = new TestResolver([ "a:0": [TreeNodes("b", ics([ic(1), ic(2)]))], @@ -467,4 +457,50 @@ ]); assert(res.resolve(TreeNode("a", ic(0))) == ["b":ic(2)], to!string(res.resolve(TreeNode("a", ic(0))))); } + + // check error message for multiple conflicting dependencies + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1)])), TreeNodes("c", ics([ic(1)]))], + "b:1": [TreeNodes("d", ics([ic(1)]))], + "c:1": [TreeNodes("d", ics([ic(2)]))], + "d:1": [], + "d:2": [] + ]); + try { + res.resolve(TreeNode("a", ic(0))); + assert(false, "Expected resolve to throw."); + } catch (ResolveException e) { + assert(e.msg == + "Unresolvable dependencies to package d:" + ~ "\n b 1 depends on d [1]" + ~ "\n c 1 depends on d [2]"); + } + } + + // check error message for invalid dependency + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1)]))] + ]); + try { + res.resolve(TreeNode("a", ic(0))); + assert(false, "Expected resolve to throw."); + } catch (DependencyLoadException e) { + assert(e.msg == "Failed to find any versions for package b, referenced by a 0"); + } + } + + // regression: unresolvable optional dependency skips the remaining dependencies + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [ + TreeNodes("b", ics([ic(2)]), DependencyType.optional), + TreeNodes("c", ics([ic(1)])) + ], + "b:1": [], + "c:1": [] + ]); + assert(res.resolve(TreeNode("a", ic(0))) == ["c":ic(1)]); + } } diff --git a/source/dub/dub.d b/source/dub/dub.d index c6a79a5..544805f 100644 --- a/source/dub/dub.d +++ b/source/dub/dub.d @@ -167,6 +167,46 @@ init(); + if (skip_registry == SkipPackageSuppliers.none) + m_packageSuppliers = getPackageSuppliers(additional_package_suppliers); + else + m_packageSuppliers = getPackageSuppliers(additional_package_suppliers, skip_registry); + + m_packageManager = new PackageManager(m_dirs.localRepository, m_dirs.systemSettings); + + auto ccps = m_config.customCachePaths; + if (ccps.length) + m_packageManager.customCachePaths = ccps; + + updatePackageSearchPath(); + } + + unittest + { + scope (exit) environment.remove("DUB_REGISTRY"); + auto dub = new Dub(".", null, SkipPackageSuppliers.configured); + assert(dub.m_packageSuppliers.length == 0); + environment["DUB_REGISTRY"] = "http://example.com/"; + dub = new Dub(".", null, SkipPackageSuppliers.configured); + assert(dub.m_packageSuppliers.length == 1); + environment["DUB_REGISTRY"] = "http://example.com/;http://foo.com/"; + dub = new Dub(".", null, SkipPackageSuppliers.configured); + assert(dub.m_packageSuppliers.length == 2); + dub = new Dub(".", [new RegistryPackageSupplier(URL("http://bar.com/"))], SkipPackageSuppliers.configured); + assert(dub.m_packageSuppliers.length == 3); + } + + /** Get the list of package suppliers. + + Params: + additional_package_suppliers = A list of package suppliers to try + before the suppliers found in the configurations files and the + `defaultPackageSuppliers`. + skip_registry = Can be used to skip using the configured package + suppliers, as well as the default suppliers. + */ + public PackageSupplier[] getPackageSuppliers(PackageSupplier[] additional_package_suppliers, SkipPackageSuppliers skip_registry) + { PackageSupplier[] ps = additional_package_suppliers; if (skip_registry < SkipPackageSuppliers.all) @@ -187,30 +227,31 @@ if (skip_registry < SkipPackageSuppliers.standard) ps ~= defaultPackageSuppliers(); - m_packageSuppliers = ps; - m_packageManager = new PackageManager(m_dirs.localRepository, m_dirs.systemSettings); + return ps; + } - auto ccps = m_config.customCachePaths; - if (ccps.length) - m_packageManager.customCachePaths = ccps; - - updatePackageSearchPath(); + /// ditto + public PackageSupplier[] getPackageSuppliers(PackageSupplier[] additional_package_suppliers) + { + return getPackageSuppliers(additional_package_suppliers, m_config.skipRegistry); } unittest { scope (exit) environment.remove("DUB_REGISTRY"); - auto dub = new Dub(".", null, SkipPackageSuppliers.configured); - assert(dub.m_packageSuppliers.length == 0); + auto dub = new Dub(".", null, SkipPackageSuppliers.none); + + dub.m_config = new DubConfig(Json(["skipRegistry": Json("none")]), null); + assert(dub.getPackageSuppliers(null).length == 1); + + dub.m_config = new DubConfig(Json(["skipRegistry": Json("configured")]), null); + assert(dub.getPackageSuppliers(null).length == 0); + + dub.m_config = new DubConfig(Json(["skipRegistry": Json("standard")]), null); + assert(dub.getPackageSuppliers(null).length == 0); + environment["DUB_REGISTRY"] = "http://example.com/"; - dub = new Dub(".", null, SkipPackageSuppliers.configured); - logInfo("%s", dub.m_packageSuppliers); - assert(dub.m_packageSuppliers.length == 1); - environment["DUB_REGISTRY"] = "http://example.com/;http://foo.com/"; - dub = new Dub(".", null, SkipPackageSuppliers.configured); - assert(dub.m_packageSuppliers.length == 2); - dub = new Dub(".", [new RegistryPackageSupplier(URL("http://bar.com/"))], SkipPackageSuppliers.configured); - assert(dub.m_packageSuppliers.length == 3); + assert(dub.getPackageSuppliers(null).length == 1); } /** Initializes the instance with a single package search path, without @@ -484,21 +525,12 @@ } Dependency[string] versions; - if ((options & UpgradeOptions.useCachedResult) && m_project.isUpgradeCacheUpToDate() && !packages_to_upgrade.length) { - logDiagnostic("Using cached upgrade results..."); - versions = m_project.getUpgradeCache(); - } else { - auto resolver = new DependencyVersionResolver(this, options); - foreach (p; packages_to_upgrade) - resolver.addPackageToUpgrade(p); - versions = resolver.resolve(m_project.rootPackage, m_project.selections); - if (options & UpgradeOptions.useCachedResult) { - logDiagnostic("Caching upgrade results..."); - m_project.setUpgradeCache(versions); - } - } + auto resolver = new DependencyVersionResolver(this, options); + foreach (p; packages_to_upgrade) + resolver.addPackageToUpgrade(p); + versions = resolver.resolve(m_project.rootPackage, m_project.selections); - if (options & UpgradeOptions.printUpgradesOnly) { + if (options & UpgradeOptions.dryRun) { bool any = false; string rootbasename = getBasePackageName(m_project.rootPackage.name); @@ -509,7 +541,7 @@ if (basename == rootbasename) continue; if (!m_project.selections.hasSelectedVersion(basename)) { - logInfo("Non-selected package %s is available with version %s.", + logInfo("Package %s would be selected with version %s.", basename, ver); any = true; continue; @@ -517,7 +549,7 @@ auto sver = m_project.selections.getSelectedVersion(basename); if (!sver.path.empty) continue; if (ver.version_ <= sver.version_) continue; - logInfo("Package %s can be upgraded from %s to %s.", + logInfo("Package %s would be upgraded from %s to %s.", basename, sver, ver); any = true; } @@ -562,7 +594,7 @@ m_project.reinit(); - if ((options & UpgradeOptions.select) && !(options & UpgradeOptions.noSaveSelections)) + if ((options & UpgradeOptions.select) && !(options & (UpgradeOptions.noSaveSelections | UpgradeOptions.dryRun))) m_project.saveSelections(); } @@ -601,7 +633,7 @@ auto test_config = format("%s-test-%s", m_project.rootPackage.name.replace(".", "-").replace(":", "-"), config); BuildSettings lbuildsettings = settings.buildSettings; - m_project.addBuildSettings(lbuildsettings, settings.platform, config, null, true); + m_project.addBuildSettings(lbuildsettings, settings, config, null, true); if (lbuildsettings.targetType == TargetType.none) { logInfo(`Configuration '%s' has target type "none". Skipping test.`, config); return; @@ -1340,8 +1372,9 @@ preRelease = 1<<2, /// inclde pre-release versions in upgrade forceRemove = 1<<3, /// Deprecated, does nothing. select = 1<<4, /// Update the dub.selections.json file with the upgraded versions - printUpgradesOnly = 1<<5, /// Instead of downloading new packages, just print a message to notify the user of their existence - useCachedResult = 1<<6, /// Use cached information stored with the package to determine upgrades + dryRun = 1<<5, /// Instead of downloading new packages, just print a message to notify the user of their existence + /*deprecated*/ printUpgradesOnly = dryRun, /// deprecated, use dryRun instead + /*deprecated*/ useCachedResult = 1<<6, /// deprecated, has no effect noSaveSelections = 1<<7, /// Don't store updated selections on disk } @@ -1658,6 +1691,17 @@ return ret; } + @property SkipPackageSuppliers skipRegistry() + { + if(auto pv = "skipRegistry" in m_data) + return to!SkipPackageSuppliers((*pv).get!string); + + if (m_parentConfig) + return m_parentConfig.skipRegistry; + + return SkipPackageSuppliers.none; + } + @property NativePath[] customCachePaths() { import std.algorithm.iteration : map; diff --git a/source/dub/generators/generator.d b/source/dub/generators/generator.d index 588a111..ef51351 100644 --- a/source/dub/generators/generator.d +++ b/source/dub/generators/generator.d @@ -95,7 +95,7 @@ foreach (pack; m_project.getTopologicalPackageList(true, null, configs)) { BuildSettings buildSettings; auto config = configs[pack.name]; - buildSettings.processVars(m_project, pack, pack.getBuildSettings(settings.platform, config), true); + buildSettings.processVars(m_project, pack, pack.getBuildSettings(settings.platform, config), settings, true); targets[pack.name] = TargetInfo(pack, [pack], config, buildSettings); prepareGeneration(pack, m_project, settings, buildSettings); @@ -112,7 +112,7 @@ foreach (pack; m_project.getTopologicalPackageList(true, null, configs)) { BuildSettings buildsettings; - buildsettings.processVars(m_project, pack, pack.getBuildSettings(settings.platform, configs[pack.name]), true); + buildsettings.processVars(m_project, pack, pack.getBuildSettings(settings.platform, configs[pack.name]), settings, true); bool generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); finalizeGeneration(pack, m_project, settings, buildsettings, NativePath(bs.targetPath), generate_binary); } @@ -412,13 +412,13 @@ } // configure targets for build types such as release, or unittest-cov - private void addBuildTypeSettings(TargetInfo[string] targets, GeneratorSettings settings) + private void addBuildTypeSettings(TargetInfo[string] targets, in GeneratorSettings settings) { foreach (ref ti; targets.byValue) { ti.buildSettings.add(settings.buildSettings); // add build type settings and convert plain DFLAGS to build options - m_project.addBuildTypeSettings(ti.buildSettings, settings.platform, settings.buildType, ti.pack is m_project.rootPackage); + m_project.addBuildTypeSettings(ti.buildSettings, settings, ti.pack is m_project.rootPackage); settings.compiler.extractBuildOptions(ti.buildSettings); auto tt = ti.buildSettings.targetType; diff --git a/source/dub/generators/sublimetext.d b/source/dub/generators/sublimetext.d index 57a8012..9ac53c9 100644 --- a/source/dub/generators/sublimetext.d +++ b/source/dub/generators/sublimetext.d @@ -80,6 +80,7 @@ "profile-gc", "cov", "unittest-cov", + "syntax" ]; string fileRegex; diff --git a/source/dub/internal/utils.d b/source/dub/internal/utils.d index 4eb455a..3e36af7 100644 --- a/source/dub/internal/utils.d +++ b/source/dub/internal/utils.d @@ -40,8 +40,14 @@ NativePath getTempFile(string prefix, string extension = null) { import std.uuid : randomUUID; + import std.array: replace; - auto path = getTempDir() ~ (prefix ~ "-" ~ randomUUID.toString() ~ extension); + string fileName = prefix ~ "-" ~ randomUUID.toString() ~ extension; + + if (extension !is null && extension == ".d") + fileName = fileName.replace("-", "_"); + + auto path = getTempDir() ~ fileName; temporary_files ~= path; return path; } diff --git a/source/dub/internal/vibecompat/inet/url.d b/source/dub/internal/vibecompat/inet/url.d index 6016af5..720d5d0 100644 --- a/source/dub/internal/vibecompat/inet/url.d +++ b/source/dub/internal/vibecompat/inet/url.d @@ -18,6 +18,7 @@ import std.exception; import std.string; import std.uri; +import std.meta : AliasSeq; /** @@ -34,6 +35,7 @@ string m_password; string m_queryString; string m_anchor; + alias m_schemes = AliasSeq!("http", "https", "ftp", "spdy", "file", "sftp"); } /// Constructs a new URL object from its components. @@ -66,45 +68,40 @@ str = str[idx+1 .. $]; bool requires_host = false; - switch(m_schema){ - case "http": - case "https": - case "ftp": - case "spdy": - case "sftp": - case "file": - // proto://server/path style - enforce(str.startsWith("//"), "URL must start with proto://..."); - requires_host = true; - str = str[2 .. $]; - goto default; - default: - auto si = str.countUntil('/'); - if( si < 0 ) si = str.length; - auto ai = str[0 .. si].countUntil('@'); - sizediff_t hs = 0; - if( ai >= 0 ){ - hs = ai+1; - auto ci = str[0 .. ai].countUntil(':'); - if( ci >= 0 ){ - m_username = str[0 .. ci]; - m_password = str[ci+1 .. ai]; - } else m_username = str[0 .. ai]; - enforce(m_username.length > 0, "Empty user name in URL."); - } - - m_host = str[hs .. si]; - auto pi = m_host.countUntil(':'); - if(pi > 0) { - enforce(pi < m_host.length-1, "Empty port in URL."); - m_port = to!ushort(m_host[pi+1..$]); - m_host = m_host[0 .. pi]; - } - - enforce(!requires_host || m_schema == "file" || m_host.length > 0, - "Empty server name in URL."); - str = str[si .. $]; + auto schema_parts = m_schema.split("+"); + if (!schema_parts.empty && schema_parts.back.canFind(m_schemes)) + { + // proto://server/path style + enforce(str.startsWith("//"), "URL must start with proto://..."); + requires_host = true; + str = str[2 .. $]; } + + auto si = str.countUntil('/'); + if( si < 0 ) si = str.length; + auto ai = str[0 .. si].countUntil('@'); + sizediff_t hs = 0; + if( ai >= 0 ){ + hs = ai+1; + auto ci = str[0 .. ai].countUntil(':'); + if( ci >= 0 ){ + m_username = str[0 .. ci]; + m_password = str[ci+1 .. ai]; + } else m_username = str[0 .. ai]; + enforce(m_username.length > 0, "Empty user name in URL."); + } + + m_host = str[hs .. si]; + auto pi = m_host.countUntil(':'); + if(pi > 0) { + enforce(pi < m_host.length-1, "Empty port in URL."); + m_port = to!ushort(m_host[pi+1..$]); + m_host = m_host[0 .. pi]; + } + + enforce(!requires_host || m_schema == "file" || m_host.length > 0, + "Empty server name in URL."); + str = str[si .. $]; } this.localURI = (str == "") ? "/" : str; @@ -215,16 +212,10 @@ auto dst = appender!string(); dst.put(schema); dst.put(":"); - switch(schema){ - default: break; - case "file": - case "http": - case "https": - case "ftp": - case "spdy": - case "sftp": - dst.put("//"); - break; + auto schema_parts = schema.split("+"); + if (!schema_parts.empty && schema_parts.back.canFind(m_schemes)) + { + dst.put("//"); } dst.put(host); if( m_port > 0 ) formattedWrite(dst, ":%d", m_port); @@ -283,4 +274,9 @@ url = URL("http://localhost/")~NativePath("packages"); assert(url.toString() == "http://localhost/packages", url.toString()); + + url = URL.parse("dub+https://code.dlang.org/"); + assert(url.host == "code.dlang.org"); + assert(url.toString() == "dub+https://code.dlang.org/"); + assert(url.schema == "dub+https"); } diff --git a/source/dub/package_.d b/source/dub/package_.d index 8a2e6f6..bac7020 100644 --- a/source/dub/package_.d +++ b/source/dub/package_.d @@ -409,6 +409,7 @@ case "profile-gc": settings.addOptions(profileGC, debugInfo); break; case "cov": settings.addOptions(coverage, debugInfo); break; case "unittest-cov": settings.addOptions(unittests, coverage, debugMode, debugInfo); break; + case "syntax": settings.addOptions(syntaxOnly); break; } } } diff --git a/source/dub/project.d b/source/dub/project.d index 277b92b..d01fe41 100644 --- a/source/dub/project.d +++ b/source/dub/project.d @@ -614,16 +614,16 @@ * * Params: * dst = The BuildSettings struct to fill with data. - * platform = The platform to retrieve the values for. + * gsettings = The generator settings to retrieve the values for. * config = Values of the given configuration will be retrieved. * root_package = If non null, use it instead of the project's real root package. * shallow = If true, collects only build settings for the main package (including inherited settings) and doesn't stop on target type none and sourceLibrary. */ - void addBuildSettings(ref BuildSettings dst, in BuildPlatform platform, string config, in Package root_package = null, bool shallow = false) + void addBuildSettings(ref BuildSettings dst, in GeneratorSettings gsettings, string config, in Package root_package = null, bool shallow = false) const { import dub.internal.utils : stripDlangSpecialChars; - auto configs = getPackageConfigs(platform, config); + auto configs = getPackageConfigs(gsettings.platform, config); foreach (pkg; this.getTopologicalPackageList(false, root_package, configs)) { auto pkg_path = pkg.path.toNativeString(); @@ -632,11 +632,11 @@ assert(pkg.name in configs, "Missing configuration for "~pkg.name); logDebug("Gathering build settings for %s (%s)", pkg.name, configs[pkg.name]); - auto psettings = pkg.getBuildSettings(platform, configs[pkg.name]); + auto psettings = pkg.getBuildSettings(gsettings.platform, configs[pkg.name]); if (psettings.targetType != TargetType.none) { if (shallow && pkg !is m_rootPackage) psettings.sourceFiles = null; - processVars(dst, this, pkg, psettings); + processVars(dst, this, pkg, psettings, gsettings); if (psettings.importPaths.empty) logWarn(`Package %s (configuration "%s") defines no import paths, use {"importPaths": [...]} or the default package directory structure to fix this.`, pkg.name, configs[pkg.name]); if (psettings.mainSourceFile.empty && pkg is m_rootPackage && psettings.targetType == TargetType.executable) @@ -651,15 +651,15 @@ dst.targetPath = psettings.targetPath; dst.targetName = psettings.targetName; if (!psettings.workingDirectory.empty) - dst.workingDirectory = processVars(psettings.workingDirectory, this, pkg, true); + dst.workingDirectory = processVars(psettings.workingDirectory, this, pkg, gsettings, true); if (psettings.mainSourceFile.length) - dst.mainSourceFile = processVars(psettings.mainSourceFile, this, pkg, true); + dst.mainSourceFile = processVars(psettings.mainSourceFile, this, pkg, gsettings, true); } } // always add all version identifiers of all packages foreach (pkg; this.getTopologicalPackageList(false, null, configs)) { - auto psettings = pkg.getBuildSettings(platform, configs[pkg.name]); + auto psettings = pkg.getBuildSettings(gsettings.platform, configs[pkg.name]); dst.addVersions(psettings.versions); } } @@ -668,18 +668,18 @@ Params: dst = The `BuildSettings` instance to add the build settings to - platform = Target build platform + gsettings = Target generator settings build_type = Name of the build type for_root_package = Selects if the build settings are for the root package or for one of the dependencies. Unittest flags will only be added to the root package. */ - void addBuildTypeSettings(ref BuildSettings dst, in BuildPlatform platform, string build_type, bool for_root_package = true) + void addBuildTypeSettings(ref BuildSettings dst, in GeneratorSettings gsettings, bool for_root_package = true) { bool usedefflags = !(dst.requirements & BuildRequirement.noDefaultFlags); if (usedefflags) { BuildSettings btsettings; - m_rootPackage.addBuildTypeSettings(btsettings, platform, build_type); + m_rootPackage.addBuildTypeSettings(btsettings, gsettings.platform, gsettings.buildType); if (!for_root_package) { // don't propagate unittest switch to dependencies, as dependent @@ -688,7 +688,7 @@ btsettings.removeOptions(BuildOption.unittests); } - processVars(dst, this, m_rootPackage, btsettings); + processVars(dst, this, m_rootPackage, btsettings, gsettings); } } @@ -1067,41 +1067,14 @@ m_selections.save(path); } - /** Checks if the cached upgrade information is still considered up to date. - - The cache will be considered out of date after 24 hours after the last - online check. - */ - bool isUpgradeCacheUpToDate() + deprecated bool isUpgradeCacheUpToDate() { - try { - auto datestr = m_packageSettings["dub"].opt!(Json[string]).get("lastUpgrade", Json("")).get!string; - if (!datestr.length) return false; - auto date = SysTime.fromISOExtString(datestr); - if ((Clock.currTime() - date) > 1.days) return false; - return true; - } catch (Exception t) { - logDebug("Failed to get the last upgrade time: %s", t.msg); - return false; - } + return false; } - /** Returns the currently cached upgrade information. - - The returned dictionary maps from dependency package name to the latest - available version that matches the dependency specifications. - */ - Dependency[string] getUpgradeCache() + deprecated Dependency[string] getUpgradeCache() { - try { - Dependency[string] ret; - foreach (string p, d; m_packageSettings["dub"].opt!(Json[string]).get("cachedUpgrades", Json.emptyObject)) - ret[p] = SelectedVersions.dependencyFromJson(d); - return ret; - } catch (Exception t) { - logDebug("Failed to get cached upgrades: %s", t.msg); - return null; - } + return null; } /** Sets a new set of versions for the upgrade cache. @@ -1165,75 +1138,55 @@ } void processVars(ref BuildSettings dst, in Project project, in Package pack, - BuildSettings settings, bool include_target_settings = false) + BuildSettings settings, in GeneratorSettings gsettings, bool include_target_settings = false) { - dst.addDFlags(processVars(project, pack, settings.dflags)); - dst.addLFlags(processVars(project, pack, settings.lflags)); - dst.addLibs(processVars(project, pack, settings.libs)); - dst.addSourceFiles(processVars(project, pack, settings.sourceFiles, true)); - dst.addImportFiles(processVars(project, pack, settings.importFiles, true)); - dst.addStringImportFiles(processVars(project, pack, settings.stringImportFiles, true)); - dst.addCopyFiles(processVars(project, pack, settings.copyFiles, true)); - dst.addVersions(processVars(project, pack, settings.versions)); - dst.addDebugVersions(processVars(project, pack, settings.debugVersions)); - dst.addImportPaths(processVars(project, pack, settings.importPaths, true)); - dst.addStringImportPaths(processVars(project, pack, settings.stringImportPaths, true)); - dst.addPreGenerateCommands(processVars(project, pack, settings.preGenerateCommands)); - dst.addPostGenerateCommands(processVars(project, pack, settings.postGenerateCommands)); - dst.addPreBuildCommands(processVars(project, pack, settings.preBuildCommands)); - dst.addPostBuildCommands(processVars(project, pack, settings.postBuildCommands)); + dst.addDFlags(processVars(project, pack, gsettings, settings.dflags)); + dst.addLFlags(processVars(project, pack, gsettings, settings.lflags)); + dst.addLibs(processVars(project, pack, gsettings, settings.libs)); + dst.addSourceFiles(processVars(project, pack, gsettings, settings.sourceFiles, true)); + dst.addImportFiles(processVars(project, pack, gsettings, settings.importFiles, true)); + dst.addStringImportFiles(processVars(project, pack, gsettings, settings.stringImportFiles, true)); + dst.addCopyFiles(processVars(project, pack, gsettings, settings.copyFiles, true)); + dst.addVersions(processVars(project, pack, gsettings, settings.versions)); + dst.addDebugVersions(processVars(project, pack, gsettings, settings.debugVersions)); + dst.addImportPaths(processVars(project, pack, gsettings, settings.importPaths, true)); + dst.addStringImportPaths(processVars(project, pack, gsettings, settings.stringImportPaths, true)); + dst.addPreGenerateCommands(processVars(project, pack, gsettings, settings.preGenerateCommands)); + dst.addPostGenerateCommands(processVars(project, pack, gsettings, settings.postGenerateCommands)); + dst.addPreBuildCommands(processVars(project, pack, gsettings, settings.preBuildCommands)); + dst.addPostBuildCommands(processVars(project, pack, gsettings, settings.postBuildCommands)); dst.addRequirements(settings.requirements); dst.addOptions(settings.options); if (include_target_settings) { dst.targetType = settings.targetType; - dst.targetPath = processVars(settings.targetPath, project, pack, true); + dst.targetPath = processVars(settings.targetPath, project, pack, gsettings, true); dst.targetName = settings.targetName; if (!settings.workingDirectory.empty) - dst.workingDirectory = processVars(settings.workingDirectory, project, pack, true); + dst.workingDirectory = processVars(settings.workingDirectory, project, pack, gsettings, true); if (settings.mainSourceFile.length) - dst.mainSourceFile = processVars(settings.mainSourceFile, project, pack, true); + dst.mainSourceFile = processVars(settings.mainSourceFile, project, pack, gsettings, true); } } -private string[] processVars(in Project project, in Package pack, string[] vars, bool are_paths = false) +private string[] processVars(in Project project, in Package pack, in GeneratorSettings gsettings, string[] vars, bool are_paths = false) { auto ret = appender!(string[])(); - processVars(ret, project, pack, vars, are_paths); + processVars(ret, project, pack, gsettings, vars, are_paths); return ret.data; } -private void processVars(ref Appender!(string[]) dst, in Project project, in Package pack, string[] vars, bool are_paths = false) +private void processVars(ref Appender!(string[]) dst, in Project project, in Package pack, in GeneratorSettings gsettings, string[] vars, bool are_paths = false) { - foreach (var; vars) dst.put(processVars(var, project, pack, are_paths)); + foreach (var; vars) dst.put(processVars(var, project, pack, gsettings, are_paths)); } -private string processVars(string var, in Project project, in Package pack, bool is_path) +private string processVars(Project, Package)(string var, in Project project, in Package pack,in GeneratorSettings gsettings, bool is_path) { - auto idx = std.string.indexOf(var, '$'); - if (idx >= 0) { - auto vres = appender!string(); - while (idx >= 0) { - if (idx+1 >= var.length) break; - if (var[idx+1] == '$') { - vres.put(var[0 .. idx+1]); - var = var[idx+2 .. $]; - } else { - vres.put(var[0 .. idx]); - var = var[idx+1 .. $]; + import std.regex : regex, replaceAll; - size_t idx2 = 0; - while( idx2 < var.length && isIdentChar(var[idx2]) ) idx2++; - auto varname = var[0 .. idx2]; - var = var[idx2 .. $]; - - vres.put(getVariable(varname, project, pack)); - } - idx = std.string.indexOf(var, '$'); - } - vres.put(var); - var = vres.data; - } + auto varRE = regex(`\$([\w_]+)|\$\{([\w_]+)\}`); + var = var.replaceAll!(m => getVariable(m[1].length ? m[1] : m[2], project, pack, gsettings))(varRE); if (is_path) { auto p = NativePath(var); if (!p.absolute) { @@ -1242,19 +1195,65 @@ } else return var; } -private string getVariable(string name, in Project project, in Package pack) +// Keep the following list up-to-date if adding more build settings variables. +/// List of variables that can be used in build settings +package(dub) immutable buildSettingsVars = [ + "ARCH", "PLATFORM", "PLATFORM_POSIX", "BUILD_TYPE" +]; + +private string getVariable(Project, Package)(string name, in Project project, in Package pack, in GeneratorSettings gsettings) { import std.process : environment; - if (name == "PACKAGE_DIR") return pack.path.toNativeString(); - if (name == "ROOT_PACKAGE_DIR") return project.rootPackage.path.toNativeString(); + import std.uni : asUpperCase; + NativePath path; + if (name == "PACKAGE_DIR") + path = pack.path; + else if (name == "ROOT_PACKAGE_DIR") + path = project.rootPackage.path; if (name.endsWith("_PACKAGE_DIR")) { auto pname = name[0 .. $-12]; foreach (prj; project.getTopologicalPackageList()) - if (prj.name.toUpper().replace("-", "_") == pname) - return prj.path.toNativeString(); + if (prj.name.asUpperCase.map!(a => a == '-' ? '_' : a).equal(pname)) + { + path = prj.path; + break; + } } + if (!path.empty) + { + // no trailing slash for clean path concatenation (see #1392) + path.endsWithSlash = false; + return path.toNativeString(); + } + + if (name == "ARCH") { + foreach (a; gsettings.platform.architecture) + return a; + return ""; + } + + if (name == "PLATFORM") { + import std.algorithm : filter; + foreach (p; gsettings.platform.platform.filter!(p => p != "posix")) + return p; + foreach (p; gsettings.platform.platform) + return p; + return ""; + } + + if (name == "PLATFORM_POSIX") { + import std.algorithm : canFind; + if (gsettings.platform.platform.canFind("posix")) + return "posix"; + foreach (p; gsettings.platform.platform) + return p; + return ""; + } + + if (name == "BUILD_TYPE") return gsettings.buildType; + auto envvar = environment.get(name); if (envvar !is null) return envvar; @@ -1262,6 +1261,65 @@ } +unittest +{ + static struct MockPackage + { + this(string name) + { + this.name = name; + version (Posix) + path = NativePath("/pkgs/"~name); + else version (Windows) + path = NativePath(`C:\pkgs\`~name); + // see 4d4017c14c, #268, and #1392 for why this all package paths end on slash internally + path.endsWithSlash = true; + } + string name; + NativePath path; + } + + static struct MockProject + { + MockPackage rootPackage; + inout(MockPackage)[] getTopologicalPackageList() inout + { + return _dependencies; + } + private: + MockPackage[] _dependencies; + } + + MockProject proj = { + rootPackage: MockPackage("root"), + _dependencies: [MockPackage("dep1"), MockPackage("dep2")] + }; + auto pack = MockPackage("test"); + GeneratorSettings gsettings; + enum isPath = true; + + import std.path : dirSeparator; + + static Path woSlash(Path p) { p.endsWithSlash = false; return p; } + // basic vars + assert(processVars("Hello $PACKAGE_DIR", proj, pack, gsettings, !isPath) == "Hello "~woSlash(pack.path).toNativeString); + assert(processVars("Hello $ROOT_PACKAGE_DIR", proj, pack, gsettings, !isPath) == "Hello "~woSlash(proj.rootPackage.path).toNativeString.chomp(dirSeparator)); + assert(processVars("Hello $DEP1_PACKAGE_DIR", proj, pack, gsettings, !isPath) == "Hello "~woSlash(proj._dependencies[0].path).toNativeString); + // ${VAR} replacements + assert(processVars("Hello ${PACKAGE_DIR}"~dirSeparator~"foobar", proj, pack, gsettings, !isPath) == "Hello "~(pack.path ~ "foobar").toNativeString); + assert(processVars("Hello $PACKAGE_DIR"~dirSeparator~"foobar", proj, pack, gsettings, !isPath) == "Hello "~(pack.path ~ "foobar").toNativeString); + // test with isPath + assert(processVars("local", proj, pack, gsettings, isPath) == (pack.path ~ "local").toNativeString); + // test other env variables + import std.process : environment; + environment["MY_ENV_VAR"] = "blablabla"; + assert(processVars("$MY_ENV_VAR", proj, pack, gsettings, !isPath) == "blablabla"); + assert(processVars("${MY_ENV_VAR}suffix", proj, pack, gsettings, !isPath) == "blablablasuffix"); + assert(processVars("$MY_ENV_VAR-suffix", proj, pack, gsettings, !isPath) == "blablabla-suffix"); + assert(processVars("$MY_ENV_VAR:suffix", proj, pack, gsettings, !isPath) == "blablabla:suffix"); + environment.remove("MY_ENV_VAR"); +} + /** Holds and stores a set of version selections for package dependencies. This is the runtime representation of the information contained in @@ -1446,4 +1504,3 @@ m_selections[p] = Selected(dependencyFromJson(v)); } } - diff --git a/source/dub/recipe/packagerecipe.d b/source/dub/recipe/packagerecipe.d index 30873b5..7775bdb 100644 --- a/source/dub/recipe/packagerecipe.d +++ b/source/dub/recipe/packagerecipe.d @@ -188,7 +188,13 @@ auto path = NativePath(spath); if (!path.absolute) path = base_path ~ path; if (!existsFile(path) || !isDir(path.toNativeString())) { - logWarn("Invalid source/import path: %s", path.toNativeString()); + import dub.project : buildSettingsVars; + import std.algorithm : any, find; + const hasVar = buildSettingsVars.any!((string var) { + return spath.find("$"~var).length > 0 || spath.find("${"~var~"}").length > 0; + }); + if (!hasVar) + logWarn("Invalid source/import path: %s", path.toNativeString()); continue; } diff --git a/test/expected-issue1037-output b/test/expected-issue1037-output index a5d7c55..e03b110 100644 --- a/test/expected-issue1037-output +++ b/test/expected-issue1037-output @@ -1,4 +1,3 @@ -Root package issue1037-better-dependency-messages reference gitcompatibledubpackage 1.0.1 cannot be satisfied. -Packages causing the conflict: - issue1037-better-dependency-messages depends on 1.0.1 - b depends on ~>1.0.2 +Unresolvable dependencies to package gitcompatibledubpackage: + b >=0.0.0 @DIR/b depends on gitcompatibledubpackage ~>1.0.2 + issue1037-better-dependency-messages ~master depends on gitcompatibledubpackage 1.0.1 diff --git a/test/issue1037-better-dependency-messages.sh b/test/issue1037-better-dependency-messages.sh index 2fe3b9f..c06bdc4 100755 --- a/test/issue1037-better-dependency-messages.sh +++ b/test/issue1037-better-dependency-messages.sh @@ -4,17 +4,21 @@ cd ${CURR_DIR}/issue1037-better-dependency-messages temp_file=$(mktemp $(basename $0).XXXXXX) +temp_file2=$(mktemp $(basename $0).XXXXXX) expected_file="$CURR_DIR/expected-issue1037-output" function cleanup { - rm $temp_file + rm -f $temp_file + rm -f $temp_file } trap cleanup EXIT +sed "s#DIR#$CURR_DIR/issue1037-better-dependency-messages#" "$expected_file" > "$temp_file2" + $DUB upgrade 2>$temp_file && exit 1 # dub upgrade should fail -if ! diff "$expected_file" "$temp_file"; then +if ! diff "$temp_file2" "$temp_file"; then die 'output not containing conflict information' fi diff --git a/test/issue1158-stdin-for-single-files.sh b/test/issue1158-stdin-for-single-files.sh new file mode 100755 index 0000000..1d2baea --- /dev/null +++ b/test/issue1158-stdin-for-single-files.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd ${CURR_DIR}/issue1158-stdin-for-single-files + +if ! { cat stdin.d | ${DUB} - --value=v 2>&1 || true; } | grep -cF '["--value=v"]'; then + die $LINENO 'Stdin for single files failed.' +fi \ No newline at end of file diff --git a/test/issue1158-stdin-for-single-files/.no_build b/test/issue1158-stdin-for-single-files/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue1158-stdin-for-single-files/.no_build diff --git a/test/issue1158-stdin-for-single-files/stdin.d b/test/issue1158-stdin-for-single-files/stdin.d new file mode 100644 index 0000000..120a14a --- /dev/null +++ b/test/issue1158-stdin-for-single-files/stdin.d @@ -0,0 +1,7 @@ +/+ dub.sdl: + name "hello" ++/ +void main(string[] args) { + import std.stdio : writeln; + writeln(args[1..$]); +} \ No newline at end of file diff --git a/test/issue1447-build-settings-vars.sh b/test/issue1447-build-settings-vars.sh new file mode 100755 index 0000000..5f60e06 --- /dev/null +++ b/test/issue1447-build-settings-vars.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +set -e + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +if [[ `uname -m` == "x86_64" ]]; then + ARCH=x86_64 +else + ARCH=x86 +fi + +rm -rf ${CURR_DIR}/issue1447-build-settings-vars/.dub +rm -rf ${CURR_DIR}/issue1447-build-settings-vars/test + +${DUB} build --root ${CURR_DIR}/issue1447-build-settings-vars --arch=$ARCH +OUTPUT=`${CURR_DIR}/issue1447-build-settings-vars/test` + +rm -rf ${CURR_DIR}/issue1447-build-settings-vars/.dub +rm -rf ${CURR_DIR}/issue1447-build-settings-vars/test + +if [[ "$OUTPUT" != "$ARCH" ]]; then die "Build settings ARCH var incorrect"; fi diff --git a/test/issue1447-build-settings-vars/.no_run b/test/issue1447-build-settings-vars/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue1447-build-settings-vars/.no_run diff --git a/test/issue1447-build-settings-vars/.no_test b/test/issue1447-build-settings-vars/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue1447-build-settings-vars/.no_test diff --git a/test/issue1447-build-settings-vars/dub.json b/test/issue1447-build-settings-vars/dub.json new file mode 100644 index 0000000..9f5d345 --- /dev/null +++ b/test/issue1447-build-settings-vars/dub.json @@ -0,0 +1,4 @@ +{ + "name": "test", + "stringImportPaths": ["view-$ARCH"] +} \ No newline at end of file diff --git a/test/issue1447-build-settings-vars/source/app.d b/test/issue1447-build-settings-vars/source/app.d new file mode 100644 index 0000000..3e94dfc --- /dev/null +++ b/test/issue1447-build-settings-vars/source/app.d @@ -0,0 +1,6 @@ +import std.stdio; + +void main() +{ + writeln(import("arch")); +} diff --git a/test/issue1447-build-settings-vars/view-x86/arch b/test/issue1447-build-settings-vars/view-x86/arch new file mode 100644 index 0000000..f4bad79 --- /dev/null +++ b/test/issue1447-build-settings-vars/view-x86/arch @@ -0,0 +1 @@ +x86 \ No newline at end of file diff --git a/test/issue1447-build-settings-vars/view-x86_64/arch b/test/issue1447-build-settings-vars/view-x86_64/arch new file mode 100644 index 0000000..8790996 --- /dev/null +++ b/test/issue1447-build-settings-vars/view-x86_64/arch @@ -0,0 +1 @@ +x86_64 \ No newline at end of file