diff --git a/source/dub/commandline.d b/source/dub/commandline.d index 9c27b5e..258f710 100644 --- a/source/dub/commandline.d +++ b/source/dub/commandline.d @@ -364,8 +364,12 @@ } if (!m_nodeps) { - logDiagnostic("Checking dependencies in '%s'", dub.projectPath.toNativeString()); - dub.update(UpdateOptions.select); + // retrieve missing packages + logDiagnostic("Checking for missing dependencies."); + dub.upgrade(UpdateOptions.select); + // check for updates + logDiagnostic("Checking for upgrades."); + dub.upgrade(UpdateOptions.upgrade|UpdateOptions.printUpgradesOnly); } } @@ -725,7 +729,7 @@ if (m_prerelease) options |= UpdateOptions.preRelease; if (m_forceRemove) options |= UpdateOptions.forceRemove; enforceUsage(app_args.length == 0, "Upgrading a specific package is not yet implemented."); - dub.update(options); + dub.upgrade(options); return 0; } } @@ -796,10 +800,10 @@ auto name = free_args[0]; - if (m_version.length) dub.fetch(name, Dependency(m_version), location, true, false, m_forceRemove); + if (m_version.length) dub.fetch(name, Dependency(m_version), location, true, false, m_forceRemove, false); else { try { - dub.fetch(name, Dependency(">=0.0.0"), location, true, false, m_forceRemove); + dub.fetch(name, Dependency(">=0.0.0"), location, true, false, m_forceRemove, false); logInfo( "Please note that you need to use `dub run ` " ~ "or add it to dependencies of your package to actually use/run it. " ~ @@ -808,7 +812,7 @@ catch(Exception e){ logInfo("Getting a release version failed: %s", e.msg); logInfo("Retry with ~master..."); - dub.fetch(name, Dependency("~master"), location, true, true, m_forceRemove); + dub.fetch(name, Dependency("~master"), location, true, true, m_forceRemove, false); } } return 0; diff --git a/source/dub/dependency.d b/source/dub/dependency.d index f197381..2397f67 100644 --- a/source/dub/dependency.d +++ b/source/dub/dependency.d @@ -84,7 +84,11 @@ } if (isBranch || other.isBranch) { if(m_version == other.m_version) return 0; - else throw new Exception("Can't compare branch versions! (this: %s, other: %s)".format(this, other)); + if (!isBranch) return 1; + else if (!other.isBranch) return -1; + if (isMaster) return 1; + else if (other.isMaster) return -1; + return this.m_version < other.m_version ? -1 : 1; } return compareVersions(isMaster ? MAX_VERS : m_version, other.isMaster ? MAX_VERS : other.m_version); @@ -120,6 +124,11 @@ a = Version(Version.MASTER_STRING); b = Version("~BRANCH"); assert(a != b, "a != b with a:MASTER, b:'~branch' failed"); + assert(a > b); + assert(a < Version("0.0.0")); + assert(b < Version("0.0.0")); + assert(a > Version("~Z")); + assert(b < Version("~Z")); // SemVer 2.0.0-rc.2 a = Version("2.0.0-rc.2"); @@ -170,6 +179,7 @@ // A Dependency, which matches every valid version. static @property ANY() { return Dependency(ANY_IDENT); } + static @property INVALID() { Dependency ret; ret.m_versA = Version.HEAD; ret.m_versB = Version.RELEASE; return ret; } this(string ves) { @@ -375,11 +385,10 @@ /// Merges to versions Dependency merge(ref const(Dependency) o) const { - if (!valid()) return this; - if (!o.valid()) return o; - - enforce(m_versA.isBranch == o.m_versA.isBranch, format("Conflicting versions: %s vs. %s", m_versA, o.m_versA)); - enforce(m_versB.isBranch == o.m_versB.isBranch, format("Conflicting versions: %s vs. %s", m_versB, o.m_versB)); + if (!this.valid || !o.valid) return INVALID; + if (m_versA.isBranch != o.m_versA.isBranch) return INVALID; + if (m_versB.isBranch != o.m_versB.isBranch) return INVALID; + if (m_versA.isBranch) return m_versA == o.m_versA ? this : INVALID; Version a = m_versA > o.m_versA ? m_versA : o.m_versA; Version b = m_versB < o.m_versB ? m_versB : o.m_versB; @@ -477,8 +486,9 @@ a = Dependency(branch1); b = Dependency(branch2); - assertThrown(a.merge(b), "Shouldn't be able to merge to different branches"); - assertNotThrown(b = a.merge(a), "Should be able to merge the same branches. (?)"); + assert(!a.merge(b).valid, "Shouldn't be able to merge to different branches"); + b = a.merge(a); + assert(b.valid, "Should be able to merge the same branches. (?)"); assert(a == b); a = Dependency(branch1); @@ -547,272 +557,140 @@ logDebug("Dependency Unittest sucess."); } -/** - Stuff for a dependency lookup. -*/ -struct RequestedDependency { - this( string pkg, Dependency de) { - dependency = de; - packages[pkg] = de; + +class DependencyResolver(CONFIGS, CONFIG) { + static struct TreeNodes { + string pack; + CONFIGS configs; } - Dependency dependency; - Dependency[string] packages; + + static struct TreeNode { + string pack; + CONFIG config; + } + + static struct ChildIterationState { + TreeNode[] configs; + size_t configIndex; + } + + static struct GraphIterationState { + CONFIG[string] visited; + TreeNode[] stack; + TreeNode node; + ChildIterationState[] children; + } + + CONFIG[string] resolve(TreeNode root) + { + static string rootPackage(string p) { + auto idx = p.indexOf(":"); + if (idx < 0) return p; + return p[0 .. idx]; + } + + size_t[string] package_indices; + CONFIG[][] all_configs; + void findConfigsRec(TreeNode parent) + { + foreach (ch; getChildren(parent)) { + auto basepack = rootPackage(ch.pack); + if (basepack in package_indices) continue; + + auto pidx = all_configs.length; + auto configs = getAllConfigs(basepack); + enforce(configs.length > 0, format("Found no configurations for package %s.", basepack)); + all_configs ~= configs; + package_indices[basepack] = pidx; + + foreach (v; all_configs[pidx]) + findConfigsRec(TreeNode(ch.pack, v)); + } + } + findConfigsRec(root); + + auto config_indices = new size_t[all_configs.length]; + config_indices[] = 0; + + bool[TreeNode] visited; + bool validateConfigs(TreeNode parent) + { + if (parent in visited) return true; + visited[parent] = true; + foreach (ch; getChildren(parent)) { + auto basepack = rootPackage(ch.pack); + assert(basepack in package_indices, format("%s not in packages %s", basepack, package_indices)); + auto pidx = package_indices[basepack]; + auto config = all_configs[pidx][config_indices[pidx]]; + auto chnode = TreeNode(ch.pack, config); + if (!matches(ch.configs, config) || !validateConfigs(chnode)) + return false; + } + return true; + } + + while (true) { + // check if the current combination of configurations works out + visited = null; + if (validateConfigs(root)) { + CONFIG[string] ret; + foreach (p, i; package_indices) + ret[p] = all_configs[i][config_indices[i]]; + return ret; + } + + // find the next combination of configurations + foreach_reverse (pi, ref i; config_indices) { + if (++i >= all_configs[pi].length) i = 0; + else break; + } + enforce(config_indices.any!"a!=0", "Could not find a valid dependency tree configuration."); + } + } + + protected abstract CONFIG[] getAllConfigs(string pack); + protected abstract TreeNodes[] getChildren(TreeNode node); + protected abstract bool matches(CONFIGS configs, CONFIG config); } -class DependencyGraph { - this(const Package root) { - m_root = root; - m_packages[m_root.name] = root; - } - - void insert(const Package p) { - enforce(p.name != m_root.name, format("Dependency with the same name as the root package (%s) detected.", p.name)); - m_packages[p.name] = p; - } - - void remove(const Package p) { - enforce(p.name != m_root.name); - Rebindable!(const Package)* pkg = p.name in m_packages; - if( pkg ) m_packages.remove(p.name); - } - - private - { - alias Rebindable!(const Package) PkgType; - } - - void clearUnused() { - Rebindable!(const Package)[string] unused = m_packages.dup; - unused.remove(m_root.name); - forAllDependencies( (const PkgType* avail, string s, Dependency d, const Package issuer) { - if(avail && d.matches(avail.vers)) - unused.remove(avail.name); - }); - foreach(string unusedPkg, d; unused) { - logDebug("Removed unused package: "~unusedPkg); - m_packages.remove(unusedPkg); - } - } - - RequestedDependency[string] conflicted() const { - RequestedDependency[string] deps = needed(); - RequestedDependency[string] conflicts; - foreach(string pkg, d; deps) - if(!d.dependency.valid()) - conflicts[pkg] = d; - return conflicts; - } - - RequestedDependency[string] missing() const { - RequestedDependency[string] deps; - forAllDependencies( (const PkgType* avail, string pkgId, Dependency d, const Package issuer) { - if(!d.optional && (!avail || !d.matches(avail.vers))) - addDependency(deps, pkgId, d, issuer); - }); - return deps; - } - - RequestedDependency[string] needed() const { - RequestedDependency[string] deps; - forAllDependencies( (const PkgType* avail, string pkgId, Dependency d, const Package issuer) { - if(!d.optional) - addDependency(deps, pkgId, d, issuer); - }); - return deps; - } - - RequestedDependency[string] optional() const { - RequestedDependency[string] allDeps; - forAllDependencies( (const PkgType* avail, string pkgId, Dependency d, const Package issuer) { - addDependency(allDeps, pkgId, d, issuer); - }); - RequestedDependency[string] optionalDeps; - foreach(id, req; allDeps) - if(req.dependency.optional) optionalDeps[id] = req; - return optionalDeps; - } - - private void forAllDependencies(void delegate (const PkgType* avail, string pkgId, Dependency d, const Package issuer) dg) const { - foreach(string issuerPackag, issuer; m_packages) { - foreach(string depPkg, dependency; issuer.dependencies) { - auto availPkg = depPkg in m_packages; - dg(availPkg, depPkg, dependency, issuer); +unittest { + static class TestResolver : DependencyResolver!(size_t[], size_t) { + private TreeNodes[][string] m_children; + this(TreeNodes[][string] children) { m_children = children; } + protected override size_t[] getAllConfigs(string pack) { + auto ret = appender!(size_t[]); + foreach (p; m_children.byKey) { + if (p.length <= pack.length+1) continue; + if (p[0 .. pack.length] != pack || p[pack.length] != ':') continue; + auto didx = p.lastIndexOf(':'); + ret ~= p[didx+1 .. $].to!size_t; } + ret.data.sort!"a>b"(); + return ret.data; } - } - - private static void addDependency(ref RequestedDependency[string] deps, string packageId, Dependency d, const Package issuer) { - auto d2 = packageId in deps; - if(!d2) { - deps[packageId] = RequestedDependency(issuer.name, d); - } else { - d2.packages[issuer.name] = d; - try d2.dependency = d2.dependency.merge(d); - catch (Exception e) { - logError("Conflicting dependency %s: %s", packageId, e.msg); - foreach (p, d; d2.packages) - logError(" %s requires %s", p, d); - d2.dependency = Dependency("<=0.0.0 >=1.0.0"); - } - } - } - - private { - const Package m_root; - PkgType[string] m_packages; + protected override TreeNodes[] getChildren(TreeNode node) { return m_children.get(node.pack ~ ":" ~ node.config.to!string(), null); } + protected override bool matches(size_t[] configs, size_t config) { return configs.canFind(config); } } - unittest { - /* - R (master) -> A (master) - */ - auto R_json = parseJsonString(` - { - "name": "r", - "dependencies": { - "a": "~master", - "b": "1.0.0" - }, - "version": "~master" - } - `); - Package r_master = new Package(R_json); - auto graph = new DependencyGraph(r_master); - - assert(graph.conflicted.length == 0, "There are conflicting packages"); - - void expectA(RequestedDependency[string] requested, string name) { - assert("a" in requested, "Package a is not the "~name~" package"); - assert(requested["a"].dependency == Dependency("~master"), "Package a is not "~name~" as ~master version."); - assert("r" in requested["a"].packages, "Package r is not the issuer of "~name~" Package a(~master)."); - assert(requested["a"].packages["r"] == Dependency("~master"), "Package r is not the issuer of "~name~" Package a(~master)."); - } - void expectB(RequestedDependency[string] requested, string name) { - assert("b" in requested, "Package b is not the "~name~" package"); - assert(requested["b"].dependency == Dependency("1.0.0"), "Package b is not "~name~" as 1.0.0 version."); - assert("r" in requested["b"].packages, "Package r is not the issuer of "~name~" Package b(1.0.0)."); - assert(requested["b"].packages["r"] == Dependency("1.0.0"), "Package r is not the issuer of "~name~" Package b(1.0.0)."); - } - auto missing = graph.missing(); - assert(missing.length == 2, "Invalid count of missing items"); - expectA(missing, "missing"); - expectB(missing, "missing"); - - auto needed = graph.needed(); - assert(needed.length == 2, "Invalid count of needed packages."); - expectA(needed, "needed"); - expectB(needed, "needed"); - - assert(graph.optional.length == 0, "There are optional packages reported"); - - auto A_json = parseJsonString(` - { - "name": "a", - "dependencies": { - }, - "version": "~master" - } - `); - Package a_master = new Package(A_json); - graph.insert(a_master); - - assert(graph.conflicted.length == 0, "There are conflicting packages"); - - auto missing2 = graph.missing; - assert(missing2.length == 1, "Missing list does not contain an package."); - expectB(missing2, "missing2"); - - needed = graph.needed; - assert(needed.length == 2, "Invalid count of needed packages."); - expectA(needed, "needed"); - expectB(needed, "needed"); - - assert(graph.optional.length == 0, "There are optional packages reported"); + // properly back up if conflicts are detected along the way (d:2 vs d:1) + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", [2, 1]), TreeNodes("d", [1]), TreeNodes("e", [2, 1])], + "b:1": [TreeNodes("c", [2, 1]), TreeNodes("d", [1])], + "b:2": [TreeNodes("c", [3, 2]), TreeNodes("d", [2, 1])], + "c:1": [], "c:2": [], "c:3": [], + "d:1": [], "d:2": [], + "e:1": [], "e:2": [], + ]); + assert(res.resolve(TreeNode("a", 0)) == ["b":2u, "c":3u, "d":1u, "e":2u]); } - unittest { - /* - r -> r:sub - */ - auto R_json = parseJsonString(` - { - "name": "r", - "dependencies": { - "r:sub": "~master" - }, - "version": "~master", - "subPackages": [ - { - "name": "sub" - } - ] - } - `); - - Package r_master = new Package(R_json); - auto graph = new DependencyGraph(r_master); - assert(graph.missing().length == 1); - // Subpackages need to be explicitly added. - graph.insert(r_master.subPackages[0]); - assert(graph.missing().length == 0); + // handle cyclic dependencies gracefully + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", [1])], + "b:1": [TreeNodes("b", [1])] + ]); + assert(res.resolve(TreeNode("a", 0)) == ["b":1u]); } - - unittest { - /* - r -> s:sub - */ - auto R_json = parseJsonString(` - { - "name": "r", - "dependencies": { - "s:sub": "~master" - }, - "version": "~master" - } - `); - auto S_w_sub_json = parseJsonString(` - { - "name": "s", - "version": "~master", - "subPackages": [ - { - "name": "sub" - } - ] - } - `); - auto S_wout_sub_json = parseJsonString(` - { - "name": "s", - "version": "~master" - } - `); - auto sub_json = parseJsonString(` - { - "name": "sub", - "version": "~master" - } - `); - - Package r_master = new Package(R_json); - auto graph = new DependencyGraph(r_master); - assert(graph.missing().length == 1); - Package s_master = new Package(S_w_sub_json); - graph.insert(s_master); - assert(graph.missing().length == 1); - graph.insert(s_master.subPackages[0]); - assert(graph.missing().length == 0); - - graph = new DependencyGraph(r_master); - assert(graph.missing().length == 1); - s_master = new Package(S_wout_sub_json); - graph.insert(s_master); - assert(graph.missing().length == 1); - - graph = new DependencyGraph(r_master); - assert(graph.missing().length == 1); - s_master = new Package(sub_json); - graph.insert(s_master); - assert(graph.missing().length == 1); - } -} \ No newline at end of file +} diff --git a/source/dub/dub.d b/source/dub/dub.d index 183740c..023a941 100644 --- a/source/dub/dub.d +++ b/source/dub/dub.d @@ -163,57 +163,21 @@ string getDefaultConfiguration(BuildPlatform platform, bool allow_non_library_configs = true) const { return m_project.getDefaultConfiguration(platform, allow_non_library_configs); } - /// Performs retrieval and removal as necessary for - /// the application. - /// @param options bit combination of UpdateOptions - void update(UpdateOptions options) + void upgrade(UpdateOptions options) { - bool[string] masterVersionUpgrades; - auto selections = new SelectedVersions; - while (true) { - Action[] allActions = m_project.determineActions(m_packageSuppliers, options, selections); - Action[] actions; - foreach(a; allActions) - if(a.packageId !in masterVersionUpgrades) - actions ~= a; + auto resolver = new DependencyVersionResolver(this, options); + auto versions = resolver.resolve(m_project.mainPackage, m_project.selections); - if (actions.length == 0) break; - - logInfo("The following changes will be performed:"); - bool conflictedOrFailed = false; - foreach(Action a; actions) { - logInfo("%s %s %s, %s", capitalize(to!string(a.type)), a.packageId, a.vers, a.location); - if( a.type == Action.Type.conflict || a.type == Action.Type.failure ) { - logInfo(" -> issued by: "); - conflictedOrFailed = true; - foreach(string pkg, d; a.issuer) - logInfo(" "~pkg~": %s", d); - } - } - - enforce (!conflictedOrFailed, "Aborting package retrieval due to errors."); - - if (m_dryRun) return; - - // Remove first - foreach(Action a; actions.filter!(a => a.type == Action.Type.remove)) { - assert(a.pack !is null, "No package specified for removal."); - remove(a.pack, (options & UpdateOptions.forceRemove) != 0); - } - foreach(Action a; actions.filter!(a => a.type == Action.Type.fetch)) { - fetch(a.packageId, a.vers, a.location, (options & UpdateOptions.upgrade) != 0, (options & UpdateOptions.preRelease) != 0, (options & UpdateOptions.forceRemove) != 0); - // never update the same package more than once - masterVersionUpgrades[a.packageId] = true; - } - - m_project.selections.set(selections); - m_project.reinit(); + foreach (p, ver; versions) { + assert(!p.canFind(":"), "Resolved packages contain a sub package!?: "~p); + auto pack = m_packageManager.getBestPackage(p, ver); + if (!pack) fetch(p, ver, PlacementLocation.userWide, false, (options & UpdateOptions.preRelease) != 0, (options & UpdateOptions.forceRemove) != 0, false); + if (options & UpdateOptions.select) + m_project.selections.selectVersion(p, ver.version_); } - if (options & UpdateOptions.select) { - selections.save(m_projectPath ~ SelectedVersions.defaultFile); - logDiagnostic("Stored currently selected versions into " ~ SelectedVersions.defaultFile); - } + if (options & UpdateOptions.select) + m_project.saveSelections(); } /// Generate project files for a specified IDE. @@ -347,7 +311,7 @@ // TODO: use flags enum instead of bool parameters /// Fetches the package matching the dependency and places it in the specified location. - Package fetch(string packageId, const Dependency dep, PlacementLocation location, bool force_branch_upgrade, bool use_prerelease, bool force_remove) + Package fetch(string packageId, const Dependency dep, PlacementLocation location, bool force_branch_upgrade, bool use_prerelease, bool force_remove, bool print_only) { Json pinfo; PackageSupplier supplier; @@ -372,15 +336,22 @@ } // always upgrade branch based versions - TODO: actually check if there is a new commit available - if (auto pack = m_packageManager.getPackage(packageId, ver, placement)) { + auto existing = m_packageManager.getPackage(packageId, ver, placement); + if (print_only) { + if (existing && existing.vers != ver) + logInfo("A new version for %s is available (%s -> %s). Run \"dub upgrade %s\" to switch.", + packageId, existing.vers, ver, packageId); + return null; + } + if (existing) { if (!ver.startsWith("~") || !force_branch_upgrade || location == PlacementLocation.local) { // TODO: support git working trees by performing a "git pull" instead of this - logInfo("Package %s %s (%s) is already present with the latest version, skipping upgrade.", + logDiagnostic("Package %s %s (%s) is already present with the latest version, skipping upgrade.", packageId, ver, placement); - return pack; + return existing; } else { - logInfo("Removing present package of %s %s", packageId, ver); - if (!m_dryRun) m_packageManager.remove(pack, force_remove); + logInfo("Removing %s %s to prepare replacement with a new version.", packageId, ver); + if (!m_dryRun) m_packageManager.remove(existing, force_remove); } } @@ -516,7 +487,7 @@ if (!ddox_pack) ddox_pack = m_packageManager.getBestPackage("ddox", "~master"); if (!ddox_pack) { logInfo("DDOX is not present, getting it and storing user wide"); - ddox_pack = fetch("ddox", Dependency(">=0.0.0"), PlacementLocation.userWide, false, false, false); + ddox_pack = fetch("ddox", Dependency(">=0.0.0"), PlacementLocation.userWide, false, false, false, false); } version(Windows) auto ddox_exe = "ddox.exe"; @@ -610,3 +581,122 @@ } return ret.data; } + +private class DependencyVersionResolver : DependencyResolver!(Dependency, Dependency) { + private { + Dub m_dub; + UpdateOptions m_options; + Dependency[][string] m_packageVersions; + Package[string] m_remotePackages; + SelectedVersions m_selectedVersions; + Package m_rootPackage; + } + + + this(Dub dub, UpdateOptions options) + { + m_dub = dub; + m_options = options; + } + + Dependency[string] resolve(Package root, SelectedVersions selected_versions) + { + m_rootPackage = root; + m_selectedVersions = selected_versions; + return super.resolve(TreeNode(root.name, Dependency(root.ver))); + } + + protected override Dependency[] getAllConfigs(string pack) + { + logDiagnostic("Search for versions of %s (%s package suppliers)", pack, m_dub.m_packageSuppliers.length); + if (!(m_options & UpdateOptions.upgrade) && m_selectedVersions.hasSelectedVersion(pack)) + return [m_selectedVersions.selectedVersion(pack)]; + + if (auto pvers = pack in m_packageVersions) + return *pvers; + + // TODO: if no UpdateOptions.upgrade is given, query the PackageManager first + + foreach (ps; m_dub.m_packageSuppliers) { + try { + auto vers = ps.getVersions(pack).reverse; + if (!vers.length) { + logDiagnostic("No versions for %s for %s", pack, ps.description); + continue; + } + + // move pre-release versions to the back of the list if no preRelease flag is given + if (!(m_options & UpdateOptions.preRelease)) + vers = vers.filter!(v => !v.isPreRelease).array ~ vers.filter!(v => v.isPreRelease).array; + + m_packageVersions[pack] = vers.map!(v => Dependency(v)).array; + return vers.map!(v => Dependency(v)).array; + } catch (Exception e) { + logDebug("Package %s not found in %s: %s", pack, ps.description, e.msg); + logDebug("Full error: %s", e.toString().sanitize); + } + } + + logDiagnostic("Nothing found for %s", pack); + return null; + } + + protected override TreeNodes[] getChildren(TreeNode node) + { + auto ret = appender!(TreeNodes[]); + auto pack = getPackage(node.pack, node.config); + foreach (dname, dspec; pack.dependencies) { + if (m_options & UpdateOptions.upgrade || !m_selectedVersions || !m_selectedVersions.hasSelectedVersion(node.pack)) + ret ~= TreeNodes(dname, dspec); + else ret ~= TreeNodes(dname, m_selectedVersions.selectedVersion(node.pack)); + } + return ret.data; + } + + protected override bool matches(Dependency configs, Dependency config) + { + return configs.merge(config).valid; + } + + private Package getPackage(string name, Dependency dep) + { + if (auto ret = m_dub.m_packageManager.getBestPackage(name, dep)) + return ret; + + auto key = name ~ ":" ~ dep.version_.toString(); + + if (auto ret = key in m_remotePackages) + return *ret; + + auto prerelease = (m_options & UpdateOptions.preRelease) != 0; + + auto rootpack = name.split(":")[0]; + + foreach (ps; m_dub.m_packageSuppliers) { + if (rootpack == name) { + try { + auto desc = ps.getPackageDescription(name, dep, prerelease); + auto ret = new Package(desc); + m_remotePackages[key] = ret; + return ret; + } catch (Exception e) { + logDiagnostic("Metadata for %s could not be downloaded from %s...", name, ps.description); + } + } else { + try { + m_dub.fetch(rootpack, dep, PlacementLocation.userWide, false, prerelease, (m_options & UpdateOptions.forceRemove) != 0, false); + auto ret = m_dub.m_packageManager.getBestPackage(name, dep); + if (!ret) { + logWarn("Package %s %s doesn't have a sub package %s", rootpack, dep.version_, name); + return null; + } + } catch (Exception e) { + logDiagnostic("Package %s could not be downloaded from %s...", rootpack, ps.description); + } + } + } + + logWarn("Package %s was found neither locally, nor in the configured package registries."); + return null; + } +} diff --git a/source/dub/packagesupplier.d b/source/dub/packagesupplier.d index 7713f32..822ad20 100644 --- a/source/dub/packagesupplier.d +++ b/source/dub/packagesupplier.d @@ -14,10 +14,15 @@ import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.url; -import std.file; -import std.exception; -import std.zip; +import std.algorithm : filter, sort; +import std.array : array; import std.conv; +import std.exception; +import std.file; +import std.string : format; +import std.zip; + +// TODO: drop the "best package" behavior and let retrievePackage/getPackageDescription take a Version instead of Dependency /// Supplies packages, this is done by supplying the latest possible version /// which is available. @@ -25,6 +30,8 @@ /// Returns a hunman readable representation of the supplier @property string description(); + Version[] getVersions(string package_id); + /// path: absolute path to store the package (usually in a zip format) void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release); @@ -41,6 +48,21 @@ override @property string description() { return "file repository at "~m_path.toNativeString(); } + Version[] getVersions(string package_id) + { + Version[] ret; + foreach (DirEntry d; dirEntries(m_path.toNativeString(), package_id~"*", SpanMode.shallow)) { + Path p = Path(d.name); + logDebug("Entry: %s", p); + enforce(to!string(p.head)[$-4..$] == ".zip"); + auto vers = p.head.toString()[package_id.length+1..$-4]; + logDebug("Version: %s", vers); + ret ~= Version(vers); + } + ret.sort(); + return ret; + } + void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release) { enforce(path.absolute); @@ -57,31 +79,17 @@ } private Path bestPackageFile(string packageId, Dependency dep, bool pre_release) - const { - Version bestver = Version.RELEASE; - foreach (DirEntry d; dirEntries(m_path.toNativeString(), packageId~"*", SpanMode.shallow)) { - Path p = Path(d.name); - logDebug("Entry: %s", p); - enforce(to!string(p.head)[$-4..$] == ".zip"); - string vers = to!string(p.head)[packageId.length+1..$-4]; - logDebug("Version string: "~vers); - Version cur = Version(vers); - if (!dep.matches(cur)) continue; - if (bestver == Version.RELEASE) bestver = cur; - else if (pre_release) { - if (cur > bestver) bestver = cur; - } else if (bestver.isPreRelease) { - if (!cur.isPreRelease || cur > bestver) bestver = cur; - } else if (!cur.isPreRelease && cur > bestver) bestver = cur; + { + Path toPath(Version ver) { + return m_path ~ (packageId ~ "-" ~ ver.toString() ~ ".zip"); } - - auto fileName = m_path ~ (packageId ~ "_" ~ to!string(bestver) ~ ".zip"); - - if (bestver == Version.RELEASE || !existsFile(fileName)) - throw new Exception("No matching package found"); - - logDiagnostic("Found best matching package: '%s'", fileName); - return fileName; + auto versions = getVersions(packageId).filter!(v => dep.matches(v)).array; + enforce(versions.length > 0, format("No package %s found matching %s", packageId, dep)); + foreach_reverse (ver; versions) { + if (pre_release || !ver.isPreRelease) + return toPath(ver); + } + return toPath(versions[$-1]); } } @@ -99,6 +107,18 @@ } override @property string description() { return "registry at "~m_registryUrl.toString(); } + + Version[] getVersions(string package_id) + { + Version[] ret; + Json md = getMetadata(package_id); + foreach (json; md["versions"]) { + auto cur = Version(cast(string)json["version"]); + ret ~= cur; + } + ret.sort(); + return ret; + } void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release) { diff --git a/source/dub/project.d b/source/dub/project.d index c2d70e9..2a00754 100644 --- a/source/dub/project.d +++ b/source/dub/project.d @@ -176,39 +176,37 @@ m_rootPackage.name.toLower(), m_rootPackage.name); } - // TODO: compute the set of mutual dependencies first - // (i.e. ">=0.0.1 <=0.0.5" and "<= 0.0.4" get ">=0.0.1 <=0.0.4") - // conflicts would then also be detected. void collectDependenciesRec(Package pack) { - logDiagnostic("Collecting dependencies for %s", pack.name); - foreach( name, original_vspec; pack.dependencies ){ - Dependency vspec = original_vspec; - if (m_selectedVersions.hasSelectedVersion(name)) { - vspec = m_selectedVersions.selectedVersion(name); - logDiagnostic("Dependency on %s overruled by locally selected version: %s", name, vspec); + logDebug("Collecting dependencies for %s", pack.name); + foreach (name, original_vspec; pack.dependencies) { + if (!m_selectedVersions.hasSelectedVersion(name)) { + logDiagnostic("Version selection for dependency %s of %s is missing.", name, pack.name); + continue; } + Dependency vspec = m_selectedVersions.selectedVersion(name); + Package p; - if( !vspec.path.empty ){ + if (!vspec.path.empty) { Path path = vspec.path; - if( !path.absolute ) path = pack.path ~ path; + if (!path.absolute) path = pack.path ~ path; logDiagnostic("Adding local %s %s", path, vspec.version_); p = m_packageManager.getTemporaryPackage(path, vspec.version_); } else { p = m_packageManager.getBestPackage(name, vspec); } - if( !m_dependencies.canFind(p) ){ - logDiagnostic("Found dependency %s %s: %s", name, vspec.toString(), p !is null); - if( p ){ - m_dependencies ~= p; - p.warnOnSpecialCompilerFlags(); - collectDependenciesRec(p); - } + + if (!p) { + logDiagnostic("Missing dependency %s %s of %s", name, vspec, pack.name); + continue; } - if( m_selectedVersions.hasSelectedVersion(name) && !pack) { - logError("The locally selected version was not found: " ~ name); + if (!m_dependencies.canFind(p)) { + logDiagnostic("Found dependency %s %s", name, vspec.toString()); + m_dependencies ~= p; + p.warnOnSpecialCompilerFlags(); + collectDependenciesRec(p); } m_dependees[p] ~= pack; @@ -459,91 +457,25 @@ return false; } - /// Actions which can be performed to update the application. - /// selectedVersions: - Action[] determineActions(PackageSupplier[] packageSuppliers, UpdateOptions option, SelectedVersions selected_versions = null) + /*bool iterateDependencies(bool delegate(Package pack, string dep_name, Dependency dep_spec) del) { - scope(exit) writeDubJson(); + bool all_found = true; - selected_versions.clean(); + bool[string] visited; + void iterate(Package pack) + { + if (pack.name in visited) return; + visited[pack.name] = true; - if(!m_rootPackage) { - Action[] a; - return a; - } - - auto graph = new DependencyGraph(m_rootPackage); - if(!gatherMissingDependencies(packageSuppliers, graph) || graph.missing().length > 0) { - // Check the conflicts first. - auto conflicts = graph.conflicted(); - if(conflicts.length > 0) { - logError("The dependency graph could not be filled, there are conflicts."); - Action[] actions; - foreach( string pkg, dbp; graph.conflicted()) - actions ~= Action.conflict(pkg, dbp.dependency, dbp.packages); - - // Missing dependencies could have some bogus results, therefore - // return only the conflicts. - return actions; - } - - // Then check unresolved dependencies. - logError("The dependency graph could not be filled, there are unresolved dependencies."); - Action[] actions; - foreach( string pkg, rdp; graph.missing()) - actions ~= Action.failure(pkg, rdp.dependency, rdp.packages); - - return actions; - } - - // Gather retrieved - Package[string] retrieved; - retrieved[m_rootPackage.name] = m_rootPackage; - foreach(ref Package p; m_dependencies) { - auto pbase = p.basePackage; - auto pexist = retrieved.get(pbase.name, null); - if (pexist && pexist !is pbase){ - logError("Conflicting package references found:"); - logError(" %s %s: %s", pexist.name, pexist.vers, pexist.path.toNativeString()); - logError(" %s %s: %s", pbase.name, pbase.vers, pbase.path.toNativeString()); - throw new Exception("Conflicting package multi-references."); - } - retrieved[pbase.name] = pbase; - } - - // Check against package list and add retrieval actions - Action[] actions; - void addAction(Action act) { - if (!actions.any!(a => a.type == act.type && a.location == act.location && a.packageId == act.packageId && a.vers == act.vers)) - actions ~= act; - } - int[string] upgradePackages; - scope(failure) if (selected_versions) selected_versions.clean(); - foreach( string pkg, d; graph.needed() ) { - auto basepkg = pkg.getBasePackage(); - auto p = basepkg in retrieved; - // TODO: auto update to latest head revision - if(!p || (!d.dependency.matches(p.vers) && !d.dependency.matches(Version.MASTER))) { - if(!p) logDiagnostic("Triggering retrieval of required package '"~basepkg~"', which was not present."); - else logDiagnostic("Triggering retrieval of required package '"~basepkg~"', which doesn't match the required versionh. Required '%s', available '%s'.", d.dependency, p.vers); - addAction(Action.get(basepkg, PlacementLocation.userWide, d.dependency, d.packages)); - } else { - if (option & UpdateOptions.upgrade) { - auto existing = m_packageManager.getBestPackage(basepkg, d.dependency); - // Only add one upgrade action for each package. - if(basepkg !in upgradePackages && m_packageManager.isManagedPackage(existing)) { - logDiagnostic("Required package '"~basepkg~"' found with version '"~p.vers~"', upgrading."); - upgradePackages[basepkg] = 1; - addAction(Action.get(basepkg, PlacementLocation.userWide, d.dependency, d.packages)); - } - } else logDiagnostic("Required package '"~basepkg~"' found with version '"~p.vers~"'"); - - if (selected_versions) selected_versions.selectVersion(pkg, p.ver, d.packages); + foreach (dn, ds; pack.dependencies) { + auto dep = del(pack, dn, ds); + if (dep) iterateDependencies(dep); + else all_found = false; } } - return actions; - } + return all_found; + }*/ /// Outputs a JSON description of the project, including its deoendencies. void describe(ref Json dst, BuildPlatform platform, string config) @@ -563,121 +495,10 @@ } } - private bool gatherMissingDependencies(PackageSupplier[] packageSuppliers, DependencyGraph graph) + void saveSelections() { - RequestedDependency[string] missing = graph.missing(); - RequestedDependency[string] oldMissing; - while( missing.length > 0 ) { - logDebug("Try to resolve %s", missing.keys); - if( missing.keys == oldMissing.keys ){ // FIXME: should actually compare the complete AA here - bool different = false; - foreach(string pkg, reqDep; missing) { - auto o = pkg in oldMissing; - if(o && reqDep.dependency != o.dependency) { - different = true; - break; - } - } - if(!different) { - logWarn("Could not resolve dependencies"); - return false; - } - } - - oldMissing = missing.dup; - logDebug("There are %s packages missing.", missing.length); - - auto toLookup = missing; - foreach(id, dep; graph.optional()) { - assert(id !in toLookup, "A missing dependency in the graph seems to be optional, which is an error."); - toLookup[id] = dep; - } - - foreach(string pkg, reqDep; toLookup) { - if(!reqDep.dependency.valid()) { - logDebug("Dependency to "~pkg~" is invalid. Trying to fix by modifying others."); - continue; - } - - auto ppath = pkg.getSubPackagePath(); - - // TODO: auto update and update interval by time - logDebug("Adding package to graph: "~pkg); - Package p = m_packageManager.getBestPackage(pkg, reqDep.dependency); - if( p ) logDebug("Found present package %s %s", pkg, p.ver); - - // Don't bother with not available optional packages. - if( !p && reqDep.dependency.optional ) continue; - - // Try an already present package first - if( p && needsUpToDateCheck(p) ){ - logInfo("Triggering update of package %s", pkg); - p = null; - } - - if( !p ) p = fetchPackageMetadata(packageSuppliers, pkg, reqDep); - if( p ) graph.insert(p); - } - graph.clearUnused(); - - // As the dependencies are filled in starting from the outermost - // packages, resolving those conflicts won't happen (?). - if(graph.conflicted().length > 0) { - logInfo("There are conflicts in the dependency graph."); - return false; - } - - missing = graph.missing(); - } - - return true; - } - - private Package fetchPackageMetadata(PackageSupplier[] packageSuppliers, string pkg, RequestedDependency reqDep) { - Package p = null; - try { - logDiagnostic("Fetching package %s (%d suppliers registered)", pkg, packageSuppliers.length); - auto ppath = pkg.getSubPackagePath(); - auto basepkg = pkg.getBasePackage(); - foreach (supplier; packageSuppliers) { - try { - // Get main package. - auto sp = new Package(supplier.getPackageDescription(basepkg, reqDep.dependency, false)); - // Fetch subpackage, if one was requested. - foreach (spn; ppath[1 .. $]) { - try { - // Some subpackages are shipped with the main package. - sp = sp.getSubPackage(spn); - } catch (Exception e) { - // HACK: Support for external packages. Until the registry serves the - // metadata of the external packages, there is no way to get to - // know this information. - // - // Eventually, the dependencies of all referenced packages will be - // fulfilled, but this is done by a hack where the metadata of the - // external package is inferred as having no additional dependencies. - // When the package is then fetched the state is re-evaluated and - // possible new dependencies will be resolved. - string hacked_info = "{\"name\": \"" ~ spn ~ "\"}"; - auto info = parseJson(hacked_info); - sp = new Package(info, Path(), sp); - } - } - p = sp; - break; - } catch (Exception e) { - logDiagnostic("No metadata for %s: %s", supplier.description, e.msg); - } - } - enforce(p !is null, "Could not find package candidate for "~pkg~" "~reqDep.dependency.toString()); - markUpToDate(basepkg); - } - catch(Exception e) { - logError("Failed to retrieve metadata for package %s: %s", pkg, e.msg); - logDiagnostic("Full error: %s", e.toString().sanitize()); - } - - return p; + assert(m_selectedVersions !is null, "Cannot save selections for non-disk based project (has no selections)."); + m_selectedVersions.save(m_rootPackage.path ~ SelectedVersions.defaultFile); } private bool needsUpToDateCheck(Package pack) { @@ -734,13 +555,14 @@ string packageId; PlacementLocation location; Dependency vers; + Version existingVersion; } const Package pack; const Dependency[string] issuer; - static Action get(string pkg, PlacementLocation location, in Dependency dep, Dependency[string] context) + static Action get(string pkg, PlacementLocation location, in Dependency dep, Dependency[string] context, Version old_version = Version.UNKNOWN) { - return Action(Type.fetch, pkg, location, dep, context); + return Action(Type.fetch, pkg, location, dep, context, old_version); } static Action remove(Package pkg, Dependency[string] context) @@ -758,13 +580,14 @@ return Action(Type.failure, pkg, PlacementLocation.userWide, dep, context); } - private this(Type id, string pkg, PlacementLocation location, in Dependency d, Dependency[string] issue) + private this(Type id, string pkg, PlacementLocation location, in Dependency d, Dependency[string] issue, Version existing_version = Version.UNKNOWN) { this.type = id; this.packageId = pkg; this.location = location; this.vers = d; this.issuer = issue; + this.existingVersion = existing_version; } private this(Type id, Package pkg, Dependency[string] issue) @@ -785,10 +608,11 @@ enum UpdateOptions { none = 0, - upgrade = 1<<1, - preRelease = 1<<2, // inclde pre-release versions in upgrade - forceRemove = 1<<3, + upgrade = 1<<1, /// Upgrade existing packages + preRelease = 1<<2, /// inclde pre-release versions in upgrade + forceRemove = 1<<3, /// Force removing package folders, which contain unknown files select = 1<<4, /// Update the dub.selections.json file with the upgraded versions + printUpgradesOnly = 1<<5, /// Instead of downloading new packages, just print a message to notify the user of their existence } @@ -931,9 +755,8 @@ m_selectedVersions = versions.m_selectedVersions.dup; } - void selectVersion(string packageId, Version version_, Dependency[string] issuer) + void selectVersion(string packageId, Version version_) { - enforce(packageId !in m_selectedVersions, "Cannot reselect a package!"); m_selectedVersions[packageId] = Selected(version_/*, issuer*/); }