diff --git a/CHANGELOG.md b/CHANGELOG.md index d6ac803..6789309 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,7 +58,7 @@ - Fixed using custom registries in the global DUB configuration file - [issue #186][issue186] - Fixed assertions triggering when `$HOME` is a relative path (by Ognjen Ivkovic) - [pull #192][issue192] - Fixed the VisualD project generator to enforce build requirements - - Fixed build requirements to also affect comipler options of the selected build + - Fixed build requirements to also affect comipler options of the selected build - Fixed configuration resolution for complex dependency graphs (it could happen that configurations were picked that can't work on the selected platform) - Fixed `dub build -b ddox` to only copy resource files from DDOX if they are newer than existing files on Posix - Fixed storing sub packages when the modified package description is written after fetching a package diff --git a/dub.json b/dub.json index 20668d3..65ff628 100644 --- a/dub.json +++ b/dub.json @@ -1,37 +1,37 @@ -{ - "name": "dub", - "description": "Package manager for D packages", - "license": "MIT", - "copyright": "Copyright © 2012-2014 rejectedsoftware e.K., Copyright © 2012-2014 Matthias Dondorff", - "authors": [ - "Matthias Dondorff", - "Sönke Ludwig" - ], - "targetPath": "bin", - "configurations": [ - { - "name": "application", - "targetType": "executable", - "mainSourceFile": "source/app.d", - "libs": ["curl"], - "copyFiles-windows": ["curllib.dll", "libeay32.dll", "openldap.dll", "ssleay32.dll"], - "versions": ["DubUseCurl"] - }, - { - "name": "library", - "targetType": "library", - "excludedSourceFiles": ["source/app.d"], - "libs": ["curl"], - "copyFiles-windows": ["curllib.dll", "libeay32.dll", "openldap.dll", "ssleay32.dll"], - "versions": ["DubUseCurl"] - }, - { - "name": "library-nonet", - "targetType": "library", - "dependencies": { - "vibe-d": {"version": "~>0.7.19-rc.4", "optional": true} - }, - "excludedSourceFiles": ["source/app.d"] - } - ] +{ + "name": "dub", + "description": "Package manager for D packages", + "license": "MIT", + "copyright": "Copyright © 2012-2014 rejectedsoftware e.K., Copyright © 2012-2014 Matthias Dondorff", + "authors": [ + "Matthias Dondorff", + "Sönke Ludwig" + ], + "targetPath": "bin", + "configurations": [ + { + "name": "application", + "targetType": "executable", + "mainSourceFile": "source/app.d", + "libs": ["curl"], + "copyFiles-windows": ["curllib.dll", "libeay32.dll", "openldap.dll", "ssleay32.dll"], + "versions": ["DubUseCurl"] + }, + { + "name": "library", + "targetType": "library", + "excludedSourceFiles": ["source/app.d"], + "libs": ["curl"], + "copyFiles-windows": ["curllib.dll", "libeay32.dll", "openldap.dll", "ssleay32.dll"], + "versions": ["DubUseCurl"] + }, + { + "name": "library-nonet", + "targetType": "library", + "dependencies": { + "vibe-d": {"version": "~>0.7.19-rc.4", "optional": true} + }, + "excludedSourceFiles": ["source/app.d"] + } + ] } \ No newline at end of file diff --git a/examples/app/source/app.d b/examples/app/source/app.d index ba308f1..1fa5bc3 100644 --- a/examples/app/source/app.d +++ b/examples/app/source/app.d @@ -1,6 +1,6 @@ import std.stdio; void main() -{ +{ writeln("Hello, World."); } diff --git a/examples/lib/source/lib.d b/examples/lib/source/lib.d index 345d027..a1bc27e 100644 --- a/examples/lib/source/lib.d +++ b/examples/lib/source/lib.d @@ -3,6 +3,6 @@ import std.stdio; void test() -{ +{ writeln("Hello, World."); } diff --git a/examples/mixed/source/app.d b/examples/mixed/source/app.d index c558c9e..3e95fb2 100644 --- a/examples/mixed/source/app.d +++ b/examples/mixed/source/app.d @@ -3,6 +3,6 @@ import lib; void main() -{ +{ test(); } diff --git a/installer/win/EnvVarUpdate.nsh b/installer/win/EnvVarUpdate.nsh index e684da8..b67e3ba 100644 --- a/installer/win/EnvVarUpdate.nsh +++ b/installer/win/EnvVarUpdate.nsh @@ -9,14 +9,14 @@ * ${EnvVarUpdate} "ResultVar" "EnvVarName" "Action" "RegLoc" "PathString" * * Credits: - * Version 1.0 + * Version 1.0 * * Cal Turney (turnec2) * * Amir Szekely (KiCHiK) and e-circ for developing the forerunners of this * function: AddToPath, un.RemoveFromPath, AddToEnvVar, un.RemoveFromEnvVar, * WriteEnvStr, and un.DeleteEnvStr * * Diego Pedroso (deguix) for StrTok * * Kevin English (kenglish_hi) for StrContains - * * Hendri Adriaens (Smile2Me), Diego Pedroso (deguix), and Dan Fuhry + * * Hendri Adriaens (Smile2Me), Diego Pedroso (deguix), and Dan Fuhry * (dandaman32) for StrReplace * * Version 1.1 (compatibility with StrFunc.nsh) @@ -25,8 +25,8 @@ * http://nsis.sourceforge.net/Environmental_Variables:_append%2C_prepend%2C_and_remove_entries * */ - - + + !ifndef ENVVARUPDATE_FUNCTION !define ENVVARUPDATE_FUNCTION !verbose push @@ -34,7 +34,7 @@ !include "LogicLib.nsh" !include "WinMessages.NSH" !include "StrFunc.nsh" - + ; ---- Fix for conflict if StrFunc.nsh is already includes in main file ----------------------- !macro _IncludeStrFunction StrFuncName !ifndef ${StrFuncName}_INCLUDED @@ -45,11 +45,11 @@ !endif !define un.${StrFuncName} "${Un${StrFuncName}}" !macroend - + !insertmacro _IncludeStrFunction StrTok !insertmacro _IncludeStrFunction StrStr !insertmacro _IncludeStrFunction StrRep - + ; ---------------------------------- Macro Definitions ---------------------------------------- !macro _EnvVarUpdateConstructor ResultVar EnvVarName Action Regloc PathString Push "${EnvVarName}" @@ -60,7 +60,7 @@ Pop "${ResultVar}" !macroend !define EnvVarUpdate '!insertmacro "_EnvVarUpdateConstructor"' - + !macro _unEnvVarUpdateConstructor ResultVar EnvVarName Action Regloc PathString Push "${EnvVarName}" Push "${Action}" @@ -71,15 +71,15 @@ !macroend !define un.EnvVarUpdate '!insertmacro "_unEnvVarUpdateConstructor"' ; ---------------------------------- Macro Definitions end------------------------------------- - + ;----------------------------------- EnvVarUpdate start---------------------------------------- !define hklm_all_users 'HKLM "SYSTEM\CurrentControlSet\Control\Session Manager\Environment"' !define hkcu_current_user 'HKCU "Environment"' - + !macro EnvVarUpdate UN - + Function ${UN}EnvVarUpdate - + Push $0 Exch 4 Exch $1 @@ -95,7 +95,7 @@ Push $8 Push $9 Push $R0 - + /* After this point: ------------------------- $0 = ResultVar (returned) @@ -109,7 +109,7 @@ $8 = Entry counter (temp) $9 = tempstr2 (temp) $R0 = tempChar (temp) */ - + ; Step 1: Read contents of EnvVarName from RegLoc ; ; Check for empty EnvVarName @@ -118,7 +118,7 @@ DetailPrint "ERROR: EnvVarName is blank" Goto EnvVarUpdate_Restore_Vars ${EndIf} - + ; Check for valid Action ${If} $2 != "A" ${AndIf} $2 != "P" @@ -127,7 +127,7 @@ DetailPrint "ERROR: Invalid Action - must be A, P, or R" Goto EnvVarUpdate_Restore_Vars ${EndIf} - + ${If} $3 == HKLM ReadRegStr $5 ${hklm_all_users} $1 ; Get EnvVarName from all users into $5 ${ElseIf} $3 == HKCU @@ -137,7 +137,7 @@ DetailPrint 'ERROR: Action is [$3] but must be "HKLM" or HKCU"' Goto EnvVarUpdate_Restore_Vars ${EndIf} - + ; Check for empty PathString ${If} $4 == "" SetErrors @@ -147,12 +147,12 @@ ;;khc - here check if length is going to be greater then max string length ;; and abort if so - also abort if original path empty - may mean - ;; it was too long as well- write message to say set it by hand + ;; it was too long as well- write message to say set it by hand Push $6 Push $7 Push $8 - StrLen $7 $4 + StrLen $7 $4 StrLen $6 $5 IntOp $8 $6 + $7 ${If} $5 == "" @@ -176,7 +176,7 @@ DetailPrint "$1 is empty - Nothing to remove" Goto EnvVarUpdate_Restore_Vars ${EndIf} - + ; Step 2: Scrub EnvVar ; StrCpy $0 $5 ; Copy the contents to $0 @@ -198,13 +198,13 @@ ${${UN}StrRep} $0 $0 "; " ";" ; Remove ';' ${Loop} ${Do} - ${${UN}StrStr} $7 $0 ";;" + ${${UN}StrStr} $7 $0 ";;" ${If} $7 == "" ${ExitDo} ${EndIf} ${${UN}StrRep} $0 $0 ";;" ";" ${Loop} - + ; Remove a leading or trailing semicolon from EnvVar StrCpy $7 $0 1 0 ${If} $7 == ";" @@ -218,27 +218,27 @@ ${EndIf} ; DetailPrint "Scrubbed $1: [$0]" ; Uncomment to debug ${EndIf} - + /* Step 3. Remove all instances of the target path/string (even if "A" or "P") $6 = bool flag (1 = found and removed PathString) $7 = a string (e.g. path) delimited by semicolon(s) $8 = entry counter starting at 0 $9 = copy of $0 $R0 = tempChar */ - + ${If} $5 != "" ; If EnvVar is not empty ... StrCpy $9 $0 StrCpy $0 "" StrCpy $8 0 StrCpy $6 0 - + ${Do} ${${UN}StrTok} $7 $9 ";" $8 "0" ; $7 = next entry, $8 = entry counter - + ${If} $7 == "" ; If we've run out of entries, ${ExitDo} ; were done ${EndIf} ; - + ; Remove leading and trailing spaces from this entry (critical step for Action=Remove) ${Do} StrCpy $R0 $7 1 @@ -263,11 +263,11 @@ ${AndIf} $0 != "" ; and this is NOT the 1st string to be added to $0, StrCpy $0 $0;$7 ; append path to $0 with a prepended semicolon ${EndIf} ; - + IntOp $8 $8 + 1 ; Bump counter ${Loop} ; Check for duplicates until we run out of paths ${EndIf} - + ; Step 4: Perform the requested Action ; ${If} $2 != "R" ; If Append or Prepend @@ -305,7 +305,7 @@ DetailPrint "$1 is now empty" ${EndIf} ${EndIf} - + ; Step 5: Update the registry at RegLoc with the updated EnvVar and announce the change ; ClearErrors @@ -314,15 +314,15 @@ ${ElseIf} $3 == HKCU WriteRegExpandStr ${hkcu_current_user} $1 $0 ; Write it to current user section ${EndIf} - + IfErrors 0 +4 MessageBox MB_OK|MB_ICONEXCLAMATION "Could not write updated $1 to $3" DetailPrint "Could not write updated $1 to $3" Goto EnvVarUpdate_Restore_Vars - + ; "Export" our change SendMessage ${HWND_BROADCAST} ${WM_WININICHANGE} 0 "STR:Environment" /TIMEOUT=1 - + EnvVarUpdate_Restore_Vars: ; ; Restore the user's variables and return ResultVar @@ -339,13 +339,13 @@ Push $0 ; Push my $0 (ResultVar) Exch Pop $0 ; Restore his $0 - + FunctionEnd - + !macroend ; EnvVarUpdate UN !insertmacro EnvVarUpdate "" !insertmacro EnvVarUpdate "un." ;----------------------------------- EnvVarUpdate end---------------------------------------- - + !verbose pop !endif diff --git a/installer/win/installer.nsi b/installer/win/installer.nsi index 09ec0db..6d880da 100644 --- a/installer/win/installer.nsi +++ b/installer/win/installer.nsi @@ -79,17 +79,17 @@ ; This section is mandatory SectionIn RO - + SetOutPath $INSTDIR - + ; Create installation directory CreateDirectory "$INSTDIR" - + File "${DubExecPath}\dub.exe" File "${DubExecPath}\libcurl.dll" File "${DubExecPath}\libeay32.dll" File "${DubExecPath}\ssleay32.dll" - + ; Create command line batch file FileOpen $0 "$INSTDIR\dubvars.bat" w FileWrite $0 "@echo.$\n" @@ -146,7 +146,7 @@ ; Remove the uninstaller Delete $INSTDIR\uninstall.exe - + ; Remove shortcuts Delete "$SMPROGRAMS\dub\dub Command Prompt.lnk" diff --git a/source/dub/compilers/compiler.d b/source/dub/compilers/compiler.d index 94f4767..44d4e08 100644 --- a/source/dub/compilers/compiler.d +++ b/source/dub/compilers/compiler.d @@ -41,7 +41,7 @@ if (name.canFind("dmd")) return getCompiler("dmd"); if (name.canFind("gdc")) return getCompiler("gdc"); if (name.canFind("ldc")) return getCompiler("ldc"); - + throw new Exception("Unknown compiler: "~name); } @@ -265,9 +265,9 @@ /// "-windows-dmd" /// /// Params: - /// specification = The specification being matched. It must be the empty string or start with a dash. + /// specification = The specification being matched. It must be the empty string or start with a dash. /// - /// Returns: + /// Returns: /// true if the given specification matches this BuildPlatform, false otherwise. (The empty string matches) /// bool matchesSpecification(const(char)[] specification) const { @@ -329,7 +329,7 @@ return settings.targetName ~ ".dll"; else return "lib" ~ settings.targetName ~ ".so"; } -} +} bool isLinkerFile(string f) @@ -355,7 +355,7 @@ import dub.internal.utils; auto path = getTempDir() ~ "dub_platform_probe.d"; - + auto fil = openFile(path, FileMode.CreateTrunc); scope (failure) { fil.close(); @@ -467,7 +467,7 @@ BuildPlatform readPlatformProbe(string output) { import std.string; - + // work around possible additional output of the compiler auto idx1 = output.indexOf("{"); auto idx2 = output.lastIndexOf("}"); diff --git a/source/dub/compilers/ldc.d b/source/dub/compilers/ldc.d index 45b51dd..dad1cf3 100644 --- a/source/dub/compilers/ldc.d +++ b/source/dub/compilers/ldc.d @@ -87,7 +87,7 @@ // since LDC always outputs multiple object files, avoid conflicts by default settings.addDFlags("-oq", "-od=.dub/obj"); - + if (!(fields & BuildSetting.versions)) { settings.addDFlags(settings.versions.map!(s => "-d-version="~s)().array()); settings.versions = null; diff --git a/source/dub/dependency.d b/source/dub/dependency.d index 758446b..cef6935 100644 --- a/source/dub/dependency.d +++ b/source/dub/dependency.d @@ -1,598 +1,598 @@ -/** - Stuff with dependencies. - - Copyright: © 2012-2013 Matthias Dondorff - License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. - Authors: Matthias Dondorff, Sönke Ludwig -*/ -module dub.dependency; - -import dub.internal.utils; -import dub.internal.vibecompat.core.log; -import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.data.json; -import dub.internal.vibecompat.inet.url; -import dub.package_; -import dub.semver; - -import std.algorithm; -import std.array; -import std.exception; -import std.regex; -import std.string; -import std.typecons; -static import std.compiler; - - -/** - Representing a dependency, which is basically a version string and a - compare methode, e.g. '>=1.0.0 <2.0.0' (i.e. a space separates the two - version numbers) -*/ -struct Dependency { - private { - // Shortcut to create >=0.0.0 - enum ANY_IDENT = "*"; - string m_cmpA; - Version m_versA; - string m_cmpB; - Version m_versB; - Path m_path; - bool m_optional = false; - } - - // A Dependency, which matches every valid version. - static @property ANY() { return Dependency(ANY_IDENT); } - static @property INVALID() { Dependency ret; ret.m_versA = Version.HEAD; ret.m_versB = Version.RELEASE; return ret; } - - this(string ves) - { - enforce(ves.length > 0); - string orig = ves; - - if (ves == ANY_IDENT) { - // Any version is good. - ves = ">=0.0.0"; - } - - if (ves.startsWith("~>")) { - // Shortcut: "~>x.y.z" variant. Last non-zero number will indicate - // the base for this so something like this: ">=x.y.z <=", ves[0]) == -1) { - m_cmpA = ">="; - m_cmpB = "<="; - m_versA = m_versB = Version(ves); - } else { - m_cmpA = skipComp(ves); - size_t idx2 = std.string.indexOf(ves, " "); - if (idx2 == -1) { - if (m_cmpA == "<=" || m_cmpA == "<") { - m_versA = Version.RELEASE; - m_cmpB = m_cmpA; - m_cmpA = ">="; - m_versB = Version(ves); - } else if (m_cmpA == ">=" || m_cmpA == ">") { - m_versA = Version(ves); - m_versB = Version.HEAD; - m_cmpB = "<="; - } else { - // Converts "==" to ">=a&&<=a", which makes merging easier - m_versA = m_versB = Version(ves); - m_cmpA = ">="; - m_cmpB = "<="; - } - } else { - assert(ves[idx2] == ' '); - m_versA = Version(ves[0..idx2]); - string v2 = ves[idx2+1..$]; - m_cmpB = skipComp(v2); - m_versB = Version(v2); - - enforce(!m_versA.isBranch, "Partly a branch (A): %s", ves); - enforce(!m_versB.isBranch, "Partly a branch (B): %s", ves); - - if (m_versB < m_versA) { - swap(m_versA, m_versB); - swap(m_cmpA, m_cmpB); - } - enforce( m_cmpA != "==" && m_cmpB != "==", "For equality, please specify a single version."); - } - } - } - - this(in Version ver) - { - m_cmpA = ">="; - m_cmpB = "<="; - m_versA = ver; - m_versB = ver; - } - - this(Path path) - { - this(ANY_IDENT); - m_path = path; - } - - @property void path(Path value) { m_path = value; } - @property Path path() const { return m_path; } - @property bool optional() const { return m_optional; } - @property void optional(bool optional) { m_optional = optional; } - @property bool isExactVersion() const { return m_versA == m_versB; } - - @property Version version_() const { - enforce(m_versA == m_versB, "Dependency "~versionString()~" is no exact version."); - return m_versA; - } - - @property string versionString() - const { - string r; - - if( m_versA == m_versB && m_cmpA == ">=" && m_cmpB == "<=" ){ - // Special "==" case - if (m_versA == Version.MASTER ) r = "~master"; - else r = m_versA.toString(); - } else { - if( m_versA != Version.RELEASE ) r = m_cmpA ~ m_versA.toString(); - if( m_versB != Version.HEAD ) r ~= (r.length==0?"" : " ") ~ m_cmpB ~ m_versB.toString(); - if( m_versA == Version.RELEASE && m_versB == Version.HEAD ) r = ">=0.0.0"; - } - return r; - } - - Dependency mapToPath(Path path) - const { - if (m_path.empty || m_path.absolute) return this; - else { - Dependency ret = this; - ret.path = path ~ ret.path; - return ret; - } - } - - string toString()() - const { - auto ret = versionString; - if (optional) ret ~= " (optional)"; - if (!path.empty) ret ~= " @"~path.toNativeString(); - return ret; - } - - Json toJson() const { - Json json; - if( path.empty && !optional ){ - json = Json(versionString()); - } else { - json = Json.emptyObject; - json["version"] = versionString(); - if (!path.empty) json["path"] = path.toString(); - if (optional) json["optional"] = true; - } - return json; - } - - unittest { - Dependency d = Dependency("==1.0.0"); - assert(d.toJson() == Json("1.0.0"), "Failed: " ~ d.toJson().toPrettyString()); - d = fromJson((fromJson(d.toJson())).toJson()); - assert(d == Dependency("1.0.0")); - assert(d.toJson() == Json("1.0.0"), "Failed: " ~ d.toJson().toPrettyString()); - } - - static Dependency fromJson(Json verspec) { - Dependency dep; - if( verspec.type == Json.Type.object ){ - if( auto pp = "path" in verspec ) { - if (auto pv = "version" in verspec) - logDiagnostic("Ignoring version specification (%s) for path based dependency %s", pv.get!string, pp.get!string); - - dep = Dependency.ANY; - dep.path = Path(verspec.path.get!string()); - } else { - enforce("version" in verspec, "No version field specified!"); - auto ver = verspec["version"].get!string; - // Using the string to be able to specifiy a range of versions. - dep = Dependency(ver); - } - if( auto po = "optional" in verspec ) { - dep.optional = verspec.optional.get!bool(); - } - } else { - // canonical "package-id": "version" - dep = Dependency(verspec.get!string()); - } - return dep; - } - - unittest { - assert(fromJson(parseJsonString("\">=1.0.0 <2.0.0\"")) == Dependency(">=1.0.0 <2.0.0")); - Dependency parsed = fromJson(parseJsonString(` - { - "version": "2.0.0", - "optional": true, - "path": "path/to/package" - } - `)); - Dependency d = Dependency.ANY; // supposed to ignore the version spec - d.optional = true; - d.path = Path("path/to/package"); - assert(d == parsed); - // optional and path not checked by opEquals. - assert(d.optional == parsed.optional); - assert(d.path == parsed.path); - } - - bool opEquals(in Dependency o) - const { - // TODO(mdondorff): Check if not comparing the path is correct for all clients. - return o.m_cmpA == m_cmpA && o.m_cmpB == m_cmpB - && o.m_versA == m_versA && o.m_versB == m_versB - && o.m_optional == m_optional; - } - - int opCmp(in Dependency o) - const { - if (m_cmpA != o.m_cmpA) return m_cmpA < o.m_cmpA ? -1 : 1; - if (m_cmpB != o.m_cmpB) return m_cmpB < o.m_cmpB ? -1 : 1; - if (m_versA != o.m_versA) return m_versA < o.m_versA ? -1 : 1; - if (m_versB != o.m_versB) return m_versB < o.m_versB ? -1 : 1; - if (m_optional != o.m_optional) return m_optional ? -1 : 1; - return 0; - } - - hash_t toHash() const nothrow @trusted { - try { - auto strhash = &typeid(string).getHash; - auto str = this.toString(); - return strhash(&str); - } catch assert(false); - } - - bool valid() const { - return m_versA == m_versB // compare not important - || (m_versA < m_versB && doCmp(m_cmpA, m_versB, m_versA) && doCmp(m_cmpB, m_versA, m_versB)); - } - - bool matches(string vers) const { return matches(Version(vers)); } - bool matches(const(Version) v) const { return matches(v); } - bool matches(ref const(Version) v) const { - if (this == ANY) return true; - //logDebug(" try match: %s with: %s", v, this); - // Master only matches master - if(m_versA.isBranch) { - enforce(m_versA == m_versB); - return m_versA == v; - } - if(v.isBranch || m_versA.isBranch) - return m_versA == v; - if( !doCmp(m_cmpA, v, m_versA) ) - return false; - if( !doCmp(m_cmpB, v, m_versB) ) - return false; - return true; - } - - /// Merges to versions - Dependency merge(ref const(Dependency) o) - const { - if (this == ANY) return o; - if (o == ANY) return this; - if (!this.valid || !o.valid) return INVALID; - if (m_versA.isBranch != o.m_versA.isBranch) return INVALID; - if (m_versB.isBranch != o.m_versB.isBranch) return INVALID; - if (m_versA.isBranch) return m_versA == o.m_versA ? this : INVALID; - if (this.path != o.path) return INVALID; - - Version a = m_versA > o.m_versA ? m_versA : o.m_versA; - Version b = m_versB < o.m_versB ? m_versB : o.m_versB; - - Dependency d = this; - d.m_cmpA = !doCmp(m_cmpA, a,a)? m_cmpA : o.m_cmpA; - d.m_versA = a; - d.m_cmpB = !doCmp(m_cmpB, b,b)? m_cmpB : o.m_cmpB; - d.m_versB = b; - d.m_optional = m_optional && o.m_optional; - - return d; - } - - private static bool isDigit(char ch) { return ch >= '0' && ch <= '9'; } - private static string skipComp(ref string c) { - size_t idx = 0; - while (idx < c.length && !isDigit(c[idx]) && c[idx] != Version.BRANCH_IDENT) idx++; - enforce(idx < c.length, "Expected version number in version spec: "~c); - string cmp = idx==c.length-1||idx==0? ">=" : c[0..idx]; - c = c[idx..$]; - switch(cmp) { - default: enforce(false, "No/Unknown comparision specified: '"~cmp~"'"); return ">="; - case ">=": goto case; case ">": goto case; - case "<=": goto case; case "<": goto case; - case "==": return cmp; - } - } - - private static bool doCmp(string mthd, ref const Version a, ref const Version b) { - //logDebug("Calling %s%s%s", a, mthd, b); - switch(mthd) { - default: throw new Exception("Unknown comparison operator: "~mthd); - case ">": return a>b; - case ">=": return a>=b; - case "==": return a==b; - case "<=": return a<=b; - case "<": return a=1.1.0"), b = Dependency(">=1.3.0"); - assert (a.merge(b).valid() && a.merge(b).versionString == ">=1.3.0", a.merge(b).toString()); - - a = Dependency("<=1.0.0 >=2.0.0"); - assert (!a.valid(), a.toString()); - - a = Dependency(">=1.0.0 <=5.0.0"), b = Dependency(">=2.0.0"); - assert (a.merge(b).valid() && a.merge(b).versionString == ">=2.0.0 <=5.0.0", a.merge(b).toString()); - - assertThrown(a = Dependency(">1.0.0 ==5.0.0"), "Construction is invalid"); - - a = Dependency(">1.0.0"), b = Dependency("<2.0.0"); - assert (a.merge(b).valid(), a.merge(b).toString()); - assert (a.merge(b).versionString == ">1.0.0 <2.0.0", a.merge(b).toString()); - - a = Dependency(">2.0.0"), b = Dependency("<1.0.0"); - assert (!(a.merge(b)).valid(), a.merge(b).toString()); - - a = Dependency(">=2.0.0"), b = Dependency("<=1.0.0"); - assert (!(a.merge(b)).valid(), a.merge(b).toString()); - - a = Dependency("==2.0.0"), b = Dependency("==1.0.0"); - assert (!(a.merge(b)).valid(), a.merge(b).toString()); - - a = Dependency("1.0.0"), b = Dependency("==1.0.0"); - assert (a == b); - - a = Dependency("<=2.0.0"), b = Dependency("==1.0.0"); - Dependency m = a.merge(b); - assert (m.valid(), m.toString()); - assert (m.matches(Version("1.0.0"))); - assert (!m.matches(Version("1.1.0"))); - assert (!m.matches(Version("0.0.1"))); - - - // branches / head revisions - a = Dependency(Version.MASTER_STRING); - assert(a.valid()); - assert(a.matches(Version.MASTER)); - b = Dependency(Version.MASTER_STRING); - m = a.merge(b); - assert(m.matches(Version.MASTER)); - - //assertThrown(a = Dependency(Version.MASTER_STRING ~ " <=1.0.0"), "Construction invalid"); - assertThrown(a = Dependency(">=1.0.0 " ~ Version.MASTER_STRING), "Construction invalid"); - - immutable string branch1 = Version.BRANCH_IDENT ~ "Branch1"; - immutable string branch2 = Version.BRANCH_IDENT ~ "Branch2"; - - //assertThrown(a = Dependency(branch1 ~ " " ~ branch2), "Error: '" ~ branch1 ~ " " ~ branch2 ~ "' succeeded"); - //assertThrown(a = Dependency(Version.MASTER_STRING ~ " " ~ branch1), "Error: '" ~ Version.MASTER_STRING ~ " " ~ branch1 ~ "' succeeded"); - - a = Dependency(branch1); - b = Dependency(branch2); - assert(!a.merge(b).valid, "Shouldn't be able to merge to different branches"); - b = a.merge(a); - assert(b.valid, "Should be able to merge the same branches. (?)"); - assert(a == b); - - a = Dependency(branch1); - assert(a.matches(branch1), "Dependency(branch1) does not match 'branch1'"); - assert(a.matches(Version(branch1)), "Dependency(branch1) does not match Version('branch1')"); - assert(!a.matches(Version.MASTER), "Dependency(branch1) matches Version.MASTER"); - assert(!a.matches(branch2), "Dependency(branch1) matches 'branch2'"); - assert(!a.matches(Version("1.0.0")), "Dependency(branch1) matches '1.0.0'"); - a = Dependency(">=1.0.0"); - assert(!a.matches(Version(branch1)), "Dependency(1.0.0) matches 'branch1'"); - - // Testing optional dependencies. - a = Dependency(">=1.0.0"); - assert(!a.optional, "Default is not optional."); - b = a; - assert(!a.merge(b).optional, "Merging two not optional dependencies wrong."); - a.optional = true; - assert(!a.merge(b).optional, "Merging optional with not optional wrong."); - b.optional = true; - assert(a.merge(b).optional, "Merging two optional dependencies wrong."); - - // SemVer's sub identifiers. - a = Dependency(">=1.0.0-beta"); - assert(!a.matches(Version("1.0.0-alpha")), "Failed: match 1.0.0-alpha with >=1.0.0-beta"); - assert(a.matches(Version("1.0.0-beta")), "Failed: match 1.0.0-beta with >=1.0.0-beta"); - assert(a.matches(Version("1.0.0")), "Failed: match 1.0.0 with >=1.0.0-beta"); - assert(a.matches(Version("1.0.0-rc")), "Failed: match 1.0.0-rc with >=1.0.0-beta"); - - // Approximate versions. - a = Dependency("~>3.0"); - b = Dependency(">=3.0.0 <4.0.0"); - assert(a == b, "Testing failed: " ~ a.toString()); - assert(a.matches(Version("3.1.146")), "Failed: Match 3.1.146 with ~>0.1.2"); - assert(!a.matches(Version("0.2.0")), "Failed: Match 0.2.0 with ~>0.1.2"); - a = Dependency("~>3.0.0"); - assert(a == Dependency(">=3.0.0 <3.1.0"), "Testing failed: " ~ a.toString()); - a = Dependency("~>3.5"); - assert(a == Dependency(">=3.5.0 <4.0.0"), "Testing failed: " ~ a.toString()); - a = Dependency("~>3.5.0"); - assert(a == Dependency(">=3.5.0 <3.6.0"), "Testing failed: " ~ a.toString()); - - a = Dependency("~>1.0.1-beta"); - b = Dependency(">=1.0.1-beta <1.1.0"); - assert(a == b, "Testing failed: " ~ a.toString()); - assert(a.matches(Version("1.0.1-beta"))); - assert(a.matches(Version("1.0.1-beta.6"))); - - a = Dependency("~d2test"); - assert(!a.optional); - assert(a.valid); - assert(a.version_ == Version("~d2test")); - - a = Dependency("==~d2test"); - assert(!a.optional); - assert(a.valid); - assert(a.version_ == Version("~d2test")); - - a = Dependency.ANY; - assert(!a.optional); - assert(a.valid); - assertThrown(a.version_); - b = Dependency(">=1.0.1"); - assert(b == a.merge(b)); - assert(b == b.merge(a)); - - logDebug("Dependency Unittest sucess."); -} - - -/** - A version in the format "major.update.bugfix-prerelease+buildmetadata" - according to Semantic Versioning Specification v2.0.0. - - (deprecated): - This also supports a format like "~master", to identify trunk, or - "~branch_name" to identify a branch. Both Version types starting with "~" - refer to the head revision of the corresponding branch. - This is subject to be removed soon. -*/ -struct Version { - private { - enum MAX_VERS = "99999.0.0"; - enum UNKNOWN_VERS = "unknown"; - string m_version; - } - - static @property RELEASE() { return Version("0.0.0"); } - static @property HEAD() { return Version(MAX_VERS); } - static @property MASTER() { return Version(MASTER_STRING); } - static @property UNKNOWN() { return Version(UNKNOWN_VERS); } - static @property MASTER_STRING() { return "~master"; } - static @property BRANCH_IDENT() { return '~'; } - - this(string vers) - { - enforce(vers.length > 1, "Version strings must not be empty."); - if (vers[0] != BRANCH_IDENT && vers != UNKNOWN_VERS) - enforce(vers.isValidVersion(), "Invalid SemVer format: " ~ vers); - m_version = vers; - } - - bool opEquals(const Version oth) const { - if (isUnknown || oth.isUnknown) { - throw new Exception("Can't compare unknown versions! (this: %s, other: %s)".format(this, oth)); - } - return m_version == oth.m_version; - } - - /// Returns true, if this version indicates a branch, which is not the trunk. - @property bool isBranch() const { return !m_version.empty && m_version[0] == BRANCH_IDENT; } - @property bool isMaster() const { return m_version == MASTER_STRING; } - @property bool isPreRelease() const { - if (isBranch) return true; - return isPreReleaseVersion(m_version); - } - @property bool isUnknown() const { return m_version == UNKNOWN_VERS; } - - /** - Comparing Versions is generally possible, but comparing Versions - identifying branches other than master will fail. Only equality - can be tested for these. - */ - int opCmp(ref const Version other) - const { - if (isUnknown || other.isUnknown) { - throw new Exception("Can't compare unknown versions! (this: %s, other: %s)".format(this, other)); - } - if (isBranch || other.isBranch) { - if(m_version == other.m_version) return 0; - if (!isBranch) return 1; - else if (!other.isBranch) return -1; - if (isMaster) return 1; - else if (other.isMaster) return -1; - return this.m_version < other.m_version ? -1 : 1; - } - - return compareVersions(isMaster ? MAX_VERS : m_version, other.isMaster ? MAX_VERS : other.m_version); - } - int opCmp(in Version other) const { return opCmp(other); } - - string toString() const { return m_version; } -} - -unittest { - Version a, b; - - assertNotThrown(a = Version("1.0.0"), "Constructing Version('1.0.0') failed"); - assert(!a.isBranch, "Error: '1.0.0' treated as branch"); - assert(a == a, "a == a failed"); - - assertNotThrown(a = Version(Version.MASTER_STRING), "Constructing Version("~Version.MASTER_STRING~"') failed"); - assert(a.isBranch, "Error: '"~Version.MASTER_STRING~"' treated as branch"); - assert(a.isMaster); - assert(a == Version.MASTER, "Constructed master version != default master version."); - - assertNotThrown(a = Version("~BRANCH"), "Construction of branch Version failed."); - assert(a.isBranch, "Error: '~BRANCH' not treated as branch'"); - assert(!a.isMaster); - assert(a == a, "a == a with branch failed"); - - // opCmp - a = Version("1.0.0"); - b = Version("1.0.0"); - assert(a == b, "a == b with a:'1.0.0', b:'1.0.0' failed"); - b = Version("2.0.0"); - assert(a != b, "a != b with a:'1.0.0', b:'2.0.0' failed"); - a = Version(Version.MASTER_STRING); - b = Version("~BRANCH"); - assert(a != b, "a != b with a:MASTER, b:'~branch' failed"); - assert(a > b); - assert(a < Version("0.0.0")); - assert(b < Version("0.0.0")); - assert(a > Version("~Z")); - assert(b < Version("~Z")); - - // SemVer 2.0.0-rc.2 - a = Version("2.0.0-rc.2"); - b = Version("2.0.0-rc.3"); - assert(a < b, "Failed: 2.0.0-rc.2 < 2.0.0-rc.3"); - - a = Version("2.0.0-rc.2+build-metadata"); - b = Version("2.0.0+build-metadata"); - assert(a < b, "Failed: "~a.toString()~"<"~b.toString()); - - // 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0 - Version[] versions; - versions ~= Version("1.0.0-alpha"); - versions ~= Version("1.0.0-alpha.1"); - versions ~= Version("1.0.0-beta.2"); - versions ~= Version("1.0.0-beta.11"); - versions ~= Version("1.0.0-rc.1"); - versions ~= Version("1.0.0"); - for(int i=1; i=0; --j) - assert(versions[j] < versions[i], "Failed: " ~ versions[j].toString() ~ "<" ~ versions[i].toString()); - - a = Version.UNKNOWN; - b = Version.RELEASE; - assertThrown(a == b, "Failed: compared " ~ a.toString() ~ " with " ~ b.toString() ~ ""); - - a = Version.UNKNOWN; - b = Version.UNKNOWN; - assertThrown(a == b, "Failed: UNKNOWN == UNKNOWN"); -} +/** + Stuff with dependencies. + + Copyright: © 2012-2013 Matthias Dondorff + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Matthias Dondorff, Sönke Ludwig +*/ +module dub.dependency; + +import dub.internal.utils; +import dub.internal.vibecompat.core.log; +import dub.internal.vibecompat.core.file; +import dub.internal.vibecompat.data.json; +import dub.internal.vibecompat.inet.url; +import dub.package_; +import dub.semver; + +import std.algorithm; +import std.array; +import std.exception; +import std.regex; +import std.string; +import std.typecons; +static import std.compiler; + + +/** + Representing a dependency, which is basically a version string and a + compare methode, e.g. '>=1.0.0 <2.0.0' (i.e. a space separates the two + version numbers) +*/ +struct Dependency { + private { + // Shortcut to create >=0.0.0 + enum ANY_IDENT = "*"; + string m_cmpA; + Version m_versA; + string m_cmpB; + Version m_versB; + Path m_path; + bool m_optional = false; + } + + // A Dependency, which matches every valid version. + static @property ANY() { return Dependency(ANY_IDENT); } + static @property INVALID() { Dependency ret; ret.m_versA = Version.HEAD; ret.m_versB = Version.RELEASE; return ret; } + + this(string ves) + { + enforce(ves.length > 0); + string orig = ves; + + if (ves == ANY_IDENT) { + // Any version is good. + ves = ">=0.0.0"; + } + + if (ves.startsWith("~>")) { + // Shortcut: "~>x.y.z" variant. Last non-zero number will indicate + // the base for this so something like this: ">=x.y.z <=", ves[0]) == -1) { + m_cmpA = ">="; + m_cmpB = "<="; + m_versA = m_versB = Version(ves); + } else { + m_cmpA = skipComp(ves); + size_t idx2 = std.string.indexOf(ves, " "); + if (idx2 == -1) { + if (m_cmpA == "<=" || m_cmpA == "<") { + m_versA = Version.RELEASE; + m_cmpB = m_cmpA; + m_cmpA = ">="; + m_versB = Version(ves); + } else if (m_cmpA == ">=" || m_cmpA == ">") { + m_versA = Version(ves); + m_versB = Version.HEAD; + m_cmpB = "<="; + } else { + // Converts "==" to ">=a&&<=a", which makes merging easier + m_versA = m_versB = Version(ves); + m_cmpA = ">="; + m_cmpB = "<="; + } + } else { + assert(ves[idx2] == ' '); + m_versA = Version(ves[0..idx2]); + string v2 = ves[idx2+1..$]; + m_cmpB = skipComp(v2); + m_versB = Version(v2); + + enforce(!m_versA.isBranch, "Partly a branch (A): %s", ves); + enforce(!m_versB.isBranch, "Partly a branch (B): %s", ves); + + if (m_versB < m_versA) { + swap(m_versA, m_versB); + swap(m_cmpA, m_cmpB); + } + enforce( m_cmpA != "==" && m_cmpB != "==", "For equality, please specify a single version."); + } + } + } + + this(in Version ver) + { + m_cmpA = ">="; + m_cmpB = "<="; + m_versA = ver; + m_versB = ver; + } + + this(Path path) + { + this(ANY_IDENT); + m_path = path; + } + + @property void path(Path value) { m_path = value; } + @property Path path() const { return m_path; } + @property bool optional() const { return m_optional; } + @property void optional(bool optional) { m_optional = optional; } + @property bool isExactVersion() const { return m_versA == m_versB; } + + @property Version version_() const { + enforce(m_versA == m_versB, "Dependency "~versionString()~" is no exact version."); + return m_versA; + } + + @property string versionString() + const { + string r; + + if( m_versA == m_versB && m_cmpA == ">=" && m_cmpB == "<=" ){ + // Special "==" case + if (m_versA == Version.MASTER ) r = "~master"; + else r = m_versA.toString(); + } else { + if( m_versA != Version.RELEASE ) r = m_cmpA ~ m_versA.toString(); + if( m_versB != Version.HEAD ) r ~= (r.length==0?"" : " ") ~ m_cmpB ~ m_versB.toString(); + if( m_versA == Version.RELEASE && m_versB == Version.HEAD ) r = ">=0.0.0"; + } + return r; + } + + Dependency mapToPath(Path path) + const { + if (m_path.empty || m_path.absolute) return this; + else { + Dependency ret = this; + ret.path = path ~ ret.path; + return ret; + } + } + + string toString()() + const { + auto ret = versionString; + if (optional) ret ~= " (optional)"; + if (!path.empty) ret ~= " @"~path.toNativeString(); + return ret; + } + + Json toJson() const { + Json json; + if( path.empty && !optional ){ + json = Json(versionString()); + } else { + json = Json.emptyObject; + json["version"] = versionString(); + if (!path.empty) json["path"] = path.toString(); + if (optional) json["optional"] = true; + } + return json; + } + + unittest { + Dependency d = Dependency("==1.0.0"); + assert(d.toJson() == Json("1.0.0"), "Failed: " ~ d.toJson().toPrettyString()); + d = fromJson((fromJson(d.toJson())).toJson()); + assert(d == Dependency("1.0.0")); + assert(d.toJson() == Json("1.0.0"), "Failed: " ~ d.toJson().toPrettyString()); + } + + static Dependency fromJson(Json verspec) { + Dependency dep; + if( verspec.type == Json.Type.object ){ + if( auto pp = "path" in verspec ) { + if (auto pv = "version" in verspec) + logDiagnostic("Ignoring version specification (%s) for path based dependency %s", pv.get!string, pp.get!string); + + dep = Dependency.ANY; + dep.path = Path(verspec.path.get!string()); + } else { + enforce("version" in verspec, "No version field specified!"); + auto ver = verspec["version"].get!string; + // Using the string to be able to specifiy a range of versions. + dep = Dependency(ver); + } + if( auto po = "optional" in verspec ) { + dep.optional = verspec.optional.get!bool(); + } + } else { + // canonical "package-id": "version" + dep = Dependency(verspec.get!string()); + } + return dep; + } + + unittest { + assert(fromJson(parseJsonString("\">=1.0.0 <2.0.0\"")) == Dependency(">=1.0.0 <2.0.0")); + Dependency parsed = fromJson(parseJsonString(` + { + "version": "2.0.0", + "optional": true, + "path": "path/to/package" + } + `)); + Dependency d = Dependency.ANY; // supposed to ignore the version spec + d.optional = true; + d.path = Path("path/to/package"); + assert(d == parsed); + // optional and path not checked by opEquals. + assert(d.optional == parsed.optional); + assert(d.path == parsed.path); + } + + bool opEquals(in Dependency o) + const { + // TODO(mdondorff): Check if not comparing the path is correct for all clients. + return o.m_cmpA == m_cmpA && o.m_cmpB == m_cmpB + && o.m_versA == m_versA && o.m_versB == m_versB + && o.m_optional == m_optional; + } + + int opCmp(in Dependency o) + const { + if (m_cmpA != o.m_cmpA) return m_cmpA < o.m_cmpA ? -1 : 1; + if (m_cmpB != o.m_cmpB) return m_cmpB < o.m_cmpB ? -1 : 1; + if (m_versA != o.m_versA) return m_versA < o.m_versA ? -1 : 1; + if (m_versB != o.m_versB) return m_versB < o.m_versB ? -1 : 1; + if (m_optional != o.m_optional) return m_optional ? -1 : 1; + return 0; + } + + hash_t toHash() const nothrow @trusted { + try { + auto strhash = &typeid(string).getHash; + auto str = this.toString(); + return strhash(&str); + } catch assert(false); + } + + bool valid() const { + return m_versA == m_versB // compare not important + || (m_versA < m_versB && doCmp(m_cmpA, m_versB, m_versA) && doCmp(m_cmpB, m_versA, m_versB)); + } + + bool matches(string vers) const { return matches(Version(vers)); } + bool matches(const(Version) v) const { return matches(v); } + bool matches(ref const(Version) v) const { + if (this == ANY) return true; + //logDebug(" try match: %s with: %s", v, this); + // Master only matches master + if(m_versA.isBranch) { + enforce(m_versA == m_versB); + return m_versA == v; + } + if(v.isBranch || m_versA.isBranch) + return m_versA == v; + if( !doCmp(m_cmpA, v, m_versA) ) + return false; + if( !doCmp(m_cmpB, v, m_versB) ) + return false; + return true; + } + + /// Merges to versions + Dependency merge(ref const(Dependency) o) + const { + if (this == ANY) return o; + if (o == ANY) return this; + if (!this.valid || !o.valid) return INVALID; + if (m_versA.isBranch != o.m_versA.isBranch) return INVALID; + if (m_versB.isBranch != o.m_versB.isBranch) return INVALID; + if (m_versA.isBranch) return m_versA == o.m_versA ? this : INVALID; + if (this.path != o.path) return INVALID; + + Version a = m_versA > o.m_versA ? m_versA : o.m_versA; + Version b = m_versB < o.m_versB ? m_versB : o.m_versB; + + Dependency d = this; + d.m_cmpA = !doCmp(m_cmpA, a,a)? m_cmpA : o.m_cmpA; + d.m_versA = a; + d.m_cmpB = !doCmp(m_cmpB, b,b)? m_cmpB : o.m_cmpB; + d.m_versB = b; + d.m_optional = m_optional && o.m_optional; + + return d; + } + + private static bool isDigit(char ch) { return ch >= '0' && ch <= '9'; } + private static string skipComp(ref string c) { + size_t idx = 0; + while (idx < c.length && !isDigit(c[idx]) && c[idx] != Version.BRANCH_IDENT) idx++; + enforce(idx < c.length, "Expected version number in version spec: "~c); + string cmp = idx==c.length-1||idx==0? ">=" : c[0..idx]; + c = c[idx..$]; + switch(cmp) { + default: enforce(false, "No/Unknown comparision specified: '"~cmp~"'"); return ">="; + case ">=": goto case; case ">": goto case; + case "<=": goto case; case "<": goto case; + case "==": return cmp; + } + } + + private static bool doCmp(string mthd, ref const Version a, ref const Version b) { + //logDebug("Calling %s%s%s", a, mthd, b); + switch(mthd) { + default: throw new Exception("Unknown comparison operator: "~mthd); + case ">": return a>b; + case ">=": return a>=b; + case "==": return a==b; + case "<=": return a<=b; + case "<": return a=1.1.0"), b = Dependency(">=1.3.0"); + assert (a.merge(b).valid() && a.merge(b).versionString == ">=1.3.0", a.merge(b).toString()); + + a = Dependency("<=1.0.0 >=2.0.0"); + assert (!a.valid(), a.toString()); + + a = Dependency(">=1.0.0 <=5.0.0"), b = Dependency(">=2.0.0"); + assert (a.merge(b).valid() && a.merge(b).versionString == ">=2.0.0 <=5.0.0", a.merge(b).toString()); + + assertThrown(a = Dependency(">1.0.0 ==5.0.0"), "Construction is invalid"); + + a = Dependency(">1.0.0"), b = Dependency("<2.0.0"); + assert (a.merge(b).valid(), a.merge(b).toString()); + assert (a.merge(b).versionString == ">1.0.0 <2.0.0", a.merge(b).toString()); + + a = Dependency(">2.0.0"), b = Dependency("<1.0.0"); + assert (!(a.merge(b)).valid(), a.merge(b).toString()); + + a = Dependency(">=2.0.0"), b = Dependency("<=1.0.0"); + assert (!(a.merge(b)).valid(), a.merge(b).toString()); + + a = Dependency("==2.0.0"), b = Dependency("==1.0.0"); + assert (!(a.merge(b)).valid(), a.merge(b).toString()); + + a = Dependency("1.0.0"), b = Dependency("==1.0.0"); + assert (a == b); + + a = Dependency("<=2.0.0"), b = Dependency("==1.0.0"); + Dependency m = a.merge(b); + assert (m.valid(), m.toString()); + assert (m.matches(Version("1.0.0"))); + assert (!m.matches(Version("1.1.0"))); + assert (!m.matches(Version("0.0.1"))); + + + // branches / head revisions + a = Dependency(Version.MASTER_STRING); + assert(a.valid()); + assert(a.matches(Version.MASTER)); + b = Dependency(Version.MASTER_STRING); + m = a.merge(b); + assert(m.matches(Version.MASTER)); + + //assertThrown(a = Dependency(Version.MASTER_STRING ~ " <=1.0.0"), "Construction invalid"); + assertThrown(a = Dependency(">=1.0.0 " ~ Version.MASTER_STRING), "Construction invalid"); + + immutable string branch1 = Version.BRANCH_IDENT ~ "Branch1"; + immutable string branch2 = Version.BRANCH_IDENT ~ "Branch2"; + + //assertThrown(a = Dependency(branch1 ~ " " ~ branch2), "Error: '" ~ branch1 ~ " " ~ branch2 ~ "' succeeded"); + //assertThrown(a = Dependency(Version.MASTER_STRING ~ " " ~ branch1), "Error: '" ~ Version.MASTER_STRING ~ " " ~ branch1 ~ "' succeeded"); + + a = Dependency(branch1); + b = Dependency(branch2); + assert(!a.merge(b).valid, "Shouldn't be able to merge to different branches"); + b = a.merge(a); + assert(b.valid, "Should be able to merge the same branches. (?)"); + assert(a == b); + + a = Dependency(branch1); + assert(a.matches(branch1), "Dependency(branch1) does not match 'branch1'"); + assert(a.matches(Version(branch1)), "Dependency(branch1) does not match Version('branch1')"); + assert(!a.matches(Version.MASTER), "Dependency(branch1) matches Version.MASTER"); + assert(!a.matches(branch2), "Dependency(branch1) matches 'branch2'"); + assert(!a.matches(Version("1.0.0")), "Dependency(branch1) matches '1.0.0'"); + a = Dependency(">=1.0.0"); + assert(!a.matches(Version(branch1)), "Dependency(1.0.0) matches 'branch1'"); + + // Testing optional dependencies. + a = Dependency(">=1.0.0"); + assert(!a.optional, "Default is not optional."); + b = a; + assert(!a.merge(b).optional, "Merging two not optional dependencies wrong."); + a.optional = true; + assert(!a.merge(b).optional, "Merging optional with not optional wrong."); + b.optional = true; + assert(a.merge(b).optional, "Merging two optional dependencies wrong."); + + // SemVer's sub identifiers. + a = Dependency(">=1.0.0-beta"); + assert(!a.matches(Version("1.0.0-alpha")), "Failed: match 1.0.0-alpha with >=1.0.0-beta"); + assert(a.matches(Version("1.0.0-beta")), "Failed: match 1.0.0-beta with >=1.0.0-beta"); + assert(a.matches(Version("1.0.0")), "Failed: match 1.0.0 with >=1.0.0-beta"); + assert(a.matches(Version("1.0.0-rc")), "Failed: match 1.0.0-rc with >=1.0.0-beta"); + + // Approximate versions. + a = Dependency("~>3.0"); + b = Dependency(">=3.0.0 <4.0.0"); + assert(a == b, "Testing failed: " ~ a.toString()); + assert(a.matches(Version("3.1.146")), "Failed: Match 3.1.146 with ~>0.1.2"); + assert(!a.matches(Version("0.2.0")), "Failed: Match 0.2.0 with ~>0.1.2"); + a = Dependency("~>3.0.0"); + assert(a == Dependency(">=3.0.0 <3.1.0"), "Testing failed: " ~ a.toString()); + a = Dependency("~>3.5"); + assert(a == Dependency(">=3.5.0 <4.0.0"), "Testing failed: " ~ a.toString()); + a = Dependency("~>3.5.0"); + assert(a == Dependency(">=3.5.0 <3.6.0"), "Testing failed: " ~ a.toString()); + + a = Dependency("~>1.0.1-beta"); + b = Dependency(">=1.0.1-beta <1.1.0"); + assert(a == b, "Testing failed: " ~ a.toString()); + assert(a.matches(Version("1.0.1-beta"))); + assert(a.matches(Version("1.0.1-beta.6"))); + + a = Dependency("~d2test"); + assert(!a.optional); + assert(a.valid); + assert(a.version_ == Version("~d2test")); + + a = Dependency("==~d2test"); + assert(!a.optional); + assert(a.valid); + assert(a.version_ == Version("~d2test")); + + a = Dependency.ANY; + assert(!a.optional); + assert(a.valid); + assertThrown(a.version_); + b = Dependency(">=1.0.1"); + assert(b == a.merge(b)); + assert(b == b.merge(a)); + + logDebug("Dependency Unittest sucess."); +} + + +/** + A version in the format "major.update.bugfix-prerelease+buildmetadata" + according to Semantic Versioning Specification v2.0.0. + + (deprecated): + This also supports a format like "~master", to identify trunk, or + "~branch_name" to identify a branch. Both Version types starting with "~" + refer to the head revision of the corresponding branch. + This is subject to be removed soon. +*/ +struct Version { + private { + enum MAX_VERS = "99999.0.0"; + enum UNKNOWN_VERS = "unknown"; + string m_version; + } + + static @property RELEASE() { return Version("0.0.0"); } + static @property HEAD() { return Version(MAX_VERS); } + static @property MASTER() { return Version(MASTER_STRING); } + static @property UNKNOWN() { return Version(UNKNOWN_VERS); } + static @property MASTER_STRING() { return "~master"; } + static @property BRANCH_IDENT() { return '~'; } + + this(string vers) + { + enforce(vers.length > 1, "Version strings must not be empty."); + if (vers[0] != BRANCH_IDENT && vers != UNKNOWN_VERS) + enforce(vers.isValidVersion(), "Invalid SemVer format: " ~ vers); + m_version = vers; + } + + bool opEquals(const Version oth) const { + if (isUnknown || oth.isUnknown) { + throw new Exception("Can't compare unknown versions! (this: %s, other: %s)".format(this, oth)); + } + return m_version == oth.m_version; + } + + /// Returns true, if this version indicates a branch, which is not the trunk. + @property bool isBranch() const { return !m_version.empty && m_version[0] == BRANCH_IDENT; } + @property bool isMaster() const { return m_version == MASTER_STRING; } + @property bool isPreRelease() const { + if (isBranch) return true; + return isPreReleaseVersion(m_version); + } + @property bool isUnknown() const { return m_version == UNKNOWN_VERS; } + + /** + Comparing Versions is generally possible, but comparing Versions + identifying branches other than master will fail. Only equality + can be tested for these. + */ + int opCmp(ref const Version other) + const { + if (isUnknown || other.isUnknown) { + throw new Exception("Can't compare unknown versions! (this: %s, other: %s)".format(this, other)); + } + if (isBranch || other.isBranch) { + if(m_version == other.m_version) return 0; + if (!isBranch) return 1; + else if (!other.isBranch) return -1; + if (isMaster) return 1; + else if (other.isMaster) return -1; + return this.m_version < other.m_version ? -1 : 1; + } + + return compareVersions(isMaster ? MAX_VERS : m_version, other.isMaster ? MAX_VERS : other.m_version); + } + int opCmp(in Version other) const { return opCmp(other); } + + string toString() const { return m_version; } +} + +unittest { + Version a, b; + + assertNotThrown(a = Version("1.0.0"), "Constructing Version('1.0.0') failed"); + assert(!a.isBranch, "Error: '1.0.0' treated as branch"); + assert(a == a, "a == a failed"); + + assertNotThrown(a = Version(Version.MASTER_STRING), "Constructing Version("~Version.MASTER_STRING~"') failed"); + assert(a.isBranch, "Error: '"~Version.MASTER_STRING~"' treated as branch"); + assert(a.isMaster); + assert(a == Version.MASTER, "Constructed master version != default master version."); + + assertNotThrown(a = Version("~BRANCH"), "Construction of branch Version failed."); + assert(a.isBranch, "Error: '~BRANCH' not treated as branch'"); + assert(!a.isMaster); + assert(a == a, "a == a with branch failed"); + + // opCmp + a = Version("1.0.0"); + b = Version("1.0.0"); + assert(a == b, "a == b with a:'1.0.0', b:'1.0.0' failed"); + b = Version("2.0.0"); + assert(a != b, "a != b with a:'1.0.0', b:'2.0.0' failed"); + a = Version(Version.MASTER_STRING); + b = Version("~BRANCH"); + assert(a != b, "a != b with a:MASTER, b:'~branch' failed"); + assert(a > b); + assert(a < Version("0.0.0")); + assert(b < Version("0.0.0")); + assert(a > Version("~Z")); + assert(b < Version("~Z")); + + // SemVer 2.0.0-rc.2 + a = Version("2.0.0-rc.2"); + b = Version("2.0.0-rc.3"); + assert(a < b, "Failed: 2.0.0-rc.2 < 2.0.0-rc.3"); + + a = Version("2.0.0-rc.2+build-metadata"); + b = Version("2.0.0+build-metadata"); + assert(a < b, "Failed: "~a.toString()~"<"~b.toString()); + + // 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0 + Version[] versions; + versions ~= Version("1.0.0-alpha"); + versions ~= Version("1.0.0-alpha.1"); + versions ~= Version("1.0.0-beta.2"); + versions ~= Version("1.0.0-beta.11"); + versions ~= Version("1.0.0-rc.1"); + versions ~= Version("1.0.0"); + for(int i=1; i=0; --j) + assert(versions[j] < versions[i], "Failed: " ~ versions[j].toString() ~ "<" ~ versions[i].toString()); + + a = Version.UNKNOWN; + b = Version.RELEASE; + assertThrown(a == b, "Failed: compared " ~ a.toString() ~ " with " ~ b.toString() ~ ""); + + a = Version.UNKNOWN; + b = Version.UNKNOWN; + assertThrown(a == b, "Failed: UNKNOWN == UNKNOWN"); +} diff --git a/source/dub/dependencyresolver.d b/source/dub/dependencyresolver.d index f8b5ac3..4fab2c9 100644 --- a/source/dub/dependencyresolver.d +++ b/source/dub/dependencyresolver.d @@ -71,7 +71,7 @@ { if (parent in visited) return; visited[parent] = true; - + foreach (ch; getChildren(parent)) { auto basepack = rootPackage(ch.pack); auto pidx = all_configs.length; diff --git a/source/dub/generators/build.d b/source/dub/generators/build.d index fbdcfcb..00bc4e6 100644 --- a/source/dub/generators/build.d +++ b/source/dub/generators/build.d @@ -1,6 +1,6 @@ /** Generator for direct compiler builds. - + Copyright: © 2013-2013 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig diff --git a/source/dub/generators/generator.d b/source/dub/generators/generator.d index 783bc13..081c67b 100644 --- a/source/dub/generators/generator.d +++ b/source/dub/generators/generator.d @@ -1,6 +1,6 @@ /** Generator for project files - + Copyright: © 2012-2013 Matthias Dondorff License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff @@ -275,7 +275,7 @@ if (generate_binary) { if (!exists(buildsettings.targetPath)) mkdirRecurse(buildsettings.targetPath); - + if (buildsettings.copyFiles.length) { logInfo("Copying files for %s...", pack); foreach (f; buildsettings.copyFiles) { diff --git a/source/dub/generators/visuald.d b/source/dub/generators/visuald.d index e28c4b1..86de079 100644 --- a/source/dub/generators/visuald.d +++ b/source/dub/generators/visuald.d @@ -1,519 +1,519 @@ -/** - Generator for VisualD project files - - Copyright: © 2012-2013 Matthias Dondorff - License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. - Authors: Matthias Dondorff -*/ -module dub.generators.visuald; - -import dub.compilers.compiler; -import dub.generators.generator; -import dub.internal.utils; -import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; -import dub.package_; -import dub.packagemanager; -import dub.project; - -import std.algorithm; -import std.array; -import std.conv; -import std.exception; -import std.format; -import std.string : format; -import std.uuid; - - -// Dubbing is developing dub... -//version = DUBBING; - -// TODO: handle pre/post build commands - - -class VisualDGenerator : ProjectGenerator { - private { - PackageManager m_pkgMgr; - string[string] m_projectUuids; - } - - this(Project app, PackageManager mgr) - { - super(app); - m_pkgMgr = mgr; - } - - override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) - { - auto bs = targets[m_project.name].buildSettings; - logDebug("About to generate projects for %s, with %s direct dependencies.", m_project.rootPackage.name, m_project.rootPackage.dependencies.length); - generateProjectFiles(settings, targets); - generateSolutionFile(settings, targets); - logInfo("VisualD project generated."); - } - - private { - void generateSolutionFile(GeneratorSettings settings, in TargetInfo[string] targets) - { - auto ret = appender!(char[])(); - auto configs = m_project.getPackageConfigs(settings.platform, settings.config); - auto some_uuid = generateUUID(); - - // Solution header - ret.put("Microsoft Visual Studio Solution File, Format Version 11.00\n"); - ret.put("# Visual Studio 2010\n"); - - bool[string] visited; - void generateSolutionEntry(string pack) { - if (pack in visited) return; - visited[pack] = true; - - auto ti = targets[pack]; - - auto uuid = guid(pack); - ret.formattedWrite("Project(\"%s\") = \"%s\", \"%s\", \"%s\"\n", - some_uuid, pack, projFileName(pack), uuid); - - if (ti.linkDependencies.length && ti.buildSettings.targetType != TargetType.staticLibrary) { - ret.put("\tProjectSection(ProjectDependencies) = postProject\n"); - foreach (d; ti.linkDependencies) - if (!isHeaderOnlyPackage(d, targets)) { - // TODO: clarify what "uuid = uuid" should mean - ret.formattedWrite("\t\t%s = %s\n", guid(d), guid(d)); - } - ret.put("\tEndProjectSection\n"); - } - - ret.put("EndProject\n"); - - foreach (d; ti.dependencies) generateSolutionEntry(d); - } - - auto mainpack = m_project.rootPackage.name; - - generateSolutionEntry(mainpack); - - // Global section contains configurations - ret.put("Global\n"); - ret.put("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n"); - ret.formattedWrite("\t\t%s|Win32 = %s|Win32\n", settings.buildType, settings.buildType); - ret.put("\tEndGlobalSection\n"); - ret.put("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n"); - - const string[] sub = ["ActiveCfg", "Build.0"]; - const string[] conf = [settings.buildType~"|Win32"]; - auto projectUuid = guid(mainpack); - foreach (t; targets.byKey) - foreach (c; conf) - foreach (s; sub) - formattedWrite(ret, "\t\t%s.%s.%s = %s\n", guid(t), c, s, c); - - // TODO: for all dependencies - ret.put("\tEndGlobalSection\n"); - - ret.put("\tGlobalSection(SolutionProperties) = preSolution\n"); - ret.put("\t\tHideSolutionNode = FALSE\n"); - ret.put("\tEndGlobalSection\n"); - ret.put("EndGlobal\n"); - - // Writing solution file - logDebug("About to write to .sln file with %s bytes", to!string(ret.data().length)); - auto sln = openFile(solutionFileName(), FileMode.CreateTrunc); - scope(exit) sln.close(); - sln.put(ret.data()); - sln.flush(); - } - - - void generateProjectFiles(GeneratorSettings settings, in TargetInfo[string] targets) - { - bool[string] visited; - void performRec(string name) { - if (name in visited) return; - visited[name] = true; - generateProjectFile(name, settings, targets); - foreach (d; targets[name].dependencies) - performRec(d); - } - - performRec(m_project.rootPackage.name); - } - - bool isHeaderOnlyPackage(string pack, in TargetInfo[string] targets) - const { - auto buildsettings = targets[pack].buildSettings; - if (!buildsettings.sourceFiles.any!(f => f.endsWith(".d"))()) - return true; - return false; - } - - void generateProjectFile(string packname, GeneratorSettings settings, in TargetInfo[string] targets) - { - int i = 0; - auto ret = appender!(char[])(); - - auto project_file_dir = m_project.rootPackage.path ~ projFileName(packname).parentPath; - ret.put("\n"); - ret.formattedWrite(" %s\n", guid(packname)); - - // Several configurations (debug, release, unittest) - generateProjectConfiguration(ret, packname, settings.buildType, settings, targets); - //generateProjectConfiguration(ret, packname, "release", settings, targets); - //generateProjectConfiguration(ret, packname, "unittest", settings, targets); - - // Add all files - auto files = targets[packname].buildSettings; - SourceFile[string] sourceFiles; - void addSourceFile(Path file_path, Path structure_path, bool build) - { - auto key = file_path.toString(); - auto sf = sourceFiles.get(key, SourceFile.init); - sf.filePath = file_path; - if (!sf.build) { - sf.build = build; - sf.structurePath = structure_path; - } - sourceFiles[key] = sf; - } - - void addFile(string s, bool build) { - auto sp = Path(s); - assert(sp.absolute, format("Source path in %s expected to be absolute: %s", packname, s)); - //if( !sp.absolute ) sp = pack.path ~ sp; - addSourceFile(sp.relativeTo(project_file_dir), determineStructurePath(sp, targets[packname]), build); - } - - foreach (p; targets[packname].packages) - if (!p.packageInfoFile.empty) - addFile(p.packageInfoFile.toNativeString(), false); - - if (files.targetType == TargetType.staticLibrary) - foreach(s; files.sourceFiles.filter!(s => !isLinkerFile(s))) addFile(s, true); - else - foreach(s; files.sourceFiles.filter!(s => !s.endsWith(".lib"))) addFile(s, true); - - foreach(s; files.importFiles) addFile(s, false); - foreach(s; files.stringImportFiles) addFile(s, false); - - // Create folders and files - ret.formattedWrite(" ", getPackageFileName(packname)); - Path lastFolder; - foreach(source; sortedSources(sourceFiles.values)) { - logDebug("source looking at %s", source.structurePath); - auto cur = source.structurePath[0 .. source.structurePath.length-1]; - if(lastFolder != cur) { - size_t same = 0; - foreach(idx; 0..min(lastFolder.length, cur.length)) - if(lastFolder[idx] != cur[idx]) break; - else same = idx+1; - - const decrease = lastFolder.length - min(lastFolder.length, same); - const increase = cur.length - min(cur.length, same); - - foreach(unused; 0..decrease) - ret.put("\n "); - foreach(idx; 0..increase) - ret.formattedWrite("\n ", cur[same + idx].toString()); - lastFolder = cur; - } - ret.formattedWrite("\n ", source.build ? "" : "tool=\"None\" ", source.filePath.toNativeString()); - } - // Finalize all open folders - foreach(unused; 0..lastFolder.length) - ret.put("\n "); - ret.put("\n \n"); - - logDebug("About to write to '%s.visualdproj' file %s bytes", getPackageFileName(packname), ret.data().length); - auto proj = openFile(projFileName(packname), FileMode.CreateTrunc); - scope(exit) proj.close(); - proj.put(ret.data()); - proj.flush(); - } - - void generateProjectConfiguration(Appender!(char[]) ret, string pack, string type, GeneratorSettings settings, in TargetInfo[string] targets) - { - auto project_file_dir = m_project.rootPackage.path ~ projFileName(pack).parentPath; - auto buildsettings = targets[pack].buildSettings.dup; - - string[] getSettings(string setting)(){ return __traits(getMember, buildsettings, setting); } - string[] getPathSettings(string setting)() - { - auto settings = getSettings!setting(); - auto ret = new string[settings.length]; - foreach (i; 0 .. settings.length) { - // \" is interpreted as an escaped " by cmd.exe, so we need to avoid that - auto p = Path(settings[i]).relativeTo(project_file_dir); - p.endsWithSlash = false; - ret[i] = '"' ~ p.toNativeString() ~ '"'; - } - return ret; - } - - foreach(architecture; settings.platform.architecture) { - string arch; - switch(architecture) { - default: logWarn("Unsupported platform('%s'), defaulting to x86", architecture); goto case; - case "x86": arch = "Win32"; break; - case "x86_64": arch = "x64"; break; - } - ret.formattedWrite(" \n", to!string(type), arch); - - // FIXME: handle compiler options in an abstract way instead of searching for DMD specific flags - - // debug and optimize setting - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.debugInfo ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.optimize ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.inline ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.releaseMode ? "1" : "0"); - - // Lib or exe? - enum - { - Executable = 0, - StaticLib = 1, - DynamicLib = 2 - } - - int output_type = StaticLib; // library - string output_ext = "lib"; - if (buildsettings.targetType == TargetType.executable) - { - output_type = Executable; - output_ext = "exe"; - } - else if (buildsettings.targetType == TargetType.dynamicLibrary) - { - output_type = DynamicLib; - output_ext = "dll"; - } - string debugSuffix = type == "debug" ? "_d" : ""; - auto bin_path = pack == m_project.rootPackage.name ? Path(buildsettings.targetPath) : Path(".dub/lib/"); - bin_path.endsWithSlash = true; - ret.formattedWrite(" %s\n", output_type); - ret.formattedWrite(" %s%s%s.%s\n", bin_path.toNativeString(), buildsettings.targetName, debugSuffix, output_ext); - - // include paths and string imports - string imports = join(getPathSettings!"importPaths"(), " "); - string stringImports = join(getPathSettings!"stringImportPaths"(), " "); - ret.formattedWrite(" %s\n", imports); - ret.formattedWrite(" %s\n", stringImports); - - ret.formattedWrite(" %s\n", "$(DMDInstallDir)windows\\bin\\dmd.exe"); // FIXME: use the actually selected compiler! - ret.formattedWrite(" %s\n", getSettings!"dflags"().join(" ")); - - // Add version identifiers - string versions = join(getSettings!"versions"(), " "); - ret.formattedWrite(" %s\n", versions); - - // Add libraries, system libs need to be suffixed by ".lib". - string linkLibs = join(map!(a => a~".lib")(getSettings!"libs"()), " "); - string addLinkFiles = join(getSettings!"sourceFiles"().filter!(s => s.endsWith(".lib"))(), " "); - if (arch == "x86") addLinkFiles ~= " phobos.lib"; - if (output_type != StaticLib) ret.formattedWrite(" %s %s\n", linkLibs, addLinkFiles); - - // Unittests - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.unittests ? "1" : "0"); - - // compute directory for intermediate files (need dummy/ because of how -op determines the resulting path) - size_t ndummy = 0; - foreach (f; buildsettings.sourceFiles) { - auto rpath = Path(f).relativeTo(project_file_dir); - size_t nd = 0; - foreach (i; 0 .. rpath.length) - if (rpath[i] == "..") - nd++; - if (nd > ndummy) ndummy = nd; - } - string intersubdir = replicate("dummy/", ndummy) ~ getPackageFileName(pack); - - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - int singlefilemode; - final switch (settings.buildMode) with (BuildMode) { - case separate: singlefilemode = 2; break; - case allAtOnce: singlefilemode = 0; break; - //case singleFile: singlefilemode = 1; break; - //case compileOnly: singlefilemode = 3; break; - } - ret.formattedWrite(" %s\n", singlefilemode); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.verbose ? "1" : "0"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.formattedWrite(" %s\n", arch == "x64" ? 1 : 0); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.noBoundsCheck ? "1" : "0"); - ret.put(" 0\n"); - ret.put(" 1\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.warningsAsErrors ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.warnings ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.property ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.alwaysStackFrame ? "1" : "0"); - ret.put(" 0\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.coverage ? "1" : "0"); - ret.put(" 0\n"); - ret.put(" 2\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.ignoreUnknownPragmas ? "1" : "0"); - ret.formattedWrite(" %s\n", settings.compiler.name == "ldc" ? 2 : settings.compiler.name == "gdc" ? 1 : 0); - ret.formattedWrite(" 0\n"); - ret.formattedWrite(" %s\n", bin_path.toNativeString()); - ret.formattedWrite(" .dub/obj/%s/%s\n", to!string(type), intersubdir); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" 0\n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" 0\n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" 1\n"); - ret.put(" $(IntDir)\\$(TargetName).json\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" \n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" 0\n"); - ret.put(" \n"); - ret.put(" 1\n"); - ret.put(" $(VisualDInstallDir)cv2pdb\\cv2pdb.exe\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" 0\n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" \n"); - ret.put(" *.obj;*.cmd;*.build;*.dep\n"); - ret.put(" \n"); - } // foreach(architecture) - } - - void performOnDependencies(const Package main, string[string] configs, void delegate(const Package pack) op) - { - foreach (p; m_project.getTopologicalPackageList(false, main, configs)) { - if (p is main) continue; - op(p); - } - } - - string generateUUID() const { - import std.string; - return "{" ~ toUpper(randomUUID().toString()) ~ "}"; - } - - string guid(string projectName) { - if(projectName !in m_projectUuids) - m_projectUuids[projectName] = generateUUID(); - return m_projectUuids[projectName]; - } - - auto solutionFileName() const { - version(DUBBING) return getPackageFileName(m_project.rootPackage) ~ ".dubbed.sln"; - else return getPackageFileName(m_project.rootPackage.name) ~ ".sln"; - } - - Path projFileName(string pack) const { - auto basepath = Path(".");//Path(".dub/"); - version(DUBBING) return basepath ~ (getPackageFileName(pack) ~ ".dubbed.visualdproj"); - else return basepath ~ (getPackageFileName(pack) ~ ".visualdproj"); - } - } - - // TODO: nice folders - struct SourceFile { - Path structurePath; - Path filePath; - bool build; - - hash_t toHash() const nothrow @trusted { return structurePath.toHash() ^ filePath.toHash() ^ (build * 0x1f3e7b2c); } - int opCmp(ref const SourceFile rhs) const { return sortOrder(this, rhs); } - // "a < b" for folder structures (deepest folder first, else lexical) - private final static int sortOrder(ref const SourceFile a, ref const SourceFile b) { - assert(!a.structurePath.empty()); - assert(!b.structurePath.empty()); - auto as = a.structurePath; - auto bs = b.structurePath; - - // Check for different folders, compare folders only (omit last one). - for(uint idx=0; idx bs.length? -1 : 1; - } - else { - // Both paths indicate files in the same directory, use lexical - // ordering for those. - return as.head.opCmp(bs.head); - } - } - } - - auto sortedSources(SourceFile[] sources) { - return sort(sources); - } - - unittest { - SourceFile[] sfs = [ - { Path("b/file.d"), Path("") }, - { Path("b/b/fileA.d"), Path("") }, - { Path("a/file.d"), Path("") }, - { Path("b/b/fileB.d"), Path("") }, - { Path("b/b/b/fileA.d"), Path("") }, - { Path("b/c/fileA.d"), Path("") }, - ]; - auto sorted = sort(sfs); - SourceFile[] sortedSfs; - foreach(sr; sorted) - sortedSfs ~= sr; - assert(sortedSfs[0].structurePath == Path("a/file.d"), "1"); - assert(sortedSfs[1].structurePath == Path("b/b/b/fileA.d"), "2"); - assert(sortedSfs[2].structurePath == Path("b/b/fileA.d"), "3"); - assert(sortedSfs[3].structurePath == Path("b/b/fileB.d"), "4"); - assert(sortedSfs[4].structurePath == Path("b/c/fileA.d"), "5"); - assert(sortedSfs[5].structurePath == Path("b/file.d"), "6"); - } -} - -private Path determineStructurePath(Path file_path, in ProjectGenerator.TargetInfo target) -{ - foreach (p; target.packages) { - if (file_path.startsWith(p.path)) - return Path(getPackageFileName(p.name)) ~ file_path[p.path.length .. $]; - } - return Path("misc/") ~ file_path.head; -} - -private string getPackageFileName(string pack) -{ - return pack.replace(":", "_"); -} +/** + Generator for VisualD project files + + Copyright: © 2012-2013 Matthias Dondorff + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Matthias Dondorff +*/ +module dub.generators.visuald; + +import dub.compilers.compiler; +import dub.generators.generator; +import dub.internal.utils; +import dub.internal.vibecompat.core.file; +import dub.internal.vibecompat.core.log; +import dub.package_; +import dub.packagemanager; +import dub.project; + +import std.algorithm; +import std.array; +import std.conv; +import std.exception; +import std.format; +import std.string : format; +import std.uuid; + + +// Dubbing is developing dub... +//version = DUBBING; + +// TODO: handle pre/post build commands + + +class VisualDGenerator : ProjectGenerator { + private { + PackageManager m_pkgMgr; + string[string] m_projectUuids; + } + + this(Project app, PackageManager mgr) + { + super(app); + m_pkgMgr = mgr; + } + + override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) + { + auto bs = targets[m_project.name].buildSettings; + logDebug("About to generate projects for %s, with %s direct dependencies.", m_project.rootPackage.name, m_project.rootPackage.dependencies.length); + generateProjectFiles(settings, targets); + generateSolutionFile(settings, targets); + logInfo("VisualD project generated."); + } + + private { + void generateSolutionFile(GeneratorSettings settings, in TargetInfo[string] targets) + { + auto ret = appender!(char[])(); + auto configs = m_project.getPackageConfigs(settings.platform, settings.config); + auto some_uuid = generateUUID(); + + // Solution header + ret.put("Microsoft Visual Studio Solution File, Format Version 11.00\n"); + ret.put("# Visual Studio 2010\n"); + + bool[string] visited; + void generateSolutionEntry(string pack) { + if (pack in visited) return; + visited[pack] = true; + + auto ti = targets[pack]; + + auto uuid = guid(pack); + ret.formattedWrite("Project(\"%s\") = \"%s\", \"%s\", \"%s\"\n", + some_uuid, pack, projFileName(pack), uuid); + + if (ti.linkDependencies.length && ti.buildSettings.targetType != TargetType.staticLibrary) { + ret.put("\tProjectSection(ProjectDependencies) = postProject\n"); + foreach (d; ti.linkDependencies) + if (!isHeaderOnlyPackage(d, targets)) { + // TODO: clarify what "uuid = uuid" should mean + ret.formattedWrite("\t\t%s = %s\n", guid(d), guid(d)); + } + ret.put("\tEndProjectSection\n"); + } + + ret.put("EndProject\n"); + + foreach (d; ti.dependencies) generateSolutionEntry(d); + } + + auto mainpack = m_project.rootPackage.name; + + generateSolutionEntry(mainpack); + + // Global section contains configurations + ret.put("Global\n"); + ret.put("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n"); + ret.formattedWrite("\t\t%s|Win32 = %s|Win32\n", settings.buildType, settings.buildType); + ret.put("\tEndGlobalSection\n"); + ret.put("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n"); + + const string[] sub = ["ActiveCfg", "Build.0"]; + const string[] conf = [settings.buildType~"|Win32"]; + auto projectUuid = guid(mainpack); + foreach (t; targets.byKey) + foreach (c; conf) + foreach (s; sub) + formattedWrite(ret, "\t\t%s.%s.%s = %s\n", guid(t), c, s, c); + + // TODO: for all dependencies + ret.put("\tEndGlobalSection\n"); + + ret.put("\tGlobalSection(SolutionProperties) = preSolution\n"); + ret.put("\t\tHideSolutionNode = FALSE\n"); + ret.put("\tEndGlobalSection\n"); + ret.put("EndGlobal\n"); + + // Writing solution file + logDebug("About to write to .sln file with %s bytes", to!string(ret.data().length)); + auto sln = openFile(solutionFileName(), FileMode.CreateTrunc); + scope(exit) sln.close(); + sln.put(ret.data()); + sln.flush(); + } + + + void generateProjectFiles(GeneratorSettings settings, in TargetInfo[string] targets) + { + bool[string] visited; + void performRec(string name) { + if (name in visited) return; + visited[name] = true; + generateProjectFile(name, settings, targets); + foreach (d; targets[name].dependencies) + performRec(d); + } + + performRec(m_project.rootPackage.name); + } + + bool isHeaderOnlyPackage(string pack, in TargetInfo[string] targets) + const { + auto buildsettings = targets[pack].buildSettings; + if (!buildsettings.sourceFiles.any!(f => f.endsWith(".d"))()) + return true; + return false; + } + + void generateProjectFile(string packname, GeneratorSettings settings, in TargetInfo[string] targets) + { + int i = 0; + auto ret = appender!(char[])(); + + auto project_file_dir = m_project.rootPackage.path ~ projFileName(packname).parentPath; + ret.put("\n"); + ret.formattedWrite(" %s\n", guid(packname)); + + // Several configurations (debug, release, unittest) + generateProjectConfiguration(ret, packname, settings.buildType, settings, targets); + //generateProjectConfiguration(ret, packname, "release", settings, targets); + //generateProjectConfiguration(ret, packname, "unittest", settings, targets); + + // Add all files + auto files = targets[packname].buildSettings; + SourceFile[string] sourceFiles; + void addSourceFile(Path file_path, Path structure_path, bool build) + { + auto key = file_path.toString(); + auto sf = sourceFiles.get(key, SourceFile.init); + sf.filePath = file_path; + if (!sf.build) { + sf.build = build; + sf.structurePath = structure_path; + } + sourceFiles[key] = sf; + } + + void addFile(string s, bool build) { + auto sp = Path(s); + assert(sp.absolute, format("Source path in %s expected to be absolute: %s", packname, s)); + //if( !sp.absolute ) sp = pack.path ~ sp; + addSourceFile(sp.relativeTo(project_file_dir), determineStructurePath(sp, targets[packname]), build); + } + + foreach (p; targets[packname].packages) + if (!p.packageInfoFile.empty) + addFile(p.packageInfoFile.toNativeString(), false); + + if (files.targetType == TargetType.staticLibrary) + foreach(s; files.sourceFiles.filter!(s => !isLinkerFile(s))) addFile(s, true); + else + foreach(s; files.sourceFiles.filter!(s => !s.endsWith(".lib"))) addFile(s, true); + + foreach(s; files.importFiles) addFile(s, false); + foreach(s; files.stringImportFiles) addFile(s, false); + + // Create folders and files + ret.formattedWrite(" ", getPackageFileName(packname)); + Path lastFolder; + foreach(source; sortedSources(sourceFiles.values)) { + logDebug("source looking at %s", source.structurePath); + auto cur = source.structurePath[0 .. source.structurePath.length-1]; + if(lastFolder != cur) { + size_t same = 0; + foreach(idx; 0..min(lastFolder.length, cur.length)) + if(lastFolder[idx] != cur[idx]) break; + else same = idx+1; + + const decrease = lastFolder.length - min(lastFolder.length, same); + const increase = cur.length - min(cur.length, same); + + foreach(unused; 0..decrease) + ret.put("\n "); + foreach(idx; 0..increase) + ret.formattedWrite("\n ", cur[same + idx].toString()); + lastFolder = cur; + } + ret.formattedWrite("\n ", source.build ? "" : "tool=\"None\" ", source.filePath.toNativeString()); + } + // Finalize all open folders + foreach(unused; 0..lastFolder.length) + ret.put("\n "); + ret.put("\n \n"); + + logDebug("About to write to '%s.visualdproj' file %s bytes", getPackageFileName(packname), ret.data().length); + auto proj = openFile(projFileName(packname), FileMode.CreateTrunc); + scope(exit) proj.close(); + proj.put(ret.data()); + proj.flush(); + } + + void generateProjectConfiguration(Appender!(char[]) ret, string pack, string type, GeneratorSettings settings, in TargetInfo[string] targets) + { + auto project_file_dir = m_project.rootPackage.path ~ projFileName(pack).parentPath; + auto buildsettings = targets[pack].buildSettings.dup; + + string[] getSettings(string setting)(){ return __traits(getMember, buildsettings, setting); } + string[] getPathSettings(string setting)() + { + auto settings = getSettings!setting(); + auto ret = new string[settings.length]; + foreach (i; 0 .. settings.length) { + // \" is interpreted as an escaped " by cmd.exe, so we need to avoid that + auto p = Path(settings[i]).relativeTo(project_file_dir); + p.endsWithSlash = false; + ret[i] = '"' ~ p.toNativeString() ~ '"'; + } + return ret; + } + + foreach(architecture; settings.platform.architecture) { + string arch; + switch(architecture) { + default: logWarn("Unsupported platform('%s'), defaulting to x86", architecture); goto case; + case "x86": arch = "Win32"; break; + case "x86_64": arch = "x64"; break; + } + ret.formattedWrite(" \n", to!string(type), arch); + + // FIXME: handle compiler options in an abstract way instead of searching for DMD specific flags + + // debug and optimize setting + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.debugInfo ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.optimize ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.inline ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.releaseMode ? "1" : "0"); + + // Lib or exe? + enum + { + Executable = 0, + StaticLib = 1, + DynamicLib = 2 + } + + int output_type = StaticLib; // library + string output_ext = "lib"; + if (buildsettings.targetType == TargetType.executable) + { + output_type = Executable; + output_ext = "exe"; + } + else if (buildsettings.targetType == TargetType.dynamicLibrary) + { + output_type = DynamicLib; + output_ext = "dll"; + } + string debugSuffix = type == "debug" ? "_d" : ""; + auto bin_path = pack == m_project.rootPackage.name ? Path(buildsettings.targetPath) : Path(".dub/lib/"); + bin_path.endsWithSlash = true; + ret.formattedWrite(" %s\n", output_type); + ret.formattedWrite(" %s%s%s.%s\n", bin_path.toNativeString(), buildsettings.targetName, debugSuffix, output_ext); + + // include paths and string imports + string imports = join(getPathSettings!"importPaths"(), " "); + string stringImports = join(getPathSettings!"stringImportPaths"(), " "); + ret.formattedWrite(" %s\n", imports); + ret.formattedWrite(" %s\n", stringImports); + + ret.formattedWrite(" %s\n", "$(DMDInstallDir)windows\\bin\\dmd.exe"); // FIXME: use the actually selected compiler! + ret.formattedWrite(" %s\n", getSettings!"dflags"().join(" ")); + + // Add version identifiers + string versions = join(getSettings!"versions"(), " "); + ret.formattedWrite(" %s\n", versions); + + // Add libraries, system libs need to be suffixed by ".lib". + string linkLibs = join(map!(a => a~".lib")(getSettings!"libs"()), " "); + string addLinkFiles = join(getSettings!"sourceFiles"().filter!(s => s.endsWith(".lib"))(), " "); + if (arch == "x86") addLinkFiles ~= " phobos.lib"; + if (output_type != StaticLib) ret.formattedWrite(" %s %s\n", linkLibs, addLinkFiles); + + // Unittests + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.unittests ? "1" : "0"); + + // compute directory for intermediate files (need dummy/ because of how -op determines the resulting path) + size_t ndummy = 0; + foreach (f; buildsettings.sourceFiles) { + auto rpath = Path(f).relativeTo(project_file_dir); + size_t nd = 0; + foreach (i; 0 .. rpath.length) + if (rpath[i] == "..") + nd++; + if (nd > ndummy) ndummy = nd; + } + string intersubdir = replicate("dummy/", ndummy) ~ getPackageFileName(pack); + + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + int singlefilemode; + final switch (settings.buildMode) with (BuildMode) { + case separate: singlefilemode = 2; break; + case allAtOnce: singlefilemode = 0; break; + //case singleFile: singlefilemode = 1; break; + //case compileOnly: singlefilemode = 3; break; + } + ret.formattedWrite(" %s\n", singlefilemode); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.verbose ? "1" : "0"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.formattedWrite(" %s\n", arch == "x64" ? 1 : 0); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.noBoundsCheck ? "1" : "0"); + ret.put(" 0\n"); + ret.put(" 1\n"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.warningsAsErrors ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.warnings ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.property ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.alwaysStackFrame ? "1" : "0"); + ret.put(" 0\n"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.coverage ? "1" : "0"); + ret.put(" 0\n"); + ret.put(" 2\n"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.ignoreUnknownPragmas ? "1" : "0"); + ret.formattedWrite(" %s\n", settings.compiler.name == "ldc" ? 2 : settings.compiler.name == "gdc" ? 1 : 0); + ret.formattedWrite(" 0\n"); + ret.formattedWrite(" %s\n", bin_path.toNativeString()); + ret.formattedWrite(" .dub/obj/%s/%s\n", to!string(type), intersubdir); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" 0\n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" 0\n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" 1\n"); + ret.put(" $(IntDir)\\$(TargetName).json\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" \n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" 0\n"); + ret.put(" \n"); + ret.put(" 1\n"); + ret.put(" $(VisualDInstallDir)cv2pdb\\cv2pdb.exe\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" 0\n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" \n"); + ret.put(" *.obj;*.cmd;*.build;*.dep\n"); + ret.put(" \n"); + } // foreach(architecture) + } + + void performOnDependencies(const Package main, string[string] configs, void delegate(const Package pack) op) + { + foreach (p; m_project.getTopologicalPackageList(false, main, configs)) { + if (p is main) continue; + op(p); + } + } + + string generateUUID() const { + import std.string; + return "{" ~ toUpper(randomUUID().toString()) ~ "}"; + } + + string guid(string projectName) { + if(projectName !in m_projectUuids) + m_projectUuids[projectName] = generateUUID(); + return m_projectUuids[projectName]; + } + + auto solutionFileName() const { + version(DUBBING) return getPackageFileName(m_project.rootPackage) ~ ".dubbed.sln"; + else return getPackageFileName(m_project.rootPackage.name) ~ ".sln"; + } + + Path projFileName(string pack) const { + auto basepath = Path(".");//Path(".dub/"); + version(DUBBING) return basepath ~ (getPackageFileName(pack) ~ ".dubbed.visualdproj"); + else return basepath ~ (getPackageFileName(pack) ~ ".visualdproj"); + } + } + + // TODO: nice folders + struct SourceFile { + Path structurePath; + Path filePath; + bool build; + + hash_t toHash() const nothrow @trusted { return structurePath.toHash() ^ filePath.toHash() ^ (build * 0x1f3e7b2c); } + int opCmp(ref const SourceFile rhs) const { return sortOrder(this, rhs); } + // "a < b" for folder structures (deepest folder first, else lexical) + private final static int sortOrder(ref const SourceFile a, ref const SourceFile b) { + assert(!a.structurePath.empty()); + assert(!b.structurePath.empty()); + auto as = a.structurePath; + auto bs = b.structurePath; + + // Check for different folders, compare folders only (omit last one). + for(uint idx=0; idx bs.length? -1 : 1; + } + else { + // Both paths indicate files in the same directory, use lexical + // ordering for those. + return as.head.opCmp(bs.head); + } + } + } + + auto sortedSources(SourceFile[] sources) { + return sort(sources); + } + + unittest { + SourceFile[] sfs = [ + { Path("b/file.d"), Path("") }, + { Path("b/b/fileA.d"), Path("") }, + { Path("a/file.d"), Path("") }, + { Path("b/b/fileB.d"), Path("") }, + { Path("b/b/b/fileA.d"), Path("") }, + { Path("b/c/fileA.d"), Path("") }, + ]; + auto sorted = sort(sfs); + SourceFile[] sortedSfs; + foreach(sr; sorted) + sortedSfs ~= sr; + assert(sortedSfs[0].structurePath == Path("a/file.d"), "1"); + assert(sortedSfs[1].structurePath == Path("b/b/b/fileA.d"), "2"); + assert(sortedSfs[2].structurePath == Path("b/b/fileA.d"), "3"); + assert(sortedSfs[3].structurePath == Path("b/b/fileB.d"), "4"); + assert(sortedSfs[4].structurePath == Path("b/c/fileA.d"), "5"); + assert(sortedSfs[5].structurePath == Path("b/file.d"), "6"); + } +} + +private Path determineStructurePath(Path file_path, in ProjectGenerator.TargetInfo target) +{ + foreach (p; target.packages) { + if (file_path.startsWith(p.path)) + return Path(getPackageFileName(p.name)) ~ file_path[p.path.length .. $]; + } + return Path("misc/") ~ file_path.head; +} + +private string getPackageFileName(string pack) +{ + return pack.replace(":", "_"); +} diff --git a/source/dub/init.d b/source/dub/init.d index 673386e..5a4edd6 100644 --- a/source/dub/init.d +++ b/source/dub/init.d @@ -25,7 +25,7 @@ if( !root_path.empty ){ if( !existsFile(root_path) ) createDirectory(root_path); - } + } //Make sure we do not overwrite anything accidentally auto files = packageInfoFilenames ~ ["source/", "views/", "public/"]; @@ -43,7 +43,7 @@ { writePackageJson(root_path, "A minimal D application.", null); createDirectory(root_path ~ "source"); - write((root_path ~ "source/app.d").toNativeString(), + write((root_path ~ "source/app.d").toNativeString(), q{import std.stdio; void main() @@ -59,7 +59,7 @@ createDirectory(root_path ~ "source"); createDirectory(root_path ~ "views"); createDirectory(root_path ~ "public"); - write((root_path ~ "source/app.d").toNativeString(), + write((root_path ~ "source/app.d").toNativeString(), q{import vibe.d; shared static this() diff --git a/source/dub/internal/vibecompat/core/file.d b/source/dub/internal/vibecompat/core/file.d index 8c51f74..5a806a3 100644 --- a/source/dub/internal/vibecompat/core/file.d +++ b/source/dub/internal/vibecompat/core/file.d @@ -31,7 +31,7 @@ void put(in char[] str) { put(cast(ubyte[])str); } void put(char ch) { put((&ch)[0 .. 1]); } void put(dchar ch) { char[4] chars; put(chars[0 .. encode(chars, ch)]); } - + ubyte[] readAll() { file.seek(0, std.stream.SeekPos.End); diff --git a/source/dub/internal/vibecompat/inet/path.d b/source/dub/internal/vibecompat/inet/path.d index 1fe30b1..fbe1557 100644 --- a/source/dub/internal/vibecompat/inet/path.d +++ b/source/dub/internal/vibecompat/inet/path.d @@ -30,7 +30,7 @@ bool m_absolute = false; bool m_endsWithSlash = false; } - + /// Constructs a Path object by parsing a path string. this(string pathstr) { @@ -38,20 +38,20 @@ m_absolute = (pathstr.startsWith("/") || m_nodes.length > 0 && (m_nodes[0].toString().countUntil(':')>0 || m_nodes[0] == "\\")); m_endsWithSlash = pathstr.endsWith("/"); } - + /// Constructs a path object from a list of PathEntry objects. this(immutable(PathEntry)[] nodes, bool absolute) { m_nodes = nodes; m_absolute = absolute; } - + /// Constructs a relative path with one path entry. this(PathEntry entry){ m_nodes = [entry]; m_absolute = false; } - + /// Determines if the path is absolute. @property bool absolute() const { return m_absolute; } @@ -74,14 +74,14 @@ } m_nodes = newnodes; } - + /// Converts the Path back to a string representation using slashes. string toString() const { if( m_nodes.empty ) return absolute ? "/" : ""; - + Appender!string ret; - + // for absolute paths start with / version(Windows) { @@ -91,12 +91,12 @@ } else { - if( absolute ) + if( absolute ) { ret.put('/'); } } - + foreach( i, f; m_nodes ){ if( i > 0 ) ret.put('/'); ret.put(f.toString()); @@ -104,10 +104,10 @@ if( m_nodes.length > 0 && m_endsWithSlash ) ret.put('/'); - + return ret.data; } - + /// Converts the Path object to a native path string (backslash as path separator on Windows). string toNativeString() const { @@ -119,26 +119,26 @@ } Appender!string ret; - + // for absolute unix paths start with / version(Posix) { if(absolute) ret.put('/'); } - + foreach( i, f; m_nodes ){ version(Windows) { if( i > 0 ) ret.put('\\'); } version(Posix) { if( i > 0 ) ret.put('/'); } else { enforce("Unsupported OS"); } ret.put(f.toString()); } - + if( m_nodes.length > 0 && m_endsWithSlash ){ version(Windows) { ret.put('\\'); } version(Posix) { ret.put('/'); } } - + return ret.data; } - - /// Tests if `rhs` is an anchestor or the same as this path. + + /// Tests if `rhs` is an anchestor or the same as this path. bool startsWith(const Path rhs) const { if( rhs.m_nodes.length > m_nodes.length ) return false; foreach( i; 0 .. rhs.m_nodes.length ) @@ -146,7 +146,7 @@ return false; return true; } - + /// Computes the relative path from `parentPath` to this path. Path relativeTo(const Path parentPath) const { assert(this.absolute && parentPath.absolute); @@ -170,7 +170,7 @@ ret.m_endsWithSlash = this.m_endsWithSlash; return ret; } - + /// The last entry of the path @property ref immutable(PathEntry) head() const { enforce(m_nodes.length > 0); return m_nodes[$-1]; } @@ -193,7 +193,7 @@ /// Determines if this path goes outside of its base path (i.e. begins with '..'). @property bool external() const { return !m_absolute && m_nodes.length > 0 && m_nodes[0].m_name == ".."; } - + ref immutable(PathEntry) opIndex(size_t idx) const { return m_nodes[idx]; } Path opSlice(size_t start, size_t end) const { auto ret = Path(m_nodes[start .. end], start == 0 ? absolute : false); @@ -201,15 +201,15 @@ return ret; } size_t opDollar(int dim)() const if(dim == 0) { return m_nodes.length; } - - + + Path opBinary(string OP)(const Path rhs) const if( OP == "~" ) { Path ret; ret.m_nodes = m_nodes; ret.m_absolute = m_absolute; ret.m_endsWithSlash = rhs.m_endsWithSlash; ret.normalize(); // needed to avoid "."~".." become "" instead of ".." - + assert(!rhs.absolute, "Trying to append absolute path."); size_t idx = m_nodes.length; foreach(folder; rhs.m_nodes){ @@ -226,13 +226,13 @@ } return ret; } - + Path opBinary(string OP)(string rhs) const if( OP == "~" ) { assert(rhs.length > 0, "Cannot append empty path string."); return opBinary!"~"(Path(rhs)); } Path opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { assert(rhs.toString().length > 0, "Cannot append empty path string."); return opBinary!"~"(Path(rhs)); } void opOpAssign(string OP)(string rhs) if( OP == "~" ) { assert(rhs.length > 0, "Cannot append empty path string."); opOpAssign!"~"(Path(rhs)); } void opOpAssign(string OP)(PathEntry rhs) if( OP == "~" ) { assert(rhs.toString().length > 0, "Cannot append empty path string."); opOpAssign!"~"(Path(rhs)); } void opOpAssign(string OP)(Path rhs) if( OP == "~" ) { auto p = this ~ rhs; m_nodes = p.m_nodes; m_endsWithSlash = rhs.m_endsWithSlash; } - + /// Tests two paths for equality using '=='. bool opEquals(ref const Path rhs) const { if( m_absolute != rhs.m_absolute ) return false; @@ -272,17 +272,17 @@ private { string m_name; } - + this(string str) { assert(str.countUntil('/') < 0 && (str.countUntil('\\') < 0 || str.length == 1)); m_name = str; } - + string toString() const { return m_name; } Path opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { return Path(cast(immutable)[this, rhs], false); } - + bool opEquals(ref const PathEntry rhs) const { return m_name == rhs.m_name; } bool opEquals(PathEntry rhs) const { return m_name == rhs.m_name; } bool opEquals(string rhs) const { return m_name == rhs; } @@ -426,7 +426,7 @@ dotpathp.normalize(); assert(dotpathp.toString() == "/test2/x/y"); } - + { auto parentpath = "/path/to/parent"; auto parentpathp = Path(parentpath); diff --git a/source/dub/internal/vibecompat/inet/url.d b/source/dub/internal/vibecompat/inet/url.d index 45b16f2..d9f021f 100644 --- a/source/dub/internal/vibecompat/inet/url.d +++ b/source/dub/internal/vibecompat/inet/url.d @@ -52,7 +52,7 @@ } /** Constructs a URL from its string representation. - + TODO: additional validation required (e.g. valid host and user names and port) */ this(string url_string) @@ -163,14 +163,14 @@ /// The path part plus query string and anchor @property string localURI() - const { + const { auto str = appender!string(); str.reserve(m_pathString.length + 2 + queryString.length + anchor.length); str.put(encode(path.toString())); if( queryString.length ) { str.put("?"); str.put(queryString); - } + } if( anchor.length ) { str.put("#"); str.put(anchor); @@ -267,7 +267,7 @@ assert(url.schema == "https", url.schema); assert(url.host == "www.example.net", url.host); assert(url.path == Path("/index.html"), url.path.toString()); - + url = URL.parse("http://jo.doe:password@sub.www.example.net:4711/sub2/index.html?query#anchor"); assert(url.schema == "http", url.schema); assert(url.username == "jo.doe", url.username); diff --git a/source/dub/package_.d b/source/dub/package_.d index c6b4559..8435149 100644 --- a/source/dub/package_.d +++ b/source/dub/package_.d @@ -1,895 +1,895 @@ -/** - Stuff with dependencies. - - Copyright: © 2012-2013 Matthias Dondorff - License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. - Authors: Matthias Dondorff -*/ -module dub.package_; - -import dub.compilers.compiler; -import dub.dependency; -import dub.internal.utils; -import dub.internal.vibecompat.core.log; -import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.data.json; -import dub.internal.vibecompat.inet.url; - -import std.algorithm; -import std.array; -import std.conv; -import std.exception; -import std.file; -import std.range; -import std.string; -import std.traits : EnumMembers; - - -// Supported package descriptions in decreasing order of preference. -enum packageInfoFilenames = ["dub.json", /*"dub.sdl",*/ "package.json"]; -string defaultPackageFilename() { - return packageInfoFilenames[0]; -} - -/** - Represents a package, including its sub packages - - Documentation of the dub.json can be found at - http://registry.vibed.org/package-format -*/ -class Package { - static struct LocalPackageDef { string name; Version version_; Path path; } - - private { - Path m_path; - Path m_infoFile; - PackageInfo m_info; - Package m_parentPackage; - Package[] m_subPackages; - Path[] m_exportedPackages; - } - - static bool isPackageAt(Path path) - { - foreach (f; packageInfoFilenames) - if (existsFile(path ~ f)) - return true; - return false; - } - - this(Path root, Package parent = null, string versionOverride = "") - { - Json info; - try { - foreach (f; packageInfoFilenames) { - auto name = root ~ f; - if (existsFile(name)) { - m_infoFile = name; - info = jsonFromFile(m_infoFile); - break; - } - } - } catch (Exception ex) throw new Exception(format("Failed to load package at %s: %s", root.toNativeString(), ex.msg)); - - enforce(info.type != Json.Type.undefined, format("Missing package description for package at %s", root.toNativeString())); - - this(info, root, parent, versionOverride); - } - - this(Json packageInfo, Path root = Path(), Package parent = null, string versionOverride = "") - { - m_parentPackage = parent; - m_path = root; - m_path.endsWithSlash = true; - - // force the package name to be lower case - packageInfo.name = packageInfo.name.get!string.toLower(); - - // check for default string import folders - foreach(defvf; ["views"]){ - auto p = m_path ~ defvf; - if( existsFile(p) ) - m_info.buildSettings.stringImportPaths[""] ~= defvf; - } - - string app_main_file; - auto pkg_name = packageInfo.name.get!string(); - - // check for default source folders - foreach(defsf; ["source/", "src/"]){ - auto p = m_path ~ defsf; - if( existsFile(p) ){ - m_info.buildSettings.sourcePaths[""] ~= defsf; - m_info.buildSettings.importPaths[""] ~= defsf; - foreach (fil; ["app.d", "main.d", pkg_name ~ "/main.d", pkg_name ~ "/" ~ "app.d"]) - if (existsFile(p ~ fil)) { - app_main_file = Path(defsf ~ fil).toNativeString(); - break; - } - } - } - - // parse the JSON description - { - scope(failure) logError("Failed to parse package description in %s", root.toNativeString()); - m_info.parseJson(packageInfo); - - if (!versionOverride.empty) - m_info.version_ = versionOverride; - - // try to run git to determine the version of the package if no explicit version was given - if (m_info.version_.length == 0 && !parent) { - try m_info.version_ = determineVersionFromSCM(root); - catch (Exception e) logDebug("Failed to determine version by SCM: %s", e.msg); - - if (m_info.version_.length == 0) { - logDiagnostic("Note: Failed to determine version of package %s at %s. Assuming ~master.", m_info.name, this.path.toNativeString()); - // TODO: Assume unknown version here? - // m_info.version_ = Version.UNKNOWN.toString(); - m_info.version_ = Version.MASTER.toString(); - } else logDiagnostic("Determined package version using GIT: %s %s", m_info.name, m_info.version_); - } - } - - // generate default configurations if none are defined - if (m_info.configurations.length == 0) { - if (m_info.buildSettings.targetType == TargetType.executable) { - BuildSettingsTemplate app_settings; - app_settings.targetType = TargetType.executable; - if (m_info.buildSettings.mainSourceFile.empty) app_settings.mainSourceFile = app_main_file; - m_info.configurations ~= ConfigurationInfo("application", app_settings); - } else if (m_info.buildSettings.targetType != TargetType.none) { - BuildSettingsTemplate lib_settings; - lib_settings.targetType = m_info.buildSettings.targetType == TargetType.autodetect ? TargetType.library : m_info.buildSettings.targetType; - - if (m_info.buildSettings.targetType == TargetType.autodetect) { - if (app_main_file.length) { - lib_settings.excludedSourceFiles[""] ~= app_main_file; - - BuildSettingsTemplate app_settings; - app_settings.targetType = TargetType.executable; - app_settings.mainSourceFile = app_main_file; - m_info.configurations ~= ConfigurationInfo("application", app_settings); - } - } - - m_info.configurations ~= ConfigurationInfo("library", lib_settings); - } - } - - // load all sub packages defined in the package description - foreach (sub; packageInfo.subPackages.opt!(Json[])) { - enforce(!m_parentPackage, format("'subPackages' found in '%s'. This is only supported in the main package file for '%s'.", name, m_parentPackage.name)); - - if (sub.type == Json.Type.string) { - auto p = Path(sub.get!string); - p.normalize(); - enforce(!p.absolute, "Sub package paths must not be absolute: " ~ sub.get!string); - enforce(!p.startsWith(Path("..")), "Sub packages must be in a sub directory, not " ~ sub.get!string); - m_exportedPackages ~= p; - if (!path.empty) m_subPackages ~= new Package(path ~ p, this, this.vers); - } else { - m_subPackages ~= new Package(sub, root, this); - } - } - - simpleLint(); - } - - @property string name() - const { - if (m_parentPackage) return m_parentPackage.name ~ ":" ~ m_info.name; - else return m_info.name; - } - @property string vers() const { return m_parentPackage ? m_parentPackage.vers : m_info.version_; } - @property Version ver() const { return Version(this.vers); } - @property void ver(Version ver) { assert(m_parentPackage is null); m_info.version_ = ver.toString(); } - @property ref inout(PackageInfo) info() inout { return m_info; } - @property Path path() const { return m_path; } - @property Path packageInfoFile() const { return m_infoFile; } - @property const(Dependency[string]) dependencies() const { return m_info.dependencies; } - @property inout(Package) basePackage() inout { return m_parentPackage ? m_parentPackage.basePackage : this; } - @property inout(Package) parentPackage() inout { return m_parentPackage; } - @property inout(Package)[] subPackages() inout { return m_subPackages; } - @property inout(Path[]) exportedPackages() inout { return m_exportedPackages; } - - @property string[] configurations() - const { - auto ret = appender!(string[])(); - foreach( ref config; m_info.configurations ) - ret.put(config.name); - return ret.data; - } - - const(Dependency[string]) getDependencies(string config) - const { - Dependency[string] ret; - foreach (k, v; m_info.buildSettings.dependencies) - ret[k] = v; - foreach (ref conf; m_info.configurations) - if (conf.name == config) { - foreach (k, v; conf.buildSettings.dependencies) - ret[k] = v; - break; - } - return ret; - } - - /** Overwrites the packge description file using the default filename with the current information. - */ - void storeInfo() - { - enforce(!ver.isUnknown, "Trying to store a package with an 'unknown' version, this is not supported."); - auto filename = m_path ~ defaultPackageFilename(); - auto dstFile = openFile(filename.toNativeString(), FileMode.CreateTrunc); - scope(exit) dstFile.close(); - dstFile.writePrettyJsonString(m_info.toJson()); - m_infoFile = filename; - } - - inout(Package) getSubPackage(string name) inout { - foreach (p; m_subPackages) - if (p.name == this.name ~ ":" ~ name) - return p; - throw new Exception(format("Unknown sub package: %s:%s", this.name, name)); - } - - void warnOnSpecialCompilerFlags() - { - // warn about use of special flags - m_info.buildSettings.warnOnSpecialCompilerFlags(m_info.name, null); - foreach (ref config; m_info.configurations) - config.buildSettings.warnOnSpecialCompilerFlags(m_info.name, config.name); - } - - const(BuildSettingsTemplate) getBuildSettings(string config = null) - const { - if (config.length) { - foreach (ref conf; m_info.configurations) - if (conf.name == config) - return conf.buildSettings; - assert(false, "Unknown configuration: "~config); - } else { - return m_info.buildSettings; - } - } - - /// Returns all BuildSettings for the given platform and config. - BuildSettings getBuildSettings(in BuildPlatform platform, string config) - const { - BuildSettings ret; - m_info.buildSettings.getPlatformSettings(ret, platform, this.path); - bool found = false; - foreach(ref conf; m_info.configurations){ - if( conf.name != config ) continue; - conf.buildSettings.getPlatformSettings(ret, platform, this.path); - found = true; - break; - } - assert(found || config is null, "Unknown configuration for "~m_info.name~": "~config); - - // construct default target name based on package name - if( ret.targetName.empty ) ret.targetName = this.name.replace(":", "_"); - - // special support for DMD style flags - getCompiler("dmd").extractBuildOptions(ret); - - return ret; - } - - void addBuildTypeSettings(ref BuildSettings settings, in BuildPlatform platform, string build_type) - const { - if (build_type == "$DFLAGS") { - import std.process; - string dflags = environment.get("DFLAGS"); - settings.addDFlags(dflags.split()); - return; - } - - if (auto pbt = build_type in m_info.buildTypes) { - logDiagnostic("Using custom build type '%s'.", build_type); - pbt.getPlatformSettings(settings, platform, this.path); - } else { - with(BuildOptions) switch (build_type) { - default: throw new Exception(format("Unknown build type for %s: '%s'", this.name, build_type)); - case "plain": break; - case "debug": settings.addOptions(debugMode, debugInfo); break; - case "release": settings.addOptions(releaseMode, optimize, inline); break; - case "release-nobounds": settings.addOptions(releaseMode, optimize, inline, noBoundsCheck); break; - case "unittest": settings.addOptions(unittests, debugMode, debugInfo); break; - case "docs": settings.addOptions(syntaxOnly); settings.addDFlags("-c", "-Dddocs"); break; - case "ddox": settings.addOptions(syntaxOnly); settings.addDFlags("-c", "-Df__dummy.html", "-Xfdocs.json"); break; - case "profile": settings.addOptions(profile, optimize, inline, debugInfo); break; - case "cov": settings.addOptions(coverage, debugInfo); break; - case "unittest-cov": settings.addOptions(unittests, coverage, debugMode, debugInfo); break; - } - } - } - - string getSubConfiguration(string config, in Package dependency, in BuildPlatform platform) - const { - bool found = false; - foreach(ref c; m_info.configurations){ - if( c.name == config ){ - if( auto pv = dependency.name in c.buildSettings.subConfigurations ) return *pv; - found = true; - break; - } - } - assert(found || config is null, "Invalid configuration \""~config~"\" for "~this.name); - if( auto pv = dependency.name in m_info.buildSettings.subConfigurations ) return *pv; - return null; - } - - /// Returns the default configuration to build for the given platform - string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library = false) - const { - foreach (ref conf; m_info.configurations) { - if (!conf.matchesPlatform(platform)) continue; - if (!allow_non_library && conf.buildSettings.targetType == TargetType.executable) continue; - return conf.name; - } - return null; - } - - /// Returns a list of configurations suitable for the given platform - string[] getPlatformConfigurations(in BuildPlatform platform, bool is_main_package = false) - const { - auto ret = appender!(string[]); - foreach(ref conf; m_info.configurations){ - if (!conf.matchesPlatform(platform)) continue; - if (!is_main_package && conf.buildSettings.targetType == TargetType.executable) continue; - ret ~= conf.name; - } - if (ret.data.length == 0) ret.put(null); - return ret.data; - } - - /// Human readable information of this package and its dependencies. - string generateInfoString() const { - string s; - s ~= m_info.name ~ ", version '" ~ m_info.version_ ~ "'"; - s ~= "\n Dependencies:"; - foreach(string p, ref const Dependency v; m_info.dependencies) - s ~= "\n " ~ p ~ ", version '" ~ v.toString() ~ "'"; - return s; - } - - bool hasDependency(string depname, string config) - const { - if (depname in m_info.buildSettings.dependencies) return true; - foreach (ref c; m_info.configurations) - if ((config.empty || c.name == config) && depname in c.buildSettings.dependencies) - return true; - return false; - } - - void describe(ref Json dst, BuildPlatform platform, string config) - { - dst.path = m_path.toNativeString(); - dst.name = this.name; - dst["version"] = this.vers; - dst.description = m_info.description; - dst.homepage = m_info.homepage; - dst.authors = m_info.authors.serializeToJson(); - dst.copyright = m_info.copyright; - dst.license = m_info.license; - dst.dependencies = m_info.dependencies.keys.serializeToJson(); - - // save build settings - BuildSettings bs = getBuildSettings(platform, config); - - foreach (string k, v; bs.serializeToJson()) dst[k] = v; - dst.remove("requirements"); - dst.remove("sourceFiles"); - dst.remove("importFiles"); - dst.remove("stringImportFiles"); - dst.targetType = bs.targetType.to!string(); - if (dst.targetType != TargetType.none) - dst.targetFileName = getTargetFileName(bs, platform); - - // prettify build requirements output - Json[] breqs; - for (int i = 1; i <= BuildRequirements.max; i <<= 1) - if (bs.requirements & i) - breqs ~= Json(to!string(cast(BuildRequirements)i)); - dst.buildRequirements = breqs; - - // prettify options output - Json[] bopts; - for (int i = 1; i <= BuildOptions.max; i <<= 1) - if (bs.options & i) - bopts ~= Json(to!string(cast(BuildOptions)i)); - dst.options = bopts; - - // prettify files output - Json[] files; - foreach (f; bs.sourceFiles) { - auto jf = Json.emptyObject; - jf.path = f; - jf["type"] = "source"; - files ~= jf; - } - foreach (f; bs.importFiles) { - auto jf = Json.emptyObject; - jf.path = f; - jf["type"] = "import"; - files ~= jf; - } - foreach (f; bs.stringImportFiles) { - auto jf = Json.emptyObject; - jf.path = f; - jf["type"] = "stringImport"; - files ~= jf; - } - dst.files = Json(files); - } - - private void simpleLint() const { - if (m_parentPackage) { - if (m_parentPackage.path != path) { - if (info.license.length && info.license != m_parentPackage.info.license) - logWarn("License in subpackage %s is different than it's parent package, this is discouraged.", name); - } - } - if (name.empty()) logWarn("The package in %s has no name.", path); - } -} - -/// Specifying package information without any connection to a certain -/// retrived package, like Package class is doing. -struct PackageInfo { - string name; - string version_; - string description; - string homepage; - string[] authors; - string copyright; - string license; - string[] ddoxFilterArgs; - BuildSettingsTemplate buildSettings; - ConfigurationInfo[] configurations; - BuildSettingsTemplate[string] buildTypes; - Json subPackages; - - @property const(Dependency)[string] dependencies() - const { - const(Dependency)[string] ret; - foreach (n, d; this.buildSettings.dependencies) - ret[n] = d; - foreach (ref c; configurations) - foreach (n, d; c.buildSettings.dependencies) - ret[n] = d; - return ret; - } - - inout(ConfigurationInfo) getConfiguration(string name) - inout { - foreach (c; configurations) - if (c.name == name) - return c; - throw new Exception("Unknown configuration: "~name); - } - - void parseJson(Json json) - { - foreach( string field, value; json ){ - switch(field){ - default: break; - case "name": this.name = value.get!string; break; - case "version": this.version_ = value.get!string; break; - case "description": this.description = value.get!string; break; - case "homepage": this.homepage = value.get!string; break; - case "authors": this.authors = deserializeJson!(string[])(value); break; - case "copyright": this.copyright = value.get!string; break; - case "license": this.license = value.get!string; break; - case "subPackages": subPackages = value; break; - case "configurations": break; // handled below, after the global settings have been parsed - case "buildTypes": - foreach (string name, settings; value) { - BuildSettingsTemplate bs; - bs.parseJson(settings, null); - buildTypes[name] = bs; - } - break; - case "-ddoxFilterArgs": this.ddoxFilterArgs = deserializeJson!(string[])(value); break; - } - } - - enforce(this.name.length > 0, "The package \"name\" field is missing or empty."); - - // parse build settings - this.buildSettings.parseJson(json, this.name); - - if (auto pv = "configurations" in json) { - TargetType deftargettp = TargetType.library; - if (this.buildSettings.targetType != TargetType.autodetect) - deftargettp = this.buildSettings.targetType; - - foreach (settings; *pv) { - ConfigurationInfo ci; - ci.parseJson(settings, this.name, deftargettp); - this.configurations ~= ci; - } - } - } - - Json toJson() - const { - auto ret = buildSettings.toJson(); - ret.name = this.name; - if( !this.version_.empty ) ret["version"] = this.version_; - if( !this.description.empty ) ret.description = this.description; - if( !this.homepage.empty ) ret.homepage = this.homepage; - if( !this.authors.empty ) ret.authors = serializeToJson(this.authors); - if( !this.copyright.empty ) ret.copyright = this.copyright; - if( !this.license.empty ) ret.license = this.license; - if( this.subPackages.type != Json.Type.undefined ) { - auto copy = this.subPackages.toString(); - ret.subPackages = dub.internal.vibecompat.data.json.parseJson(copy); - } - if( this.configurations ){ - Json[] configs; - foreach(config; this.configurations) - configs ~= config.toJson(); - ret.configurations = configs; - } - if( this.buildTypes.length ) { - Json[string] types; - foreach(name, settings; this.buildTypes) - types[name] = settings.toJson(); - } - if( !this.ddoxFilterArgs.empty ) ret["-ddoxFilterArgs"] = this.ddoxFilterArgs.serializeToJson(); - return ret; - } -} - -/// Bundles information about a build configuration. -struct ConfigurationInfo { - string name; - string[] platforms; - BuildSettingsTemplate buildSettings; - - this(string name, BuildSettingsTemplate build_settings) - { - enforce(!name.empty, "Configuration name is empty."); - this.name = name; - this.buildSettings = build_settings; - } - - void parseJson(Json json, string package_name, TargetType default_target_type = TargetType.library) - { - this.buildSettings.targetType = default_target_type; - - foreach(string name, value; json){ - switch(name){ - default: break; - case "name": - this.name = value.get!string(); - enforce(!this.name.empty, "Configurations must have a non-empty name."); - break; - case "platforms": this.platforms = deserializeJson!(string[])(value); break; - } - } - - enforce(!this.name.empty, "Configuration is missing a name."); - - BuildSettingsTemplate bs; - this.buildSettings.parseJson(json, package_name); - } - - Json toJson() - const { - auto ret = buildSettings.toJson(); - ret.name = name; - if( this.platforms.length ) ret.platforms = serializeToJson(platforms); - return ret; - } - - bool matchesPlatform(in BuildPlatform platform) - const { - if( platforms.empty ) return true; - foreach(p; platforms) - if( platform.matchesSpecification("-"~p) ) - return true; - return false; - } -} - -/// This keeps general information about how to build a package. -/// It contains functions to create a specific BuildSetting, targeted at -/// a certain BuildPlatform. -struct BuildSettingsTemplate { - Dependency[string] dependencies; - TargetType targetType = TargetType.autodetect; - string targetPath; - string targetName; - string workingDirectory; - string mainSourceFile; - string[string] subConfigurations; - string[][string] dflags; - string[][string] lflags; - string[][string] libs; - string[][string] sourceFiles; - string[][string] sourcePaths; - string[][string] excludedSourceFiles; - string[][string] copyFiles; - string[][string] versions; - string[][string] debugVersions; - string[][string] importPaths; - string[][string] stringImportPaths; - string[][string] preGenerateCommands; - string[][string] postGenerateCommands; - string[][string] preBuildCommands; - string[][string] postBuildCommands; - BuildRequirements[string] buildRequirements; - BuildOptions[string] buildOptions; - - void parseJson(Json json, string package_name) - { - foreach(string name, value; json) - { - auto idx = std.string.indexOf(name, "-"); - string basename, suffix; - if( idx >= 0 ) basename = name[0 .. idx], suffix = name[idx .. $]; - else basename = name; - switch(basename){ - default: break; - case "dependencies": - foreach (string pkg, verspec; value) { - if (pkg.startsWith(":")) pkg = package_name ~ pkg; - enforce(pkg !in this.dependencies, "The dependency '"~pkg~"' is specified more than once." ); - this.dependencies[pkg] = deserializeJson!Dependency(verspec); - } - break; - case "targetType": - enforce(suffix.empty, "targetType does not support platform customization."); - targetType = value.get!string().to!TargetType(); - break; - case "targetPath": - enforce(suffix.empty, "targetPath does not support platform customization."); - this.targetPath = value.get!string; - break; - case "targetName": - enforce(suffix.empty, "targetName does not support platform customization."); - this.targetName = value.get!string; - break; - case "workingDirectory": - enforce(suffix.empty, "workingDirectory does not support platform customization."); - this.workingDirectory = value.get!string; - break; - case "mainSourceFile": - enforce(suffix.empty, "mainSourceFile does not support platform customization."); - this.mainSourceFile = value.get!string; - break; - case "subConfigurations": - enforce(suffix.empty, "subConfigurations does not support platform customization."); - this.subConfigurations = deserializeJson!(string[string])(value); - break; - case "dflags": this.dflags[suffix] = deserializeJson!(string[])(value); break; - case "lflags": this.lflags[suffix] = deserializeJson!(string[])(value); break; - case "libs": this.libs[suffix] = deserializeJson!(string[])(value); break; - case "files": - case "sourceFiles": this.sourceFiles[suffix] = deserializeJson!(string[])(value); break; - case "sourcePaths": this.sourcePaths[suffix] = deserializeJson!(string[])(value); break; - case "sourcePath": this.sourcePaths[suffix] ~= [value.get!string()]; break; // deprecated - case "excludedSourceFiles": this.excludedSourceFiles[suffix] = deserializeJson!(string[])(value); break; - case "copyFiles": this.copyFiles[suffix] = deserializeJson!(string[])(value); break; - case "versions": this.versions[suffix] = deserializeJson!(string[])(value); break; - case "debugVersions": this.debugVersions[suffix] = deserializeJson!(string[])(value); break; - case "importPaths": this.importPaths[suffix] = deserializeJson!(string[])(value); break; - case "stringImportPaths": this.stringImportPaths[suffix] = deserializeJson!(string[])(value); break; - case "preGenerateCommands": this.preGenerateCommands[suffix] = deserializeJson!(string[])(value); break; - case "postGenerateCommands": this.postGenerateCommands[suffix] = deserializeJson!(string[])(value); break; - case "preBuildCommands": this.preBuildCommands[suffix] = deserializeJson!(string[])(value); break; - case "postBuildCommands": this.postBuildCommands[suffix] = deserializeJson!(string[])(value); break; - case "buildRequirements": - BuildRequirements reqs; - foreach (req; deserializeJson!(string[])(value)) - reqs |= to!BuildRequirements(req); - this.buildRequirements[suffix] = reqs; - break; - case "buildOptions": - BuildOptions options; - foreach (opt; deserializeJson!(string[])(value)) - options |= to!BuildOptions(opt); - this.buildOptions[suffix] = options; - break; - } - } - } - - Json toJson() - const { - auto ret = Json.emptyObject; - if( this.dependencies !is null ){ - auto deps = Json.emptyObject; - foreach( pack, d; this.dependencies ) - deps[pack] = serializeToJson(d); - ret.dependencies = deps; - } - if (targetType != TargetType.autodetect) ret["targetType"] = targetType.to!string(); - if (!targetPath.empty) ret["targetPath"] = targetPath; - if (!targetName.empty) ret["targetName"] = targetName; - if (!workingDirectory.empty) ret["workingDirectory"] = workingDirectory; - if (!mainSourceFile.empty) ret["mainSourceFile"] = mainSourceFile; - foreach (suffix, arr; dflags) ret["dflags"~suffix] = serializeToJson(arr); - foreach (suffix, arr; lflags) ret["lflags"~suffix] = serializeToJson(arr); - foreach (suffix, arr; libs) ret["libs"~suffix] = serializeToJson(arr); - foreach (suffix, arr; sourceFiles) ret["sourceFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; sourcePaths) ret["sourcePaths"~suffix] = serializeToJson(arr); - foreach (suffix, arr; excludedSourceFiles) ret["excludedSourceFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; copyFiles) ret["copyFiles"~suffix] = serializeToJson(arr); - foreach (suffix, arr; versions) ret["versions"~suffix] = serializeToJson(arr); - foreach (suffix, arr; debugVersions) ret["debugVersions"~suffix] = serializeToJson(arr); - foreach (suffix, arr; importPaths) ret["importPaths"~suffix] = serializeToJson(arr); - foreach (suffix, arr; stringImportPaths) ret["stringImportPaths"~suffix] = serializeToJson(arr); - foreach (suffix, arr; preGenerateCommands) ret["preGenerateCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; postGenerateCommands) ret["postGenerateCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; preBuildCommands) ret["preBuildCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; postBuildCommands) ret["postBuildCommands"~suffix] = serializeToJson(arr); - foreach (suffix, arr; buildRequirements) { - string[] val; - foreach (i; [EnumMembers!BuildRequirements]) - if (arr & i) val ~= to!string(i); - ret["buildRequirements"~suffix] = serializeToJson(val); - } - foreach (suffix, arr; buildOptions) { - string[] val; - foreach (i; [EnumMembers!BuildOptions]) - if (arr & i) val ~= to!string(i); - ret["buildOptions"~suffix] = serializeToJson(val); - } - return ret; - } - - /// Constructs a BuildSettings object from this template. - void getPlatformSettings(ref BuildSettings dst, in BuildPlatform platform, Path base_path) - const { - dst.targetType = this.targetType; - if (!this.targetPath.empty) dst.targetPath = this.targetPath; - if (!this.targetName.empty) dst.targetName = this.targetName; - if (!this.workingDirectory.empty) dst.workingDirectory = this.workingDirectory; - if (!this.mainSourceFile.empty) { - dst.mainSourceFile = this.mainSourceFile; - dst.addSourceFiles(this.mainSourceFile); - } - - void collectFiles(string method)(in string[][string] paths_map, string pattern) - { - foreach (suffix, paths; paths_map) { - if (!platform.matchesSpecification(suffix)) - continue; - - foreach (spath; paths) { - enforce(!spath.empty, "Paths must not be empty strings."); - auto path = Path(spath); - if (!path.absolute) path = base_path ~ path; - if (!existsFile(path) || !isDir(path.toNativeString())) { - logWarn("Invalid source/import path: %s", path.toNativeString()); - continue; - } - - foreach (d; dirEntries(path.toNativeString(), pattern, SpanMode.depth)) { - if (isDir(d.name)) continue; - auto src = Path(d.name).relativeTo(base_path); - __traits(getMember, dst, method)(src.toNativeString()); - } - } - } - } - - // collect files from all source/import folders - collectFiles!"addSourceFiles"(sourcePaths, "*.d"); - collectFiles!"addImportFiles"(importPaths, "*.{d,di}"); - dst.removeImportFiles(dst.sourceFiles); - collectFiles!"addStringImportFiles"(stringImportPaths, "*"); - - // ensure a deterministic order of files as passed to the compiler - dst.sourceFiles.sort(); - - getPlatformSetting!("dflags", "addDFlags")(dst, platform); - getPlatformSetting!("lflags", "addLFlags")(dst, platform); - getPlatformSetting!("libs", "addLibs")(dst, platform); - getPlatformSetting!("sourceFiles", "addSourceFiles")(dst, platform); - getPlatformSetting!("excludedSourceFiles", "removeSourceFiles")(dst, platform); - getPlatformSetting!("copyFiles", "addCopyFiles")(dst, platform); - getPlatformSetting!("versions", "addVersions")(dst, platform); - getPlatformSetting!("debugVersions", "addDebugVersions")(dst, platform); - getPlatformSetting!("importPaths", "addImportPaths")(dst, platform); - getPlatformSetting!("stringImportPaths", "addStringImportPaths")(dst, platform); - getPlatformSetting!("preGenerateCommands", "addPreGenerateCommands")(dst, platform); - getPlatformSetting!("postGenerateCommands", "addPostGenerateCommands")(dst, platform); - getPlatformSetting!("preBuildCommands", "addPreBuildCommands")(dst, platform); - getPlatformSetting!("postBuildCommands", "addPostBuildCommands")(dst, platform); - getPlatformSetting!("buildRequirements", "addRequirements")(dst, platform); - getPlatformSetting!("buildOptions", "addOptions")(dst, platform); - } - - void getPlatformSetting(string name, string addname)(ref BuildSettings dst, in BuildPlatform platform) - const { - foreach(suffix, values; __traits(getMember, this, name)){ - if( platform.matchesSpecification(suffix) ) - __traits(getMember, dst, addname)(values); - } - } - - void warnOnSpecialCompilerFlags(string package_name, string config_name) - { - auto nodef = false; - auto noprop = false; - foreach (req; this.buildRequirements) { - if (req & BuildRequirements.noDefaultFlags) nodef = true; - if (req & BuildRequirements.relaxProperties) noprop = true; - } - - if (noprop) { - logWarn(`Warning: "buildRequirements": ["relaxProperties"] is deprecated and is now the default behavior. Note that the -property switch will probably be removed in future versions of DMD.`); - logWarn(""); - } - - if (nodef) { - logWarn("Warning: This package uses the \"noDefaultFlags\" build requirement. Please use only for development purposes and not for released packages."); - logWarn(""); - } else { - string[] all_dflags; - BuildOptions all_options; - foreach (flags; this.dflags) all_dflags ~= flags; - foreach (options; this.buildOptions) all_options |= options; - .warnOnSpecialCompilerFlags(all_dflags, all_options, package_name, config_name); - } - } -} - -/// Returns all package names, starting with the root package in [0]. -string[] getSubPackagePath(string package_name) -{ - return package_name.split(":"); -} - -/// Returns the name of the base package in the case of some sub package or the -/// package itself, if it is already a full package. -string getBasePackageName(string package_name) -{ - return package_name.getSubPackagePath()[0]; -} - -string getSubPackageName(string package_name) -{ - return getSubPackagePath(package_name)[1 .. $].join(":"); -} - -private string determineVersionFromSCM(Path path) -{ - import std.process; - import dub.semver; - - auto git_dir = path ~ ".git"; - if (!existsFile(git_dir) || !isDir(git_dir.toNativeString)) return null; - auto git_dir_param = "--git-dir=" ~ git_dir.toNativeString(); - - static string exec(scope string[] params...) { - auto ret = execute(params); - if (ret.status == 0) return ret.output.strip; - logDebug("'%s' failed with exit code %s: %s", params.join(" "), ret.status, ret.output.strip); - return null; - } - - if (auto tag = exec("git", git_dir_param, "describe", "--long", "--tags")) { - auto parts = tag.split("-"); - auto commit = parts[$-1]; - auto num = parts[$-2].to!int; - tag = parts[0 .. $-2].join("-"); - if (tag.startsWith("v") && isValidVersion(tag[1 .. $])) { - if (num == 0) return tag[1 .. $]; - else if (tag.canFind("+")) return format("%s.commit.%s.%s", tag[1 .. $], num, commit); - else return format("%s+commit.%s.%s", tag[1 .. $], num, commit); - } - } - - if (auto branch = exec("git", git_dir_param, "rev-parse", "--abbrev-ref", "HEAD")) { - if (branch != "HEAD") return "~" ~ branch; - } - - return null; +/** + Stuff with dependencies. + + Copyright: © 2012-2013 Matthias Dondorff + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Matthias Dondorff +*/ +module dub.package_; + +import dub.compilers.compiler; +import dub.dependency; +import dub.internal.utils; +import dub.internal.vibecompat.core.log; +import dub.internal.vibecompat.core.file; +import dub.internal.vibecompat.data.json; +import dub.internal.vibecompat.inet.url; + +import std.algorithm; +import std.array; +import std.conv; +import std.exception; +import std.file; +import std.range; +import std.string; +import std.traits : EnumMembers; + + +// Supported package descriptions in decreasing order of preference. +enum packageInfoFilenames = ["dub.json", /*"dub.sdl",*/ "package.json"]; +string defaultPackageFilename() { + return packageInfoFilenames[0]; +} + +/** + Represents a package, including its sub packages + + Documentation of the dub.json can be found at + http://registry.vibed.org/package-format +*/ +class Package { + static struct LocalPackageDef { string name; Version version_; Path path; } + + private { + Path m_path; + Path m_infoFile; + PackageInfo m_info; + Package m_parentPackage; + Package[] m_subPackages; + Path[] m_exportedPackages; + } + + static bool isPackageAt(Path path) + { + foreach (f; packageInfoFilenames) + if (existsFile(path ~ f)) + return true; + return false; + } + + this(Path root, Package parent = null, string versionOverride = "") + { + Json info; + try { + foreach (f; packageInfoFilenames) { + auto name = root ~ f; + if (existsFile(name)) { + m_infoFile = name; + info = jsonFromFile(m_infoFile); + break; + } + } + } catch (Exception ex) throw new Exception(format("Failed to load package at %s: %s", root.toNativeString(), ex.msg)); + + enforce(info.type != Json.Type.undefined, format("Missing package description for package at %s", root.toNativeString())); + + this(info, root, parent, versionOverride); + } + + this(Json packageInfo, Path root = Path(), Package parent = null, string versionOverride = "") + { + m_parentPackage = parent; + m_path = root; + m_path.endsWithSlash = true; + + // force the package name to be lower case + packageInfo.name = packageInfo.name.get!string.toLower(); + + // check for default string import folders + foreach(defvf; ["views"]){ + auto p = m_path ~ defvf; + if( existsFile(p) ) + m_info.buildSettings.stringImportPaths[""] ~= defvf; + } + + string app_main_file; + auto pkg_name = packageInfo.name.get!string(); + + // check for default source folders + foreach(defsf; ["source/", "src/"]){ + auto p = m_path ~ defsf; + if( existsFile(p) ){ + m_info.buildSettings.sourcePaths[""] ~= defsf; + m_info.buildSettings.importPaths[""] ~= defsf; + foreach (fil; ["app.d", "main.d", pkg_name ~ "/main.d", pkg_name ~ "/" ~ "app.d"]) + if (existsFile(p ~ fil)) { + app_main_file = Path(defsf ~ fil).toNativeString(); + break; + } + } + } + + // parse the JSON description + { + scope(failure) logError("Failed to parse package description in %s", root.toNativeString()); + m_info.parseJson(packageInfo); + + if (!versionOverride.empty) + m_info.version_ = versionOverride; + + // try to run git to determine the version of the package if no explicit version was given + if (m_info.version_.length == 0 && !parent) { + try m_info.version_ = determineVersionFromSCM(root); + catch (Exception e) logDebug("Failed to determine version by SCM: %s", e.msg); + + if (m_info.version_.length == 0) { + logDiagnostic("Note: Failed to determine version of package %s at %s. Assuming ~master.", m_info.name, this.path.toNativeString()); + // TODO: Assume unknown version here? + // m_info.version_ = Version.UNKNOWN.toString(); + m_info.version_ = Version.MASTER.toString(); + } else logDiagnostic("Determined package version using GIT: %s %s", m_info.name, m_info.version_); + } + } + + // generate default configurations if none are defined + if (m_info.configurations.length == 0) { + if (m_info.buildSettings.targetType == TargetType.executable) { + BuildSettingsTemplate app_settings; + app_settings.targetType = TargetType.executable; + if (m_info.buildSettings.mainSourceFile.empty) app_settings.mainSourceFile = app_main_file; + m_info.configurations ~= ConfigurationInfo("application", app_settings); + } else if (m_info.buildSettings.targetType != TargetType.none) { + BuildSettingsTemplate lib_settings; + lib_settings.targetType = m_info.buildSettings.targetType == TargetType.autodetect ? TargetType.library : m_info.buildSettings.targetType; + + if (m_info.buildSettings.targetType == TargetType.autodetect) { + if (app_main_file.length) { + lib_settings.excludedSourceFiles[""] ~= app_main_file; + + BuildSettingsTemplate app_settings; + app_settings.targetType = TargetType.executable; + app_settings.mainSourceFile = app_main_file; + m_info.configurations ~= ConfigurationInfo("application", app_settings); + } + } + + m_info.configurations ~= ConfigurationInfo("library", lib_settings); + } + } + + // load all sub packages defined in the package description + foreach (sub; packageInfo.subPackages.opt!(Json[])) { + enforce(!m_parentPackage, format("'subPackages' found in '%s'. This is only supported in the main package file for '%s'.", name, m_parentPackage.name)); + + if (sub.type == Json.Type.string) { + auto p = Path(sub.get!string); + p.normalize(); + enforce(!p.absolute, "Sub package paths must not be absolute: " ~ sub.get!string); + enforce(!p.startsWith(Path("..")), "Sub packages must be in a sub directory, not " ~ sub.get!string); + m_exportedPackages ~= p; + if (!path.empty) m_subPackages ~= new Package(path ~ p, this, this.vers); + } else { + m_subPackages ~= new Package(sub, root, this); + } + } + + simpleLint(); + } + + @property string name() + const { + if (m_parentPackage) return m_parentPackage.name ~ ":" ~ m_info.name; + else return m_info.name; + } + @property string vers() const { return m_parentPackage ? m_parentPackage.vers : m_info.version_; } + @property Version ver() const { return Version(this.vers); } + @property void ver(Version ver) { assert(m_parentPackage is null); m_info.version_ = ver.toString(); } + @property ref inout(PackageInfo) info() inout { return m_info; } + @property Path path() const { return m_path; } + @property Path packageInfoFile() const { return m_infoFile; } + @property const(Dependency[string]) dependencies() const { return m_info.dependencies; } + @property inout(Package) basePackage() inout { return m_parentPackage ? m_parentPackage.basePackage : this; } + @property inout(Package) parentPackage() inout { return m_parentPackage; } + @property inout(Package)[] subPackages() inout { return m_subPackages; } + @property inout(Path[]) exportedPackages() inout { return m_exportedPackages; } + + @property string[] configurations() + const { + auto ret = appender!(string[])(); + foreach( ref config; m_info.configurations ) + ret.put(config.name); + return ret.data; + } + + const(Dependency[string]) getDependencies(string config) + const { + Dependency[string] ret; + foreach (k, v; m_info.buildSettings.dependencies) + ret[k] = v; + foreach (ref conf; m_info.configurations) + if (conf.name == config) { + foreach (k, v; conf.buildSettings.dependencies) + ret[k] = v; + break; + } + return ret; + } + + /** Overwrites the packge description file using the default filename with the current information. + */ + void storeInfo() + { + enforce(!ver.isUnknown, "Trying to store a package with an 'unknown' version, this is not supported."); + auto filename = m_path ~ defaultPackageFilename(); + auto dstFile = openFile(filename.toNativeString(), FileMode.CreateTrunc); + scope(exit) dstFile.close(); + dstFile.writePrettyJsonString(m_info.toJson()); + m_infoFile = filename; + } + + inout(Package) getSubPackage(string name) inout { + foreach (p; m_subPackages) + if (p.name == this.name ~ ":" ~ name) + return p; + throw new Exception(format("Unknown sub package: %s:%s", this.name, name)); + } + + void warnOnSpecialCompilerFlags() + { + // warn about use of special flags + m_info.buildSettings.warnOnSpecialCompilerFlags(m_info.name, null); + foreach (ref config; m_info.configurations) + config.buildSettings.warnOnSpecialCompilerFlags(m_info.name, config.name); + } + + const(BuildSettingsTemplate) getBuildSettings(string config = null) + const { + if (config.length) { + foreach (ref conf; m_info.configurations) + if (conf.name == config) + return conf.buildSettings; + assert(false, "Unknown configuration: "~config); + } else { + return m_info.buildSettings; + } + } + + /// Returns all BuildSettings for the given platform and config. + BuildSettings getBuildSettings(in BuildPlatform platform, string config) + const { + BuildSettings ret; + m_info.buildSettings.getPlatformSettings(ret, platform, this.path); + bool found = false; + foreach(ref conf; m_info.configurations){ + if( conf.name != config ) continue; + conf.buildSettings.getPlatformSettings(ret, platform, this.path); + found = true; + break; + } + assert(found || config is null, "Unknown configuration for "~m_info.name~": "~config); + + // construct default target name based on package name + if( ret.targetName.empty ) ret.targetName = this.name.replace(":", "_"); + + // special support for DMD style flags + getCompiler("dmd").extractBuildOptions(ret); + + return ret; + } + + void addBuildTypeSettings(ref BuildSettings settings, in BuildPlatform platform, string build_type) + const { + if (build_type == "$DFLAGS") { + import std.process; + string dflags = environment.get("DFLAGS"); + settings.addDFlags(dflags.split()); + return; + } + + if (auto pbt = build_type in m_info.buildTypes) { + logDiagnostic("Using custom build type '%s'.", build_type); + pbt.getPlatformSettings(settings, platform, this.path); + } else { + with(BuildOptions) switch (build_type) { + default: throw new Exception(format("Unknown build type for %s: '%s'", this.name, build_type)); + case "plain": break; + case "debug": settings.addOptions(debugMode, debugInfo); break; + case "release": settings.addOptions(releaseMode, optimize, inline); break; + case "release-nobounds": settings.addOptions(releaseMode, optimize, inline, noBoundsCheck); break; + case "unittest": settings.addOptions(unittests, debugMode, debugInfo); break; + case "docs": settings.addOptions(syntaxOnly); settings.addDFlags("-c", "-Dddocs"); break; + case "ddox": settings.addOptions(syntaxOnly); settings.addDFlags("-c", "-Df__dummy.html", "-Xfdocs.json"); break; + case "profile": settings.addOptions(profile, optimize, inline, debugInfo); break; + case "cov": settings.addOptions(coverage, debugInfo); break; + case "unittest-cov": settings.addOptions(unittests, coverage, debugMode, debugInfo); break; + } + } + } + + string getSubConfiguration(string config, in Package dependency, in BuildPlatform platform) + const { + bool found = false; + foreach(ref c; m_info.configurations){ + if( c.name == config ){ + if( auto pv = dependency.name in c.buildSettings.subConfigurations ) return *pv; + found = true; + break; + } + } + assert(found || config is null, "Invalid configuration \""~config~"\" for "~this.name); + if( auto pv = dependency.name in m_info.buildSettings.subConfigurations ) return *pv; + return null; + } + + /// Returns the default configuration to build for the given platform + string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library = false) + const { + foreach (ref conf; m_info.configurations) { + if (!conf.matchesPlatform(platform)) continue; + if (!allow_non_library && conf.buildSettings.targetType == TargetType.executable) continue; + return conf.name; + } + return null; + } + + /// Returns a list of configurations suitable for the given platform + string[] getPlatformConfigurations(in BuildPlatform platform, bool is_main_package = false) + const { + auto ret = appender!(string[]); + foreach(ref conf; m_info.configurations){ + if (!conf.matchesPlatform(platform)) continue; + if (!is_main_package && conf.buildSettings.targetType == TargetType.executable) continue; + ret ~= conf.name; + } + if (ret.data.length == 0) ret.put(null); + return ret.data; + } + + /// Human readable information of this package and its dependencies. + string generateInfoString() const { + string s; + s ~= m_info.name ~ ", version '" ~ m_info.version_ ~ "'"; + s ~= "\n Dependencies:"; + foreach(string p, ref const Dependency v; m_info.dependencies) + s ~= "\n " ~ p ~ ", version '" ~ v.toString() ~ "'"; + return s; + } + + bool hasDependency(string depname, string config) + const { + if (depname in m_info.buildSettings.dependencies) return true; + foreach (ref c; m_info.configurations) + if ((config.empty || c.name == config) && depname in c.buildSettings.dependencies) + return true; + return false; + } + + void describe(ref Json dst, BuildPlatform platform, string config) + { + dst.path = m_path.toNativeString(); + dst.name = this.name; + dst["version"] = this.vers; + dst.description = m_info.description; + dst.homepage = m_info.homepage; + dst.authors = m_info.authors.serializeToJson(); + dst.copyright = m_info.copyright; + dst.license = m_info.license; + dst.dependencies = m_info.dependencies.keys.serializeToJson(); + + // save build settings + BuildSettings bs = getBuildSettings(platform, config); + + foreach (string k, v; bs.serializeToJson()) dst[k] = v; + dst.remove("requirements"); + dst.remove("sourceFiles"); + dst.remove("importFiles"); + dst.remove("stringImportFiles"); + dst.targetType = bs.targetType.to!string(); + if (dst.targetType != TargetType.none) + dst.targetFileName = getTargetFileName(bs, platform); + + // prettify build requirements output + Json[] breqs; + for (int i = 1; i <= BuildRequirements.max; i <<= 1) + if (bs.requirements & i) + breqs ~= Json(to!string(cast(BuildRequirements)i)); + dst.buildRequirements = breqs; + + // prettify options output + Json[] bopts; + for (int i = 1; i <= BuildOptions.max; i <<= 1) + if (bs.options & i) + bopts ~= Json(to!string(cast(BuildOptions)i)); + dst.options = bopts; + + // prettify files output + Json[] files; + foreach (f; bs.sourceFiles) { + auto jf = Json.emptyObject; + jf.path = f; + jf["type"] = "source"; + files ~= jf; + } + foreach (f; bs.importFiles) { + auto jf = Json.emptyObject; + jf.path = f; + jf["type"] = "import"; + files ~= jf; + } + foreach (f; bs.stringImportFiles) { + auto jf = Json.emptyObject; + jf.path = f; + jf["type"] = "stringImport"; + files ~= jf; + } + dst.files = Json(files); + } + + private void simpleLint() const { + if (m_parentPackage) { + if (m_parentPackage.path != path) { + if (info.license.length && info.license != m_parentPackage.info.license) + logWarn("License in subpackage %s is different than it's parent package, this is discouraged.", name); + } + } + if (name.empty()) logWarn("The package in %s has no name.", path); + } +} + +/// Specifying package information without any connection to a certain +/// retrived package, like Package class is doing. +struct PackageInfo { + string name; + string version_; + string description; + string homepage; + string[] authors; + string copyright; + string license; + string[] ddoxFilterArgs; + BuildSettingsTemplate buildSettings; + ConfigurationInfo[] configurations; + BuildSettingsTemplate[string] buildTypes; + Json subPackages; + + @property const(Dependency)[string] dependencies() + const { + const(Dependency)[string] ret; + foreach (n, d; this.buildSettings.dependencies) + ret[n] = d; + foreach (ref c; configurations) + foreach (n, d; c.buildSettings.dependencies) + ret[n] = d; + return ret; + } + + inout(ConfigurationInfo) getConfiguration(string name) + inout { + foreach (c; configurations) + if (c.name == name) + return c; + throw new Exception("Unknown configuration: "~name); + } + + void parseJson(Json json) + { + foreach( string field, value; json ){ + switch(field){ + default: break; + case "name": this.name = value.get!string; break; + case "version": this.version_ = value.get!string; break; + case "description": this.description = value.get!string; break; + case "homepage": this.homepage = value.get!string; break; + case "authors": this.authors = deserializeJson!(string[])(value); break; + case "copyright": this.copyright = value.get!string; break; + case "license": this.license = value.get!string; break; + case "subPackages": subPackages = value; break; + case "configurations": break; // handled below, after the global settings have been parsed + case "buildTypes": + foreach (string name, settings; value) { + BuildSettingsTemplate bs; + bs.parseJson(settings, null); + buildTypes[name] = bs; + } + break; + case "-ddoxFilterArgs": this.ddoxFilterArgs = deserializeJson!(string[])(value); break; + } + } + + enforce(this.name.length > 0, "The package \"name\" field is missing or empty."); + + // parse build settings + this.buildSettings.parseJson(json, this.name); + + if (auto pv = "configurations" in json) { + TargetType deftargettp = TargetType.library; + if (this.buildSettings.targetType != TargetType.autodetect) + deftargettp = this.buildSettings.targetType; + + foreach (settings; *pv) { + ConfigurationInfo ci; + ci.parseJson(settings, this.name, deftargettp); + this.configurations ~= ci; + } + } + } + + Json toJson() + const { + auto ret = buildSettings.toJson(); + ret.name = this.name; + if( !this.version_.empty ) ret["version"] = this.version_; + if( !this.description.empty ) ret.description = this.description; + if( !this.homepage.empty ) ret.homepage = this.homepage; + if( !this.authors.empty ) ret.authors = serializeToJson(this.authors); + if( !this.copyright.empty ) ret.copyright = this.copyright; + if( !this.license.empty ) ret.license = this.license; + if( this.subPackages.type != Json.Type.undefined ) { + auto copy = this.subPackages.toString(); + ret.subPackages = dub.internal.vibecompat.data.json.parseJson(copy); + } + if( this.configurations ){ + Json[] configs; + foreach(config; this.configurations) + configs ~= config.toJson(); + ret.configurations = configs; + } + if( this.buildTypes.length ) { + Json[string] types; + foreach(name, settings; this.buildTypes) + types[name] = settings.toJson(); + } + if( !this.ddoxFilterArgs.empty ) ret["-ddoxFilterArgs"] = this.ddoxFilterArgs.serializeToJson(); + return ret; + } +} + +/// Bundles information about a build configuration. +struct ConfigurationInfo { + string name; + string[] platforms; + BuildSettingsTemplate buildSettings; + + this(string name, BuildSettingsTemplate build_settings) + { + enforce(!name.empty, "Configuration name is empty."); + this.name = name; + this.buildSettings = build_settings; + } + + void parseJson(Json json, string package_name, TargetType default_target_type = TargetType.library) + { + this.buildSettings.targetType = default_target_type; + + foreach(string name, value; json){ + switch(name){ + default: break; + case "name": + this.name = value.get!string(); + enforce(!this.name.empty, "Configurations must have a non-empty name."); + break; + case "platforms": this.platforms = deserializeJson!(string[])(value); break; + } + } + + enforce(!this.name.empty, "Configuration is missing a name."); + + BuildSettingsTemplate bs; + this.buildSettings.parseJson(json, package_name); + } + + Json toJson() + const { + auto ret = buildSettings.toJson(); + ret.name = name; + if( this.platforms.length ) ret.platforms = serializeToJson(platforms); + return ret; + } + + bool matchesPlatform(in BuildPlatform platform) + const { + if( platforms.empty ) return true; + foreach(p; platforms) + if( platform.matchesSpecification("-"~p) ) + return true; + return false; + } +} + +/// This keeps general information about how to build a package. +/// It contains functions to create a specific BuildSetting, targeted at +/// a certain BuildPlatform. +struct BuildSettingsTemplate { + Dependency[string] dependencies; + TargetType targetType = TargetType.autodetect; + string targetPath; + string targetName; + string workingDirectory; + string mainSourceFile; + string[string] subConfigurations; + string[][string] dflags; + string[][string] lflags; + string[][string] libs; + string[][string] sourceFiles; + string[][string] sourcePaths; + string[][string] excludedSourceFiles; + string[][string] copyFiles; + string[][string] versions; + string[][string] debugVersions; + string[][string] importPaths; + string[][string] stringImportPaths; + string[][string] preGenerateCommands; + string[][string] postGenerateCommands; + string[][string] preBuildCommands; + string[][string] postBuildCommands; + BuildRequirements[string] buildRequirements; + BuildOptions[string] buildOptions; + + void parseJson(Json json, string package_name) + { + foreach(string name, value; json) + { + auto idx = std.string.indexOf(name, "-"); + string basename, suffix; + if( idx >= 0 ) basename = name[0 .. idx], suffix = name[idx .. $]; + else basename = name; + switch(basename){ + default: break; + case "dependencies": + foreach (string pkg, verspec; value) { + if (pkg.startsWith(":")) pkg = package_name ~ pkg; + enforce(pkg !in this.dependencies, "The dependency '"~pkg~"' is specified more than once." ); + this.dependencies[pkg] = deserializeJson!Dependency(verspec); + } + break; + case "targetType": + enforce(suffix.empty, "targetType does not support platform customization."); + targetType = value.get!string().to!TargetType(); + break; + case "targetPath": + enforce(suffix.empty, "targetPath does not support platform customization."); + this.targetPath = value.get!string; + break; + case "targetName": + enforce(suffix.empty, "targetName does not support platform customization."); + this.targetName = value.get!string; + break; + case "workingDirectory": + enforce(suffix.empty, "workingDirectory does not support platform customization."); + this.workingDirectory = value.get!string; + break; + case "mainSourceFile": + enforce(suffix.empty, "mainSourceFile does not support platform customization."); + this.mainSourceFile = value.get!string; + break; + case "subConfigurations": + enforce(suffix.empty, "subConfigurations does not support platform customization."); + this.subConfigurations = deserializeJson!(string[string])(value); + break; + case "dflags": this.dflags[suffix] = deserializeJson!(string[])(value); break; + case "lflags": this.lflags[suffix] = deserializeJson!(string[])(value); break; + case "libs": this.libs[suffix] = deserializeJson!(string[])(value); break; + case "files": + case "sourceFiles": this.sourceFiles[suffix] = deserializeJson!(string[])(value); break; + case "sourcePaths": this.sourcePaths[suffix] = deserializeJson!(string[])(value); break; + case "sourcePath": this.sourcePaths[suffix] ~= [value.get!string()]; break; // deprecated + case "excludedSourceFiles": this.excludedSourceFiles[suffix] = deserializeJson!(string[])(value); break; + case "copyFiles": this.copyFiles[suffix] = deserializeJson!(string[])(value); break; + case "versions": this.versions[suffix] = deserializeJson!(string[])(value); break; + case "debugVersions": this.debugVersions[suffix] = deserializeJson!(string[])(value); break; + case "importPaths": this.importPaths[suffix] = deserializeJson!(string[])(value); break; + case "stringImportPaths": this.stringImportPaths[suffix] = deserializeJson!(string[])(value); break; + case "preGenerateCommands": this.preGenerateCommands[suffix] = deserializeJson!(string[])(value); break; + case "postGenerateCommands": this.postGenerateCommands[suffix] = deserializeJson!(string[])(value); break; + case "preBuildCommands": this.preBuildCommands[suffix] = deserializeJson!(string[])(value); break; + case "postBuildCommands": this.postBuildCommands[suffix] = deserializeJson!(string[])(value); break; + case "buildRequirements": + BuildRequirements reqs; + foreach (req; deserializeJson!(string[])(value)) + reqs |= to!BuildRequirements(req); + this.buildRequirements[suffix] = reqs; + break; + case "buildOptions": + BuildOptions options; + foreach (opt; deserializeJson!(string[])(value)) + options |= to!BuildOptions(opt); + this.buildOptions[suffix] = options; + break; + } + } + } + + Json toJson() + const { + auto ret = Json.emptyObject; + if( this.dependencies !is null ){ + auto deps = Json.emptyObject; + foreach( pack, d; this.dependencies ) + deps[pack] = serializeToJson(d); + ret.dependencies = deps; + } + if (targetType != TargetType.autodetect) ret["targetType"] = targetType.to!string(); + if (!targetPath.empty) ret["targetPath"] = targetPath; + if (!targetName.empty) ret["targetName"] = targetName; + if (!workingDirectory.empty) ret["workingDirectory"] = workingDirectory; + if (!mainSourceFile.empty) ret["mainSourceFile"] = mainSourceFile; + foreach (suffix, arr; dflags) ret["dflags"~suffix] = serializeToJson(arr); + foreach (suffix, arr; lflags) ret["lflags"~suffix] = serializeToJson(arr); + foreach (suffix, arr; libs) ret["libs"~suffix] = serializeToJson(arr); + foreach (suffix, arr; sourceFiles) ret["sourceFiles"~suffix] = serializeToJson(arr); + foreach (suffix, arr; sourcePaths) ret["sourcePaths"~suffix] = serializeToJson(arr); + foreach (suffix, arr; excludedSourceFiles) ret["excludedSourceFiles"~suffix] = serializeToJson(arr); + foreach (suffix, arr; copyFiles) ret["copyFiles"~suffix] = serializeToJson(arr); + foreach (suffix, arr; versions) ret["versions"~suffix] = serializeToJson(arr); + foreach (suffix, arr; debugVersions) ret["debugVersions"~suffix] = serializeToJson(arr); + foreach (suffix, arr; importPaths) ret["importPaths"~suffix] = serializeToJson(arr); + foreach (suffix, arr; stringImportPaths) ret["stringImportPaths"~suffix] = serializeToJson(arr); + foreach (suffix, arr; preGenerateCommands) ret["preGenerateCommands"~suffix] = serializeToJson(arr); + foreach (suffix, arr; postGenerateCommands) ret["postGenerateCommands"~suffix] = serializeToJson(arr); + foreach (suffix, arr; preBuildCommands) ret["preBuildCommands"~suffix] = serializeToJson(arr); + foreach (suffix, arr; postBuildCommands) ret["postBuildCommands"~suffix] = serializeToJson(arr); + foreach (suffix, arr; buildRequirements) { + string[] val; + foreach (i; [EnumMembers!BuildRequirements]) + if (arr & i) val ~= to!string(i); + ret["buildRequirements"~suffix] = serializeToJson(val); + } + foreach (suffix, arr; buildOptions) { + string[] val; + foreach (i; [EnumMembers!BuildOptions]) + if (arr & i) val ~= to!string(i); + ret["buildOptions"~suffix] = serializeToJson(val); + } + return ret; + } + + /// Constructs a BuildSettings object from this template. + void getPlatformSettings(ref BuildSettings dst, in BuildPlatform platform, Path base_path) + const { + dst.targetType = this.targetType; + if (!this.targetPath.empty) dst.targetPath = this.targetPath; + if (!this.targetName.empty) dst.targetName = this.targetName; + if (!this.workingDirectory.empty) dst.workingDirectory = this.workingDirectory; + if (!this.mainSourceFile.empty) { + dst.mainSourceFile = this.mainSourceFile; + dst.addSourceFiles(this.mainSourceFile); + } + + void collectFiles(string method)(in string[][string] paths_map, string pattern) + { + foreach (suffix, paths; paths_map) { + if (!platform.matchesSpecification(suffix)) + continue; + + foreach (spath; paths) { + enforce(!spath.empty, "Paths must not be empty strings."); + auto path = Path(spath); + if (!path.absolute) path = base_path ~ path; + if (!existsFile(path) || !isDir(path.toNativeString())) { + logWarn("Invalid source/import path: %s", path.toNativeString()); + continue; + } + + foreach (d; dirEntries(path.toNativeString(), pattern, SpanMode.depth)) { + if (isDir(d.name)) continue; + auto src = Path(d.name).relativeTo(base_path); + __traits(getMember, dst, method)(src.toNativeString()); + } + } + } + } + + // collect files from all source/import folders + collectFiles!"addSourceFiles"(sourcePaths, "*.d"); + collectFiles!"addImportFiles"(importPaths, "*.{d,di}"); + dst.removeImportFiles(dst.sourceFiles); + collectFiles!"addStringImportFiles"(stringImportPaths, "*"); + + // ensure a deterministic order of files as passed to the compiler + dst.sourceFiles.sort(); + + getPlatformSetting!("dflags", "addDFlags")(dst, platform); + getPlatformSetting!("lflags", "addLFlags")(dst, platform); + getPlatformSetting!("libs", "addLibs")(dst, platform); + getPlatformSetting!("sourceFiles", "addSourceFiles")(dst, platform); + getPlatformSetting!("excludedSourceFiles", "removeSourceFiles")(dst, platform); + getPlatformSetting!("copyFiles", "addCopyFiles")(dst, platform); + getPlatformSetting!("versions", "addVersions")(dst, platform); + getPlatformSetting!("debugVersions", "addDebugVersions")(dst, platform); + getPlatformSetting!("importPaths", "addImportPaths")(dst, platform); + getPlatformSetting!("stringImportPaths", "addStringImportPaths")(dst, platform); + getPlatformSetting!("preGenerateCommands", "addPreGenerateCommands")(dst, platform); + getPlatformSetting!("postGenerateCommands", "addPostGenerateCommands")(dst, platform); + getPlatformSetting!("preBuildCommands", "addPreBuildCommands")(dst, platform); + getPlatformSetting!("postBuildCommands", "addPostBuildCommands")(dst, platform); + getPlatformSetting!("buildRequirements", "addRequirements")(dst, platform); + getPlatformSetting!("buildOptions", "addOptions")(dst, platform); + } + + void getPlatformSetting(string name, string addname)(ref BuildSettings dst, in BuildPlatform platform) + const { + foreach(suffix, values; __traits(getMember, this, name)){ + if( platform.matchesSpecification(suffix) ) + __traits(getMember, dst, addname)(values); + } + } + + void warnOnSpecialCompilerFlags(string package_name, string config_name) + { + auto nodef = false; + auto noprop = false; + foreach (req; this.buildRequirements) { + if (req & BuildRequirements.noDefaultFlags) nodef = true; + if (req & BuildRequirements.relaxProperties) noprop = true; + } + + if (noprop) { + logWarn(`Warning: "buildRequirements": ["relaxProperties"] is deprecated and is now the default behavior. Note that the -property switch will probably be removed in future versions of DMD.`); + logWarn(""); + } + + if (nodef) { + logWarn("Warning: This package uses the \"noDefaultFlags\" build requirement. Please use only for development purposes and not for released packages."); + logWarn(""); + } else { + string[] all_dflags; + BuildOptions all_options; + foreach (flags; this.dflags) all_dflags ~= flags; + foreach (options; this.buildOptions) all_options |= options; + .warnOnSpecialCompilerFlags(all_dflags, all_options, package_name, config_name); + } + } +} + +/// Returns all package names, starting with the root package in [0]. +string[] getSubPackagePath(string package_name) +{ + return package_name.split(":"); +} + +/// Returns the name of the base package in the case of some sub package or the +/// package itself, if it is already a full package. +string getBasePackageName(string package_name) +{ + return package_name.getSubPackagePath()[0]; +} + +string getSubPackageName(string package_name) +{ + return getSubPackagePath(package_name)[1 .. $].join(":"); +} + +private string determineVersionFromSCM(Path path) +{ + import std.process; + import dub.semver; + + auto git_dir = path ~ ".git"; + if (!existsFile(git_dir) || !isDir(git_dir.toNativeString)) return null; + auto git_dir_param = "--git-dir=" ~ git_dir.toNativeString(); + + static string exec(scope string[] params...) { + auto ret = execute(params); + if (ret.status == 0) return ret.output.strip; + logDebug("'%s' failed with exit code %s: %s", params.join(" "), ret.status, ret.output.strip); + return null; + } + + if (auto tag = exec("git", git_dir_param, "describe", "--long", "--tags")) { + auto parts = tag.split("-"); + auto commit = parts[$-1]; + auto num = parts[$-2].to!int; + tag = parts[0 .. $-2].join("-"); + if (tag.startsWith("v") && isValidVersion(tag[1 .. $])) { + if (num == 0) return tag[1 .. $]; + else if (tag.canFind("+")) return format("%s.commit.%s.%s", tag[1 .. $], num, commit); + else return format("%s+commit.%s.%s", tag[1 .. $], num, commit); + } + } + + if (auto branch = exec("git", git_dir_param, "rev-parse", "--abbrev-ref", "HEAD")) { + if (branch != "HEAD") return "~" ~ branch; + } + + return null; } \ No newline at end of file diff --git a/source/dub/packagemanager.d b/source/dub/packagemanager.d index ae355b6..365a3f8 100644 --- a/source/dub/packagemanager.d +++ b/source/dub/packagemanager.d @@ -1,904 +1,904 @@ -/** - Management of packages on the local computer. - - Copyright: © 2012-2013 rejectedsoftware e.K. - License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. - Authors: Sönke Ludwig, Matthias Dondorff -*/ -module dub.packagemanager; - -import dub.dependency; -import dub.internal.utils; -import dub.internal.vibecompat.core.file; -import dub.internal.vibecompat.core.log; -import dub.internal.vibecompat.data.json; -import dub.internal.vibecompat.inet.path; -import dub.package_; - -import std.algorithm : countUntil, filter, sort, canFind, remove; -import std.array; -import std.conv; -import std.digest.sha; -import std.encoding : sanitize; -import std.exception; -import std.file; -import std.string; -import std.zip; - - -/// The PackageManager can retrieve present packages and get / remove -/// packages. -class PackageManager { - private { - Repository[LocalPackageType] m_repositories; - Path[] m_searchPath; - Package[] m_packages; - Package[] m_temporaryPackages; - bool m_disableDefaultSearchPaths = false; - } - - this(Path user_path, Path system_path, bool refresh_packages = true) - { - m_repositories[LocalPackageType.user] = Repository(user_path); - m_repositories[LocalPackageType.system] = Repository(system_path); - if (refresh_packages) refresh(true); - } - - @property void searchPath(Path[] paths) - { - if (paths == m_searchPath) return; - m_searchPath = paths.dup; - refresh(false); - } - @property const(Path)[] searchPath() const { return m_searchPath; } - - @property void disableDefaultSearchPaths(bool val) - { - if (val == m_disableDefaultSearchPaths) return; - m_disableDefaultSearchPaths = val; - refresh(true); - } - - @property const(Path)[] completeSearchPath() - const { - auto ret = appender!(Path[])(); - ret.put(m_searchPath); - if (!m_disableDefaultSearchPaths) { - ret.put(m_repositories[LocalPackageType.user].searchPath); - ret.put(m_repositories[LocalPackageType.user].packagePath); - ret.put(m_repositories[LocalPackageType.system].searchPath); - ret.put(m_repositories[LocalPackageType.system].packagePath); - } - return ret.data; - } - - - /** Looks up a specific package. - - Looks up a package matching the given version/path in the set of - registered packages. The lookup order is done according the the - usual rules (see getPackageIterator). - - Params: - name = The name of the package - ver = The exact version of the package to query - path = An exact path that the package must reside in. Note that - the package must still be registered in the package manager. - enable_overrides = Apply the local package override list before - returning a package (enabled by default) - - Returns: - The matching package or null if no match was found. - */ - Package getPackage(string name, Version ver, bool enable_overrides = true) - { - if (enable_overrides) { - foreach (tp; [LocalPackageType.user, LocalPackageType.system]) - foreach (ovr; m_repositories[tp].overrides) - if (ovr.package_ == name && ovr.version_.matches(ver)) { - Package pack; - if (!ovr.targetPath.empty) pack = getPackage(name, ovr.targetPath); - else pack = getPackage(name, ovr.targetVersion, false); - if (pack) return pack; - - logWarn("Package override %s %s -> %s %s doesn't reference an existing package.", - ovr.package_, ovr.version_, ovr.targetVersion, ovr.targetPath); - } - } - - foreach (p; getPackageIterator(name)) - if (p.ver == ver) - return p; - - return null; - } - - /// ditto - Package getPackage(string name, string ver, bool enable_overrides = true) - { - return getPackage(name, Version(ver), enable_overrides); - } - - /// ditto - Package getPackage(string name, Version ver, Path path) - { - auto ret = getPackage(name, path); - if (!ret || ret.ver != ver) return null; - return ret; - } - - /// ditto - Package getPackage(string name, string ver, Path path) - { - return getPackage(name, Version(ver), path); - } - - /// ditto - Package getPackage(string name, Path path) - { - foreach( p; getPackageIterator(name) ) - if (p.path.startsWith(path)) - return p; - return null; - } - - - /** Looks up the first package matching the given name. - */ - Package getFirstPackage(string name) - { - foreach (ep; getPackageIterator(name)) - return ep; - return null; - } - - Package getOrLoadPackage(Path path) - { - foreach (p; getPackageIterator()) - if (!p.parentPackage && p.path == path) - return p; - auto pack = new Package(path); - addPackages(m_temporaryPackages, pack); - return pack; - } - - - /** Searches for the latest version of a package matching the given dependency. - */ - Package getBestPackage(string name, Dependency version_spec, bool enable_overrides = true) - { - Package ret; - foreach (p; getPackageIterator(name)) - if (version_spec.matches(p.ver) && (!ret || p.ver > ret.ver)) - ret = p; - - if (enable_overrides && ret) { - if (auto ovr = getPackage(name, ret.ver)) - return ovr; - } - return ret; - } - - /// ditto - Package getBestPackage(string name, string version_spec) - { - return getBestPackage(name, Dependency(version_spec)); - } - - - /** Determines if a package is managed by DUB. - - Managed packages can be upgraded and removed. - */ - bool isManagedPackage(Package pack) - const { - auto ppath = pack.basePackage.path; - foreach (rep; m_repositories) { - auto rpath = rep.packagePath; - if (ppath.startsWith(rpath)) - return true; - } - return false; - } - - int delegate(int delegate(ref Package)) getPackageIterator() - { - int iterator(int delegate(ref Package) del) - { - int handlePackage(Package p) { - if (auto ret = del(p)) return ret; - foreach (sp; p.subPackages) - if (auto ret = del(sp)) - return ret; - return 0; - } - - foreach (tp; m_temporaryPackages) - if (auto ret = handlePackage(tp)) return ret; - - // first search local packages - foreach (tp; LocalPackageType.min .. LocalPackageType.max+1) - foreach (p; m_repositories[cast(LocalPackageType)tp].localPackages) - if (auto ret = handlePackage(p)) return ret; - - // and then all packages gathered from the search path - foreach( p; m_packages ) - if( auto ret = handlePackage(p) ) - return ret; - return 0; - } - - return &iterator; - } - - int delegate(int delegate(ref Package)) getPackageIterator(string name) - { - int iterator(int delegate(ref Package) del) - { - foreach (p; getPackageIterator()) - if (p.name == name) - if (auto ret = del(p)) return ret; - return 0; - } - - return &iterator; - } - - - /** Returns a list of all package overrides for the given scope. - */ - const(PackageOverride)[] getOverrides(LocalPackageType scope_) - const { - return m_repositories[scope_].overrides; - } - - /** Adds a new override for the given package. - */ - void addOverride(LocalPackageType scope_, string package_, Dependency version_spec, Version target) - { - m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); - writeLocalPackageOverridesFile(scope_); - } - /// ditto - void addOverride(LocalPackageType scope_, string package_, Dependency version_spec, Path target) - { - m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); - writeLocalPackageOverridesFile(scope_); - } - - /** Removes an existing package override. - */ - void removeOverride(LocalPackageType scope_, string package_, Dependency version_spec) - { - Repository* rep = &m_repositories[scope_]; - foreach (i, ovr; rep.overrides) { - if (ovr.package_ != package_ || ovr.version_ != version_spec) - continue; - rep.overrides = rep.overrides[0 .. i] ~ rep.overrides[i+1 .. $]; - writeLocalPackageOverridesFile(scope_); - return; - } - throw new Exception(format("No override exists for %s %s", package_, version_spec)); - } - - /// Extracts the package supplied as a path to it's zip file to the - /// destination and sets a version field in the package description. - Package storeFetchedPackage(Path zip_file_path, Json package_info, Path destination) - { - auto package_name = package_info.name.get!string(); - auto package_version = package_info["version"].get!string(); - auto clean_package_version = package_version[package_version.startsWith("~") ? 1 : 0 .. $]; - - logDiagnostic("Placing package '%s' version '%s' to location '%s' from file '%s'", - package_name, package_version, destination.toNativeString(), zip_file_path.toNativeString()); - - if( existsFile(destination) ){ - throw new Exception(format("%s (%s) needs to be removed from '%s' prior placement.", package_name, package_version, destination)); - } - - // open zip file - ZipArchive archive; - { - logDebug("Opening file %s", zip_file_path); - auto f = openFile(zip_file_path, FileMode.Read); - scope(exit) f.close(); - archive = new ZipArchive(f.readAll()); - } - - logDebug("Extracting from zip."); - - // In a github zip, the actual contents are in a subfolder - Path zip_prefix; - outer: foreach(ArchiveMember am; archive.directory) { - auto path = Path(am.name); - foreach (fil; packageInfoFilenames) - if (path.length == 2 && path.head.toString == fil) { - zip_prefix = path[0 .. $-1]; - break outer; - } - } - - logDebug("zip root folder: %s", zip_prefix); - - Path getCleanedPath(string fileName) { - auto path = Path(fileName); - if(zip_prefix != Path() && !path.startsWith(zip_prefix)) return Path(); - return path[zip_prefix.length..path.length]; - } - - // extract & place - mkdirRecurse(destination.toNativeString()); - auto journal = new Journal; - logDiagnostic("Copying all files..."); - int countFiles = 0; - foreach(ArchiveMember a; archive.directory) { - auto cleanedPath = getCleanedPath(a.name); - if(cleanedPath.empty) continue; - auto dst_path = destination~cleanedPath; - - logDebug("Creating %s", cleanedPath); - if( dst_path.endsWithSlash ){ - if( !existsDirectory(dst_path) ) - mkdirRecurse(dst_path.toNativeString()); - journal.add(Journal.Entry(Journal.Type.Directory, cleanedPath)); - } else { - if( !existsDirectory(dst_path.parentPath) ) - mkdirRecurse(dst_path.parentPath.toNativeString()); - auto dstFile = openFile(dst_path, FileMode.CreateTrunc); - scope(exit) dstFile.close(); - dstFile.put(archive.expand(a)); - journal.add(Journal.Entry(Journal.Type.RegularFile, cleanedPath)); - ++countFiles; - } - } - logDiagnostic("%s file(s) copied.", to!string(countFiles)); - - // overwrite dub.json (this one includes a version field) - auto pack = new Package(destination, null, package_info["version"].get!string); - - if (pack.packageInfoFile.head != defaultPackageFilename()) { - // Storeinfo saved a default file, this could be different to the file from the zip. - removeFile(pack.packageInfoFile); - journal.remove(Journal.Entry(Journal.Type.RegularFile, Path(pack.packageInfoFile.head))); - journal.add(Journal.Entry(Journal.Type.RegularFile, Path(defaultPackageFilename()))); - } - pack.storeInfo(); - - // Write journal - logDebug("Saving retrieval action journal..."); - journal.add(Journal.Entry(Journal.Type.RegularFile, Path(JournalJsonFilename))); - journal.save(destination ~ JournalJsonFilename); - - addPackages(m_packages, pack); - - return pack; - } - - /// Removes the given the package. - void remove(in Package pack, bool force_remove) - { - logDebug("Remove %s, version %s, path '%s'", pack.name, pack.vers, pack.path); - enforce(!pack.path.empty, "Cannot remove package "~pack.name~" without a path."); - - // delete package files physically - logDebug("Looking up journal"); - auto journalFile = pack.path~JournalJsonFilename; - if (!existsFile(journalFile)) - throw new Exception("Removal failed, no retrieval journal found for '"~pack.name~"'. Please remove the folder '%s' manually.", pack.path.toNativeString()); - - auto packagePath = pack.path; - auto journal = new Journal(journalFile); - - - // Determine all target paths/files - /*auto basebs = pack.getBuildSettings(); - foreach (conf; pack.configurations) { - auto bs = pack.getBuildSettings(conf); - auto tpath = conf.targetPath.length ? conf.targetPath : basebs.targetPath; - auto tname = conf.targetName.length ? conf.targetName : basebs.targetName; - auto ttype = conf.targetType != TargetType.auto_ ? conf.targetType : basebs.targetType; - if (ttype == TargetType.none || ttype == TargetType.auto_) continue; - foreach (n; generatePlatformNames(tname, ttype)) - // ... - }*/ - - // test if there are any untracked files - if (!force_remove) { - void checkFilesRec(Path p) - { - // TODO: ignore target paths/files - - foreach (fi; iterateDirectory(p)) { - auto fpath = p ~ fi.name; - if (fi.isDirectory) { - // Indicate a directory. - fpath.endsWithSlash(true); - // Ignore /.dub folder: This folder and its content - // are not tracked by the Journal. - if (fpath.relativeTo(pack.path) == Path(".dub/")) - continue; - checkFilesRec(fpath); - } - - auto type = fi.isDirectory ? Journal.Type.Directory : Journal.Type.RegularFile; - if (!journal.containsEntry(type, fpath.relativeTo(pack.path))) - throw new Exception("Untracked file found, aborting package removal, file: " - ~ fpath.toNativeString() ~ "\nPlease remove the package folder manually or use --force-remove."); - } - } - checkFilesRec(pack.path); - } - - // remove package from repositories' list - bool found = false; - bool removeFrom(Package[] packs, in Package pack) { - auto packPos = countUntil!("a.path == b.path")(packs, pack); - if(packPos != -1) { - packs = std.algorithm.remove(packs, packPos); - return true; - } - return false; - } - foreach(repo; m_repositories) { - if(removeFrom(repo.localPackages, pack)) { - found = true; - break; - } - } - if(!found) - found = removeFrom(m_packages, pack); - enforce(found, "Cannot remove, package not found: '"~ pack.name ~"', path: " ~ to!string(pack.path)); - - logDebug("About to delete root folder for package '%s'.", pack.path); - rmdirRecurse(pack.path.toNativeString()); - logInfo("Removed package: '"~pack.name~"'"); - } - - Package addLocalPackage(Path path, string verName, LocalPackageType type) - { - path.endsWithSlash = true; - auto pack = new Package(path); - enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); - if (verName.length) - pack.ver = Version(verName); - - // don't double-add packages - Package[]* packs = &m_repositories[type].localPackages; - foreach (p; *packs) { - if (p.path == path) { - enforce(p.ver == pack.ver, "Adding the same local package twice with differing versions is not allowed."); - logInfo("Package is already registered: %s (version: %s)", p.name, p.ver); - return p; - } - } - - addPackages(*packs, pack); - - writeLocalPackageList(type); - - logInfo("Registered package: %s (version: %s)", pack.name, pack.ver); - return pack; - } - - void removeLocalPackage(Path path, LocalPackageType type) - { - path.endsWithSlash = true; - - Package[]* packs = &m_repositories[type].localPackages; - size_t[] to_remove; - foreach( i, entry; *packs ) - if( entry.path == path ) - to_remove ~= i; - enforce(to_remove.length > 0, "No "~type.to!string()~" package found at "~path.toNativeString()); - - string[Version] removed; - foreach_reverse( i; to_remove ) { - removed[(*packs)[i].ver] = (*packs)[i].name; - *packs = (*packs)[0 .. i] ~ (*packs)[i+1 .. $]; - } - - writeLocalPackageList(type); - - foreach(ver, name; removed) - logInfo("Unregistered package: %s (version: %s)", name, ver); - } - - Package getTemporaryPackage(Path path, Version ver) - { - foreach (p; m_temporaryPackages) - if (p.path == path) { - enforce(p.ver == ver, format("Package in %s is refrenced with two conflicting versions: %s vs %s", path.toNativeString(), p.ver, ver)); - return p; - } - - try { - auto pack = new Package(path); - enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); - pack.ver = ver; - addPackages(m_temporaryPackages, pack); - return pack; - } catch (Exception e) { - logDiagnostic("Error loading package at %s: %s", path.toNativeString(), e.toString().sanitize); - throw new Exception(format("Failed to add temporary package at %s: %s", path.toNativeString(), e.msg)); - } - } - - Package getTemporaryPackage(Path path) - { - foreach (p; m_temporaryPackages) - if (p.path == path) - return p; - - auto pack = new Package(path); - enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); - addPackages(m_temporaryPackages, pack); - return pack; - } - - /// For the given type add another path where packages will be looked up. - void addSearchPath(Path path, LocalPackageType type) - { - m_repositories[type].searchPath ~= path; - writeLocalPackageList(type); - } - - /// Removes a search path from the given type. - void removeSearchPath(Path path, LocalPackageType type) - { - m_repositories[type].searchPath = m_repositories[type].searchPath.filter!(p => p != path)().array(); - writeLocalPackageList(type); - } - - void refresh(bool refresh_existing_packages) - { - logDiagnostic("Refreshing local packages (refresh existing: %s)...", refresh_existing_packages); - - // load locally defined packages - void scanLocalPackages(LocalPackageType type) - { - Path list_path = m_repositories[type].packagePath; - Package[] packs; - Path[] paths; - if (!m_disableDefaultSearchPaths) try { - auto local_package_file = list_path ~ LocalPackagesFilename; - logDiagnostic("Looking for local package map at %s", local_package_file.toNativeString()); - if( !existsFile(local_package_file) ) return; - logDiagnostic("Try to load local package map at %s", local_package_file.toNativeString()); - auto packlist = jsonFromFile(list_path ~ LocalPackagesFilename); - enforce(packlist.type == Json.Type.array, LocalPackagesFilename~" must contain an array."); - foreach( pentry; packlist ){ - try { - auto name = pentry.name.get!string(); - auto path = Path(pentry.path.get!string()); - if (name == "*") { - paths ~= path; - } else { - auto ver = Version(pentry["version"].get!string()); - - Package pp; - if (!refresh_existing_packages) { - foreach (p; m_repositories[type].localPackages) - if (p.path == path) { - pp = p; - break; - } - } - - if (!pp) { - if (Package.isPackageAt(path)) pp = new Package(path); - else { - logWarn("Locally registered package %s %s was not found. Please run \"dub remove-local %s\".", - name, ver, path.toNativeString()); - auto info = Json.emptyObject; - info.name = name; - pp = new Package(info, path); - } - } - - if (pp.name != name) - logWarn("Local package at %s has different name than %s (%s)", path.toNativeString(), name, pp.name); - pp.ver = ver; - - addPackages(packs, pp); - } - } catch( Exception e ){ - logWarn("Error adding local package: %s", e.msg); - } - } - } catch( Exception e ){ - logDiagnostic("Loading of local package list at %s failed: %s", list_path.toNativeString(), e.msg); - } - m_repositories[type].localPackages = packs; - m_repositories[type].searchPath = paths; - } - scanLocalPackages(LocalPackageType.system); - scanLocalPackages(LocalPackageType.user); - - auto old_packages = m_packages; - - // rescan the system and user package folder - void scanPackageFolder(Path path) - { - if( path.existsDirectory() ){ - logDebug("iterating dir %s", path.toNativeString()); - try foreach( pdir; iterateDirectory(path) ){ - logDebug("iterating dir %s entry %s", path.toNativeString(), pdir.name); - if( !pdir.isDirectory ) continue; - auto pack_path = path ~ pdir.name; - if (!Package.isPackageAt(pack_path)) continue; - Package p; - try { - if (!refresh_existing_packages) - foreach (pp; old_packages) - if (pp.path == pack_path) { - p = pp; - break; - } - if (!p) p = new Package(pack_path); - addPackages(m_packages, p); - } catch( Exception e ){ - logError("Failed to load package in %s: %s", pack_path, e.msg); - logDiagnostic("Full error: %s", e.toString().sanitize()); - } - } - catch(Exception e) logDiagnostic("Failed to enumerate %s packages: %s", path.toNativeString(), e.toString()); - } - } - - m_packages = null; - foreach (p; this.completeSearchPath) - scanPackageFolder(p); - - void loadOverrides(LocalPackageType type) - { - m_repositories[type].overrides = null; - auto ovrfilepath = m_repositories[type].packagePath ~ LocalOverridesFilename; - if (existsFile(ovrfilepath)) { - foreach (entry; jsonFromFile(ovrfilepath)) { - PackageOverride ovr; - ovr.package_ = entry.name.get!string; - ovr.version_ = Dependency(entry["version"].get!string); - if (auto pv = "targetVersion" in entry) ovr.targetVersion = Version(pv.get!string); - if (auto pv = "targetPath" in entry) ovr.targetPath = Path(pv.get!string); - m_repositories[type].overrides ~= ovr; - } - } - } - loadOverrides(LocalPackageType.user); - loadOverrides(LocalPackageType.system); - } - - alias ubyte[] Hash; - /// Generates a hash value for a given package. - /// Some files or folders are ignored during the generation (like .dub and - /// .svn folders) - Hash hashPackage(Package pack) - { - string[] ignored_directories = [".git", ".dub", ".svn"]; - // something from .dub_ignore or what? - string[] ignored_files = []; - SHA1 sha1; - foreach(file; dirEntries(pack.path.toNativeString(), SpanMode.depth)) { - if(file.isDir && ignored_directories.canFind(Path(file.name).head.toString())) - continue; - else if(ignored_files.canFind(Path(file.name).head.toString())) - continue; - - sha1.put(cast(ubyte[])Path(file.name).head.toString()); - if(file.isDir) { - logDebug("Hashed directory name %s", Path(file.name).head); - } - else { - sha1.put(openFile(Path(file.name)).readAll()); - logDebug("Hashed file contents from %s", Path(file.name).head); - } - } - auto hash = sha1.finish(); - logDebug("Project hash: %s", hash); - return hash[0..$]; - } - - private void writeLocalPackageList(LocalPackageType type) - { - Json[] newlist; - foreach (p; m_repositories[type].searchPath) { - auto entry = Json.emptyObject; - entry.name = "*"; - entry.path = p.toNativeString(); - newlist ~= entry; - } - - foreach (p; m_repositories[type].localPackages) { - if (p.parentPackage) continue; // do not store sub packages - auto entry = Json.emptyObject; - entry["name"] = p.name; - entry["version"] = p.ver.toString(); - entry["path"] = p.path.toNativeString(); - newlist ~= entry; - } - - Path path = m_repositories[type].packagePath; - if( !existsDirectory(path) ) mkdirRecurse(path.toNativeString()); - writeJsonFile(path ~ LocalPackagesFilename, Json(newlist)); - } - - private void writeLocalPackageOverridesFile(LocalPackageType type) - { - Json[] newlist; - foreach (ovr; m_repositories[type].overrides) { - auto jovr = Json.emptyObject; - jovr.name = ovr.package_; - jovr["version"] = ovr.version_.versionString; - if (!ovr.targetPath.empty) jovr.targetPath = ovr.targetPath.toNativeString(); - else jovr.targetVersion = ovr.targetVersion.toString(); - newlist ~= jovr; - } - auto path = m_repositories[type].packagePath; - if (!existsDirectory(path)) mkdirRecurse(path.toNativeString()); - writeJsonFile(path ~ LocalOverridesFilename, Json(newlist)); - } - - /// Adds the package and scans for subpackages. - private void addPackages(ref Package[] dst_repos, Package pack) - const { - // Add the main package. - dst_repos ~= pack; - - // Additionally to the internally defined subpackages, whose metadata - // is loaded with the main dub.json, load all externally defined - // packages after the package is available with all the data. - foreach (sub_path; pack.exportedPackages) { - auto path = pack.path ~ sub_path; - if (!existsFile(path)) { - logError("Package %s declared a sub-package, definition file is missing: %s", pack.name, path.toNativeString()); - continue; - } - // Add the subpackage. - try { - dst_repos ~= new Package(path, pack); - } catch (Exception e) { - logError("Package '%s': Failed to load sub-package in %s, error: %s", pack.name, path.toNativeString(), e.msg); - logDiagnostic("Full error: %s", e.toString().sanitize()); - } - } - } -} - -struct PackageOverride { - string package_; - Dependency version_; - Version targetVersion; - Path targetPath; - - this(string package_, Dependency version_, Version target_version) - { - this.package_ = package_; - this.version_ = version_; - this.targetVersion = target_version; - } - - this(string package_, Dependency version_, Path target_path) - { - this.package_ = package_; - this.version_ = version_; - this.targetPath = target_path; - } -} - -enum LocalPackageType { - user, - system -} - -enum JournalJsonFilename = "journal.json"; -enum LocalPackagesFilename = "local-packages.json"; -enum LocalOverridesFilename = "local-overrides.json"; - - -private struct Repository { - Path path; - Path packagePath; - Path[] searchPath; - Package[] localPackages; - PackageOverride[] overrides; - - this(Path path) - { - this.path = path; - this.packagePath = path ~"packages/"; - } -} - - -/* - Retrieval journal for later removal, keeping track of placed files - files. - - Example Json: - --- - { - "version": 1, - "files": { - "file1": "typeoffile1", - ... - } - } - --- -*/ -private class Journal { - private enum Version = 1; - - enum Type { - RegularFile, - Directory, - Alien - } - - struct Entry { - this( Type t, Path f ) { type = t; relFilename = f; } - Type type; - Path relFilename; - } - - @property const(Entry[]) entries() const { return m_entries; } - - this() {} - - /// Initializes a Journal from a json file. - this(Path journalFile) { - auto jsonJournal = jsonFromFile(journalFile); - enforce(cast(int)jsonJournal["Version"] == Version, "Mismatched version: "~to!string(cast(int)jsonJournal["Version"]) ~ "vs. " ~to!string(Version)); - foreach(string file, type; jsonJournal["Files"]) - m_entries ~= Entry(to!Type(cast(string)type), Path(file)); - } - - void add(Entry e) { - foreach(Entry ent; entries) { - if( e.relFilename == ent.relFilename ) { - enforce(e.type == ent.type, "Duplicate('"~to!string(e.relFilename)~"'), different types: "~to!string(e.type)~" vs. "~to!string(ent.type)); - return; - } - } - m_entries ~= e; - } - - void remove(Entry e) { - foreach(i, Entry ent; entries) { - if( e.relFilename == ent.relFilename ) { - m_entries = std.algorithm.remove(m_entries, i); - return; - } - } - enforce(false, "Cannot remove entry, not available: " ~ e.relFilename.toNativeString()); - } - - /// Save the current state to the path. - void save(Path path) { - Json jsonJournal = serialize(); - auto fileJournal = openFile(path, FileMode.CreateTrunc); - scope(exit) fileJournal.close(); - fileJournal.writePrettyJsonString(jsonJournal); - } - - bool containsEntry(Type type, Path path) - const { - foreach (e; entries) - if (e.type == type && e.relFilename == path) - return true; - return false; - } - - private Json serialize() const { - Json[string] files; - foreach(Entry e; m_entries) - files[to!string(e.relFilename)] = to!string(e.type); - Json[string] json; - json["Version"] = Version; - json["Files"] = files; - return Json(json); - } - - private { - Entry[] m_entries; - } -} +/** + Management of packages on the local computer. + + Copyright: © 2012-2013 rejectedsoftware e.K. + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Sönke Ludwig, Matthias Dondorff +*/ +module dub.packagemanager; + +import dub.dependency; +import dub.internal.utils; +import dub.internal.vibecompat.core.file; +import dub.internal.vibecompat.core.log; +import dub.internal.vibecompat.data.json; +import dub.internal.vibecompat.inet.path; +import dub.package_; + +import std.algorithm : countUntil, filter, sort, canFind, remove; +import std.array; +import std.conv; +import std.digest.sha; +import std.encoding : sanitize; +import std.exception; +import std.file; +import std.string; +import std.zip; + + +/// The PackageManager can retrieve present packages and get / remove +/// packages. +class PackageManager { + private { + Repository[LocalPackageType] m_repositories; + Path[] m_searchPath; + Package[] m_packages; + Package[] m_temporaryPackages; + bool m_disableDefaultSearchPaths = false; + } + + this(Path user_path, Path system_path, bool refresh_packages = true) + { + m_repositories[LocalPackageType.user] = Repository(user_path); + m_repositories[LocalPackageType.system] = Repository(system_path); + if (refresh_packages) refresh(true); + } + + @property void searchPath(Path[] paths) + { + if (paths == m_searchPath) return; + m_searchPath = paths.dup; + refresh(false); + } + @property const(Path)[] searchPath() const { return m_searchPath; } + + @property void disableDefaultSearchPaths(bool val) + { + if (val == m_disableDefaultSearchPaths) return; + m_disableDefaultSearchPaths = val; + refresh(true); + } + + @property const(Path)[] completeSearchPath() + const { + auto ret = appender!(Path[])(); + ret.put(m_searchPath); + if (!m_disableDefaultSearchPaths) { + ret.put(m_repositories[LocalPackageType.user].searchPath); + ret.put(m_repositories[LocalPackageType.user].packagePath); + ret.put(m_repositories[LocalPackageType.system].searchPath); + ret.put(m_repositories[LocalPackageType.system].packagePath); + } + return ret.data; + } + + + /** Looks up a specific package. + + Looks up a package matching the given version/path in the set of + registered packages. The lookup order is done according the the + usual rules (see getPackageIterator). + + Params: + name = The name of the package + ver = The exact version of the package to query + path = An exact path that the package must reside in. Note that + the package must still be registered in the package manager. + enable_overrides = Apply the local package override list before + returning a package (enabled by default) + + Returns: + The matching package or null if no match was found. + */ + Package getPackage(string name, Version ver, bool enable_overrides = true) + { + if (enable_overrides) { + foreach (tp; [LocalPackageType.user, LocalPackageType.system]) + foreach (ovr; m_repositories[tp].overrides) + if (ovr.package_ == name && ovr.version_.matches(ver)) { + Package pack; + if (!ovr.targetPath.empty) pack = getPackage(name, ovr.targetPath); + else pack = getPackage(name, ovr.targetVersion, false); + if (pack) return pack; + + logWarn("Package override %s %s -> %s %s doesn't reference an existing package.", + ovr.package_, ovr.version_, ovr.targetVersion, ovr.targetPath); + } + } + + foreach (p; getPackageIterator(name)) + if (p.ver == ver) + return p; + + return null; + } + + /// ditto + Package getPackage(string name, string ver, bool enable_overrides = true) + { + return getPackage(name, Version(ver), enable_overrides); + } + + /// ditto + Package getPackage(string name, Version ver, Path path) + { + auto ret = getPackage(name, path); + if (!ret || ret.ver != ver) return null; + return ret; + } + + /// ditto + Package getPackage(string name, string ver, Path path) + { + return getPackage(name, Version(ver), path); + } + + /// ditto + Package getPackage(string name, Path path) + { + foreach( p; getPackageIterator(name) ) + if (p.path.startsWith(path)) + return p; + return null; + } + + + /** Looks up the first package matching the given name. + */ + Package getFirstPackage(string name) + { + foreach (ep; getPackageIterator(name)) + return ep; + return null; + } + + Package getOrLoadPackage(Path path) + { + foreach (p; getPackageIterator()) + if (!p.parentPackage && p.path == path) + return p; + auto pack = new Package(path); + addPackages(m_temporaryPackages, pack); + return pack; + } + + + /** Searches for the latest version of a package matching the given dependency. + */ + Package getBestPackage(string name, Dependency version_spec, bool enable_overrides = true) + { + Package ret; + foreach (p; getPackageIterator(name)) + if (version_spec.matches(p.ver) && (!ret || p.ver > ret.ver)) + ret = p; + + if (enable_overrides && ret) { + if (auto ovr = getPackage(name, ret.ver)) + return ovr; + } + return ret; + } + + /// ditto + Package getBestPackage(string name, string version_spec) + { + return getBestPackage(name, Dependency(version_spec)); + } + + + /** Determines if a package is managed by DUB. + + Managed packages can be upgraded and removed. + */ + bool isManagedPackage(Package pack) + const { + auto ppath = pack.basePackage.path; + foreach (rep; m_repositories) { + auto rpath = rep.packagePath; + if (ppath.startsWith(rpath)) + return true; + } + return false; + } + + int delegate(int delegate(ref Package)) getPackageIterator() + { + int iterator(int delegate(ref Package) del) + { + int handlePackage(Package p) { + if (auto ret = del(p)) return ret; + foreach (sp; p.subPackages) + if (auto ret = del(sp)) + return ret; + return 0; + } + + foreach (tp; m_temporaryPackages) + if (auto ret = handlePackage(tp)) return ret; + + // first search local packages + foreach (tp; LocalPackageType.min .. LocalPackageType.max+1) + foreach (p; m_repositories[cast(LocalPackageType)tp].localPackages) + if (auto ret = handlePackage(p)) return ret; + + // and then all packages gathered from the search path + foreach( p; m_packages ) + if( auto ret = handlePackage(p) ) + return ret; + return 0; + } + + return &iterator; + } + + int delegate(int delegate(ref Package)) getPackageIterator(string name) + { + int iterator(int delegate(ref Package) del) + { + foreach (p; getPackageIterator()) + if (p.name == name) + if (auto ret = del(p)) return ret; + return 0; + } + + return &iterator; + } + + + /** Returns a list of all package overrides for the given scope. + */ + const(PackageOverride)[] getOverrides(LocalPackageType scope_) + const { + return m_repositories[scope_].overrides; + } + + /** Adds a new override for the given package. + */ + void addOverride(LocalPackageType scope_, string package_, Dependency version_spec, Version target) + { + m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); + writeLocalPackageOverridesFile(scope_); + } + /// ditto + void addOverride(LocalPackageType scope_, string package_, Dependency version_spec, Path target) + { + m_repositories[scope_].overrides ~= PackageOverride(package_, version_spec, target); + writeLocalPackageOverridesFile(scope_); + } + + /** Removes an existing package override. + */ + void removeOverride(LocalPackageType scope_, string package_, Dependency version_spec) + { + Repository* rep = &m_repositories[scope_]; + foreach (i, ovr; rep.overrides) { + if (ovr.package_ != package_ || ovr.version_ != version_spec) + continue; + rep.overrides = rep.overrides[0 .. i] ~ rep.overrides[i+1 .. $]; + writeLocalPackageOverridesFile(scope_); + return; + } + throw new Exception(format("No override exists for %s %s", package_, version_spec)); + } + + /// Extracts the package supplied as a path to it's zip file to the + /// destination and sets a version field in the package description. + Package storeFetchedPackage(Path zip_file_path, Json package_info, Path destination) + { + auto package_name = package_info.name.get!string(); + auto package_version = package_info["version"].get!string(); + auto clean_package_version = package_version[package_version.startsWith("~") ? 1 : 0 .. $]; + + logDiagnostic("Placing package '%s' version '%s' to location '%s' from file '%s'", + package_name, package_version, destination.toNativeString(), zip_file_path.toNativeString()); + + if( existsFile(destination) ){ + throw new Exception(format("%s (%s) needs to be removed from '%s' prior placement.", package_name, package_version, destination)); + } + + // open zip file + ZipArchive archive; + { + logDebug("Opening file %s", zip_file_path); + auto f = openFile(zip_file_path, FileMode.Read); + scope(exit) f.close(); + archive = new ZipArchive(f.readAll()); + } + + logDebug("Extracting from zip."); + + // In a github zip, the actual contents are in a subfolder + Path zip_prefix; + outer: foreach(ArchiveMember am; archive.directory) { + auto path = Path(am.name); + foreach (fil; packageInfoFilenames) + if (path.length == 2 && path.head.toString == fil) { + zip_prefix = path[0 .. $-1]; + break outer; + } + } + + logDebug("zip root folder: %s", zip_prefix); + + Path getCleanedPath(string fileName) { + auto path = Path(fileName); + if(zip_prefix != Path() && !path.startsWith(zip_prefix)) return Path(); + return path[zip_prefix.length..path.length]; + } + + // extract & place + mkdirRecurse(destination.toNativeString()); + auto journal = new Journal; + logDiagnostic("Copying all files..."); + int countFiles = 0; + foreach(ArchiveMember a; archive.directory) { + auto cleanedPath = getCleanedPath(a.name); + if(cleanedPath.empty) continue; + auto dst_path = destination~cleanedPath; + + logDebug("Creating %s", cleanedPath); + if( dst_path.endsWithSlash ){ + if( !existsDirectory(dst_path) ) + mkdirRecurse(dst_path.toNativeString()); + journal.add(Journal.Entry(Journal.Type.Directory, cleanedPath)); + } else { + if( !existsDirectory(dst_path.parentPath) ) + mkdirRecurse(dst_path.parentPath.toNativeString()); + auto dstFile = openFile(dst_path, FileMode.CreateTrunc); + scope(exit) dstFile.close(); + dstFile.put(archive.expand(a)); + journal.add(Journal.Entry(Journal.Type.RegularFile, cleanedPath)); + ++countFiles; + } + } + logDiagnostic("%s file(s) copied.", to!string(countFiles)); + + // overwrite dub.json (this one includes a version field) + auto pack = new Package(destination, null, package_info["version"].get!string); + + if (pack.packageInfoFile.head != defaultPackageFilename()) { + // Storeinfo saved a default file, this could be different to the file from the zip. + removeFile(pack.packageInfoFile); + journal.remove(Journal.Entry(Journal.Type.RegularFile, Path(pack.packageInfoFile.head))); + journal.add(Journal.Entry(Journal.Type.RegularFile, Path(defaultPackageFilename()))); + } + pack.storeInfo(); + + // Write journal + logDebug("Saving retrieval action journal..."); + journal.add(Journal.Entry(Journal.Type.RegularFile, Path(JournalJsonFilename))); + journal.save(destination ~ JournalJsonFilename); + + addPackages(m_packages, pack); + + return pack; + } + + /// Removes the given the package. + void remove(in Package pack, bool force_remove) + { + logDebug("Remove %s, version %s, path '%s'", pack.name, pack.vers, pack.path); + enforce(!pack.path.empty, "Cannot remove package "~pack.name~" without a path."); + + // delete package files physically + logDebug("Looking up journal"); + auto journalFile = pack.path~JournalJsonFilename; + if (!existsFile(journalFile)) + throw new Exception("Removal failed, no retrieval journal found for '"~pack.name~"'. Please remove the folder '%s' manually.", pack.path.toNativeString()); + + auto packagePath = pack.path; + auto journal = new Journal(journalFile); + + + // Determine all target paths/files + /*auto basebs = pack.getBuildSettings(); + foreach (conf; pack.configurations) { + auto bs = pack.getBuildSettings(conf); + auto tpath = conf.targetPath.length ? conf.targetPath : basebs.targetPath; + auto tname = conf.targetName.length ? conf.targetName : basebs.targetName; + auto ttype = conf.targetType != TargetType.auto_ ? conf.targetType : basebs.targetType; + if (ttype == TargetType.none || ttype == TargetType.auto_) continue; + foreach (n; generatePlatformNames(tname, ttype)) + // ... + }*/ + + // test if there are any untracked files + if (!force_remove) { + void checkFilesRec(Path p) + { + // TODO: ignore target paths/files + + foreach (fi; iterateDirectory(p)) { + auto fpath = p ~ fi.name; + if (fi.isDirectory) { + // Indicate a directory. + fpath.endsWithSlash(true); + // Ignore /.dub folder: This folder and its content + // are not tracked by the Journal. + if (fpath.relativeTo(pack.path) == Path(".dub/")) + continue; + checkFilesRec(fpath); + } + + auto type = fi.isDirectory ? Journal.Type.Directory : Journal.Type.RegularFile; + if (!journal.containsEntry(type, fpath.relativeTo(pack.path))) + throw new Exception("Untracked file found, aborting package removal, file: " + ~ fpath.toNativeString() ~ "\nPlease remove the package folder manually or use --force-remove."); + } + } + checkFilesRec(pack.path); + } + + // remove package from repositories' list + bool found = false; + bool removeFrom(Package[] packs, in Package pack) { + auto packPos = countUntil!("a.path == b.path")(packs, pack); + if(packPos != -1) { + packs = std.algorithm.remove(packs, packPos); + return true; + } + return false; + } + foreach(repo; m_repositories) { + if(removeFrom(repo.localPackages, pack)) { + found = true; + break; + } + } + if(!found) + found = removeFrom(m_packages, pack); + enforce(found, "Cannot remove, package not found: '"~ pack.name ~"', path: " ~ to!string(pack.path)); + + logDebug("About to delete root folder for package '%s'.", pack.path); + rmdirRecurse(pack.path.toNativeString()); + logInfo("Removed package: '"~pack.name~"'"); + } + + Package addLocalPackage(Path path, string verName, LocalPackageType type) + { + path.endsWithSlash = true; + auto pack = new Package(path); + enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); + if (verName.length) + pack.ver = Version(verName); + + // don't double-add packages + Package[]* packs = &m_repositories[type].localPackages; + foreach (p; *packs) { + if (p.path == path) { + enforce(p.ver == pack.ver, "Adding the same local package twice with differing versions is not allowed."); + logInfo("Package is already registered: %s (version: %s)", p.name, p.ver); + return p; + } + } + + addPackages(*packs, pack); + + writeLocalPackageList(type); + + logInfo("Registered package: %s (version: %s)", pack.name, pack.ver); + return pack; + } + + void removeLocalPackage(Path path, LocalPackageType type) + { + path.endsWithSlash = true; + + Package[]* packs = &m_repositories[type].localPackages; + size_t[] to_remove; + foreach( i, entry; *packs ) + if( entry.path == path ) + to_remove ~= i; + enforce(to_remove.length > 0, "No "~type.to!string()~" package found at "~path.toNativeString()); + + string[Version] removed; + foreach_reverse( i; to_remove ) { + removed[(*packs)[i].ver] = (*packs)[i].name; + *packs = (*packs)[0 .. i] ~ (*packs)[i+1 .. $]; + } + + writeLocalPackageList(type); + + foreach(ver, name; removed) + logInfo("Unregistered package: %s (version: %s)", name, ver); + } + + Package getTemporaryPackage(Path path, Version ver) + { + foreach (p; m_temporaryPackages) + if (p.path == path) { + enforce(p.ver == ver, format("Package in %s is refrenced with two conflicting versions: %s vs %s", path.toNativeString(), p.ver, ver)); + return p; + } + + try { + auto pack = new Package(path); + enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); + pack.ver = ver; + addPackages(m_temporaryPackages, pack); + return pack; + } catch (Exception e) { + logDiagnostic("Error loading package at %s: %s", path.toNativeString(), e.toString().sanitize); + throw new Exception(format("Failed to add temporary package at %s: %s", path.toNativeString(), e.msg)); + } + } + + Package getTemporaryPackage(Path path) + { + foreach (p; m_temporaryPackages) + if (p.path == path) + return p; + + auto pack = new Package(path); + enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); + addPackages(m_temporaryPackages, pack); + return pack; + } + + /// For the given type add another path where packages will be looked up. + void addSearchPath(Path path, LocalPackageType type) + { + m_repositories[type].searchPath ~= path; + writeLocalPackageList(type); + } + + /// Removes a search path from the given type. + void removeSearchPath(Path path, LocalPackageType type) + { + m_repositories[type].searchPath = m_repositories[type].searchPath.filter!(p => p != path)().array(); + writeLocalPackageList(type); + } + + void refresh(bool refresh_existing_packages) + { + logDiagnostic("Refreshing local packages (refresh existing: %s)...", refresh_existing_packages); + + // load locally defined packages + void scanLocalPackages(LocalPackageType type) + { + Path list_path = m_repositories[type].packagePath; + Package[] packs; + Path[] paths; + if (!m_disableDefaultSearchPaths) try { + auto local_package_file = list_path ~ LocalPackagesFilename; + logDiagnostic("Looking for local package map at %s", local_package_file.toNativeString()); + if( !existsFile(local_package_file) ) return; + logDiagnostic("Try to load local package map at %s", local_package_file.toNativeString()); + auto packlist = jsonFromFile(list_path ~ LocalPackagesFilename); + enforce(packlist.type == Json.Type.array, LocalPackagesFilename~" must contain an array."); + foreach( pentry; packlist ){ + try { + auto name = pentry.name.get!string(); + auto path = Path(pentry.path.get!string()); + if (name == "*") { + paths ~= path; + } else { + auto ver = Version(pentry["version"].get!string()); + + Package pp; + if (!refresh_existing_packages) { + foreach (p; m_repositories[type].localPackages) + if (p.path == path) { + pp = p; + break; + } + } + + if (!pp) { + if (Package.isPackageAt(path)) pp = new Package(path); + else { + logWarn("Locally registered package %s %s was not found. Please run \"dub remove-local %s\".", + name, ver, path.toNativeString()); + auto info = Json.emptyObject; + info.name = name; + pp = new Package(info, path); + } + } + + if (pp.name != name) + logWarn("Local package at %s has different name than %s (%s)", path.toNativeString(), name, pp.name); + pp.ver = ver; + + addPackages(packs, pp); + } + } catch( Exception e ){ + logWarn("Error adding local package: %s", e.msg); + } + } + } catch( Exception e ){ + logDiagnostic("Loading of local package list at %s failed: %s", list_path.toNativeString(), e.msg); + } + m_repositories[type].localPackages = packs; + m_repositories[type].searchPath = paths; + } + scanLocalPackages(LocalPackageType.system); + scanLocalPackages(LocalPackageType.user); + + auto old_packages = m_packages; + + // rescan the system and user package folder + void scanPackageFolder(Path path) + { + if( path.existsDirectory() ){ + logDebug("iterating dir %s", path.toNativeString()); + try foreach( pdir; iterateDirectory(path) ){ + logDebug("iterating dir %s entry %s", path.toNativeString(), pdir.name); + if( !pdir.isDirectory ) continue; + auto pack_path = path ~ pdir.name; + if (!Package.isPackageAt(pack_path)) continue; + Package p; + try { + if (!refresh_existing_packages) + foreach (pp; old_packages) + if (pp.path == pack_path) { + p = pp; + break; + } + if (!p) p = new Package(pack_path); + addPackages(m_packages, p); + } catch( Exception e ){ + logError("Failed to load package in %s: %s", pack_path, e.msg); + logDiagnostic("Full error: %s", e.toString().sanitize()); + } + } + catch(Exception e) logDiagnostic("Failed to enumerate %s packages: %s", path.toNativeString(), e.toString()); + } + } + + m_packages = null; + foreach (p; this.completeSearchPath) + scanPackageFolder(p); + + void loadOverrides(LocalPackageType type) + { + m_repositories[type].overrides = null; + auto ovrfilepath = m_repositories[type].packagePath ~ LocalOverridesFilename; + if (existsFile(ovrfilepath)) { + foreach (entry; jsonFromFile(ovrfilepath)) { + PackageOverride ovr; + ovr.package_ = entry.name.get!string; + ovr.version_ = Dependency(entry["version"].get!string); + if (auto pv = "targetVersion" in entry) ovr.targetVersion = Version(pv.get!string); + if (auto pv = "targetPath" in entry) ovr.targetPath = Path(pv.get!string); + m_repositories[type].overrides ~= ovr; + } + } + } + loadOverrides(LocalPackageType.user); + loadOverrides(LocalPackageType.system); + } + + alias ubyte[] Hash; + /// Generates a hash value for a given package. + /// Some files or folders are ignored during the generation (like .dub and + /// .svn folders) + Hash hashPackage(Package pack) + { + string[] ignored_directories = [".git", ".dub", ".svn"]; + // something from .dub_ignore or what? + string[] ignored_files = []; + SHA1 sha1; + foreach(file; dirEntries(pack.path.toNativeString(), SpanMode.depth)) { + if(file.isDir && ignored_directories.canFind(Path(file.name).head.toString())) + continue; + else if(ignored_files.canFind(Path(file.name).head.toString())) + continue; + + sha1.put(cast(ubyte[])Path(file.name).head.toString()); + if(file.isDir) { + logDebug("Hashed directory name %s", Path(file.name).head); + } + else { + sha1.put(openFile(Path(file.name)).readAll()); + logDebug("Hashed file contents from %s", Path(file.name).head); + } + } + auto hash = sha1.finish(); + logDebug("Project hash: %s", hash); + return hash[0..$]; + } + + private void writeLocalPackageList(LocalPackageType type) + { + Json[] newlist; + foreach (p; m_repositories[type].searchPath) { + auto entry = Json.emptyObject; + entry.name = "*"; + entry.path = p.toNativeString(); + newlist ~= entry; + } + + foreach (p; m_repositories[type].localPackages) { + if (p.parentPackage) continue; // do not store sub packages + auto entry = Json.emptyObject; + entry["name"] = p.name; + entry["version"] = p.ver.toString(); + entry["path"] = p.path.toNativeString(); + newlist ~= entry; + } + + Path path = m_repositories[type].packagePath; + if( !existsDirectory(path) ) mkdirRecurse(path.toNativeString()); + writeJsonFile(path ~ LocalPackagesFilename, Json(newlist)); + } + + private void writeLocalPackageOverridesFile(LocalPackageType type) + { + Json[] newlist; + foreach (ovr; m_repositories[type].overrides) { + auto jovr = Json.emptyObject; + jovr.name = ovr.package_; + jovr["version"] = ovr.version_.versionString; + if (!ovr.targetPath.empty) jovr.targetPath = ovr.targetPath.toNativeString(); + else jovr.targetVersion = ovr.targetVersion.toString(); + newlist ~= jovr; + } + auto path = m_repositories[type].packagePath; + if (!existsDirectory(path)) mkdirRecurse(path.toNativeString()); + writeJsonFile(path ~ LocalOverridesFilename, Json(newlist)); + } + + /// Adds the package and scans for subpackages. + private void addPackages(ref Package[] dst_repos, Package pack) + const { + // Add the main package. + dst_repos ~= pack; + + // Additionally to the internally defined subpackages, whose metadata + // is loaded with the main dub.json, load all externally defined + // packages after the package is available with all the data. + foreach (sub_path; pack.exportedPackages) { + auto path = pack.path ~ sub_path; + if (!existsFile(path)) { + logError("Package %s declared a sub-package, definition file is missing: %s", pack.name, path.toNativeString()); + continue; + } + // Add the subpackage. + try { + dst_repos ~= new Package(path, pack); + } catch (Exception e) { + logError("Package '%s': Failed to load sub-package in %s, error: %s", pack.name, path.toNativeString(), e.msg); + logDiagnostic("Full error: %s", e.toString().sanitize()); + } + } + } +} + +struct PackageOverride { + string package_; + Dependency version_; + Version targetVersion; + Path targetPath; + + this(string package_, Dependency version_, Version target_version) + { + this.package_ = package_; + this.version_ = version_; + this.targetVersion = target_version; + } + + this(string package_, Dependency version_, Path target_path) + { + this.package_ = package_; + this.version_ = version_; + this.targetPath = target_path; + } +} + +enum LocalPackageType { + user, + system +} + +enum JournalJsonFilename = "journal.json"; +enum LocalPackagesFilename = "local-packages.json"; +enum LocalOverridesFilename = "local-overrides.json"; + + +private struct Repository { + Path path; + Path packagePath; + Path[] searchPath; + Package[] localPackages; + PackageOverride[] overrides; + + this(Path path) + { + this.path = path; + this.packagePath = path ~"packages/"; + } +} + + +/* + Retrieval journal for later removal, keeping track of placed files + files. + + Example Json: + --- + { + "version": 1, + "files": { + "file1": "typeoffile1", + ... + } + } + --- +*/ +private class Journal { + private enum Version = 1; + + enum Type { + RegularFile, + Directory, + Alien + } + + struct Entry { + this( Type t, Path f ) { type = t; relFilename = f; } + Type type; + Path relFilename; + } + + @property const(Entry[]) entries() const { return m_entries; } + + this() {} + + /// Initializes a Journal from a json file. + this(Path journalFile) { + auto jsonJournal = jsonFromFile(journalFile); + enforce(cast(int)jsonJournal["Version"] == Version, "Mismatched version: "~to!string(cast(int)jsonJournal["Version"]) ~ "vs. " ~to!string(Version)); + foreach(string file, type; jsonJournal["Files"]) + m_entries ~= Entry(to!Type(cast(string)type), Path(file)); + } + + void add(Entry e) { + foreach(Entry ent; entries) { + if( e.relFilename == ent.relFilename ) { + enforce(e.type == ent.type, "Duplicate('"~to!string(e.relFilename)~"'), different types: "~to!string(e.type)~" vs. "~to!string(ent.type)); + return; + } + } + m_entries ~= e; + } + + void remove(Entry e) { + foreach(i, Entry ent; entries) { + if( e.relFilename == ent.relFilename ) { + m_entries = std.algorithm.remove(m_entries, i); + return; + } + } + enforce(false, "Cannot remove entry, not available: " ~ e.relFilename.toNativeString()); + } + + /// Save the current state to the path. + void save(Path path) { + Json jsonJournal = serialize(); + auto fileJournal = openFile(path, FileMode.CreateTrunc); + scope(exit) fileJournal.close(); + fileJournal.writePrettyJsonString(jsonJournal); + } + + bool containsEntry(Type type, Path path) + const { + foreach (e; entries) + if (e.type == type && e.relFilename == path) + return true; + return false; + } + + private Json serialize() const { + Json[string] files; + foreach(Entry e; m_entries) + files[to!string(e.relFilename)] = to!string(e.type); + Json[string] json; + json["Version"] = Version; + json["Files"] = files; + return Json(json); + } + + private { + Entry[] m_entries; + } +} diff --git a/source/dub/packagesupplier.d b/source/dub/packagesupplier.d index 27be871..fe65b32 100644 --- a/source/dub/packagesupplier.d +++ b/source/dub/packagesupplier.d @@ -35,7 +35,7 @@ /// path: absolute path to store the package (usually in a zip format) void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release); - + /// returns the metadata for the package Json getPackageDescription(string packageId, Dependency dep, bool pre_release); } @@ -48,7 +48,7 @@ this(Path root) { m_path = root; } override @property string description() { return "file repository at "~m_path.toNativeString(); } - + Version[] getVersions(string package_id) { Version[] ret; @@ -72,13 +72,13 @@ enforce(existsFile(filename)); copyFile(filename, path); } - + Json getPackageDescription(string packageId, Dependency dep, bool pre_release) { auto filename = bestPackageFile(packageId, dep, pre_release); return jsonFromZip(filename, "dub.json"); } - + private Path bestPackageFile(string packageId, Dependency dep, bool pre_release) { Path toPath(Version ver) { @@ -103,7 +103,7 @@ CacheEntry[string] m_metadataCache; Duration m_maxCacheTime; } - + this(URL registry) { m_registryUrl = registry; @@ -123,7 +123,7 @@ ret.sort(); return ret; } - + void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release) { import std.array : replace; @@ -133,12 +133,12 @@ logDiagnostic("Found download URL: '%s'", url); download(url, path); } - + Json getPackageDescription(string packageId, Dependency dep, bool pre_release) { return getBestPackage(packageId, dep, pre_release); } - + private Json getMetadata(string packageId) { auto now = Clock.currTime(UTC()); @@ -148,7 +148,7 @@ } auto url = m_registryUrl ~ Path(PackagesPath ~ "/" ~ packageId ~ ".json"); - + logDebug("Downloading metadata for %s", packageId); logDebug("Getting from %s", url); @@ -157,7 +157,7 @@ m_metadataCache[packageId] = CacheEntry(json, now); return json; } - + private Json getBestPackage(string packageId, Dependency dep, bool pre_release) { Json md = getMetadata(packageId); diff --git a/source/dub/version_.d b/source/dub/version_.d index 8e22adb..cac04d3 100644 --- a/source/dub/version_.d +++ b/source/dub/version_.d @@ -1 +1 @@ -module dub.version_; enum dubVersion = "v0.9.21"; +module dub.version_; enum dubVersion = "v0.9.21";