diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 0000000..a52495b --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,17 @@ +# Documentation: https://docs.codecov.io/docs/codecov-yaml + +codecov: + bot: dlang-bot + +coverage: + precision: 3 + # round: down + # range: "70...100" + + status: + # Learn more at https://docs.codecov.io/docs/commit-status + project: true + patch: true + changes: false + +comment: false diff --git a/.github/issue_template.md b/.github/issue_template.md new file mode 100644 index 0000000..b63759d --- /dev/null +++ b/.github/issue_template.md @@ -0,0 +1,17 @@ +Please search for existing solutions to your problem. +- Issue list: https://github.com/dlang/dub/issues?q=is%3Aissue +- Cookbook: https://github.com/dlang/dub/wiki/Cookbook +- Stack Overflow: https://stackoverflow.com/questions/tagged/dub + +### System information +- **dub version** (e.g. dub 1.3.0) +- **OS Platform and distribution** (e.g. Windows 10, Linux Ubuntu 16.04) +- **compiler version** (e.g. dmd-2.074.1) + +### Bug Description + +### How to reproduce? + +### Expected Behavior + +### Logs diff --git a/.gitignore b/.gitignore index a5e949d..75e23c8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,31 @@ *.o *.obj -.dub/ +.dub +.directory dub.selections.json -bin/dub docs.json __dummy.html + +# Ignore build files. +/bin/dub +/bin/__test__library-nonet__ +/bin/__test__library__ +/bin/dub-test-library + +# Ignore files or directories created by the test suite. +/test/custom-source-main-bug487/custom-source-main-bug487 +/test/3-copyFiles/bin/ +/test/ignore-hidden-1/ignore-hidden-1 +/test/ignore-hidden-2/ignore-hidden-2 +/test/expected-import-path-output +/test/expected-string-import-path-output +/test/expected-describe-data-1-list-output +/test/expected-describe-data-2-dmd-output +/test/expected-issue616-output +/test/describe-project/dummy.dat +/test/describe-project/dummy-dep1.dat + +# Ignore coverage files +cov/ diff --git a/.travis.yml b/.travis.yml index c8073d8..cd04032 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,16 +1,61 @@ language: d +dist: trusty sudo: false -d: - - dmd-2.064.2 - - dmd-2.065.0 - - dmd-2.066.1 - - ldc-0.14.0 - - ldc-0.15.1 - - gdc-4.9.0 +matrix: + include: + - d: dmd-nightly + env: [FRONTEND=2.075] + - d: dmd-beta + env: [FRONTEND=2.075] + - d: dmd + env: + - [FRONTEND=2.074] + - [COVERAGE=true] + - d: dmd-2.074.1 + env: [FRONTEND=2.074] + - d: dmd-2.073.2 + env: [FRONTEND=2.073] + - d: dmd-2.072.2 + env: [FRONTEND=2.072] + - d: dmd-2.071.2 + env: [FRONTEND=2.071] + - d: dmd-2.070.2 + env: [FRONTEND=2.070] + - d: dmd-2.069.2 + env: [FRONTEND=2.069] + - d: dmd-2.068.2 + env: [FRONTEND=2.068] + - d: dmd-2.067.1 + env: [FRONTEND=2.067] + - d: dmd-2.066.1 + env: [FRONTEND=2.066] + - d: ldc-beta + env: [FRONTEND=2.073] + - d: ldc + env: [FRONTEND=2.072] + - d: ldc-1.2.0 + env: [FRONTEND=2.072] + - d: ldc-1.1.0 + env: [FRONTEND=2.071] + - d: ldc-1.0.0 + env: [FRONTEND=2.070] + - d: ldc-0.17.2 + env: [FRONTEND=2.068] + - d: gdc + env: [FRONTEND=2.068] + - d: gdc-5.2.0 + env: [FRONTEND=2.066] + - d: gdc-4.9.2 + env: [FRONTEND=2.066] + + allow_failures: + - d: gdc + +addons: + apt: + packages: + - libevent-dev script: - - dub test --compiler=${DC} -c library-nonet - - dub build --compiler=${DC} - - DC=${DMD} ./build.sh - - DUB=`pwd`/bin/dub COMPILER=${DC} test/run-unittest.sh + - ./travis-ci.sh diff --git a/CHANGELOG.md b/CHANGELOG.md index fb95c32..82f0cc9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,16 +1,471 @@ Changelog ========= -v0.9.23 - YYYY-MM-DD +v1.5.0 - 2017- +------------------- + +- Read additional registry URLs (semicolon separated) from DUB_REGISTRY env var - [pull #1173][issue1173] + +[issue1173]: https://github.com/dlang/dub/issues/1173 + +v1.4.0 - 2017- +------------------- + +- The copyright string is generated automatically by "dub init" +- "dub init" lets the user retry to enter the package name if not valid (by NotSpooky) - [pull #1122][issue1122] +- Improved collection speed for source/import files - [pull #1125][issue1125] +- Fixed "dub init" to allow digits in package names (by chadjoan) - [pull #1165][issue1165] +- Fixed a sub package build issue on Windows, where colons were used as part of the file name (by andre2007) - [issue #1130][issue1130], [pull #1137][issue1137] + +[issue1122]: https://github.com/dlang/dub/issues/1122 +[issue1125]: https://github.com/dlang/dub/issues/1125 +[issue1130]: https://github.com/dlang/dub/issues/1130 +[issue1137]: https://github.com/dlang/dub/issues/1137 +[issue1165]: https://github.com/dlang/dub/issues/1165 + + +v1.3.0 - 2017-04-10 +------------------- + +### Features and improvements ### + +- Reduced the initialization time for "dub test" by several seconds by avoiding a complex regex - [pull #1078][issue1078] +- Reduced cubic runtime complexity for collecting string import files to almost linear in the common case - [pull #1079][issue1079] +- Compiler flag usage warnings are now only emitted for the root package, reducing build output noise - [a75023cd][commita75023cd] +- Avoid redundant recreation of hard links for build targets (by Danny Milosavljevic aka daym) - [pull #1071][issue1071] + +### Bug fixes ### + +- Fixed bogus rebuild of packages with no dependencies - [pull #1093][issue1093], [issue #1091][issue1091] +- Fixed building with vibe-core instead of vibe.d 0.7.x +- Fixed the VisualD generator to properly handle the "x86_mscoff" pseudoarchitecture - [4d416e73][commit4d416e73] + +[commita75023cd]: https://github.com/dlang/dub/commit/a75023cd050c055e81190bf7abc5793aba39852f +[commit4d416e73]: https://github.com/dlang/dub/commit/4d416e730df872b552ee1dcfa8340c224a4e51fc +[issue1071]: https://github.com/dlang/dub/issues/1071 +[issue1078]: https://github.com/dlang/dub/issues/1078 +[issue1079]: https://github.com/dlang/dub/issues/1079 +[issue1091]: https://github.com/dlang/dub/issues/1091 +[issue1093]: https://github.com/dlang/dub/issues/1093 + + +v1.2.2 - 2017-03-09 +------------------- + + +v1.2.1 - 2017-02-12 +------------------- + +### Bug fixes ### + +- Fixed compile error when compiling with vibe.d versions prior to 0.8.0 - [9d25e5dd][commit9d25e5dd] +- Fixed orphan format specifier error - [220b0128][commit220b0128] +- Fixed test executable name generation causing sub package test builds to fail - [e6262373][commite6262373] +- Fixed bogus warning message when compiling with `--arch x86_mscoff` (by Andrey Penechko aka MrSmith33) - [pull #1059][issue1059] +- Fixed plaform specifiers to work for "x86_mscoff" - [86e85276][commit86e85276] + +[commit9d25e5dd]: https://github.com/dlang/dub/commit/9d25e5dd0337e9c054ff32c4b921f32603d29293 +[commit220b0128]: https://github.com/dlang/dub/commit/220b01280041abbead57db6eec28e9279b9d7cf6 +[commite6262373]: https://github.com/dlang/dub/commit/e6262373558591fa8754704fcc2e8ddafabf6671 +[commit86e85276]: https://github.com/dlang/dub/commit/86e85276a7ff85c5cde4e1926c40e666a2b6bf78 +[issue1059]: https://github.com/dlang/dub/issues/1059 + + +v1.2.0 - 2017-01-22 +------------------- + +### Features and improvements ### + + - Added an `--override-config` command line option to force selecting specific configurations for dependencies - [pull #1004][issue1004] + - Added an `x86_mscoff` architecture corresponding to DMD's `-m32mscoff` flag (by John Colvin) - [pull #1007][issue1007] + - Implemented selective dependency upgrades ("dub upgrade ") - [issue #1024][issue1024] + - Multiple configurations with the same name are now detected and will cause a warning to be displayed - [issue #984][issue984] + - Updated the Sublime Text generator and the Bash completion script to include the default "release-debug" build type (by p0nce) - [pull #1028][issue1028] + - The `--force-remove` switch is scheduled for deprecation, as it didn't do anything for a while now - [pull #1023][issue1023] + +[issue984]: https://github.com/dlang/dub/issues/984 +[issue1004]: https://github.com/dlang/dub/issues/1004 +[issue1007]: https://github.com/dlang/dub/issues/1007 +[issue1023]: https://github.com/dlang/dub/issues/1023 +[issue1024]: https://github.com/dlang/dub/issues/1024 +[issue1028]: https://github.com/dlang/dub/issues/1028 + + +v1.1.2 - 2016-12-31 +------------------- + +### Bug fixes ### + + - Fixes configuration resolution in diamond dependency settings - [issue #1005][issue1005], [pull #1006][issue1006] + - Fixed path based package overrides ("dub add-override") - [issue #779][issue779] + - Contains various diagnostic and error message improvements - [issue #957][issue957], [pull #1010][issue1010], [pull #1012][issue1012], [issue #1019][issue1019] + +[issue779]: https://github.com/dlang/dub/issues/779 +[issue957]: https://github.com/dlang/dub/issues/957 +[issue1005]: https://github.com/dlang/dub/issues/1005 +[issue1006]: https://github.com/dlang/dub/issues/1006 +[issue1010]: https://github.com/dlang/dub/issues/1010 +[issue1012]: https://github.com/dlang/dub/issues/1012 +[issue1019]: https://github.com/dlang/dub/issues/1019 + + + v1.1.1 - 2016-11-30 +------------------- + +### Bug fixes ### + + - Fixed a regression where path based dependencies were not properly resolved - [issue #934][issue934], [issue #959][issue959], [pull #962][issue962], [pull #969][issue969] + - Fixed DMD separate compile/link detection code for the case where the compiler binary is not called "dmd" - [pull #966][issur966] + - Fixed using the selected compiler for generated Sublime Text projects - [issue #931][issue931], [pull #983][issue983] + - Fixed upgrading of optional dependencies (were ignored during the upgrade previously) - [issue #672][issue672], [pull #989][issue989] + - Fixed automatic downloading of selected optional dependencies - [issue #990][issue990], [pull #991][issue991] + +[issue672]: https://github.com/dlang/dub/issues/672 +[issue931]: https://github.com/dlang/dub/issues/931 +[issue934]: https://github.com/dlang/dub/issues/934 +[issue959]: https://github.com/dlang/dub/issues/959 +[issue962]: https://github.com/dlang/dub/issues/962 +[issue966]: https://github.com/dlang/dub/issues/966 +[issue969]: https://github.com/dlang/dub/issues/969 +[issue983]: https://github.com/dlang/dub/issues/983 +[issue989]: https://github.com/dlang/dub/issues/989 +[issue990]: https://github.com/dlang/dub/issues/990 +[issue991]: https://github.com/dlang/dub/issues/991 + + +v1.1.0 - 2016-10-31 +------------------- + +### Features and improvements ### + + - Fixed compilation for DMD 2.072.0 (minimum supported frontend version is 2.065) - [pull #891][issue891] + - Fixed compilation with the latest vibe.d 0.7.30 alpha versions (avoid `Bson.opDispatch`) + - Single-file packages are now built locally unless the shebang syntax is used (still performs a build in the temporary folder in that case) - [issue #887][issue887], [pull #888][issue888] + - DUB now searches for a configuration file in "../etc/dub/settings.json" (relative to the executable location), enabling distribution-specific configuration - [issue #895][issue895], [pull #897][issue897] + - "dub remove" is now interactive in case of multiple matching package versions - [pull #879][issue879] + - Added a "--stdout" switch to "dub convert" - [issue #932][issue932], [pull #933][issue933] + +### Bug fixes ### + + - Pressing Ctrl+C during "dub init" now doesn't leave a half-initialized package behind - [issue #883][issue883], [pull #884][issue884] + - Fixed handling of empty array directives in the SDLang recipe parser (e.g. a single `sourcePaths` directive with no arguments now properly avoids searching for default source directories) + - Fixed a bad error message for missing dependencies that are referenced in the root package, as well as from a dependency - [issue #896][issue896] + - Fixed naming of folders in generated Sublime Text projects (by p0nce) - [pull #918][issue918] + - Fixed the workaround for "dub test" and modern vibe.d projects (proper fix is planned after a grace period) + - Fixed linking against intermediate dependencies in their build folder instead of the final build output file - [issue #921][issue921], [pull #922][issue922] + - Fixed omission of packages in a moderately complex sub package scenario - [issue #923][issue923], [pull #924][issue924] + - Fixed the default lib command line flag passed to LDC when building shared libraries (by Олег Леленков aka goodbin) - [pull #930][issue930] + - Fixed extraneous fields getting added to the package recipe by "dub convert" - [issue #820][issue820], [pull #901][issue901] + +[issue820]: https://github.com/dlang/dub/issues/820 +[issue879]: https://github.com/dlang/dub/issues/879 +[issue883]: https://github.com/dlang/dub/issues/883 +[issue884]: https://github.com/dlang/dub/issues/884 +[issue887]: https://github.com/dlang/dub/issues/887 +[issue888]: https://github.com/dlang/dub/issues/888 +[issue891]: https://github.com/dlang/dub/issues/891 +[issue895]: https://github.com/dlang/dub/issues/895 +[issue896]: https://github.com/dlang/dub/issues/896 +[issue897]: https://github.com/dlang/dub/issues/897 +[issue901]: https://github.com/dlang/dub/issues/901 +[issue918]: https://github.com/dlang/dub/issues/918 +[issue921]: https://github.com/dlang/dub/issues/921 +[issue922]: https://github.com/dlang/dub/issues/922 +[issue923]: https://github.com/dlang/dub/issues/923 +[issue924]: https://github.com/dlang/dub/issues/924 +[issue930]: https://github.com/dlang/dub/issues/930 +[issue932]: https://github.com/dlang/dub/issues/932 +[issue933]: https://github.com/dlang/dub/issues/933 + + +v1.0.0 - 2016-06-20 +------------------- + +### Features and improvements ### + + - Implemented support for single-file packages, including shebang script support - [issue #103][issue103], [pull #851][issue851], [pull #866][issue866], [pull #870][issue870], [pull #878][issue878] + - Builds on DMD 2.065.0 up to 2.071.1 + - Removed all deprecated functionality from the API, CLI and data formats + - The minimum supported OS X version is now 10.7 + - Switched from `std.stream` to `std.stdio` (beware that a recent version of DMD is now necessary when building DUB to support Unicode file names on Windows) - [pull #847][issue847] + - Now passes `-vcolumns` also to LDC - [issue #859][issue859], [pull #860][issue860] + +### Bug fixes ### + - Avoids superfluous registry queries when building - [issue #831][issue831], [pull #861][issue861] + - Fixed handling of "libs" on Windows/DMD when building in `allAtOnce` mode + - Fixed building with LDC on Windows for both, the VisualStudio based version and the MinGW version - [issue #618][issue618], [pull #688][issue688] + - Fixed escaping of command line arguments with spaces for LDC - [issue #834][issue834], [pull #860][issue860] + +[issue103]: https://github.com/dlang/dub/issues/103 +[issue618]: https://github.com/dlang/dub/issues/618 +[issue688]: https://github.com/dlang/dub/issues/688 +[issue831]: https://github.com/dlang/dub/issues/831 +[issue834]: https://github.com/dlang/dub/issues/834 +[issue847]: https://github.com/dlang/dub/issues/847 +[issue851]: https://github.com/dlang/dub/issues/851 +[issue859]: https://github.com/dlang/dub/issues/859 +[issue860]: https://github.com/dlang/dub/issues/860 +[issue861]: https://github.com/dlang/dub/issues/861 +[issue866]: https://github.com/dlang/dub/issues/866 +[issue870]: https://github.com/dlang/dub/issues/870 +[issue878]: https://github.com/dlang/dub/issues/878 + + +v0.9.25 - 2016-05-22 -------------------- ### Features and improvements ### - - Added support for dub init to take a list of dependencies. (by Colin Grogan) - - Example usage (dub init myProj logger vibe-d gfm --type=vibe.d) - - Dub will then try to get the latest version number for each of these dependencies from code.dlang.org and automatically add them to the dependencies section of dub.json. - - If it cant find the dependant package name, it will ignore it, - - Current functionality is preserved whereby project type can be determined by using [vibe.d, deimos or minimal] after package name. (So example above would be dub init myProj vibe.d logger vibe-d gfm). - - Preferrable to use --type however, as this should be removed for next version. + + - Builds on DMD 2.064.2 up to 2.071.0 + - Cleaned up the API to be (almost) ready for the 1.0.0 release - [issue #349][issue349] - [pull #785][issue785] + - Implemented new semantics for optional dependencies (now controlled using dub.selections.json) - [issue #361][issue361], [pull #733][issue733] + - Made "dub init" interactive to improve/simplify the creation of new packages (can be disabled with the "-n" switch) - [pull #734][issue734] + - Switched back the default "dub init" recipe format to JSON (both, JSON and SDLang will stay supported) - [issue #724][issue724] + - Locally cached packages are now stored in a folder that matches their name, which enables more possible ways to organize the source code (mostly by Guillaume Piolat aka p0nce) - [issue #502][issue502], [pull #735][issue735] + - Improved worst-case speed of the dependency resolver for some pathological cases + - Sped up GIT based local package version detection using a cache on Windows - [pull #692][issue692] + - Implemented "dub convert" to convert between JSON and SDLang package recipes - [pull #732][issue732] + - Implemented a "dub search" command to search the package registry from the CLI - [pull #663][issue663] + - "dub test" doesn't build dependencies in unittest mode anymore - [issue #640][issue640], [issue #823][issue823] + - Added a "-ddoxTool"/"x:ddoxTool" field to override the package used for DDOX documentation builds - [pull #702][issue702] + - DUB init now uses the users full name on Posix systems - [issue #715][issue715] + - Added support for the "DFLAGS" environment variable to "dub test" + - Added a "release-debug" default build type + - Path based dependencies are now also stored in dub.selections.json - [issue #772][issue772] + - Entries in dub.selections.json are now output in alphabetic order - [issue #709][issue709] + - The Sublime Text generator now outputs import paths for use with DKit (by xentec) - [pull #757][issue757] + - The VisualD generator now creates the project files in the ".dub" subdirectory (by Guillaume Piolat aka p0nce) - [pull #680][issue680] + +### Bug fixes ### + + - Fixed outputting global build settings (e.g. architecture flags) only once - [issue #346][issue346], [issue #635][issue635], [issue #686][issue686], [pull #759][issue759] + - Fixed an infinite recursive DUB invocation if dub was invoked in "preGenerateCommands" (by Nick Sabalausky) - [issue #616][issue616], [pull #633][issue633] + - Fixed the VisualD generator to set the correct debug working directory + - Fixed disabling bounds-checking on LDC to avoid the deprecated/removed `-noboundscheck` flag (by Guillaume Piolat aka p0nce) - [pull #693][issue693] + - Fixed race conditions when running multiple DUB instances concurrently - [issue #674][issue674], [pull #683][issue683] + - Fixed the error message when trying to build with DUB from a directory that doesn't contain a package - [issue #696][issue696] + - Fixed running the pre-compiled version of DUB on Mac OS versions prior to 10.11 (by Guillaume Piolat aka p0nce) - [pull #704][issue704] + - Fixed "dub dustmite" to emit a proper DUB command line if no explicit compiler/architecture is given + - Fixed "dub dustmite" when invoked on packages with path based dependencies - [issue #240][issue240], [pull #762][issue762] + - Fixed target type inheritance from the top level scope in the SDLang recipe parser + - Fixed the error message when a dependency name is omitted in an SDLang recipe (by lablanu) - [pull #723][issue723] + - Fixed the error message when using one of the "list" modes of "dub describe" on a target type "none" package - [issue #739][issue739] + - Fixed writing the "subConfigurations" field in the JSON recipe of downloaded packages - [issue #745][issue745] + - Fixed recently updated packages sometimes to fail to download - [issue #528][issue528] + - Fixed handling of path based dependencies that have internal sub package references - [issue #754][issue754], [pull #766][issue766] + - Fixed issues with generated CMake files due to backslashes in paths on Windows (by Steven Dwy) - [pull #738][issue738] + - Fixed path based dependencies sometimes overriding version based dependencies of the same package - [issue #777][issue777] + - Fixed loading of packages that have a path based selection + - Fixed detection of compiler errors in the build output for generated Sublime Text projects (by Justinas Šneideris aka develop32) - [pull #788][issue788] + - Fixed handling of certain libraries that got included using "pkg-config" (by Jean-Baptiste Lab) - [issue #782][issue782], [pull #794][issue794] + - Quick fix for building shared libraries with LDC/Windows/OS X and DMD/OS X (by Guillaume Piolat aka p0nce) - [pull #801][issue801] + - Fixed several issues with the SDLang parser + - Fixed release-specific regressions regarding sub package dependencies that got ignored during dependency graph collection - [issue #803][issue803], [pull #807][issue807] + - Fixed target type "none" packages still generating a binary target (affected `dub describe`) + - Fixed `dub describe --data-list target-type` work for target type "none" packages + + +[issue240]: https://github.com/dlang/dub/issues/240 +[issue346]: https://github.com/dlang/dub/issues/346 +[issue349]: https://github.com/dlang/dub/issues/349 +[issue361]: https://github.com/dlang/dub/issues/361 +[issue502]: https://github.com/dlang/dub/issues/502 +[issue528]: https://github.com/dlang/dub/issues/528 +[issue616]: https://github.com/dlang/dub/issues/616 +[issue633]: https://github.com/dlang/dub/issues/633 +[issue635]: https://github.com/dlang/dub/issues/635 +[issue640]: https://github.com/dlang/dub/issues/640 +[issue663]: https://github.com/dlang/dub/issues/663 +[issue674]: https://github.com/dlang/dub/issues/674 +[issue680]: https://github.com/dlang/dub/issues/680 +[issue683]: https://github.com/dlang/dub/issues/683 +[issue686]: https://github.com/dlang/dub/issues/686 +[issue692]: https://github.com/dlang/dub/issues/692 +[issue693]: https://github.com/dlang/dub/issues/693 +[issue696]: https://github.com/dlang/dub/issues/696 +[issue702]: https://github.com/dlang/dub/issues/702 +[issue704]: https://github.com/dlang/dub/issues/704 +[issue709]: https://github.com/dlang/dub/issues/709 +[issue715]: https://github.com/dlang/dub/issues/715 +[issue723]: https://github.com/dlang/dub/issues/723 +[issue724]: https://github.com/dlang/dub/issues/724 +[issue732]: https://github.com/dlang/dub/issues/732 +[issue733]: https://github.com/dlang/dub/issues/733 +[issue734]: https://github.com/dlang/dub/issues/734 +[issue735]: https://github.com/dlang/dub/issues/735 +[issue738]: https://github.com/dlang/dub/issues/738 +[issue739]: https://github.com/dlang/dub/issues/739 +[issue745]: https://github.com/dlang/dub/issues/745 +[issue754]: https://github.com/dlang/dub/issues/754 +[issue757]: https://github.com/dlang/dub/issues/757 +[issue759]: https://github.com/dlang/dub/issues/759 +[issue762]: https://github.com/dlang/dub/issues/762 +[issue766]: https://github.com/dlang/dub/issues/766 +[issue772]: https://github.com/dlang/dub/issues/772 +[issue777]: https://github.com/dlang/dub/issues/777 +[issue782]: https://github.com/dlang/dub/issues/782 +[issue785]: https://github.com/dlang/dub/issues/785 +[issue788]: https://github.com/dlang/dub/issues/788 +[issue794]: https://github.com/dlang/dub/issues/794 +[issue801]: https://github.com/dlang/dub/issues/801 +[issue803]: https://github.com/dlang/dub/issues/803 +[issue807]: https://github.com/dlang/dub/issues/807 +[issue823]: https://github.com/dlang/dub/issues/823 + + +v0.9.24 - 2015-09-20 +-------------------- + +### Features and improvements ### + + - Added support for [SDLang][sdl-package-format] based package descriptions - [issue #348][issue348], [pull #582][issue582] + - Source code updated to build with DMD 2.064.2 through 2.068.0 + - Enhanced `dub describe` support: + - The D API is now strongly typed instead of using `Json` + - Added a `"targets"` field that can be used to sport external build tools + - Added a `--data=X` switch to get information in a shell script friendly format (by Nick Sabalausky) - [pull #572][issue572] + - Added an `"active"` field to each package to be used to signal if a certain dependency takes part in the build - [issue #393][issue393] + - Added a set of additional environment variables that are available to pre/post build/generate commands (by Nick Sabalausky) - [issue #593][issue593] + - Errors and warnings are not suppressed anymore, but output to stderr + - Added the possibility to get all import paths for `dub describe` (by w0rp) - [pull #552][issue552], [issue #560][issue560], [pull #561][issue561] + - Added stricter package name validation checks + - Added a `--bare` option to search for dependencies only in the current directory (useful for running tests) + - Removed the deprecated "visuald-combined" generator (use `dub generate visuald --combined` instead) + - The command line shown for verbose output now contain the same quotes as used for the actual command invocation + - Uses `-vcolumns` for DMD if supported - [issue #581][issue581] + - Properly suppressing compiler output when `--quiet` or `--vquiet` are given (by Nick Sabalausky) - [issue #585][issue585], [pull #587][issue587] + - Added a warning when referencing sub packages by their path (instead of their parent's path) + - Building `sourceLibrary` targets with `-o-` is allowed now (enables documentation generation in particular) - [issue #553][issue553] + - The VisualD generator doesn't use a "_d" suffix for debug build targets anymore (by Guillaume Piolat aka p0nce) - [pull #617][issue617] + - Added a new "profile-gc" build type + - Cleaned up console output (parts by Guillaume Piolat aka p0nce) - [pull #621][issue621] + - Added "arm" and "arm_thumb" cross-compilation invocation support for GDC + - Added configuration support to set the default compiler binary "defaultCompiler" field in the settings.json file + - Removed the build script based selection of the default compiler (by Marc Schütz) - [pull #678][issue678] + - Added a `--skip-registry=` switch to skip searching for packages on remote registries - [issue #580][issue580] + +### Bug fixes ### + + - Fixed quoting of command line arguments for the DMD backend in the linker phase - [issue #540][issue540] + - Fixed running Dustmite with versioned dependencies that are available as a git working copy + - Fixed dependency resolution for packages that have sub packages and all of them are path based - [issue #543][issue543] + - Fixed the error message for path based dependencies that are missing a package description file - see [issue #535][issue535] + - Fixed running Dustmite with dub not available in `PATH` - [pull #547][issue547] + - Fixed passing compiler, architecture, build type and configuration options to Dustmite - [pull #547][issue547] + - Fixed return code when `dub run` is used on a library (returns non-zero now) - [pull #546][issue546] + - Fixed spurious warning when building a package by name and DUB is not run from a package directory + - Fixed handling of dependency errors that occur during automatic upgrade checks - [issue #564][issue564], [pull #565][issue565] + - Fixed the architecture flag for x64 passed to LDC (by p0nce) - [pull #574][issue574] + - Fixed enforcement of build requirements in dependencies - [issue #592][issue592] + - Fixed `dub remove` to only remove managed packages - [issue #596][issue596] + - Added a workaround for a data corruption issue (codegen bug) - [issue #601][issue601] + - Fixed building dynamic libraries with DMD - [issue #613][issue613] + +[sdl-package-format]: http://code.dlang.org/package-format?lang=sdl +[issue348]: https://github.com/dlang/dub/issues/348 +[issue393]: https://github.com/dlang/dub/issues/393 +[issue535]: https://github.com/dlang/dub/issues/535 +[issue540]: https://github.com/dlang/dub/issues/540 +[issue543]: https://github.com/dlang/dub/issues/543 +[issue546]: https://github.com/dlang/dub/issues/546 +[issue547]: https://github.com/dlang/dub/issues/547 +[issue552]: https://github.com/dlang/dub/issues/552 +[issue552]: https://github.com/dlang/dub/issues/552 +[issue553]: https://github.com/dlang/dub/issues/553 +[issue560]: https://github.com/dlang/dub/issues/560 +[issue561]: https://github.com/dlang/dub/issues/561 +[issue564]: https://github.com/dlang/dub/issues/564 +[issue565]: https://github.com/dlang/dub/issues/565 +[issue572]: https://github.com/dlang/dub/issues/572 +[issue574]: https://github.com/dlang/dub/issues/574 +[issue580]: https://github.com/dlang/dub/issues/580 +[issue581]: https://github.com/dlang/dub/issues/581 +[issue582]: https://github.com/dlang/dub/issues/582 +[issue585]: https://github.com/dlang/dub/issues/585 +[issue587]: https://github.com/dlang/dub/issues/587 +[issue592]: https://github.com/dlang/dub/issues/592 +[issue593]: https://github.com/dlang/dub/issues/593 +[issue596]: https://github.com/dlang/dub/issues/596 +[issue601]: https://github.com/dlang/dub/issues/601 +[issue613]: https://github.com/dlang/dub/issues/613 +[issue617]: https://github.com/dlang/dub/issues/617 +[issue621]: https://github.com/dlang/dub/issues/621 +[issue678]: https://github.com/dlang/dub/issues/678 + +v0.9.23 - 2015-04-06 +-------------------- + +### Features and improvements ### + + - Compiles with DMD frontend versions 2.064 up to 2.067 + - Largely reduced the execution time needed by DUB itself during builds - [pull #388][issue388] + - Added a `dub clean-caches` command to clear online registry meta data that is cached locally - [pull #433][issue433] + - Added a "deimos" template type to the `dub init` command - [pull #431][issue431] + - Added support for dub init to take a list of dependencies (by Colin Grogan) - [pull #453][issue453] + - Example: `dub init myProj logger vibe-d gfm --type=vibe.d` + - DUB will try to get the latest version number for each of these dependencies from [code.dlang.org](http://code.dlang.org/) and automatically add them to the dependencies section of dub.json + - The previous syntax where the argument to `dub init` is the project type instead of a dependency list is preserved, but deprecated - use the `--type=` switch instead + - Added a project generator for Sublime Text (by Nicholas Londey) - [pull #461][issue461] + - Added a project generator for CMake files (by Steven Dwy) - [pull #489][issue489] + - Added support for `dub test` and modules where the path doesn't match the module name (by Szabo Bogdan) - [pull #344][issue344] + - Added `dub --version` option to output the program version and build date - [pull #513][issue513] + - Improved `"copyFiles"` support + - Added support for glob matches (by Colden Cullen) - [pull #407][issue407] + - Added support for copying directories (by Vadim Lopatin) - [pull #471][issue471] + - Files are now hard linked into the target directory instead of making a real copy + - Avoids to hard link `"copyFiles"` that have not changed in the source directory on Windows - [issue #511][issue511] + - DUB now searches the PATH for installed compilers and chooses the default compiler as appropriate - [issue #480][issue480], [pull #506][issue506] + - `--build-mode=singleFile` can now build several files in parallel using the `--parallel` switch - [issue #498][issue498] + - Improved the JSON error diagnostic format to `file(line): Error: message` for better IDE integration - [issue #317][issue317] + +### Bug fixes ### + + - Fixed determining module names from empty modules for `dub test` (by Szabo Bogdan) - [pull #458][issue458] + - Fixed generating VisualStudio solution files on Win64 (by Nicholas Londey) - [pull #455][issue455] + - Fixed erroneously adding "executable" dependencies to the list of link dependencies (by Михаил Страшун aka Dicebot) - [pull #474][issue474] + - Fixed overriding the default source paths with `"sourcePaths"` - [issue #483][issue483] + - Fixed removing packages when build output files exist - [issue #377][issue377] + - Fixed handling of sub package references that specify an explicit path - [issue #448][issue448] + - Fixed erroneous detection of a "sourcemain.d" source file under certain circumstances - [issue #487][issue487] + - Fixed `dub build -t ddox` on OS X - [issue #354][issue354] + - Fixed using unique temporary files (by Михаил Страшун aka Dicebot) - [issue #482][issue482], [pull #497][issue497] + - Fixed compiler command line issues on Windows with `--buildMode=singleFile` (by machindertech) - [pull #505][issue505] + - Fixed a version range match error (">=A . +There is a central [package registry](https://github.com/dlang/dub-registry/) located at . + +[![GitHub tag](https://img.shields.io/github/tag/dlang/dub.svg?maxAge=86400)](#) [![Build Status](https://travis-ci.org/dlang/dub.svg?branch=master)](https://travis-ci.org/dlang/dub) [![Coverage Status](https://coveralls.io/repos/dlang/dub/badge.svg)](https://coveralls.io/r/dlang/dub) ## Introduction -DUB emerged as a more general replacement for [vibe.d's](http://vibed.org/) package manager. It does not imply a dependecy to vibe.d for packages and was extended to not only directly build projects, but also to generate project files (currently [VisualD](https://github.com/rainers/visuald)). -[Mono-D](http://mono-d.alexanderbothe.com/) also support the use of dub.json (dub's package description) as project file. +DUB emerged as a more general replacement for [vibe.d's](http://vibed.org/) package manager. It does not imply a dependency to vibe.d for packages and was extended to not only directly build projects, but also to generate project files (currently [VisualD](https://github.com/rainers/visuald)). +[Mono-D](http://mono-d.alexanderbothe.com/) also supports the use of dub.json (dub's package description) as the project file. The project's philosophy is to keep things as simple as possible. All that is needed to make a project a dub package is to write a short [dub.json](http://code.dlang.org/publish) file and put the source code into a `source` subfolder. It *can* then be registered on the public [package registry](http://code.dlang.org) to be made available for everyone. Any dependencies specified in `dub.json` are automatically downloaded and made available to the project during the build process. ## Key features - Simple package and build description not getting in your way - - Integrated with Git, avoiding maintainance tasks such as incrementing version numbers or uploading new project releases + - Integrated with Git, avoiding maintenance tasks such as incrementing version numbers or uploading new project releases - Generates VisualD project/solution files, integrated into MonoD - Support for DMD, GDC and LDC (common DMD flags are translated automatically) - Supports development workflows by optionally using local directories as a package source @@ -42,7 +44,9 @@ ### OS X -Chris Molozian has added DUB to [Homebrew](http://mxcl.github.io/homebrew/). Use `brew install dub` or `brew install dub --HEAD` to install the stable or the git HEAD version respectively. +Chris Molozian has added DUB to [Homebrew](http://mxcl.github.io/homebrew/). Use `brew install dub` to install the stable version, optionally adding `--HEAD`/`--devel` to install the latest git master or development release respectively. + +There is also a [MacPorts](https://www.macports.org/) package available. Type `sudo port install dub` to install the latest stable version. ### Windows @@ -50,4 +54,4 @@ ## Using DUB as a library -The [DUB package of DUB](http://code.dlang.org/packages/dub) can be used as a library to load or manipulate packages, or to resemble any functionality of the command line tool. The former task can be achieved by using the [Package class](https://github.com/D-Programming-Language/dub/blob/master/source/dub/package_.d#L40). For examples on how to replicate the command line functionality, see [commandline.d](https://github.com/D-Programming-Language/dub/blob/master/source/dub/commandline.d). +The [DUB package of DUB](http://code.dlang.org/packages/dub) can be used as a library to load or manipulate packages, or to resemble any functionality of the command line tool. The former task can be achieved by using the [Package class](https://github.com/dlang/dub/blob/master/source/dub/package_.d#L40). For examples on how to replicate the command line functionality, see [commandline.d](https://github.com/dlang/dub/blob/master/source/dub/commandline.d). diff --git a/build-files.txt b/build-files.txt index 95a467b..3d48908 100644 --- a/build-files.txt +++ b/build-files.txt @@ -2,6 +2,7 @@ source/dub/commandline.d source/dub/dependency.d source/dub/dependencyresolver.d +source/dub/description.d source/dub/dub.d source/dub/init.d source/dub/packagemanager.d @@ -15,18 +16,31 @@ source/dub/compilers/dmd.d source/dub/compilers/gdc.d source/dub/compilers/ldc.d +source/dub/compilers/utils.d source/dub/generators/build.d source/dub/generators/cmake.d source/dub/generators/generator.d source/dub/generators/sublimetext.d +source/dub/generators/targetdescription.d source/dub/generators/visuald.d +source/dub/internal/libInputVisitor.d +source/dub/internal/sdlang/ast.d +source/dub/internal/sdlang/exception.d +source/dub/internal/sdlang/lexer.d +source/dub/internal/sdlang/package.d +source/dub/internal/sdlang/parser.d +source/dub/internal/sdlang/symbol.d +source/dub/internal/sdlang/token.d +source/dub/internal/sdlang/util.d source/dub/internal/utils.d source/dub/internal/vibecompat/core/file.d source/dub/internal/vibecompat/core/log.d source/dub/internal/vibecompat/data/json.d +source/dub/internal/vibecompat/data/serialization.d source/dub/internal/vibecompat/data/utils.d source/dub/internal/vibecompat/inet/path.d source/dub/internal/vibecompat/inet/url.d +source/dub/recipe/io.d source/dub/recipe/json.d source/dub/recipe/packagerecipe.d source/dub/recipe/sdl.d diff --git a/build-gdc.sh b/build-gdc.sh new file mode 100755 index 0000000..1924b08 --- /dev/null +++ b/build-gdc.sh @@ -0,0 +1,25 @@ +#!/bin/sh +set -e + +if [ "$GDC" = "" ]; then + GDC=gdc +fi + +# link against libcurl +LIBS=`pkg-config --libs libcurl 2>/dev/null || echo "-lcurl"` + +# adjust linker flags for gdc command line +LIBS=`echo "$LIBS" | sed 's/^-L/-L-L/; s/ -L/ -L-L/g; s/^-l/-L-l/; s/ -l/ -L-l/g'` + +echo Generating version file... +GITVER=$(git describe) || GITVER=unknown +echo "module dub.version_;" > source/dub/version_.d +echo "enum dubVersion = \"$GITVER\";" >> source/dub/version_.d + +echo Running $GDC... +$GDC -obin/dub -lcurl -w -fversion=DubUseCurl -Isource $* $LIBS @build-files.txt +echo DUB has been built as bin/dub. +echo +echo You may want to run +echo sudo ln -s $(pwd)/bin/dub /usr/local/bin +echo now. diff --git a/build.cmd b/build.cmd index 7f34fbf..f965194 100644 --- a/build.cmd +++ b/build.cmd @@ -5,7 +5,6 @@ @for /f %%i in ('git describe') do @set GITVER=%%i @echo module dub.version_; > source\dub\version_.d @echo enum dubVersion = "%GITVER%"; >> source\dub\version_.d -@echo enum initialCompilerBinary = "%DC%"; >> source\dub\version_.d @echo Executing %DC%... @%DC% -ofbin\dub.exe -g -debug -w -version=DubUseCurl -Isource curl.lib %* @build-files.txt diff --git a/build.sh b/build.sh index 58bd440..7cd478e 100755 --- a/build.sh +++ b/build.sh @@ -1,43 +1,63 @@ -#!/bin/sh +#!/usr/bin/env bash set -e -if [ "$DC" = "" ]; then - command -v gdmd >/dev/null 2>&1 && DC=gdmd || true - command -v ldmd2 >/dev/null 2>&1 && DC=ldmd2 || true - command -v dmd >/dev/null 2>&1 && DC=dmd || true +if [ "$DMD" = "" ]; then + if [ ! "$DC" = "" ]; then # backwards compatibility with DC + DMD=$DC + else + command -v gdmd >/dev/null 2>&1 && DMD=gdmd || true + command -v ldmd2 >/dev/null 2>&1 && DMD=ldmd2 || true + command -v dmd >/dev/null 2>&1 && DMD=dmd || true + fi fi -if [ "$DC" = "" ]; then - echo >&2 "Failed to detect D compiler. Use DC=... to set a dmd compatible binary manually." +if [ "$DMD" = "" ]; then + echo >&2 "Failed to detect D compiler. Use DMD=... to set a dmd compatible binary manually." exit 1 fi -# link against libcurl -LIBS=`pkg-config --libs libcurl 2>/dev/null || echo "-lcurl"` +VERSION=$($DMD --version 2>/dev/null | sed -n 's|DMD.* v||p') +# workaround for link order issues with libcurl (phobos needs to come before curl) +if [[ $VERSION < 2.069.0 ]]; then + # link against libcurl + LIBS=`pkg-config --libs libcurl 2>/dev/null || echo "-lcurl"` -# fix for modern GCC versions with --as-needed by default -if [ "$DC" = "dmd" ]; then - if [ `uname` = "Linux" ]; then - LIBS="-l:libphobos2.a $LIBS" - else - LIBS="-lphobos2 $LIBS" + # fix for modern GCC versions with --as-needed by default + if [[ `$DMD --help | head -n1 | grep 'DMD\(32\|64\)'` ]]; then + if [ `uname` = "Linux" ]; then + LIBS="-l:libphobos2.a $LIBS" + else + LIBS="-lphobos2 $LIBS" + fi + elif [[ `$DMD --help | head -n1 | grep '^LDC '` ]]; then + if [ `uname` = "SunOS" ]; then + LIBS="-lnsl -lsocket -lphobos2-ldc $LIBS" + else + LIBS="-lphobos2-ldc $LIBS" + fi fi -elif [ "$DC" = "ldmd2" ]; then - LIBS="-lphobos2-ldc $LIBS" + + # adjust linker flags for dmd command line + LIBS=`echo "$LIBS" | sed 's/^-L/-L-L/; s/ -L/ -L-L/g; s/^-l/-L-l/; s/ -l/ -L-l/g'` fi -# adjust linker flags for dmd command line -LIBS=`echo "$LIBS" | sed 's/^-L/-L-L/; s/ -L/ -L-L/g; s/^-l/-L-l/; s/ -l/ -L-l/g'` +if [ "$GITVER" = "" ]; then + GITVER=$(git describe) || echo "Could not determine a version with git." +fi +if [ "$GITVER" != "" ]; then + echo Generating version file... + echo "module dub.version_;" > source/dub/version_.d + echo "enum dubVersion = \"$GITVER\";" >> source/dub/version_.d +else + echo Using existing version file. +fi -echo Generating version file... -GITVER=$(git describe) || GITVER=unknown -echo "module dub.version_;" > source/dub/version_.d -echo "enum dubVersion = \"$GITVER\";" >> source/dub/version_.d -echo "enum initialCompilerBinary = \"$DC\";" >> source/dub/version_.d +# For OSX compatibility >= 10.7 +MACOSX_DEPLOYMENT_TARGET=10.7 - -echo Running $DC... -$DC -ofbin/dub -w -version=DubUseCurl -Isource $* $LIBS @build-files.txt +echo Running $DMD... +$DMD -ofbin/dub -g -O -w -version=DubUseCurl -Isource $* $LIBS @build-files.txt +bin/dub --version echo DUB has been built as bin/dub. echo echo You may want to run diff --git a/dub.json b/dub.json deleted file mode 100644 index 44a38a9..0000000 --- a/dub.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "name": "dub", - "description": "Package manager for D packages", - "license": "MIT", - "copyright": "Copyright © 2012-2014 rejectedsoftware e.K., Copyright © 2012-2014 Matthias Dondorff", - "authors": [ - "Matthias Dondorff", - "Sönke Ludwig" - ], - "targetPath": "bin", - "configurations": [ - { - "name": "application", - "targetType": "executable", - "mainSourceFile": "source/app.d", - "libs": ["curl"], - "versions": ["DubUseCurl"] - }, - { - "name": "library", - "targetType": "library", - "excludedSourceFiles": ["source/app.d"], - "libs": ["curl"], - "copyFiles-windows": ["bin/libcurl.dll", "bin/libeay32.dll", "bin/ssleay32.dll"], - "versions": ["DubUseCurl"] - }, - { - "name": "library-nonet", - "targetType": "library", - "dependencies": { - "vibe-d": {"version": "~>0.7.19-rc.4", "optional": true} - }, - "excludedSourceFiles": ["source/app.d"] - } - ] -} diff --git a/dub.sdl b/dub.sdl new file mode 100644 index 0000000..3155a00 --- /dev/null +++ b/dub.sdl @@ -0,0 +1,34 @@ +name "dub" +description "Package manager for D packages" +authors "Matthias Dondorff" "Sönke Ludwig" +copyright "Copyright © 2012-2016 rejectedsoftware e.K., Copyright © 2012-2014 Matthias Dondorff" +license "MIT" + +targetPath "bin" + +configuration "application" { + targetType "executable" + mainSourceFile "source/app.d" + libs "curl" + versions "DubUseCurl" +} + +configuration "library" { + targetType "library" + libs "curl" + excludedSourceFiles "source/app.d" + copyFiles "bin/libcurl.dll" "bin/libeay32.dll" "bin/ssleay32.dll" platform="windows" + versions "DubUseCurl" +} + +configuration "library-nonet" { + dependency "vibe-d:http" version=">=0.7.30 <=0.9.0" optional=true + targetType "library" + excludedSourceFiles "source/app.d" +} + +configuration "dynamic-library-nonet" { + dependency "vibe-d:http" version=">=0.7.30 <=0.9.0" optional=true + targetType "dynamicLibrary" + excludedSourceFiles "source/app.d" +} diff --git a/dub.selections.json b/dub.selections.json new file mode 100644 index 0000000..2507d5a --- /dev/null +++ b/dub.selections.json @@ -0,0 +1,11 @@ +{ + "fileVersion": 1, + "versions": { + "libasync": "0.7.9", + "libev": "5.0.0+4.04", + "libevent": "2.0.1+2.0.16", + "memutils": "0.4.8", + "openssl": "1.1.5+1.0.1g", + "vibe-d": "0.7.31" + } +} diff --git a/examples/app-sdl/dub.sdl b/examples/app-sdl/dub.sdl new file mode 100644 index 0000000..2a4b2da --- /dev/null +++ b/examples/app-sdl/dub.sdl @@ -0,0 +1,2 @@ +name "app-example"; +description "A simple D application"; diff --git a/examples/app-sdl/source/app.d b/examples/app-sdl/source/app.d new file mode 100644 index 0000000..1fa5bc3 --- /dev/null +++ b/examples/app-sdl/source/app.d @@ -0,0 +1,6 @@ +import std.stdio; + +void main() +{ + writeln("Hello, World."); +} diff --git a/scripts/fish-completion/dub.fish b/scripts/fish-completion/dub.fish index 1207484..f55f345 100644 --- a/scripts/fish-completion/dub.fish +++ b/scripts/fish-completion/dub.fish @@ -49,7 +49,7 @@ complete -c dub -n "contains '$cmd' (commandline -poc)" -s a -l arch -r -d "Force architecture" complete -c dub -n "contains '$cmd' (commandline -poc)" -s d -l debug -r -d "Debug identifier" complete -c dub -n "contains '$cmd' (commandline -poc)" -l nodeps -d "No dependency check" - complete -c dub -n "contains '$cmd' (commandline -poc)" -s b -l build -u -x -d "Build type" -a "debug plain release release-nobounds unittest profile docs ddox cov unittest-cov" + complete -c dub -n "contains '$cmd' (commandline -poc)" -s b -l build -u -x -d "Build type" -a "debug plain release release-debug release-nobounds unittest profile profile-gc docs ddox cov unittest-cov" complete -c dub -n "contains '$cmd' (commandline -poc)" -l build-mode -x -d "How compiler & linker are invoked" -a "separate allAtOnce singleFile" complete -c dub -n "contains '$cmd' (commandline -poc)" -l compiler -x -d "Compiler binary" -a "dmd gdc ldc gdmd ldmd" end diff --git a/scripts/rpm-package/make_installer.sh b/scripts/rpm-package/make_installer.sh old mode 100644 new mode 100755 index 892a650..eb2ab86 --- a/scripts/rpm-package/make_installer.sh +++ b/scripts/rpm-package/make_installer.sh @@ -4,7 +4,7 @@ DUB_PATH=`pwd` #rm -f ~/rpmbuild/SOURCES/dub.tar.gz #tar -pczf ~/rpmbuild/SOURCES/dub.tar.gz source build-files.txt build.sh LICENSE* -cd installer/rpm/ +cd scripts/rpm-package/ for i in $(git describe | tr "-" "\n"); do if [ "$VER" == "" ]; then VER=${i:1} diff --git a/scripts/win-installer/EnvVarUpdate.nsh b/scripts/win-installer/EnvVarUpdate.nsh index b67e3ba..245499b 100644 --- a/scripts/win-installer/EnvVarUpdate.nsh +++ b/scripts/win-installer/EnvVarUpdate.nsh @@ -145,7 +145,7 @@ Goto EnvVarUpdate_Restore_Vars ${EndIf} - ;;khc - here check if length is going to be greater then max string length + ;;khc - here check if length is going to be greater than max string length ;; and abort if so - also abort if original path empty - may mean ;; it was too long as well- write message to say set it by hand diff --git a/scripts/win-installer/make_installer.cmd b/scripts/win-installer/make_installer.cmd index 943d94c..b2c25ae 100644 --- a/scripts/win-installer/make_installer.cmd +++ b/scripts/win-installer/make_installer.cmd @@ -1,3 +1,3 @@ set GITVER=unknown -for /f %%i in ('git describe') do set GITVER=%%i +for /f %%i in ('git describe --tags') do set GITVER=%%i "%ProgramFiles(x86)%\NSIS\makensis.exe" "/DVersion=%GITVER:~1%" installer.nsi diff --git a/source/app.d b/source/app.d index af2b866..370cd8f 100644 --- a/source/app.d +++ b/source/app.d @@ -11,7 +11,5 @@ int main(string[] args) { - version (unittest) return 0; - else return runDubCommandLine(args); + return runDubCommandLine(args); } - diff --git a/source/dub/commandline.d b/source/dub/commandline.d index 108617a..0356c00 100644 --- a/source/dub/commandline.d +++ b/source/dub/commandline.d @@ -1,7 +1,7 @@ /** Defines the behavior of the DUB command line client. - Copyright: © 2012-2013 Matthias Dondorff, Copyright © 2012-2014 Sönke Ludwig + Copyright: © 2012-2013 Matthias Dondorff, Copyright © 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ @@ -13,6 +13,7 @@ import dub.generators.generator; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.core.log; +import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.url; import dub.package_; import dub.packagemanager; @@ -35,71 +36,13 @@ import std.variant; -int runDubCommandLine(string[] args) +/** Retrieves a list of all available commands. + + Commands are grouped by category. +*/ +CommandGroup[] getCommands() { - logDiagnostic("DUB version %s", getDUBVersion()); - - version(Windows){ - // rdmd uses $TEMP to compute a temporary path. since cygwin substitutes backslashes - // with slashes, this causes OPTLINK to fail (it thinks path segments are options) - // we substitute the other way around here to fix this. - environment["TEMP"] = environment["TEMP"].replace("/", "\\"); - } - - // split application arguments from DUB arguments - string[] app_args; - auto app_args_idx = args.countUntil("--"); - if (app_args_idx >= 0) { - app_args = args[app_args_idx+1 .. $]; - args = args[0 .. app_args_idx]; - } - args = args[1 .. $]; // strip the application name - - // handle direct dub options - if (args.length) switch (args[0]) - { - case "--version": - showVersion(); - return 0; - - default: - break; - } - - // parse general options - bool verbose, vverbose, quiet, vquiet; - bool help, annotate; - LogLevel loglevel = LogLevel.info; - string[] registry_urls; - string root_path = getcwd(); - - auto common_args = new CommandArgs(args); - try { - common_args.getopt("h|help", &help, ["Display general or command specific help"]); - common_args.getopt("root", &root_path, ["Path to operate in instead of the current working dir"]); - common_args.getopt("registry", ®istry_urls, ["Search the given DUB registry URL first when resolving dependencies. Can be specified multiple times."]); - common_args.getopt("annotate", &annotate, ["Do not perform any action, just print what would be done"]); - common_args.getopt("v|verbose", &verbose, ["Print diagnostic output"]); - common_args.getopt("vverbose", &vverbose, ["Print debug output"]); - common_args.getopt("q|quiet", &quiet, ["Only print warnings and errors"]); - common_args.getopt("vquiet", &vquiet, ["Print no messages"]); - common_args.getopt("cache", &defaultPlacementLocation, ["Puts any fetched packages in the specified location [local|system|user]."]); - - if( vverbose ) loglevel = LogLevel.debug_; - else if( verbose ) loglevel = LogLevel.diagnostic; - else if( vquiet ) loglevel = LogLevel.none; - else if( quiet ) loglevel = LogLevel.warn; - setLogLevel(loglevel); - } catch (Throwable e) { - logError("Error processing arguments: %s", e.msg); - logDiagnostic("Full exception: %s", e.toString().sanitize); - logInfo("Run 'dub help' for usage information."); - return 1; - } - - // create the list of all supported commands - - CommandGroup[] commands = [ + return [ CommandGroup("Package creation", new InitCommand ), @@ -123,13 +66,86 @@ new AddLocalCommand, new RemoveLocalCommand, new ListCommand, + new SearchCommand, new ListInstalledCommand, new AddOverrideCommand, new RemoveOverrideCommand, new ListOverridesCommand, new CleanCachesCommand, + new ConvertCommand, ) ]; +} + + +/** Processes the given command line and executes the appropriate actions. + + Params: + args = This command line argument array as received in `main`. The first + entry is considered to be the name of the binary invoked. + + Returns: + Returns the exit code that is supposed to be returned to the system. +*/ +int runDubCommandLine(string[] args) +{ + logDiagnostic("DUB version %s", getDUBVersion()); + + version(Windows){ + // rdmd uses $TEMP to compute a temporary path. since cygwin substitutes backslashes + // with slashes, this causes OPTLINK to fail (it thinks path segments are options) + // we substitute the other way around here to fix this. + environment["TEMP"] = environment["TEMP"].replace("/", "\\"); + } + + // special single-file package shebang syntax + if (args.length >= 2 && args[1].endsWith(".d")) { + args = args[0] ~ ["run", "-q", "--temp-build", "--single", args[1], "--"] ~ args[2 ..$]; + } + + // split application arguments from DUB arguments + string[] app_args; + auto app_args_idx = args.countUntil("--"); + if (app_args_idx >= 0) { + app_args = args[app_args_idx+1 .. $]; + args = args[0 .. app_args_idx]; + } + args = args[1 .. $]; // strip the application name + + // handle direct dub options + if (args.length) switch (args[0]) + { + case "--version": + showVersion(); + return 0; + + default: + break; + } + + // parse general options + CommonOptions options; + LogLevel loglevel = LogLevel.info; + options.root_path = getcwd(); + + auto common_args = new CommandArgs(args); + try { + options.prepare(common_args); + + if (options.vverbose) loglevel = LogLevel.debug_; + else if (options.verbose) loglevel = LogLevel.diagnostic; + else if (options.vquiet) loglevel = LogLevel.none; + else if (options.quiet) loglevel = LogLevel.warn; + setLogLevel(loglevel); + } catch (Throwable e) { + logError("Error processing arguments: %s", e.msg); + logDiagnostic("Full exception: %s", e.toString().sanitize); + logInfo("Run 'dub help' for usage information."); + return 1; + } + + // create the list of all supported commands + CommandGroup[] commands = getCommands(); // extract the command string cmdname; @@ -138,7 +154,7 @@ cmdname = args[0]; args = args[1 .. $]; } else { - if (help) { + if (options.help) { showHelp(commands, common_args); return 0; } @@ -178,7 +194,7 @@ return 1; } - if (help) { + if (options.help) { showCommandHelp(cmd, command_args, common_args); return 0; } @@ -194,22 +210,26 @@ // initialize the root package if (!cmd.skipDubInitialization) { - // initialize DUB - auto package_suppliers = registry_urls.map!(url => cast(PackageSupplier)new RegistryPackageSupplier(URL(url))).array; - dub = new Dub(package_suppliers, root_path); - dub.dryRun = annotate; + if (options.bare) { + dub = new Dub(Path(getcwd())); + dub.rootPath = Path(options.root_path); + dub.defaultPlacementLocation = options.placementLocation; + } else { + // initialize DUB + auto package_suppliers = options.registry_urls.map!(url => cast(PackageSupplier)new RegistryPackageSupplier(URL(url))).array; + dub = new Dub(options.root_path, package_suppliers, options.skipRegistry); + dub.dryRun = options.annotate; + dub.defaultPlacementLocation = options.placementLocation; - // make the CWD package available so that for example sub packages can reference their - // parent package. - try dub.packageManager.getOrLoadPackage(Path(root_path)); - catch (Exception e) { logDiagnostic("No package found in current working directory."); } + // make the CWD package available so that for example sub packages can reference their + // parent package. + try dub.packageManager.getOrLoadPackage(Path(options.root_path)); + catch (Exception e) { logDiagnostic("No package found in current working directory."); } + } } // execute the command - int rc; - try { - rc = cmd.execute(dub, remaining_args, app_args); - } + try return cmd.execute(dub, remaining_args, app_args); catch (UsageException e) { logError("%s", e.msg); logDebug("Full exception: %s", e.toString().sanitize); @@ -217,17 +237,52 @@ return 1; } catch (Throwable e) { - logError("Error executing command %s:", cmd.name); logError("%s", e.msg); logDebug("Full exception: %s", e.toString().sanitize); return 2; } - - if (!cmd.skipDubInitialization) - dub.shutdown(); - return rc; } + +/** Contains and parses options common to all commands. +*/ +struct CommonOptions { + bool verbose, vverbose, quiet, vquiet; + bool help, annotate, bare; + string[] registry_urls; + string root_path; + SkipPackageSuppliers skipRegistry = SkipPackageSuppliers.none; + PlacementLocation placementLocation = PlacementLocation.user; + + /// Parses all common options and stores the result in the struct instance. + void prepare(CommandArgs args) + { + args.getopt("h|help", &help, ["Display general or command specific help"]); + args.getopt("root", &root_path, ["Path to operate in instead of the current working dir"]); + args.getopt("registry", ®istry_urls, ["Search the given DUB registry URL first when resolving dependencies. Can be specified multiple times."]); + args.getopt("skip-registry", &skipRegistry, [ + "Skips searching certain package registries for dependencies:", + " none: Search all configured registries (default)", + " standard: Don't search on "~defaultRegistryURL, + " all: Search none of the configured registries", + ]); + args.getopt("annotate", &annotate, ["Do not perform any action, just print what would be done"]); + args.getopt("bare", &bare, ["Read only packages contained in the current directory"]); + args.getopt("v|verbose", &verbose, ["Print diagnostic output"]); + args.getopt("vverbose", &vverbose, ["Print debug output"]); + args.getopt("q|quiet", &quiet, ["Only print warnings and errors"]); + args.getopt("vquiet", &vquiet, ["Print no messages"]); + args.getopt("cache", &placementLocation, ["Puts any fetched packages in the specified location [local|system|user]."]); + } +} + +/** Encapsulates a set of application arguments. + + This class serves two purposes. The first is to provide an API for parsing + command line arguments (`getopt`). At the same time it records all calls + to `getopt` and provides a list of all possible options using the + `recognizedArgs` property. +*/ class CommandArgs { struct Arg { Variant defaultValue; @@ -240,11 +295,20 @@ Arg[] m_recognizedArgs; } + /** Initializes the list of source arguments. + + Note that all array entries are considered application arguments (i.e. + no application name entry is present as the first entry) + */ this(string[] args) { m_args = "dummy" ~ args; } + /** Returns the list of all options recognized. + + This list is created by recording all calls to `getopt`. + */ @property const(Arg)[] recognizedArgs() { return m_recognizedArgs; } void getopt(T)(string names, T* var, string[] help_text = null) @@ -265,11 +329,15 @@ m_recognizedArgs ~= arg; } + /** Resets the list of available source arguments. + */ void dropAllArgs() { m_args = null; } + /** Returns the list of unprocessed arguments and calls `dropAllArgs`. + */ string[] extractRemainingArgs() { auto ret = m_args[1 .. $]; @@ -278,6 +346,13 @@ } } + +/** Base class for all commands. + + This cass contains a high-level description of the command, including brief + and full descriptions and a human readable command line pattern. On top of + that it defines the two main entry functions for command execution. +*/ class Command { string name; string argumentsPattern; @@ -287,12 +362,58 @@ bool hidden = false; // used for deprecated commands bool skipDubInitialization = false; + /** Parses all known command line options without executing any actions. + + This function will be called prior to execute, or may be called as + the only method when collecting the list of recognized command line + options. + + Only `args.getopt` should be called within this method. + */ abstract void prepare(scope CommandArgs args); + + /** Executes the actual action. + + Note that `prepare` will be called before any call to `execute`. + */ abstract int execute(Dub dub, string[] free_args, string[] app_args); + + private bool loadCwdPackage(Dub dub, bool warn_missing_package) + { + bool found = existsFile(dub.rootPath ~ "source/app.d"); + if (!found) + foreach (f; packageInfoFiles) + if (existsFile(dub.rootPath ~ f.filename)) { + found = true; + break; + } + + if (!found) { + if (warn_missing_package) { + logInfo(""); + logInfo("Neither a package description file, nor source/app.d was found in"); + logInfo(dub.rootPath.toNativeString()); + logInfo("Please run DUB from the root directory of an existing package, or run"); + logInfo("\"dub init --help\" to get information on creating a new package."); + logInfo(""); + } + return false; + } + + dub.loadPackage(); + + return true; + } } + +/** Encapsulates a group of commands that fit into a common category. +*/ struct CommandGroup { + /// Caption of the command category string caption; + + /// List of commands contained inthis group Command[] commands; this(string caption, Command[] commands...) @@ -309,7 +430,9 @@ class InitCommand : Command { private{ - string m_buildType = "minimal"; + string m_templateType = "minimal"; + PackageFormat m_format = PackageFormat.json; + bool m_nonInteractive; } this() { @@ -323,13 +446,18 @@ override void prepare(scope CommandArgs args) { - args.getopt("t|type", &m_buildType, [ + args.getopt("t|type", &m_templateType, [ "Set the type of project to generate. Available types:", "", "minimal - simple \"hello world\" project (default)", "vibe.d - minimal HTTP server based on vibe.d", "deimos - skeleton for C header bindings", ]); + args.getopt("f|format", &m_format, [ + "Sets the format to use for the package description file. Possible values:", + " " ~ [__traits(allMembers, PackageFormat)].map!(f => f == m_format.init.to!string ? f ~ " (default)" : f).join(", ") + ]); + args.getopt("n|non-interactive", &m_nonInteractive, ["Don't enter interactive mode."]); } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -341,18 +469,77 @@ dir = free_args[0]; free_args = free_args[1 .. $]; } + + string input(string caption, string default_value) + { + writef("%s [%s]: ", caption, default_value); + auto inp = readln(); + return inp.length > 1 ? inp[0 .. $-1] : default_value; + } + + void depCallback(ref PackageRecipe p, ref PackageFormat fmt) { + import std.datetime: Clock; + + if (m_nonInteractive) return; + + while (true) { + string rawfmt = input("Package recipe format (sdl/json)", fmt.to!string); + if (!rawfmt.length) break; + try { + fmt = rawfmt.to!PackageFormat; + break; + } catch (Exception) { + logError("Invalid format, \""~rawfmt~"\", enter either \"sdl\" or \"json\"."); + } + } + auto author = p.authors.join(", "); + while (true) { + // Tries getting the name until a valid one is given. + import std.regex; + auto nameRegex = regex(`^[a-z0-9\-_]+$`); + string triedName = input("Name", p.name); + if (triedName.matchFirst(nameRegex).empty) { + logError("Invalid name, \""~triedName~"\", names should consist only of lowercase alphanumeric characters, - and _."); + } else { + p.name = triedName; + break; + } + } + p.description = input("Description", p.description); + p.authors = input("Author name", author).split(",").map!(a => a.strip).array; + p.license = input("License", p.license); + string copyrightString = .format("Copyright © %s, %-(%s, %)", Clock.currTime().year, p.authors); + p.copyright = input("Copyright string", copyrightString); + + while (true) { + auto depname = input("Add dependency (leave empty to skip)", null); + if (!depname.length) break; + try { + auto ver = dub.getLatestVersion(depname); + auto dep = ver.isBranch ? Dependency(ver) : Dependency("~>" ~ ver.toString()); + p.buildSettings.dependencies[depname] = dep; + logInfo("Added dependency %s %s", depname, dep.versionSpec); + } catch (Exception e) { + logError("Could not find package '%s'.", depname); + logDebug("Full error: %s", e.toString().sanitize); + } + } + } + //TODO: Remove this block in next version // Checks if argument uses current method of specifying project type. if (free_args.length) { if (["vibe.d", "deimos", "minimal"].canFind(free_args[0])) { - m_buildType = free_args[0]; + m_templateType = free_args[0]; free_args = free_args[1 .. $]; logInfo("Deprecated use of init type. Use --type=[vibe.d | deimos | minimal] in future."); } } - dub.createEmptyPackage(Path(dir), free_args, m_buildType); + dub.createEmptyPackage(Path(dir), free_args, m_templateType, m_format, &depCallback); + + logInfo("Package successfully created in %s", dir.length ? dir : "."); return 0; } } @@ -370,17 +557,14 @@ string m_compilerName; string m_arch; string[] m_debugVersions; + string[] m_overrideConfigs; Compiler m_compiler; BuildPlatform m_buildPlatform; BuildSettings m_buildSettings; string m_defaultConfig; bool m_nodeps; bool m_forceRemove = false; - } - - this() - { - m_compilerName = defaultCompiler(); + bool m_single; } override void prepare(scope CommandArgs args) @@ -388,16 +572,19 @@ args.getopt("b|build", &m_buildType, [ "Specifies the type of build to perform. Note that setting the DFLAGS environment variable will override the build type with custom flags.", "Possible names:", - " debug (default), plain, release, release-nobounds, unittest, profile, docs, ddox, cov, unittest-cov and custom types" + " debug (default), plain, release, release-debug, release-nobounds, unittest, profile, profile-gc, docs, ddox, cov, unittest-cov and custom types" ]); args.getopt("c|config", &m_buildConfig, [ "Builds the specified configuration. Configurations can be defined in dub.json" ]); + args.getopt("override-config", &m_overrideConfigs, [ + "Uses the specified configuration for a certain dependency. Can be specified multiple times.", + "Format: --override-config=/" + ]); args.getopt("compiler", &m_compilerName, [ "Specifies the compiler binary to use (can be a path).", "Arbitrary pre- and suffixes to the identifiers below are recognized (e.g. ldc2 or dmd-2.063) and matched to the proper compiler type:", - " "~["dmd", "gdc", "ldc", "gdmd", "ldmd"].join(", "), - "Default value: "~m_compilerName, + " "~["dmd", "gdc", "ldc", "gdmd", "ldmd"].join(", ") ]); args.getopt("a|arch", &m_arch, [ "Force a different architecture (e.g. x86 or x86_64)" @@ -408,17 +595,21 @@ args.getopt("nodeps", &m_nodeps, [ "Do not check/update dependencies before building" ]); - args.getopt("force-remove", &m_forceRemove, [ - "Force deletion of fetched packages with untracked files when upgrading" - ]); args.getopt("build-mode", &m_buildMode, [ "Specifies the way the compiler and linker are invoked. Valid values:", " separate (default), allAtOnce, singleFile" ]); + args.getopt("single", &m_single, [ + "Treats the package name as a filename. The file must contain a package recipe comment." + ]); + args.getopt("force-remove", &m_forceRemove, [ + "Deprecated option that does nothing." + ]); } - protected void setupPackage(Dub dub, string package_name) + protected void setupPackage(Dub dub, string package_name, string default_build_type = "debug") { + if (!m_compilerName.length) m_compilerName = dub.defaultCompiler; m_compiler = getCompiler(m_compilerName); m_buildPlatform = m_compiler.determinePlatform(m_buildSettings, m_compilerName, m_arch); m_buildSettings.addDebugVersions(m_debugVersions); @@ -430,72 +621,64 @@ { string msg = "Unknown build configuration: "~m_buildConfig; enum distance = 3; - if (auto match = dub.configurations.getClosestMatch(m_buildConfig, distance)) - msg ~= ". Did you mean '" ~ match ~ "'?"; + auto match = dub.configurations.getClosestMatch(m_buildConfig, distance); + if (match !is null) msg ~= ". Did you mean '" ~ match ~ "'?"; enforce(0, msg); } if (m_buildType.length == 0) { - if (environment.get("DFLAGS")) m_buildType = "$DFLAGS"; - else m_buildType = "debug"; + if (environment.get("DFLAGS") !is null) m_buildType = "$DFLAGS"; + else m_buildType = default_build_type; } if (!m_nodeps) { // TODO: only upgrade(select) if necessary, only upgrade(upgrade) every now and then // retrieve missing packages - logDiagnostic("Checking for missing dependencies."); - dub.upgrade(UpgradeOptions.select); - // check for updates - logDiagnostic("Checking for upgrades."); - dub.upgrade(UpgradeOptions.upgrade|UpgradeOptions.printUpgradesOnly|UpgradeOptions.useCachedResult); + dub.project.reinit(); + if (!dub.project.hasAllDependencies) { + logDiagnostic("Checking for missing dependencies."); + if (m_single) dub.upgrade(UpgradeOptions.select | UpgradeOptions.noSaveSelections); + else dub.upgrade(UpgradeOptions.select); + } + + if (!m_single) { + logDiagnostic("Checking for upgrades."); + dub.upgrade(UpgradeOptions.upgrade|UpgradeOptions.printUpgradesOnly|UpgradeOptions.useCachedResult); + } } dub.project.validate(); + + foreach (sc; m_overrideConfigs) { + auto idx = sc.indexOf('/'); + enforceUsage(idx >= 0, "Expected \"/\" as argument to --override-config."); + dub.project.overrideConfiguration(sc[0 .. idx], sc[idx+1 .. $]); + } } private bool loadSpecificPackage(Dub dub, string package_name) { - Package pack; - if (!package_name.empty) { - // load package in root_path to enable searching for sub packages - if (loadCwdPackage(dub, null, false)) { - if (package_name.startsWith(":")) - package_name = dub.projectName ~ package_name; - } - pack = dub.packageManager.getFirstPackage(package_name); - enforce(pack, "Failed to find a package named '"~package_name~"'."); - logInfo("Building package %s in %s", pack.name, pack.path.toNativeString()); - dub.rootPath = pack.path; - } - if (!loadCwdPackage(dub, pack, true)) return false; - return true; - } - - private bool loadCwdPackage(Dub dub, Package pack, bool warn_missing_package) - { - if (warn_missing_package) { - bool found = existsFile(dub.rootPath ~ "source/app.d"); - if (!found) - foreach (f; packageInfoFiles) - if (existsFile(dub.rootPath ~ f.filename)) { - found = true; - break; - } - if (!found) { - logInfo(""); - logInfo("Neither a package description file, nor source/app.d was found in"); - logInfo(dub.rootPath.toNativeString()); - logInfo("Please run DUB from the root directory of an existing package, or run"); - logInfo("\"dub init --help\" to get information on creating a new package."); - logInfo(""); - return false; - } + if (m_single) { + enforce(package_name.length, "Missing file name of single-file package."); + dub.loadSingleFilePackage(package_name); + return true; } - if (pack) dub.loadPackage(pack); - else dub.loadPackageFromCwd(); + // load package in root_path to enable searching for sub packages + if (loadCwdPackage(dub, package_name.length == 0)) { + if (package_name.startsWith(":")) + package_name = dub.projectName ~ package_name; + if (!package_name.length) return true; + } + enforce(package_name.length, "No valid root package found - aborting."); + + auto pack = dub.packageManager.getFirstPackage(package_name); + enforce(pack, "Failed to find a package named '"~package_name~"'."); + logInfo("Building package %s in %s", pack.name, pack.path.toNativeString()); + dub.rootPath = pack.path; + dub.loadPackage(pack); return true; } } @@ -508,6 +691,7 @@ bool m_run = false; bool m_force = false; bool m_combined = false; + bool m_parallel = false; bool m_printPlatform, m_printBuilds, m_printConfigs; } @@ -545,6 +729,9 @@ args.getopt("print-platform", &m_printPlatform, [ "Prints the identifiers for the current build platform as used for the build fields in dub.json" ]); + args.getopt("parallel", &m_parallel, [ + "Runs multiple compiler instances in parallel, if possible." + ]); } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -589,15 +776,11 @@ gensettings.force = m_force; gensettings.rdmd = m_rdmd; gensettings.tempBuild = m_tempBuild; + gensettings.parallelBuild = m_parallel; logDiagnostic("Generating using %s", m_generator); - if (m_generator == "visuald-combined") { - gensettings.combined = true; - m_generator = "visuald"; - logWarn(`The generator "visuald-combined" is deprecated, please use the --combined switch instead.`); - } dub.generateProject(m_generator, gensettings); - if (m_buildType == "ddox") dub.runDdox(gensettings.run); + if (m_buildType == "ddox") dub.runDdox(gensettings.run, app_args); return 0; } } @@ -664,6 +847,7 @@ private { string m_mainFile; bool m_combined = false; + bool m_parallel = false; bool m_force = false; } @@ -692,8 +876,6 @@ `run the unit tests.` ]; this.acceptsAppArgs = true; - - m_buildType = "unittest"; } override void prepare(scope CommandArgs args) @@ -704,6 +886,9 @@ args.getopt("combined", &m_combined, [ "Tries to build the whole project in a single compiler run." ]); + args.getopt("parallel", &m_parallel, [ + "Runs multiple compiler instances in parallel, if possible." + ]); args.getopt("f|force", &m_force, [ "Forces a recompilation even if the target is up to date" ]); @@ -722,7 +907,7 @@ enforceUsage(free_args.length <= 1, "Expected one or zero arguments."); if (free_args.length >= 1) package_name = free_args[0]; - setupPackage(dub, package_name); + setupPackage(dub, package_name, "unittest"); GeneratorSettings settings; settings.platform = m_buildPlatform; @@ -731,7 +916,9 @@ settings.buildMode = m_buildMode; settings.buildSettings = m_buildSettings; settings.combined = m_combined; + settings.parallelBuild = m_parallel; settings.force = m_force; + settings.tempBuild = m_single; settings.run = true; settings.runArgs = app_args; @@ -741,27 +928,94 @@ } class DescribeCommand : PackageBuildCommand { + private { + bool m_importPaths = false; + bool m_stringImportPaths = false; + bool m_dataList = false; + bool m_dataNullDelim = false; + string[] m_data; + } + this() { this.name = "describe"; this.argumentsPattern = "[]"; this.description = "Prints a JSON description of the project and its dependencies"; this.helpText = [ - "Prints a JSON build description for the root package an all of their dependencies in a format similar to a JSON package description file. This is useful mostly for IDEs.", - "All usual options that are also used for build/run/generate apply." + "Prints a JSON build description for the root package an all of " ~ + "their dependencies in a format similar to a JSON package " ~ + "description file. This is useful mostly for IDEs.", + "", + "All usual options that are also used for build/run/generate apply.", + "", + "When --data=VALUE is supplied, specific build settings for a project " ~ + "will be printed instead (by default, formatted for the current compiler).", + "", + "The --data=VALUE option can be specified multiple times to retrieve " ~ + "several pieces of information at once. A comma-separated list is " ~ + "also acceptable (ex: --data=dflags,libs). The data will be output in " ~ + "the same order requested on the command line.", + "", + "The accepted values for --data=VALUE are:", + "", + "main-source-file, dflags, lflags, libs, linker-files, " ~ + "source-files, versions, debug-versions, import-paths, " ~ + "string-import-paths, import-files, options", + "", + "The following are also accepted by --data if --data-list is used:", + "", + "target-type, target-path, target-name, working-directory, " ~ + "copy-files, string-import-files, pre-generate-commands, " ~ + "post-generate-commands, pre-build-commands, post-build-commands, " ~ + "requirements", ]; } override void prepare(scope CommandArgs args) { super.prepare(args); + + args.getopt("import-paths", &m_importPaths, [ + "Shortcut for --data=import-paths --data-list" + ]); + + args.getopt("string-import-paths", &m_stringImportPaths, [ + "Shortcut for --data=string-import-paths --data-list" + ]); + + args.getopt("data", &m_data, [ + "Just list the values of a particular build setting, either for this "~ + "package alone or recursively including all dependencies. Accepts a "~ + "comma-separated list. See above for more details and accepted "~ + "possibilities for VALUE." + ]); + + args.getopt("data-list", &m_dataList, [ + "Output --data information in list format (line-by-line), instead "~ + "of formatting for a compiler command line.", + ]); + + args.getopt("data-0", &m_dataNullDelim, [ + "Output --data information using null-delimiters, rather than "~ + "spaces or newlines. Result is usable with, ex., xargs -0.", + ]); } override int execute(Dub dub, string[] free_args, string[] app_args) { - // disable all log output and use "writeln" to output the JSON description + enforceUsage( + !(m_importPaths && m_stringImportPaths), + "--import-paths and --string-import-paths may not be used together." + ); + + enforceUsage( + !(m_data && (m_importPaths || m_stringImportPaths)), + "--data may not be used together with --import-paths or --string-import-paths." + ); + + // disable all log output to stdout and use "writeln" to output the JSON description auto ll = getLogLevel(); - setLogLevel(LogLevel.none); + setLogLevel(LogLevel.warn); scope (exit) setLogLevel(ll); string package_name; @@ -771,7 +1025,27 @@ m_defaultConfig = dub.project.getDefaultConfiguration(m_buildPlatform); - dub.describeProject(m_buildPlatform, m_buildConfig.length ? m_buildConfig : m_defaultConfig); + auto config = m_buildConfig.length ? m_buildConfig : m_defaultConfig; + + GeneratorSettings settings; + settings.platform = m_buildPlatform; + settings.config = config; + settings.buildType = m_buildType; + settings.compiler = m_compiler; + + if (m_importPaths) { m_data = ["import-paths"]; m_dataList = true; } + else if (m_stringImportPaths) { m_data = ["string-import-paths"]; m_dataList = true; } + + if (m_data.length) { + ListBuildSettingsFormat lt; + with (ListBuildSettingsFormat) + lt = m_dataList ? (m_dataNullDelim ? listNul : list) : (m_dataNullDelim ? commandLineNul : commandLine); + dub.listProjectData(settings, m_data, lt); + } else { + auto desc = dub.project.describe(settings); + writeln(desc.serializeToPrettyJson()); + } + return 0; } } @@ -834,14 +1108,14 @@ this() { this.name = "upgrade"; - this.argumentsPattern = "[]"; - this.description = "Forces an upgrade of all dependencies"; + this.argumentsPattern = "[]"; + this.description = "Forces an upgrade of the dependencies"; this.helpText = [ "Upgrades all dependencies of the package by querying the package registry(ies) for new versions.", "", - "This will also update the versions stored in the selections file ("~SelectedVersions.defaultFile~") accordingly.", + "This will update the versions stored in the selections file ("~SelectedVersions.defaultFile~") accordingly.", "", - "If a package specified, (only) that package will be upgraded. Otherwise all direct and indirect dependencies of the current package will get upgraded." + "If one or more package names are specified, only those dependencies will be upgraded. Otherwise all direct and indirect dependencies of the root package will get upgraded." ]; } @@ -850,15 +1124,15 @@ args.getopt("prerelease", &m_prerelease, [ "Uses the latest pre-release version, even if release versions are available" ]); - args.getopt("force-remove", &m_forceRemove, [ - "Force deletion of fetched packages with untracked files" - ]); args.getopt("verify", &m_verify, [ - "Updates the project and performs a build. If successfull, rewrites the selected versions file ." + "Updates the project and performs a build. If successful, rewrites the selected versions file ." ]); args.getopt("missing-only", &m_missingOnly, [ "Performs an upgrade only for dependencies that don't yet have a version selected. This is also done automatically before each build." ]); + args.getopt("force-remove", &m_forceRemove, [ + "Deprecated option that does nothing." + ]); } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -866,14 +1140,12 @@ enforceUsage(free_args.length <= 1, "Unexpected arguments."); enforceUsage(app_args.length == 0, "Unexpected application arguments."); enforceUsage(!m_verify, "--verify is not yet implemented."); - dub.loadPackageFromCwd(); + dub.loadPackage(); logInfo("Upgrading project in %s", dub.projectPath.toNativeString()); auto options = UpgradeOptions.upgrade|UpgradeOptions.select; if (m_missingOnly) options &= ~UpgradeOptions.upgrade; if (m_prerelease) options |= UpgradeOptions.preRelease; - if (m_forceRemove) options |= UpgradeOptions.forceRemove; - enforceUsage(app_args.length == 0, "Upgrading a specific package is not yet implemented."); - dub.upgrade(options); + dub.upgrade(options, free_args); return 0; } } @@ -882,8 +1154,6 @@ protected { string m_version; bool m_forceRemove = false; - bool m_system = false; - bool m_local = false; } override void prepare(scope CommandArgs args) @@ -893,10 +1163,8 @@ "The remove command also accepts \"*\" here as a wildcard to remove all versions of the package from the specified location" ]); - args.getopt("system", &m_system, ["Deprecated: Puts the package into the system wide package cache instead of the user local one."]); - args.getopt("local", &m_local, ["Deprecated: Puts the package into a sub folder of the current working directory. Cannot be mixed with --system."]); args.getopt("force-remove", &m_forceRemove, [ - "Force deletion of fetched packages with untracked files" + "Deprecated option that does nothing" ]); } @@ -934,27 +1202,15 @@ override int execute(Dub dub, string[] free_args, string[] app_args) { - enforceUsage(!m_local || !m_system, "--local and --system are exclusive to each other."); enforceUsage(free_args.length == 1, "Expecting exactly one argument."); enforceUsage(app_args.length == 0, "Unexpected application arguments."); - auto location = defaultPlacementLocation; - if (m_local) - { - logWarn("--local is deprecated. Use --cache=local instead."); - location = PlacementLocation.local; - } - else if (m_system) - { - logWarn("--system is deprecated. Use --cache=system instead."); - location = PlacementLocation.system; - } + auto location = dub.defaultPlacementLocation; auto name = free_args[0]; FetchOptions fetchOpts; fetchOpts |= FetchOptions.forceBranchUpgrade; - fetchOpts |= m_forceRemove ? FetchOptions.forceRemove : FetchOptions.none; if (m_version.length) dub.fetch(name, Dependency(m_version), location, fetchOpts); else { try { @@ -985,6 +1241,10 @@ } class RemoveCommand : FetchRemoveCommand { + private { + bool m_nonInteractive; + } + this() { this.name = "remove"; @@ -998,6 +1258,7 @@ override void prepare(scope CommandArgs args) { super.prepare(args); + args.getopt("n|non-interactive", &m_nonInteractive, ["Don't enter interactive mode."]); } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -1006,19 +1267,39 @@ enforceUsage(app_args.length == 0, "Unexpected application arguments."); auto package_id = free_args[0]; - auto location = defaultPlacementLocation; - if (m_local) - { - logWarn("--local is deprecated. Use --cache=local instead."); - location = PlacementLocation.local; - } - else if (m_system) - { - logWarn("--system is deprecated. Use --cache=system instead."); - location = PlacementLocation.system; + auto location = dub.defaultPlacementLocation; + + size_t resolveVersion(in Package[] packages) { + // just remove only package version + if (packages.length == 1) + return 0; + + writeln("Select version of '", package_id, "' to remove from location '", location, "':"); + foreach (i, pack; packages) + writefln("%s) %s", i + 1, pack.version_); + writeln(packages.length + 1, ") ", "all versions"); + while (true) { + writef("> "); + auto inp = readln(); + if (!inp.length) // Ctrl+D + return size_t.max; + inp = inp.stripRight; + if (!inp.length) // newline or space + continue; + try { + immutable selection = inp.to!size_t - 1; + if (selection <= packages.length) + return selection; + } catch (ConvException e) { + } + logError("Please enter a number between 1 and %s.", packages.length + 1); + } } - dub.remove(package_id, m_version, location, m_forceRemove); + if (m_nonInteractive || !m_version.empty) + dub.remove(package_id, m_version, location); + else + dub.remove(package_id, location, &resolveVersion); return 0; } } @@ -1059,7 +1340,16 @@ this.name = "add-path"; this.argumentsPattern = ""; this.description = "Adds a default package search path"; - this.helpText = ["Adds a default package search path"]; + this.helpText = [ + "Adds a default package search path. All direct sub folders of this path will be searched for package descriptions and will be made available as packages. Using this command has the equivalent effect as calling 'dub add-local' on each of the sub folders manually.", + "", + "Any packages registered using add-path will be preferred over packages downloaded from the package registry when searching for dependencies during a build operation.", + "", + "The version of the packages will be determined by one of the following:", + " - For GIT working copies, the last tag (git describe) is used to determine the version", + " - If the package contains a \"version\" field in the package description, this is used", + " - If neither of those apply, \"~master\" is assumed" + ]; } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -1076,7 +1366,7 @@ this.name = "remove-path"; this.argumentsPattern = ""; this.description = "Removes a package search path"; - this.helpText = ["Removes a package search path"]; + this.helpText = ["Removes a package search path previously added with add-path."]; } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -1093,7 +1383,13 @@ this.name = "add-local"; this.argumentsPattern = " []"; this.description = "Adds a local package directory (e.g. a git repository)"; - this.helpText = ["Adds a local package directory (e.g. a git repository)"]; + this.helpText = [ + "Adds a local package directory to be used during dependency resolution. This command is useful for registering local packages, such as GIT working copies, that are either not available in the package registry, or are supposed to be overwritten.", + "", + "The version of the package is either determined automatically (see the \"add-path\" command, or can be explicitly overwritten by passing a version on the command line.", + "", + "See 'dub add-path -h' for a way to register multiple local packages at once." + ]; } override int execute(Dub dub, string[] free_args, string[] app_args) @@ -1116,7 +1412,8 @@ override int execute(Dub dub, string[] free_args, string[] app_args) { - enforceUsage(free_args.length == 1, "Missing path to package."); + enforceUsage(free_args.length >= 1, "Missing package path argument."); + enforceUsage(free_args.length <= 1, "Expected the package path to be the only argument."); dub.removeLocalPackage(free_args[0], m_system); return 0; } @@ -1135,9 +1432,11 @@ override void prepare(scope CommandArgs args) {} override int execute(Dub dub, string[] free_args, string[] app_args) { + enforceUsage(free_args.length == 0, "Expecting no extra arguments."); + enforceUsage(app_args.length == 0, "The list command supports no application arguments."); logInfo("Packages present in the system and known to dub:"); foreach (p; dub.packageManager.getPackageIterator()) - logInfo(" %s %s: %s", p.name, p.ver, p.path.toNativeString()); + logInfo(" %s %s: %s", p.name, p.version_, p.path.toNativeString()); logInfo(""); return 0; } @@ -1153,6 +1452,42 @@ } } +class SearchCommand : Command { + this() + { + this.name = "search"; + this.argumentsPattern = ""; + this.description = "Search for available packages."; + this.helpText = [ + "Search all specified DUB registries for packages matching query." + ]; + } + override void prepare(scope CommandArgs args) {} + override int execute(Dub dub, string[] free_args, string[] app_args) + { + enforce(free_args.length == 1, "Expected one argument."); + auto res = dub.searchPackages(free_args[0]); + if (res.empty) + { + logError("No matches found."); + return 1; + } + auto justify = res + .map!((descNmatches) => descNmatches[1]) + .joiner + .map!(m => m.name.length + m.version_.length) + .reduce!max + " ()".length; + justify += (~justify & 3) + 1; // round to next multiple of 4 + foreach (desc, matches; res) + { + logInfo("==== %s ====", desc); + foreach (m; matches) + logInfo("%s%s", leftJustify(m.name ~ " (" ~ m.version_ ~ ")", justify), m.description); + } + return 0; + } +} + /******************************************************************************/ /* OVERRIDES */ @@ -1188,6 +1523,7 @@ auto ver = Dependency(free_args[1]); if (existsFile(Path(free_args[2]))) { auto target = Path(free_args[2]); + if (!target.absolute) target = Path(getcwd()) ~ target; dub.packageManager.addOverride(scope_, pack, ver, target); logInfo("Added override %s %s => %s", pack, ver, target); } else { @@ -1237,7 +1573,7 @@ this.argumentsPattern = ""; this.description = "Prints a list of all local package overrides"; this.helpText = [ - "Prints a list of all overriden packages added via \"dub add-override\"." + "Prints a list of all overridden packages added via \"dub add-override\"." ]; } override void prepare(scope CommandArgs args) {} @@ -1277,7 +1613,6 @@ override int execute(Dub dub, string[] free_args, string[] app_args) { - dub.cleanCaches(); return 0; } } @@ -1317,7 +1652,7 @@ { args.getopt("compiler-status", &m_compilerStatusCode, ["The expected status code of the compiler run"]); args.getopt("compiler-regex", &m_compilerRegex, ["A regular expression used to match against the compiler output"]); - args.getopt("linker-status", &m_linkerStatusCode, ["The expected status code of the liner run"]); + args.getopt("linker-status", &m_linkerStatusCode, ["The expected status code of the linker run"]); args.getopt("linker-regex", &m_linkerRegex, ["A regular expression used to match against the linker output"]); args.getopt("program-status", &m_programStatusCode, ["The expected status code of the built executable"]); args.getopt("program-regex", &m_programRegex, ["A regular expression used to match against the program output"]); @@ -1326,11 +1661,16 @@ super.prepare(args); // speed up loading when in test mode - if (m_testPackage.length) skipDubInitialization = true; + if (m_testPackage.length) { + skipDubInitialization = true; + m_nodeps = true; + } } override int execute(Dub dub, string[] free_args, string[] app_args) { + import std.format : formattedWrite; + if (m_testPackage.length) { dub = new Dub(Path(getcwd())); @@ -1390,25 +1730,64 @@ } } + static void fixPathDependency(string pack, ref Dependency dep) { + if (dep.path.length > 0) { + auto mainpack = getBasePackageName(pack); + dep.path = Path("../") ~ mainpack; + } + } + + void fixPathDependencies(ref PackageRecipe recipe, Path base_path) + { + foreach (name, ref dep; recipe.buildSettings.dependencies) + fixPathDependency(name, dep); + + foreach (ref cfg; recipe.configurations) + foreach (name, ref dep; cfg.buildSettings.dependencies) + fixPathDependency(name, dep); + + foreach (ref subp; recipe.subPackages) + if (subp.path.length) { + auto sub_path = base_path ~ Path(subp.path); + auto pack = prj.packageManager.getOrLoadPackage(sub_path); + fixPathDependencies(pack.recipe, sub_path); + pack.storeInfo(sub_path); + } else fixPathDependencies(subp.recipe, base_path); + } + bool[string] visited; foreach (pack_; prj.getTopologicalPackageList()) { auto pack = pack_.basePackage; if (pack.name in visited) continue; visited[pack.name] = true; + auto dst_path = path ~ pack.name; logInfo("Copy package '%s' to destination folder...", pack.name); - copyFolderRec(pack.path, path ~ pack.name); + copyFolderRec(pack.path, dst_path); + + // adjust all path based dependencies + fixPathDependencies(pack.recipe, dst_path); + + // overwrite package description file with additional version information + pack.storeInfo(dst_path); } + logInfo("Executing dustmite..."); - auto testcmd = format("dub dustmite --vquiet --test-package=%s", prj.name); - if (m_compilerStatusCode != int.min) testcmd ~= format(" --compiler-status=%s", m_compilerStatusCode); - if (m_compilerRegex.length) testcmd ~= format(" \"--compiler-regex=%s\"", m_compilerRegex); - if (m_linkerStatusCode != int.min) testcmd ~= format(" --linker-status=%s", m_linkerStatusCode); - if (m_linkerRegex.length) testcmd ~= format(" \"--linker-regex=%s\"", m_linkerRegex); - if (m_programStatusCode != int.min) testcmd ~= format(" --program-status=%s", m_programStatusCode); - if (m_programRegex.length) testcmd ~= format(" \"--program-regex=%s\"", m_programRegex); + auto testcmd = appender!string(); + testcmd.formattedWrite("%s dustmite --vquiet --test-package=%s --build=%s --config=%s", + thisExePath, prj.name, m_buildType, m_buildConfig); + + if (m_compilerName.length) testcmd.formattedWrite(" \"--compiler=%s\"", m_compilerName); + if (m_arch.length) testcmd.formattedWrite(" --arch=%s", m_arch); + if (m_compilerStatusCode != int.min) testcmd.formattedWrite(" --compiler-status=%s", m_compilerStatusCode); + if (m_compilerRegex.length) testcmd.formattedWrite(" \"--compiler-regex=%s\"", m_compilerRegex); + if (m_linkerStatusCode != int.min) testcmd.formattedWrite(" --linker-status=%s", m_linkerStatusCode); + if (m_linkerRegex.length) testcmd.formattedWrite(" \"--linker-regex=%s\"", m_linkerRegex); + if (m_programStatusCode != int.min) testcmd.formattedWrite(" --program-status=%s", m_programStatusCode); + if (m_programRegex.length) testcmd.formattedWrite(" \"--program-regex=%s\"", m_programRegex); if (m_combined) testcmd ~= " --combined"; - // TODO: pass all original parameters - auto dmpid = spawnProcess(["dustmite", path.toNativeString(), testcmd]); + // TODO: pass *all* original parameters + logDiagnostic("Running dustmite: %s", testcmd); + auto dmpid = spawnProcess(["dustmite", path.toNativeString(), testcmd.data]); return dmpid.wait(); } return 0; @@ -1420,6 +1799,8 @@ import std.encoding; import std.regex; + logInfo("%s", output); + if (code_match != int.min && code != code_match) { logInfo("Exit code %s doesn't match expected value %s", code, code_match); throw new DustmiteMismatchException; @@ -1455,6 +1836,46 @@ /******************************************************************************/ +/* CONVERT command */ +/******************************************************************************/ + +class ConvertCommand : Command { + private { + string m_format; + bool m_stdout; + } + + this() + { + this.name = "convert"; + this.argumentsPattern = ""; + this.description = "Converts the file format of the package recipe."; + this.helpText = [ + "This command will convert between JSON and SDLang formatted package recipe files.", + "", + "Warning: Beware that any formatting and comments within the package recipe will get lost in the conversion process." + ]; + } + + override void prepare(scope CommandArgs args) + { + args.getopt("f|format", &m_format, ["Specifies the target package recipe format. Possible values:", " json, sdl"]); + args.getopt("s|stdout", &m_stdout, ["Outputs the converted package recipe to stdout instead of writing to disk."]); + } + + override int execute(Dub dub, string[] free_args, string[] app_args) + { + enforceUsage(app_args.length == 0, "Unexpected application arguments."); + enforceUsage(free_args.length == 0, "Unexpected arguments: "~free_args.join(" ")); + enforceUsage(m_format.length > 0, "Missing target format file extension (--format=...)."); + if (!loadCwdPackage(dub, true)) return 1; + dub.convertRecipe(m_format, m_stdout); + return 0; + } +} + + +/******************************************************************************/ /* HELP */ /******************************************************************************/ @@ -1552,13 +1973,13 @@ assert(names.length == 1 || names.length == 2); string sarg = names[0].length == 1 ? names[0] : null; string larg = names[0].length > 1 ? names[0] : names.length > 1 ? names[1] : null; - if (sarg) { + if (sarg !is null) { writeWS(shortArgColumn); writef("-%s", sarg); writeWS(longArgColumn - shortArgColumn - 2); } else writeWS(longArgColumn); size_t col = longArgColumn; - if (larg) { + if (larg !is null) { if (arg.defaultValue.peek!bool) { writef("--%s", larg); col += larg.length + 2; @@ -1582,7 +2003,16 @@ private void writeWrapped(string string, size_t indent = 0, size_t first_line_pos = 0) { - auto wrapped = string.wrap(lineWidth, getRepString!' '(first_line_pos), getRepString!' '(indent)); + // handle pre-indented strings and bullet lists + size_t first_line_indent = 0; + while (string.startsWith(" ")) { + string = string[1 .. $]; + indent++; + first_line_indent++; + } + if (string.startsWith("- ")) indent += 2; + + auto wrapped = string.wrap(lineWidth, getRepString!' '(first_line_pos+first_line_indent), getRepString!' '(indent)); wrapped = wrapped[first_line_pos .. $]; foreach (ln; wrapped.splitLines()) writeln(ln); diff --git a/source/dub/compilers/buildsettings.d b/source/dub/compilers/buildsettings.d index 52089b5..16de290 100644 --- a/source/dub/compilers/buildsettings.d +++ b/source/dub/compilers/buildsettings.d @@ -12,11 +12,15 @@ import std.array : array; import std.algorithm : filter; import std.path : globMatch; +static if (__VERSION__ >= 2067) + import std.typecons : BitFlags; /// BuildPlatform specific settings, like needed libraries or additional /// include paths. struct BuildSettings { + import dub.internal.vibecompat.data.serialization : byName; + TargetType targetType; string targetPath; string targetName; @@ -25,6 +29,7 @@ string[] dflags; string[] lflags; string[] libs; + string[] linkerFiles; string[] sourceFiles; string[] copyFiles; string[] versions; @@ -37,8 +42,8 @@ string[] postGenerateCommands; string[] preBuildCommands; string[] postBuildCommands; - BuildRequirements requirements; - BuildOptions options; + @byName BuildRequirements requirements; + @byName BuildOptions options; BuildSettings dup() const { @@ -60,6 +65,7 @@ addDFlags(bs.dflags); addLFlags(bs.lflags); addLibs(bs.libs); + addLinkerFiles(bs.linkerFiles); addSourceFiles(bs.sourceFiles); addCopyFiles(bs.copyFiles); addVersions(bs.versions); @@ -75,9 +81,11 @@ } void addDFlags(in string[] value...) { dflags ~= value; } + void prependDFlags(in string[] value...) { prepend(dflags, value); } void removeDFlags(in string[] value...) { remove(dflags, value); } void addLFlags(in string[] value...) { lflags ~= value; } void addLibs(in string[] value...) { add(libs, value); } + void addLinkerFiles(in string[] value...) { add(linkerFiles, value); } void addSourceFiles(in string[] value...) { add(sourceFiles, value); } void prependSourceFiles(in string[] value...) { prepend(sourceFiles, value); } void removeSourceFiles(in string[] value...) { removePaths(sourceFiles, value); } @@ -88,15 +96,17 @@ void addStringImportPaths(in string[] value...) { add(stringImportPaths, value); } void prependStringImportPaths(in string[] value...) { prepend(stringImportPaths, value); } void addImportFiles(in string[] value...) { add(importFiles, value); } - void removeImportFiles(in string[] value...) { removePaths(importFiles, value); } - void addStringImportFiles(in string[] value...) { add(stringImportFiles, value); } + void addStringImportFiles(in string[] value...) { addSI(stringImportFiles, value); } void addPreGenerateCommands(in string[] value...) { add(preGenerateCommands, value, false); } void addPostGenerateCommands(in string[] value...) { add(postGenerateCommands, value, false); } void addPreBuildCommands(in string[] value...) { add(preBuildCommands, value, false); } void addPostBuildCommands(in string[] value...) { add(postBuildCommands, value, false); } - void addRequirements(in BuildRequirements[] value...) { foreach (v; value) this.requirements |= v; } - void addOptions(in BuildOptions[] value...) { foreach (v; value) this.options |= v; } - void removeOptions(in BuildOptions[] value...) { foreach (v; value) this.options &= ~v; } + void addRequirements(in BuildRequirement[] value...) { foreach (v; value) this.requirements |= v; } + void addRequirements(in BuildRequirements value) { this.requirements |= value; } + void addOptions(in BuildOption[] value...) { foreach (v; value) this.options |= v; } + void addOptions(in BuildOptions value) { this.options |= value; } + void removeOptions(in BuildOption[] value...) { foreach (v; value) this.options &= ~v; } + void removeOptions(in BuildOptions value) { this.options &= ~value; } // Adds vals to arr without adding duplicates. private void add(ref string[] arr, in string[] vals, bool no_duplicates = true) @@ -135,6 +145,20 @@ } } + // add string import files (avoids file name duplicates in addition to path duplicates) + private void addSI(ref string[] arr, in string[] vals) + { + bool[string] existing; + foreach (v; arr) existing[Path(v).head.toString()] = true; + foreach (v; vals) { + auto s = Path(v).head.toString(); + if (s !in existing) { + existing[s] = true; + arr ~= v; + } + } + } + private void removePaths(ref string[] arr, in string[] vals) { bool matches(string s) @@ -189,7 +213,7 @@ object } -enum BuildRequirements { +enum BuildRequirement { none = 0, /// No special requirements allowWarnings = 1<<0, /// Warnings do not abort compilation silenceWarnings = 1<<1, /// Don't show warnings @@ -203,7 +227,33 @@ noDefaultFlags = 1<<9, /// Do not issue any of the default build flags (e.g. -debug, -w, -property etc.) - use only for development purposes } -enum BuildOptions { + struct BuildRequirements { + import dub.internal.vibecompat.data.serialization : ignore; + + static if (__VERSION__ >= 2067) { + @ignore BitFlags!BuildRequirement values; + this(BuildRequirement req) { values = req; } + } else { + @ignore BuildRequirement values; + this(BuildRequirement req) { values = req; } + BuildRequirement[] toRepresentation() + const { + BuildRequirement[] ret; + for (int f = 1; f <= BuildRequirement.max; f *= 2) + if (values & f) ret ~= cast(BuildRequirement)f; + return ret; + } + static BuildRequirements fromRepresentation(BuildRequirement[] v) + { + BuildRequirements ret; + foreach (f; v) ret.values |= f; + return ret; + } + } + alias values this; + } + +enum BuildOption { none = 0, /// Use compiler defaults debugMode = 1<<0, /// Compile in debug mode (enables contracts, -debug) releaseMode = 1<<1, /// Compile in release mode (disables assertions and bounds checks, -release) @@ -226,4 +276,56 @@ deprecationWarnings = 1<<18, /// Warn about using deprecated features (-dw) deprecationErrors = 1<<19, /// Stop compilation upon usage of deprecated features (-de) property = 1<<20, /// DEPRECATED: Enforce property syntax (-property) + profileGC = 1<<21, /// Profile runtime allocations + pic = 1<<22, /// Generate position independent code + // for internal usage + _docs = 1<<23, // Write ddoc to docs + _ddox = 1<<24 // Compile docs.json } + + struct BuildOptions { + import dub.internal.vibecompat.data.serialization : ignore; + + static if (__VERSION__ >= 2067) { + @ignore BitFlags!BuildOption values; + this(BuildOption opt) { values = opt; } + this(BitFlags!BuildOption v) { values = v; } + } else { + @ignore BuildOption values; + this(BuildOption opt) { values = opt; } + BuildOption[] toRepresentation() + const { + BuildOption[] ret; + for (int f = 1; f <= BuildOption.max; f *= 2) + if (values & f) ret ~= cast(BuildOption)f; + return ret; + } + static BuildOptions fromRepresentation(BuildOption[] v) + { + BuildOptions ret; + foreach (f; v) ret.values |= f; + return ret; + } + } + + alias values this; + } + +/** + All build options that will be inherited upwards in the dependency graph + + Build options in this category fulfill one of the following properties: + $(UL + $(LI The option affects the semantics of the generated code) + $(LI The option affects if a certain piece of code is valid or not) + $(LI The option enabled meta information in dependent projects that are useful for the dependee (e.g. debug information)) + ) +*/ +enum BuildOptions inheritedBuildOptions = BuildOption.debugMode | BuildOption.releaseMode + | BuildOption.coverage | BuildOption.debugInfo | BuildOption.debugInfoC + | BuildOption.alwaysStackFrame | BuildOption.stackStomping | BuildOption.inline + | BuildOption.noBoundsCheck | BuildOption.profile | BuildOption.ignoreUnknownPragmas + | BuildOption.syntaxOnly | BuildOption.warnings | BuildOption.warningsAsErrors + | BuildOption.ignoreDeprecations | BuildOption.deprecationWarnings + | BuildOption.deprecationErrors | BuildOption.property | BuildOption.profileGC + | BuildOption.pic; diff --git a/source/dub/compilers/compiler.d b/source/dub/compilers/compiler.d index 2acc298..2670bcf 100644 --- a/source/dub/compilers/compiler.d +++ b/source/dub/compilers/compiler.d @@ -1,17 +1,15 @@ /** Compiler settings and abstraction. - Copyright: © 2013-2014 rejectedsoftware e.K. + Copyright: © 2013-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.compilers.compiler; public import dub.compilers.buildsettings; +public import dub.platform : BuildPlatform, matchesSpecification; -import dub.compilers.dmd; -import dub.compilers.gdc; -import dub.compilers.ldc; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; @@ -24,13 +22,13 @@ import std.process; -static this() -{ - registerCompiler(new DmdCompiler); - registerCompiler(new GdcCompiler); - registerCompiler(new LdcCompiler); -} +/** Returns a compiler handler for a given binary name. + The name will be compared against the canonical name of each registered + compiler handler. If no match is found, the sub strings "dmd", "gdc" and + "ldc", in this order, will be searched within the name. If this doesn't + yield a match either, an exception will be thrown. +*/ Compiler getCompiler(string name) { foreach (c; s_compilers) @@ -45,192 +43,28 @@ throw new Exception("Unknown compiler: "~name); } -string defaultCompiler() -{ - static string name; - if (!name.length) name = findCompiler(); - return name; -} +/** Registers a new compiler handler. -private string findCompiler() -{ - import std.process : env=environment; - import dub.version_ : initialCompilerBinary; - version (Windows) enum sep = ";", exe = ".exe"; - version (Posix) enum sep = ":", exe = ""; - - auto def = Path(initialCompilerBinary); - if (def.absolute && existsFile(def)) - return initialCompilerBinary; - - auto compilers = ["dmd", "gdc", "gdmd", "ldc2", "ldmd2"]; - if (!def.absolute) - compilers = initialCompilerBinary ~ compilers; - - auto paths = env.get("PATH", "").splitter(sep).map!Path; - auto res = compilers.find!(bin => paths.canFind!(p => existsFile(p ~ (bin~exe)))); - return res.empty ? initialCompilerBinary : res.front; -} - + Note that by default `DMDCompiler`, `GDCCompiler` and `LDCCompiler` are + already registered at startup. +*/ void registerCompiler(Compiler c) { s_compilers ~= c; } -void warnOnSpecialCompilerFlags(string[] compiler_flags, BuildOptions options, string package_name, string config_name) -{ - struct SpecialFlag { - string[] flags; - string alternative; - } - static immutable SpecialFlag[] s_specialFlags = [ - {["-c", "-o-"], "Automatically issued by DUB, do not specify in dub.json"}, - {["-w", "-Wall", "-Werr"], `Use "buildRequirements" to control warning behavior`}, - {["-property", "-fproperty"], "Using this flag may break building of dependencies and it will probably be removed from DMD in the future"}, - {["-wi"], `Use the "buildRequirements" field to control warning behavior`}, - {["-d", "-de", "-dw"], `Use the "buildRequirements" field to control deprecation behavior`}, - {["-of"], `Use "targetPath" and "targetName" to customize the output file`}, - {["-debug", "-fdebug", "-g"], "Call dub with --build=debug"}, - {["-release", "-frelease", "-O", "-inline"], "Call dub with --build=release"}, - {["-unittest", "-funittest"], "Call dub with --build=unittest"}, - {["-lib"], `Use {"targetType": "staticLibrary"} or let dub manage this`}, - {["-D"], "Call dub with --build=docs or --build=ddox"}, - {["-X"], "Call dub with --build=ddox"}, - {["-cov"], "Call dub with --build=cov or --build=unittest-cox"}, - {["-profile"], "Call dub with --build=profile"}, - {["-version="], `Use "versions" to specify version constants in a compiler independent way`}, - {["-debug="], `Use "debugVersions" to specify version constants in a compiler independent way`}, - {["-I"], `Use "importPaths" to specify import paths in a compiler independent way`}, - {["-J"], `Use "stringImportPaths" to specify import paths in a compiler independent way`}, - {["-m32", "-m64"], `Use --arch=x86/--arch=x86_64 to specify the target architecture`} - ]; - - struct SpecialOption { - BuildOptions[] flags; - string alternative; - } - static immutable SpecialOption[] s_specialOptions = [ - {[BuildOptions.debugMode], "Call DUB with --build=debug"}, - {[BuildOptions.releaseMode], "Call DUB with --build=release"}, - {[BuildOptions.coverage], "Call DUB with --build=cov or --build=unittest-cov"}, - {[BuildOptions.debugInfo], "Call DUB with --build=debug"}, - {[BuildOptions.inline], "Call DUB with --build=release"}, - {[BuildOptions.noBoundsCheck], "Call DUB with --build=release-nobounds"}, - {[BuildOptions.optimize], "Call DUB with --build=release"}, - {[BuildOptions.profile], "Call DUB with --build=profile"}, - {[BuildOptions.unittests], "Call DUB with --build=unittest"}, - {[BuildOptions.warnings, BuildOptions.warningsAsErrors], "Use \"buildRequirements\" to control the warning level"}, - {[BuildOptions.ignoreDeprecations, BuildOptions.deprecationWarnings, BuildOptions.deprecationErrors], "Use \"buildRequirements\" to control the deprecation warning level"}, - {[BuildOptions.property], "This flag is deprecated and has no effect"} - ]; - - bool got_preamble = false; - void outputPreamble() - { - if (got_preamble) return; - got_preamble = true; - logWarn(""); - if (config_name.empty) logWarn("## Warning for package %s ##", package_name); - else logWarn("## Warning for package %s, configuration %s ##", package_name, config_name); - logWarn(""); - logWarn("The following compiler flags have been specified in the package description"); - logWarn("file. They are handled by DUB and direct use in packages is discouraged."); - logWarn("Alternatively, you can set the DFLAGS environment variable to pass custom flags"); - logWarn("to the compiler, or use one of the suggestions below:"); - logWarn(""); - } - - foreach (f; compiler_flags) { - foreach (sf; s_specialFlags) { - if (sf.flags.any!(sff => f == sff || (sff.endsWith("=") && f.startsWith(sff)))) { - outputPreamble(); - logWarn("%s: %s", f, sf.alternative); - break; - } - } - } - - foreach (sf; s_specialOptions) { - foreach (f; sf.flags) { - if (options & f) { - outputPreamble(); - logWarn("%s: %s", f, sf.alternative); - break; - } - } - } - - if (got_preamble) logWarn(""); -} - - -/** - Alters the build options to comply with the specified build requirements. -*/ -void enforceBuildRequirements(ref BuildSettings settings) -{ - settings.addOptions(BuildOptions.warningsAsErrors); - if (settings.requirements & BuildRequirements.allowWarnings) { settings.options &= ~BuildOptions.warningsAsErrors; settings.options |= BuildOptions.warnings; } - if (settings.requirements & BuildRequirements.silenceWarnings) settings.options &= ~(BuildOptions.warningsAsErrors|BuildOptions.warnings); - if (settings.requirements & BuildRequirements.disallowDeprecations) { settings.options &= ~(BuildOptions.ignoreDeprecations|BuildOptions.deprecationWarnings); settings.options |= BuildOptions.deprecationErrors; } - if (settings.requirements & BuildRequirements.silenceDeprecations) { settings.options &= ~(BuildOptions.deprecationErrors|BuildOptions.deprecationWarnings); settings.options |= BuildOptions.ignoreDeprecations; } - if (settings.requirements & BuildRequirements.disallowInlining) settings.options &= ~BuildOptions.inline; - if (settings.requirements & BuildRequirements.disallowOptimization) settings.options &= ~BuildOptions.optimize; - if (settings.requirements & BuildRequirements.requireBoundsCheck) settings.options &= ~BuildOptions.noBoundsCheck; - if (settings.requirements & BuildRequirements.requireContracts) settings.options &= ~BuildOptions.releaseMode; - if (settings.requirements & BuildRequirements.relaxProperties) settings.options &= ~BuildOptions.property; -} - - -/** - Replaces each referenced import library by the appropriate linker flags. - - This function tries to invoke "pkg-config" if possible and falls back to - direct flag translation if that fails. -*/ -void resolveLibs(ref BuildSettings settings) -{ - import std.string : format; - - if (settings.libs.length == 0) return; - - if (settings.targetType == TargetType.library || settings.targetType == TargetType.staticLibrary) { - logDiagnostic("Ignoring all import libraries for static library build."); - settings.libs = null; - version(Windows) settings.sourceFiles = settings.sourceFiles.filter!(f => !f.endsWith(".lib")).array; - } - - version (Posix) { - try { - enum pkgconfig_bin = "pkg-config"; - string[] pkgconfig_libs; - foreach (lib; settings.libs) - if (execute([pkgconfig_bin, "--exists", "lib"~lib]).status == 0) - pkgconfig_libs ~= lib; - - logDiagnostic("Using pkg-config to resolve library flags for %s.", pkgconfig_libs.map!(l => "lib"~l).array.join(", ")); - - if (pkgconfig_libs.length) { - auto libflags = execute([pkgconfig_bin, "--libs"] ~ pkgconfig_libs.map!(l => "lib"~l)().array()); - enforce(libflags.status == 0, format("pkg-config exited with error code %s: %s", libflags.status, libflags.output)); - foreach (f; libflags.output.split()) { - if (f.startsWith("-Wl,")) settings.addLFlags(f[4 .. $].split(",")); - else settings.addLFlags(f); - } - settings.libs = settings.libs.filter!(l => !pkgconfig_libs.canFind(l)).array; - } - if (settings.libs.length) logDiagnostic("Using direct -l... flags for %s.", settings.libs.array.join(", ")); - } catch (Exception e) { - logDiagnostic("pkg-config failed: %s", e.msg); - logDiagnostic("Falling back to direct -l... flags."); - } - } -} - interface Compiler { + /// Returns the canonical name of the compiler (e.g. "dmd"). @property string name() const; + /** Determines the build platform properties given a set of build settings. + + This will invoke the compiler to build a platform probe file, which + determines the target build platform's properties during compile-time. + + See_Also: `dub.compilers.utils.generatePlatformProbeFile` + */ BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override = null); /// Replaces high level fields with low level fields and converts @@ -240,6 +74,9 @@ /// Removes any dflags that match one of the BuildOptions values and populates the BuildSettings.options field. void extractBuildOptions(ref BuildSettings settings) const; + /// Computes the full file name of the generated binary. + string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) const; + /// Adds the appropriate flag to set a target path void setTarget(ref BuildSettings settings, in BuildPlatform platform, string targetPath = null) const; @@ -249,6 +86,14 @@ /// Invokes the underlying linker directly void invokeLinker(in BuildSettings settings, in BuildPlatform platform, string[] objects, void delegate(int, string) output_callback); + /// Convert linker flags to compiler format + string[] lflagsToDFlags(in string[] lflags) const; + + /** Runs a tool and provides common boilerplate code. + + This method should be used by `Compiler` implementations to invoke the + compiler or linker binary. + */ protected final void invokeTool(string[] args, void delegate(int, string) output_callback) { import std.string; @@ -264,266 +109,45 @@ } version (Posix) if (status == -9) { - throw new Exception(format("%s failed with exit code %s. This may indicate that the process has run out of memory."), - args[0], status); + throw new Exception(format("%s failed with exit code %s. This may indicate that the process has run out of memory.", + args[0], status)); } enforce(status == 0, format("%s failed with exit code %s.", args[0], status)); } -} + /// Compiles platform probe file with the specified compiler and parses its output. + protected final BuildPlatform probePlatform(string compiler_binary, string[] args, string arch_override) + { + import std.string : format; + import dub.compilers.utils : generatePlatformProbeFile, readPlatformProbe; -/// Represents a platform a package can be build upon. -struct BuildPlatform { - /// e.g. ["posix", "windows"] - string[] platform; - /// e.g. ["x86", "x86_64"] - string[] architecture; - /// Canonical compiler name e.g. "dmd" - string compiler; - /// Compiler binary name e.g. "ldmd2" - string compilerBinary; - /// Compiled frontend version (e.g. 2065) - int frontendVersion; + auto fil = generatePlatformProbeFile(); - enum any = BuildPlatform(null, null, null, null, -1); + auto result = executeShell(escapeShellCommand(compiler_binary ~ args ~ fil.toNativeString())); + enforce(result.status == 0, format("Failed to invoke the compiler %s to determine the build platform: %s", + compiler_binary, result.output)); - /// Build platforms can be specified via a string specification. - /// - /// Specifications are build upon the following scheme, where each component - /// is optional (indicated by []), but the order is obligatory. - /// "[-platform][-architecture][-compiler]" - /// - /// So the following strings are valid specifications: - /// "-windows-x86-dmd" - /// "-dmd" - /// "-arm" - /// "-arm-dmd" - /// "-windows-dmd" - /// - /// Params: - /// specification = The specification being matched. It must be the empty string or start with a dash. - /// - /// Returns: - /// true if the given specification matches this BuildPlatform, false otherwise. (The empty string matches) - /// - bool matchesSpecification(const(char)[] specification) - const { - if (specification.empty) return true; - if (this == any) return true; + auto build_platform = readPlatformProbe(result.output); + build_platform.compilerBinary = compiler_binary; - auto splitted=specification.splitter('-'); - assert(!splitted.empty, "No valid platform specification! The leading hyphen is required!"); - splitted.popFront(); // Drop leading empty match. - enforce(!splitted.empty, "Platform specification if present, must not be empty!"); - if (platform.canFind(splitted.front)) { - splitted.popFront(); - if(splitted.empty) - return true; + if (build_platform.compiler != this.name) { + logWarn(`The determined compiler type "%s" doesn't match the expected type "%s". This will probably result in build errors.`, + build_platform.compiler, this.name); } - if (architecture.canFind(splitted.front)) { - splitted.popFront(); - if(splitted.empty) - return true; + + // Hack: see #1059 + // When compiling with --arch=x86_mscoff build_platform.architecture is equal to ["x86"] and canFind below is false. + // This hack prevents unnesessary warning 'Failed to apply the selected architecture x86_mscoff. Got ["x86"]'. + // And also makes "x86_mscoff" available as a platform specifier in the package recipe + if (arch_override == "x86_mscoff") + build_platform.architecture ~= arch_override; + if (arch_override.length && !build_platform.architecture.canFind(arch_override)) { + logWarn(`Failed to apply the selected architecture %s. Got %s.`, + arch_override, build_platform.architecture); } - if (compiler == splitted.front) { - splitted.popFront(); - enforce(splitted.empty, "No valid specification! The compiler has to be the last element!"); - return true; - } - return false; + + return build_platform; } - unittest { - auto platform=BuildPlatform(["posix", "linux"], ["x86_64"], "dmd"); - assert(platform.matchesSpecification("-posix")); - assert(platform.matchesSpecification("-linux")); - assert(platform.matchesSpecification("-linux-dmd")); - assert(platform.matchesSpecification("-linux-x86_64-dmd")); - assert(platform.matchesSpecification("-x86_64")); - assert(!platform.matchesSpecification("-windows")); - assert(!platform.matchesSpecification("-ldc")); - assert(!platform.matchesSpecification("-windows-dmd")); - } -} - - -string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) -{ - assert(settings.targetName.length > 0, "No target name set."); - final switch (settings.targetType) { - case TargetType.autodetect: assert(false, "Configurations must have a concrete target type."); - case TargetType.none: return null; - case TargetType.sourceLibrary: return null; - case TargetType.executable: - if( platform.platform.canFind("windows") ) - return settings.targetName ~ ".exe"; - else return settings.targetName; - case TargetType.library: - case TargetType.staticLibrary: - if (platform.platform.canFind("windows") && platform.compiler == "dmd") - return settings.targetName ~ ".lib"; - else return "lib" ~ settings.targetName ~ ".a"; - case TargetType.dynamicLibrary: - if( platform.platform.canFind("windows") ) - return settings.targetName ~ ".dll"; - else return "lib" ~ settings.targetName ~ ".so"; - case TargetType.object: - if (platform.platform.canFind("windows")) - return settings.targetName ~ ".obj"; - else return settings.targetName ~ ".o"; - } -} - - -bool isLinkerFile(string f) -{ - import std.path; - switch (extension(f)) { - default: - return false; - version (Windows) { - case ".lib", ".obj", ".res", ".def": - return true; - } else { - case ".a", ".o", ".so", ".dylib": - return true; - } - } -} - -/// Generate a file that will give, at compile time, informations about the compiler (architecture, frontend version...) -Path generatePlatformProbeFile() -{ - import dub.internal.vibecompat.core.file; - import dub.internal.vibecompat.data.json; - import dub.internal.utils; - - auto path = getTempFile("dub_platform_probe", ".d"); - - auto fil = openFile(path, FileMode.CreateTrunc); - scope (failure) { - fil.close(); - } - - fil.write(q{ - module dub_platform_probe; - - template toString(int v) { enum toString = v.stringof; } - - pragma(msg, `{`); - pragma(msg,` "compiler": "`~ determineCompiler() ~ `",`); - pragma(msg, ` "frontendVersion": ` ~ toString!__VERSION__ ~ `,`); - pragma(msg, ` "compilerVendor": "` ~ __VENDOR__ ~ `",`); - pragma(msg, ` "platform": [`); - pragma(msg, ` ` ~ determinePlatform()); - pragma(msg, ` ],`); - pragma(msg, ` "architecture": [`); - pragma(msg, ` ` ~ determineArchitecture()); - pragma(msg, ` ],`); - pragma(msg, `}`); - - string determinePlatform() - { - string ret; - version(Windows) ret ~= `"windows", `; - version(linux) ret ~= `"linux", `; - version(Posix) ret ~= `"posix", `; - version(OSX) ret ~= `"osx", `; - version(FreeBSD) ret ~= `"freebsd", `; - version(OpenBSD) ret ~= `"openbsd", `; - version(NetBSD) ret ~= `"netbsd", `; - version(DragonFlyBSD) ret ~= `"dragonflybsd", `; - version(BSD) ret ~= `"bsd", `; - version(Solaris) ret ~= `"solaris", `; - version(AIX) ret ~= `"aix", `; - version(Haiku) ret ~= `"haiku", `; - version(SkyOS) ret ~= `"skyos", `; - version(SysV3) ret ~= `"sysv3", `; - version(SysV4) ret ~= `"sysv4", `; - version(Hurd) ret ~= `"hurd", `; - version(Android) ret ~= `"android", `; - version(Cygwin) ret ~= `"cygwin", `; - version(MinGW) ret ~= `"mingw", `; - return ret; - } - - string determineArchitecture() - { - string ret; - version(X86) ret ~= `"x86", `; - version(X86_64) ret ~= `"x86_64", `; - version(ARM) ret ~= `"arm", `; - version(ARM_Thumb) ret ~= `"arm_thumb", `; - version(ARM_SoftFloat) ret ~= `"arm_softfloat", `; - version(ARM_HardFloat) ret ~= `"arm_hardfloat", `; - version(ARM64) ret ~= `"arm64", `; - version(PPC) ret ~= `"ppc", `; - version(PPC_SoftFP) ret ~= `"ppc_softfp", `; - version(PPC_HardFP) ret ~= `"ppc_hardfp", `; - version(PPC64) ret ~= `"ppc64", `; - version(IA64) ret ~= `"ia64", `; - version(MIPS) ret ~= `"mips", `; - version(MIPS32) ret ~= `"mips32", `; - version(MIPS64) ret ~= `"mips64", `; - version(MIPS_O32) ret ~= `"mips_o32", `; - version(MIPS_N32) ret ~= `"mips_n32", `; - version(MIPS_O64) ret ~= `"mips_o64", `; - version(MIPS_N64) ret ~= `"mips_n64", `; - version(MIPS_EABI) ret ~= `"mips_eabi", `; - version(MIPS_NoFloat) ret ~= `"mips_nofloat", `; - version(MIPS_SoftFloat) ret ~= `"mips_softfloat", `; - version(MIPS_HardFloat) ret ~= `"mips_hardfloat", `; - version(SPARC) ret ~= `"sparc", `; - version(SPARC_V8Plus) ret ~= `"sparc_v8plus", `; - version(SPARC_SoftFP) ret ~= `"sparc_softfp", `; - version(SPARC_HardFP) ret ~= `"sparc_hardfp", `; - version(SPARC64) ret ~= `"sparc64", `; - version(S390) ret ~= `"s390", `; - version(S390X) ret ~= `"s390x", `; - version(HPPA) ret ~= `"hppa", `; - version(HPPA64) ret ~= `"hppa64", `; - version(SH) ret ~= `"sh", `; - version(SH64) ret ~= `"sh64", `; - version(Alpha) ret ~= `"alpha", `; - version(Alpha_SoftFP) ret ~= `"alpha_softfp", `; - version(Alpha_HardFP) ret ~= `"alpha_hardfp", `; - return ret; - } - - string determineCompiler() - { - version(DigitalMars) return "dmd"; - else version(GNU) return "gdc"; - else version(LDC) return "ldc"; - else version(SDC) return "sdc"; - else return null; - } - }); - - fil.close(); - - return path; -} - -BuildPlatform readPlatformProbe(string output) -{ - import std.string; - - // work around possible additional output of the compiler - auto idx1 = output.indexOf("{"); - auto idx2 = output.lastIndexOf("}"); - enforce(idx1 >= 0 && idx1 < idx2, - "Unexpected platform information output - does not contain a JSON object."); - output = output[idx1 .. idx2+1]; - - import dub.internal.vibecompat.data.json; - auto json = parseJsonString(output); - - BuildPlatform build_platform; - build_platform.platform = json.platform.get!(Json[]).map!(e => e.get!string()).array(); - build_platform.architecture = json.architecture.get!(Json[]).map!(e => e.get!string()).array(); - build_platform.compiler = json.compiler.get!string; - build_platform.frontendVersion = json.frontendVersion.get!int; - return build_platform; } private { diff --git a/source/dub/compilers/dmd.d b/source/dub/compilers/dmd.d index 599f809..b13736e 100644 --- a/source/dub/compilers/dmd.d +++ b/source/dub/compilers/dmd.d @@ -8,6 +8,7 @@ module dub.compilers.dmd; import dub.compilers.compiler; +import dub.compilers.utils; import dub.internal.utils; import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; @@ -19,73 +20,53 @@ import std.exception; import std.file; import std.process; -import std.random; import std.typecons; -class DmdCompiler : Compiler { +class DMDCompiler : Compiler { private static immutable s_options = [ - tuple(BuildOptions.debugMode, ["-debug"]), - tuple(BuildOptions.releaseMode, ["-release"]), - tuple(BuildOptions.coverage, ["-cov"]), - tuple(BuildOptions.debugInfo, ["-g"]), - tuple(BuildOptions.debugInfoC, ["-gc"]), - tuple(BuildOptions.alwaysStackFrame, ["-gs"]), - tuple(BuildOptions.stackStomping, ["-gx"]), - tuple(BuildOptions.inline, ["-inline"]), - tuple(BuildOptions.noBoundsCheck, ["-noboundscheck"]), - tuple(BuildOptions.optimize, ["-O"]), - tuple(BuildOptions.profile, ["-profile"]), - tuple(BuildOptions.unittests, ["-unittest"]), - tuple(BuildOptions.verbose, ["-v"]), - tuple(BuildOptions.ignoreUnknownPragmas, ["-ignore"]), - tuple(BuildOptions.syntaxOnly, ["-o-"]), - tuple(BuildOptions.warnings, ["-wi"]), - tuple(BuildOptions.warningsAsErrors, ["-w"]), - tuple(BuildOptions.ignoreDeprecations, ["-d"]), - tuple(BuildOptions.deprecationWarnings, ["-dw"]), - tuple(BuildOptions.deprecationErrors, ["-de"]), - tuple(BuildOptions.property, ["-property"]), + tuple(BuildOption.debugMode, ["-debug"]), + tuple(BuildOption.releaseMode, ["-release"]), + tuple(BuildOption.coverage, ["-cov"]), + tuple(BuildOption.debugInfo, ["-g"]), + tuple(BuildOption.debugInfoC, ["-gc"]), + tuple(BuildOption.alwaysStackFrame, ["-gs"]), + tuple(BuildOption.stackStomping, ["-gx"]), + tuple(BuildOption.inline, ["-inline"]), + tuple(BuildOption.noBoundsCheck, ["-noboundscheck"]), + tuple(BuildOption.optimize, ["-O"]), + tuple(BuildOption.profile, ["-profile"]), + tuple(BuildOption.unittests, ["-unittest"]), + tuple(BuildOption.verbose, ["-v"]), + tuple(BuildOption.ignoreUnknownPragmas, ["-ignore"]), + tuple(BuildOption.syntaxOnly, ["-o-"]), + tuple(BuildOption.warnings, ["-wi"]), + tuple(BuildOption.warningsAsErrors, ["-w"]), + tuple(BuildOption.ignoreDeprecations, ["-d"]), + tuple(BuildOption.deprecationWarnings, ["-dw"]), + tuple(BuildOption.deprecationErrors, ["-de"]), + tuple(BuildOption.property, ["-property"]), + tuple(BuildOption.profileGC, ["-profile=gc"]), + + tuple(BuildOption._docs, ["-Dddocs"]), + tuple(BuildOption._ddox, ["-Xfdocs.json", "-Df__dummy.html"]), ]; @property string name() const { return "dmd"; } BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override) { - import std.process; - import std.string; - - auto fil = generatePlatformProbeFile(); - string[] arch_flags; - switch (arch_override) { default: throw new Exception("Unsupported architecture: "~arch_override); case "": break; case "x86": arch_flags = ["-m32"]; break; case "x86_64": arch_flags = ["-m64"]; break; + case "x86_mscoff": arch_flags = ["-m32mscoff"]; break; } settings.addDFlags(arch_flags); - auto result = executeShell(escapeShellCommand(compiler_binary ~ arch_flags ~ - ["-quiet", "-c", "-o-", fil.toNativeString()])); - enforce(result.status == 0, format("Failed to invoke the compiler %s to determine the build platform: %s", - compiler_binary, result.output)); - - auto build_platform = readPlatformProbe(result.output); - build_platform.compilerBinary = compiler_binary; - - if (build_platform.compiler != this.name) { - logWarn(`The determined compiler type "%s" doesn't match the expected type "%s". This will probably result in build errors.`, - build_platform.compiler, this.name); - } - - if (arch_override.length && !build_platform.architecture.canFind(arch_override)) { - logWarn(`Failed to apply the selected architecture %s. Got %s.`, - arch_override, build_platform.architecture); - } - - return build_platform; + return probePlatform(compiler_binary, arch_flags ~ ["-quiet", "-c", "-o-"], arch_override); } void prepareBuildSettings(ref BuildSettings settings, BuildSetting fields = BuildSetting.all) const @@ -118,22 +99,27 @@ settings.stringImportPaths = null; } - if (!(fields & BuildSetting.sourceFiles)) { - settings.addDFlags(settings.sourceFiles); - settings.sourceFiles = null; - } - if (!(fields & BuildSetting.libs)) { resolveLibs(settings); version(Windows) settings.addSourceFiles(settings.libs.map!(l => l~".lib")().array()); else settings.addLFlags(settings.libs.map!(l => "-l"~l)().array()); } + if (!(fields & BuildSetting.sourceFiles)) { + settings.addDFlags(settings.sourceFiles); + settings.sourceFiles = null; + } + if (!(fields & BuildSetting.lflags)) { - settings.addDFlags(settings.lflags.map!(f => "-L"~f)().array()); + settings.addDFlags(lflagsToDFlags(settings.lflags)); settings.lflags = null; } + version (Posix) { + if (settings.options & BuildOption.pic) + settings.addDFlags("-fPIC"); + } + assert(fields & BuildSetting.dflags); assert(fields & BuildSetting.copyFiles); } @@ -154,6 +140,37 @@ settings.dflags = newflags.data; } + string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) + const { + import std.conv: text; + assert(settings.targetName.length > 0, "No target name set."); + final switch (settings.targetType) { + case TargetType.autodetect: + assert(false, + text("Configurations must have a concrete target type, ", settings.targetName, + " has ", settings.targetType)); + case TargetType.none: return null; + case TargetType.sourceLibrary: return null; + case TargetType.executable: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".exe"; + else return settings.targetName; + case TargetType.library: + case TargetType.staticLibrary: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".lib"; + else return "lib" ~ settings.targetName ~ ".a"; + case TargetType.dynamicLibrary: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".dll"; + else return "lib" ~ settings.targetName ~ ".so"; + case TargetType.object: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".obj"; + else return settings.targetName ~ ".o"; + } + } + void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { final switch (settings.targetType) { @@ -167,7 +184,8 @@ break; case TargetType.dynamicLibrary: version (Windows) settings.addDFlags("-shared"); - else settings.addDFlags("-shared", "-fPIC"); + else version (OSX) settings.addDFlags("-shared"); + else settings.prependDFlags("-shared", "-defaultlib=libphobos2.so"); break; case TargetType.object: settings.addDFlags("-c"); @@ -182,9 +200,11 @@ void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback) { auto res_file = getTempFile("dub-build", ".rsp"); - std.file.write(res_file.toNativeString(), join(settings.dflags.map!(s => s.canFind(' ') ? "\""~s~"\"" : s), "\n")); + const(string)[] args = settings.dflags; + if (platform.frontendVersion >= 2066) args ~= "-vcolumns"; + std.file.write(res_file.toNativeString(), escapeArgs(args).join("\n")); - logDiagnostic("%s %s", platform.compilerBinary, join(cast(string[])settings.dflags, " ")); + logDiagnostic("%s %s", platform.compilerBinary, escapeArgs(args).join(" ")); invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback); } @@ -195,24 +215,34 @@ auto args = ["-of"~tpath.toNativeString()]; args ~= objects; args ~= settings.sourceFiles; - version(linux) args ~= "-L--no-as-needed"; // avoids linker errors due to libraries being speficied in the wrong order by DMD - args ~= settings.lflags.map!(l => "-L"~l)().array; + version(linux) args ~= "-L--no-as-needed"; // avoids linker errors due to libraries being specified in the wrong order by DMD + args ~= lflagsToDFlags(settings.lflags); args ~= settings.dflags.filter!(f => isLinkerDFlag(f)).array; auto res_file = getTempFile("dub-build", ".lnk"); - std.file.write(res_file.toNativeString(), join(args, "\n")); + std.file.write(res_file.toNativeString(), escapeArgs(args).join("\n")); - logDiagnostic("%s %s", platform.compilerBinary, args.join(" ")); + logDiagnostic("%s %s", platform.compilerBinary, escapeArgs(args).join(" ")); invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback); } + string[] lflagsToDFlags(in string[] lflags) const + { + return lflags.map!(f => "-L"~f)().array(); + } + + private auto escapeArgs(in string[] args) + { + return args.map!(s => s.canFind(' ') ? "\""~s~"\"" : s); + } + private static bool isLinkerDFlag(string arg) { switch (arg) { default: if (arg.startsWith("-defaultlib=")) return true; return false; - case "-g", "-gc", "-m32", "-m64", "-shared", "-lib": + case "-g", "-gc", "-m32", "-m64", "-shared", "-lib", "-m32mscoff": return true; } } diff --git a/source/dub/compilers/gdc.d b/source/dub/compilers/gdc.d index a82339c..0d5d3cf 100644 --- a/source/dub/compilers/gdc.d +++ b/source/dub/compilers/gdc.d @@ -8,6 +8,7 @@ module dub.compilers.gdc; import dub.compilers.compiler; +import dub.compilers.utils; import dub.internal.utils; import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; @@ -19,77 +20,57 @@ import std.exception; import std.file; import std.process; -import std.random; import std.typecons; -class GdcCompiler : Compiler { +class GDCCompiler : Compiler { private static immutable s_options = [ - tuple(BuildOptions.debugMode, ["-fdebug"]), - tuple(BuildOptions.releaseMode, ["-frelease"]), - tuple(BuildOptions.coverage, ["-fprofile-arcs", "-ftest-coverage"]), - tuple(BuildOptions.debugInfo, ["-g"]), - tuple(BuildOptions.debugInfoC, ["-g", "-fdebug-c"]), - //tuple(BuildOptions.alwaysStackFrame, ["-X"]), - //tuple(BuildOptions.stackStomping, ["-X"]), - tuple(BuildOptions.inline, ["-finline-functions"]), - tuple(BuildOptions.noBoundsCheck, ["-fno-bounds-check"]), - tuple(BuildOptions.optimize, ["-O3"]), - tuple(BuildOptions.profile, ["-pg"]), - tuple(BuildOptions.unittests, ["-funittest"]), - tuple(BuildOptions.verbose, ["-fd-verbose"]), - tuple(BuildOptions.ignoreUnknownPragmas, ["-fignore-unknown-pragmas"]), - tuple(BuildOptions.syntaxOnly, ["-fsyntax-only"]), - tuple(BuildOptions.warnings, ["-Wall"]), - tuple(BuildOptions.warningsAsErrors, ["-Werror", "-Wall"]), - tuple(BuildOptions.ignoreDeprecations, ["-Wno-deprecated"]), - tuple(BuildOptions.deprecationWarnings, ["-Wdeprecated"]), - tuple(BuildOptions.deprecationErrors, ["-Werror", "-Wdeprecated"]), - tuple(BuildOptions.property, ["-fproperty"]), + tuple(BuildOption.debugMode, ["-fdebug"]), + tuple(BuildOption.releaseMode, ["-frelease"]), + tuple(BuildOption.coverage, ["-fprofile-arcs", "-ftest-coverage"]), + tuple(BuildOption.debugInfo, ["-g"]), + tuple(BuildOption.debugInfoC, ["-g", "-fdebug-c"]), + //tuple(BuildOption.alwaysStackFrame, ["-X"]), + //tuple(BuildOption.stackStomping, ["-X"]), + tuple(BuildOption.inline, ["-finline-functions"]), + tuple(BuildOption.noBoundsCheck, ["-fno-bounds-check"]), + tuple(BuildOption.optimize, ["-O3"]), + tuple(BuildOption.profile, ["-pg"]), + tuple(BuildOption.unittests, ["-funittest"]), + tuple(BuildOption.verbose, ["-fd-verbose"]), + tuple(BuildOption.ignoreUnknownPragmas, ["-fignore-unknown-pragmas"]), + tuple(BuildOption.syntaxOnly, ["-fsyntax-only"]), + tuple(BuildOption.warnings, ["-Wall"]), + tuple(BuildOption.warningsAsErrors, ["-Werror", "-Wall"]), + tuple(BuildOption.ignoreDeprecations, ["-Wno-deprecated"]), + tuple(BuildOption.deprecationWarnings, ["-Wdeprecated"]), + tuple(BuildOption.deprecationErrors, ["-Werror", "-Wdeprecated"]), + tuple(BuildOption.property, ["-fproperty"]), + //tuple(BuildOption.profileGC, ["-?"]), + + tuple(BuildOption._docs, ["-fdoc-dir=docs"]), + tuple(BuildOption._ddox, ["-fXf=docs.json", "-fdoc-file=__dummy.html"]), ]; @property string name() const { return "gdc"; } BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override) { - import std.process; - import std.string; - - auto fil = generatePlatformProbeFile(); - string[] arch_flags; - switch (arch_override) { default: throw new Exception("Unsupported architecture: "~arch_override); case "": break; + case "arm": arch_flags = ["-marm"]; break; + case "arm_thumb": arch_flags = ["-mthumb"]; break; case "x86": arch_flags = ["-m32"]; break; case "x86_64": arch_flags = ["-m64"]; break; } settings.addDFlags(arch_flags); auto binary_file = getTempFile("dub_platform_probe"); - auto result = executeShell(escapeShellCommand( - compiler_binary ~ - arch_flags ~ - ["-c", "-o", binary_file.toNativeString(), fil.toNativeString()] - )); - enforce(result.status == 0, format("Failed to invoke the compiler %s to determine the build platform: %s", - compiler_binary, result.output)); - - auto build_platform = readPlatformProbe(result.output); - build_platform.compilerBinary = compiler_binary; - - if (build_platform.compiler != this.name) { - logWarn(`The determined compiler type "%s" doesn't match the expected type "%s". This will probably result in build errors.`, - build_platform.compiler, this.name); - } - - if (arch_override.length && !build_platform.architecture.canFind(arch_override)) { - logWarn(`Failed to apply the selected architecture %s. Got %s.`, - arch_override, build_platform.architecture); - } - - return build_platform; + return probePlatform(compiler_binary, + arch_flags ~ ["-c", "-o", binary_file.toNativeString()], + arch_override); } void prepareBuildSettings(ref BuildSettings settings, BuildSetting fields = BuildSetting.all) const @@ -133,11 +114,13 @@ } if (!(fields & BuildSetting.lflags)) { - foreach( f; settings.lflags ) - settings.addDFlags(["-Xlinker", f]); + settings.addDFlags(lflagsToDFlags(settings.lflags)); settings.lflags = null; } + if (settings.options & BuildOption.pic) + settings.addDFlags("-fPIC"); + assert(fields & BuildSetting.dflags); assert(fields & BuildSetting.copyFiles); } @@ -158,6 +141,31 @@ settings.dflags = newflags.data; } + string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) + const { + assert(settings.targetName.length > 0, "No target name set."); + final switch (settings.targetType) { + case TargetType.autodetect: assert(false, "Configurations must have a concrete target type."); + case TargetType.none: return null; + case TargetType.sourceLibrary: return null; + case TargetType.executable: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".exe"; + else return settings.targetName; + case TargetType.library: + case TargetType.staticLibrary: + return "lib" ~ settings.targetName ~ ".a"; + case TargetType.dynamicLibrary: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".dll"; + else return "lib" ~ settings.targetName ~ ".so"; + case TargetType.object: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".obj"; + else return settings.targetName ~ ".o"; + } + } + void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { final switch (settings.targetType) { @@ -200,11 +208,23 @@ args = [ "ar", "rcs", tpath ] ~ objects; } else { args = platform.compilerBinary ~ objects ~ settings.sourceFiles ~ settings.lflags ~ settings.dflags.filter!(f => isLinkageFlag(f)).array; - version(linux) args ~= "-L--no-as-needed"; // avoids linker errors due to libraries being speficied in the wrong order by DMD + version(linux) args ~= "-L--no-as-needed"; // avoids linker errors due to libraries being specified in the wrong order by DMD } logDiagnostic("%s", args.join(" ")); invokeTool(args, output_callback); } + + string[] lflagsToDFlags(in string[] lflags) const + { + string[] dflags; + foreach( f; lflags ) + { + dflags ~= "-Xlinker"; + dflags ~= f; + } + + return dflags; + } } private string extractTarget(const string[] args) { auto i = args.countUntil("-o"); return i >= 0 ? args[i+1] : null; } diff --git a/source/dub/compilers/ldc.d b/source/dub/compilers/ldc.d index 32a4520..df8c2de 100644 --- a/source/dub/compilers/ldc.d +++ b/source/dub/compilers/ldc.d @@ -8,6 +8,7 @@ module dub.compilers.ldc; import dub.compilers.compiler; +import dub.compilers.utils; import dub.internal.utils; import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.path; @@ -19,60 +20,52 @@ import std.exception; import std.file; import std.process; -import std.random; import std.typecons; -class LdcCompiler : Compiler { +class LDCCompiler : Compiler { private static immutable s_options = [ - tuple(BuildOptions.debugMode, ["-d-debug"]), - tuple(BuildOptions.releaseMode, ["-release"]), - //tuple(BuildOptions.coverage, ["-?"]), - tuple(BuildOptions.debugInfo, ["-g"]), - tuple(BuildOptions.debugInfoC, ["-gc"]), - //tuple(BuildOptions.alwaysStackFrame, ["-?"]), - //tuple(BuildOptions.stackStomping, ["-?"]), - tuple(BuildOptions.inline, ["-enable-inlining"]), - tuple(BuildOptions.noBoundsCheck, ["-disable-boundscheck"]), - tuple(BuildOptions.optimize, ["-O"]), - //tuple(BuildOptions.profile, ["-?"]), - tuple(BuildOptions.unittests, ["-unittest"]), - tuple(BuildOptions.verbose, ["-v"]), - tuple(BuildOptions.ignoreUnknownPragmas, ["-ignore"]), - tuple(BuildOptions.syntaxOnly, ["-o-"]), - tuple(BuildOptions.warnings, ["-wi"]), - tuple(BuildOptions.warningsAsErrors, ["-w"]), - tuple(BuildOptions.ignoreDeprecations, ["-d"]), - tuple(BuildOptions.deprecationWarnings, ["-dw"]), - tuple(BuildOptions.deprecationErrors, ["-de"]), - tuple(BuildOptions.property, ["-property"]), + tuple(BuildOption.debugMode, ["-d-debug"]), + tuple(BuildOption.releaseMode, ["-release"]), + //tuple(BuildOption.coverage, ["-?"]), + tuple(BuildOption.debugInfo, ["-g"]), + tuple(BuildOption.debugInfoC, ["-gc"]), + //tuple(BuildOption.alwaysStackFrame, ["-?"]), + //tuple(BuildOption.stackStomping, ["-?"]), + tuple(BuildOption.inline, ["-enable-inlining", "-Hkeep-all-bodies"]), + tuple(BuildOption.noBoundsCheck, ["-boundscheck=off"]), + tuple(BuildOption.optimize, ["-O3"]), + //tuple(BuildOption.profile, ["-?"]), + tuple(BuildOption.unittests, ["-unittest"]), + tuple(BuildOption.verbose, ["-v"]), + tuple(BuildOption.ignoreUnknownPragmas, ["-ignore"]), + tuple(BuildOption.syntaxOnly, ["-o-"]), + tuple(BuildOption.warnings, ["-wi"]), + tuple(BuildOption.warningsAsErrors, ["-w"]), + tuple(BuildOption.ignoreDeprecations, ["-d"]), + tuple(BuildOption.deprecationWarnings, ["-dw"]), + tuple(BuildOption.deprecationErrors, ["-de"]), + tuple(BuildOption.property, ["-property"]), + //tuple(BuildOption.profileGC, ["-?"]), + + tuple(BuildOption._docs, ["-Dd=docs"]), + tuple(BuildOption._ddox, ["-Xf=docs.json", "-Dd=__dummy_docs"]), ]; @property string name() const { return "ldc"; } BuildPlatform determinePlatform(ref BuildSettings settings, string compiler_binary, string arch_override) { - // TODO: determine platform by invoking the compiler instead - BuildPlatform build_platform; - build_platform.platform = .determinePlatform(); - build_platform.architecture = .determineArchitecture(); - build_platform.compiler = this.name; - build_platform.compilerBinary = compiler_binary; - + string[] arch_flags; switch (arch_override) { default: throw new Exception("Unsupported architecture: "~arch_override); case "": break; - case "x86": - build_platform.architecture = ["x86"]; - settings.addDFlags("-march=x86"); - break; - case "x86_64": - build_platform.architecture = ["x86_64"]; - settings.addDFlags("-march=x86_64"); - break; + case "x86": arch_flags = ["-march=x86"]; break; + case "x86_64": arch_flags = ["-march=x86-64"]; break; } + settings.addDFlags(arch_flags); - return build_platform; + return probePlatform(compiler_binary, arch_flags ~ ["-c", "-o-"], arch_override); } void prepareBuildSettings(ref BuildSettings settings, BuildSetting fields = BuildSetting.all) const @@ -119,10 +112,13 @@ } if (!(fields & BuildSetting.lflags)) { - settings.addDFlags(settings.lflags.map!(s => "-L="~s)().array()); + settings.addDFlags(lflagsToDFlags(settings.lflags)); settings.lflags = null; } + if (settings.options & BuildOption.pic) + settings.addDFlags("-relocation-model=pic"); + assert(fields & BuildSetting.dflags); assert(fields & BuildSetting.copyFiles); } @@ -143,6 +139,43 @@ settings.dflags = newflags.data; } + string getTargetFileName(in BuildSettings settings, in BuildPlatform platform) + const { + import std.string : splitLines, strip; + import std.uni : toLower; + + assert(settings.targetName.length > 0, "No target name set."); + + auto result = executeShell(escapeShellCommand([platform.compilerBinary, "-version"])); + enforce (result.status == 0, "Failed to determine linker used by LDC. \"" + ~platform.compilerBinary~" -version\" failed with exit code " + ~result.status.to!string()~"."); + + bool generates_coff = result.output.splitLines.find!(l => l.strip.toLower.startsWith("default target:")).front.canFind("-windows-msvc"); + + final switch (settings.targetType) { + case TargetType.autodetect: assert(false, "Configurations must have a concrete target type."); + case TargetType.none: return null; + case TargetType.sourceLibrary: return null; + case TargetType.executable: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".exe"; + else return settings.targetName; + case TargetType.library: + case TargetType.staticLibrary: + if (generates_coff) return settings.targetName ~ ".lib"; + else return "lib" ~ settings.targetName ~ ".a"; + case TargetType.dynamicLibrary: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".dll"; + else return "lib" ~ settings.targetName ~ ".so"; + case TargetType.object: + if (platform.platform.canFind("windows")) + return settings.targetName ~ ".obj"; + else return settings.targetName ~ ".o"; + } + } + void setTarget(ref BuildSettings settings, in BuildPlatform platform, string tpath = null) const { final switch (settings.targetType) { @@ -170,9 +203,11 @@ void invoke(in BuildSettings settings, in BuildPlatform platform, void delegate(int, string) output_callback) { auto res_file = getTempFile("dub-build", ".rsp"); - std.file.write(res_file.toNativeString(), join(cast(string[])settings.dflags, "\n")); + const(string)[] args = settings.dflags; + if (platform.frontendVersion >= 2066) args ~= "-vcolumns"; + std.file.write(res_file.toNativeString(), escapeArgs(args).join("\n")); - logDiagnostic("%s %s", platform.compilerBinary, join(cast(string[])settings.dflags, " ")); + logDiagnostic("%s %s", platform.compilerBinary, escapeArgs(args).join(" ")); invokeTool([platform.compilerBinary, "@"~res_file.toNativeString()], output_callback); } @@ -180,4 +215,14 @@ { assert(false, "Separate linking not implemented for LDC"); } + + string[] lflagsToDFlags(in string[] lflags) const + { + return lflags.map!(s => "-L="~s)().array(); + } + + private auto escapeArgs(in string[] args) + { + return args.map!(s => s.canFind(' ') ? "\""~s~"\"" : s); + } } diff --git a/source/dub/compilers/utils.d b/source/dub/compilers/utils.d new file mode 100644 index 0000000..218fd87 --- /dev/null +++ b/source/dub/compilers/utils.d @@ -0,0 +1,387 @@ +/** + Utility functionality for compiler class implementations. + + Copyright: © 2013-2016 rejectedsoftware e.K. + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Sönke Ludwig +*/ +module dub.compilers.utils; + +import dub.compilers.buildsettings; +import dub.platform; +import dub.internal.vibecompat.core.log; +import dub.internal.vibecompat.inet.path; +import std.algorithm : canFind, endsWith, filter; + + +/** + Alters the build options to comply with the specified build requirements. + + And enabled options that do not comply will get disabled. +*/ +void enforceBuildRequirements(ref BuildSettings settings) +{ + settings.addOptions(BuildOption.warningsAsErrors); + if (settings.requirements & BuildRequirement.allowWarnings) { settings.options &= ~BuildOption.warningsAsErrors; settings.options |= BuildOption.warnings; } + if (settings.requirements & BuildRequirement.silenceWarnings) settings.options &= ~(BuildOption.warningsAsErrors|BuildOption.warnings); + if (settings.requirements & BuildRequirement.disallowDeprecations) { settings.options &= ~(BuildOption.ignoreDeprecations|BuildOption.deprecationWarnings); settings.options |= BuildOption.deprecationErrors; } + if (settings.requirements & BuildRequirement.silenceDeprecations) { settings.options &= ~(BuildOption.deprecationErrors|BuildOption.deprecationWarnings); settings.options |= BuildOption.ignoreDeprecations; } + if (settings.requirements & BuildRequirement.disallowInlining) settings.options &= ~BuildOption.inline; + if (settings.requirements & BuildRequirement.disallowOptimization) settings.options &= ~BuildOption.optimize; + if (settings.requirements & BuildRequirement.requireBoundsCheck) settings.options &= ~BuildOption.noBoundsCheck; + if (settings.requirements & BuildRequirement.requireContracts) settings.options &= ~BuildOption.releaseMode; + if (settings.requirements & BuildRequirement.relaxProperties) settings.options &= ~BuildOption.property; +} + + +/** + Determines if a specific file name has the extension of a linker file. + + Linker files include static/dynamic libraries, resource files, object files + and DLL definition files. +*/ +bool isLinkerFile(string f) +{ + import std.path; + switch (extension(f)) { + default: + return false; + version (Windows) { + case ".lib", ".obj", ".res", ".def": + return true; + } else { + case ".a", ".o", ".so", ".dylib": + return true; + } + } +} + +unittest { + version (Windows) { + assert(isLinkerFile("test.obj")); + assert(isLinkerFile("test.lib")); + assert(isLinkerFile("test.res")); + assert(!isLinkerFile("test.o")); + assert(!isLinkerFile("test.d")); + } else { + assert(isLinkerFile("test.o")); + assert(isLinkerFile("test.a")); + assert(isLinkerFile("test.so")); + assert(isLinkerFile("test.dylib")); + assert(!isLinkerFile("test.obj")); + assert(!isLinkerFile("test.d")); + } +} + + +/** + Replaces each referenced import library by the appropriate linker flags. + + This function tries to invoke "pkg-config" if possible and falls back to + direct flag translation if that fails. +*/ +void resolveLibs(ref BuildSettings settings) +{ + import std.string : format; + import std.array : array; + + if (settings.libs.length == 0) return; + + if (settings.targetType == TargetType.library || settings.targetType == TargetType.staticLibrary) { + logDiagnostic("Ignoring all import libraries for static library build."); + settings.libs = null; + version(Windows) settings.sourceFiles = settings.sourceFiles.filter!(f => !f.endsWith(".lib")).array; + } + + version (Posix) { + import std.algorithm : any, map, partition, startsWith; + import std.array : array, join, split; + import std.exception : enforce; + import std.process : execute; + + try { + enum pkgconfig_bin = "pkg-config"; + + bool exists(string lib) { + return execute([pkgconfig_bin, "--exists", lib]).status == 0; + } + + auto pkgconfig_libs = settings.libs.partition!(l => !exists(l)); + pkgconfig_libs ~= settings.libs[0 .. $ - pkgconfig_libs.length] + .partition!(l => !exists("lib"~l)).map!(l => "lib"~l).array; + settings.libs = settings.libs[0 .. $ - pkgconfig_libs.length]; + + if (pkgconfig_libs.length) { + logDiagnostic("Using pkg-config to resolve library flags for %s.", pkgconfig_libs.join(", ")); + auto libflags = execute([pkgconfig_bin, "--libs"] ~ pkgconfig_libs); + enforce(libflags.status == 0, format("pkg-config exited with error code %s: %s", libflags.status, libflags.output)); + foreach (f; libflags.output.split()) { + if (f.startsWith("-L-L")) { + settings.addLFlags(f[2 .. $]); + } else if (f.startsWith("-defaultlib")) { + settings.addDFlags(f); + } else if (f.startsWith("-L-defaultlib")) { + settings.addDFlags(f[2 .. $]); + } else if (f.startsWith("-pthread")) { + settings.addLFlags("-lpthread"); + } else if (f.startsWith("-L-l")) { + settings.addLFlags(f[2 .. $].split(",")); + } else if (f.startsWith("-Wl,")) settings.addLFlags(f[4 .. $].split(",")); + else settings.addLFlags(f); + } + } + if (settings.libs.length) logDiagnostic("Using direct -l... flags for %s.", settings.libs.array.join(", ")); + } catch (Exception e) { + logDiagnostic("pkg-config failed: %s", e.msg); + logDiagnostic("Falling back to direct -l... flags."); + } + } +} + + +/** Searches the given list of compiler flags for ones that have a generic + equivalent. + + Certain compiler flags should, instead of using compiler-specific syntax, + be specified as build options (`BuildOptions`) or built requirements + (`BuildRequirements`). This function will output warning messages to + assist the user in making the best choice. +*/ +void warnOnSpecialCompilerFlags(string[] compiler_flags, BuildOptions options, string package_name, string config_name) +{ + import std.algorithm : any, endsWith, startsWith; + import std.range : empty; + + struct SpecialFlag { + string[] flags; + string alternative; + } + static immutable SpecialFlag[] s_specialFlags = [ + {["-c", "-o-"], "Automatically issued by DUB, do not specify in dub.json"}, + {["-w", "-Wall", "-Werr"], `Use "buildRequirements" to control warning behavior`}, + {["-property", "-fproperty"], "Using this flag may break building of dependencies and it will probably be removed from DMD in the future"}, + {["-wi"], `Use the "buildRequirements" field to control warning behavior`}, + {["-d", "-de", "-dw"], `Use the "buildRequirements" field to control deprecation behavior`}, + {["-of"], `Use "targetPath" and "targetName" to customize the output file`}, + {["-debug", "-fdebug", "-g"], "Call dub with --build=debug"}, + {["-release", "-frelease", "-O", "-inline"], "Call dub with --build=release"}, + {["-unittest", "-funittest"], "Call dub with --build=unittest"}, + {["-lib"], `Use {"targetType": "staticLibrary"} or let dub manage this`}, + {["-D"], "Call dub with --build=docs or --build=ddox"}, + {["-X"], "Call dub with --build=ddox"}, + {["-cov"], "Call dub with --build=cov or --build=unittest-cov"}, + {["-profile"], "Call dub with --build=profile"}, + {["-version="], `Use "versions" to specify version constants in a compiler independent way`}, + {["-debug="], `Use "debugVersions" to specify version constants in a compiler independent way`}, + {["-I"], `Use "importPaths" to specify import paths in a compiler independent way`}, + {["-J"], `Use "stringImportPaths" to specify import paths in a compiler independent way`}, + {["-m32", "-m64", "-m32mscoff"], `Use --arch=x86/--arch=x86_64/--arch=x86_mscoff to specify the target architecture`} + ]; + + struct SpecialOption { + BuildOption[] flags; + string alternative; + } + static immutable SpecialOption[] s_specialOptions = [ + {[BuildOption.debugMode], "Call DUB with --build=debug"}, + {[BuildOption.releaseMode], "Call DUB with --build=release"}, + {[BuildOption.coverage], "Call DUB with --build=cov or --build=unittest-cov"}, + {[BuildOption.debugInfo], "Call DUB with --build=debug"}, + {[BuildOption.inline], "Call DUB with --build=release"}, + {[BuildOption.noBoundsCheck], "Call DUB with --build=release-nobounds"}, + {[BuildOption.optimize], "Call DUB with --build=release"}, + {[BuildOption.profile], "Call DUB with --build=profile"}, + {[BuildOption.unittests], "Call DUB with --build=unittest"}, + {[BuildOption.warnings, BuildOption.warningsAsErrors], "Use \"buildRequirements\" to control the warning level"}, + {[BuildOption.ignoreDeprecations, BuildOption.deprecationWarnings, BuildOption.deprecationErrors], "Use \"buildRequirements\" to control the deprecation warning level"}, + {[BuildOption.property], "This flag is deprecated and has no effect"} + ]; + + bool got_preamble = false; + void outputPreamble() + { + if (got_preamble) return; + got_preamble = true; + logWarn(""); + if (config_name.empty) logWarn("## Warning for package %s ##", package_name); + else logWarn("## Warning for package %s, configuration %s ##", package_name, config_name); + logWarn(""); + logWarn("The following compiler flags have been specified in the package description"); + logWarn("file. They are handled by DUB and direct use in packages is discouraged."); + logWarn("Alternatively, you can set the DFLAGS environment variable to pass custom flags"); + logWarn("to the compiler, or use one of the suggestions below:"); + logWarn(""); + } + + foreach (f; compiler_flags) { + foreach (sf; s_specialFlags) { + if (sf.flags.any!(sff => f == sff || (sff.endsWith("=") && f.startsWith(sff)))) { + outputPreamble(); + logWarn("%s: %s", f, sf.alternative); + break; + } + } + } + + foreach (sf; s_specialOptions) { + foreach (f; sf.flags) { + if (options & f) { + outputPreamble(); + logWarn("%s: %s", f, sf.alternative); + break; + } + } + } + + if (got_preamble) logWarn(""); +} + + +/** + Generate a file that will give, at compile time, information about the compiler (architecture, frontend version...) + + See_Also: `readPlatformProbe` +*/ +Path generatePlatformProbeFile() +{ + import dub.internal.vibecompat.core.file; + import dub.internal.vibecompat.data.json; + import dub.internal.utils; + + auto path = getTempFile("dub_platform_probe", ".d"); + + auto fil = openFile(path, FileMode.createTrunc); + scope (failure) { + fil.close(); + } + + // NOTE: This must be kept in sync with the dub.platform module + fil.write(q{ + module dub_platform_probe; + + template toString(int v) { enum toString = v.stringof; } + + pragma(msg, `{`); + pragma(msg,` "compiler": "`~ determineCompiler() ~ `",`); + pragma(msg, ` "frontendVersion": ` ~ toString!__VERSION__ ~ `,`); + pragma(msg, ` "compilerVendor": "` ~ __VENDOR__ ~ `",`); + pragma(msg, ` "platform": [`); + pragma(msg, ` ` ~ determinePlatform()); + pragma(msg, ` ],`); + pragma(msg, ` "architecture": [`); + pragma(msg, ` ` ~ determineArchitecture()); + pragma(msg, ` ],`); + pragma(msg, `}`); + + string determinePlatform() + { + string ret; + version(Windows) ret ~= `"windows", `; + version(linux) ret ~= `"linux", `; + version(Posix) ret ~= `"posix", `; + version(OSX) ret ~= `"osx", `; + version(FreeBSD) ret ~= `"freebsd", `; + version(OpenBSD) ret ~= `"openbsd", `; + version(NetBSD) ret ~= `"netbsd", `; + version(DragonFlyBSD) ret ~= `"dragonflybsd", `; + version(BSD) ret ~= `"bsd", `; + version(Solaris) ret ~= `"solaris", `; + version(AIX) ret ~= `"aix", `; + version(Haiku) ret ~= `"haiku", `; + version(SkyOS) ret ~= `"skyos", `; + version(SysV3) ret ~= `"sysv3", `; + version(SysV4) ret ~= `"sysv4", `; + version(Hurd) ret ~= `"hurd", `; + version(Android) ret ~= `"android", `; + version(Cygwin) ret ~= `"cygwin", `; + version(MinGW) ret ~= `"mingw", `; + return ret; + } + + string determineArchitecture() + { + string ret; + version(X86) ret ~= `"x86", `; + version(X86_64) ret ~= `"x86_64", `; + version(ARM) ret ~= `"arm", `; + version(ARM_Thumb) ret ~= `"arm_thumb", `; + version(ARM_SoftFloat) ret ~= `"arm_softfloat", `; + version(ARM_HardFloat) ret ~= `"arm_hardfloat", `; + version(ARM64) ret ~= `"arm64", `; + version(PPC) ret ~= `"ppc", `; + version(PPC_SoftFP) ret ~= `"ppc_softfp", `; + version(PPC_HardFP) ret ~= `"ppc_hardfp", `; + version(PPC64) ret ~= `"ppc64", `; + version(IA64) ret ~= `"ia64", `; + version(MIPS) ret ~= `"mips", `; + version(MIPS32) ret ~= `"mips32", `; + version(MIPS64) ret ~= `"mips64", `; + version(MIPS_O32) ret ~= `"mips_o32", `; + version(MIPS_N32) ret ~= `"mips_n32", `; + version(MIPS_O64) ret ~= `"mips_o64", `; + version(MIPS_N64) ret ~= `"mips_n64", `; + version(MIPS_EABI) ret ~= `"mips_eabi", `; + version(MIPS_NoFloat) ret ~= `"mips_nofloat", `; + version(MIPS_SoftFloat) ret ~= `"mips_softfloat", `; + version(MIPS_HardFloat) ret ~= `"mips_hardfloat", `; + version(SPARC) ret ~= `"sparc", `; + version(SPARC_V8Plus) ret ~= `"sparc_v8plus", `; + version(SPARC_SoftFP) ret ~= `"sparc_softfp", `; + version(SPARC_HardFP) ret ~= `"sparc_hardfp", `; + version(SPARC64) ret ~= `"sparc64", `; + version(S390) ret ~= `"s390", `; + version(S390X) ret ~= `"s390x", `; + version(HPPA) ret ~= `"hppa", `; + version(HPPA64) ret ~= `"hppa64", `; + version(SH) ret ~= `"sh", `; + version(SH64) ret ~= `"sh64", `; + version(Alpha) ret ~= `"alpha", `; + version(Alpha_SoftFP) ret ~= `"alpha_softfp", `; + version(Alpha_HardFP) ret ~= `"alpha_hardfp", `; + return ret; + } + + string determineCompiler() + { + version(DigitalMars) return "dmd"; + else version(GNU) return "gdc"; + else version(LDC) return "ldc"; + else version(SDC) return "sdc"; + else return null; + } + }); + + fil.close(); + + return path; +} + +/** + Processes the output generated by compiling the platform probe file. + + See_Also: `generatePlatformProbeFile`. +*/ +BuildPlatform readPlatformProbe(string output) +{ + import std.algorithm : map; + import std.array : array; + import std.exception : enforce; + import std.string; + + // work around possible additional output of the compiler + auto idx1 = output.indexOf("{"); + auto idx2 = output.lastIndexOf("}"); + enforce(idx1 >= 0 && idx1 < idx2, + "Unexpected platform information output - does not contain a JSON object."); + output = output[idx1 .. idx2+1]; + + import dub.internal.vibecompat.data.json; + auto json = parseJsonString(output); + + BuildPlatform build_platform; + build_platform.platform = json["platform"].get!(Json[]).map!(e => e.get!string()).array(); + build_platform.architecture = json["architecture"].get!(Json[]).map!(e => e.get!string()).array(); + build_platform.compiler = json["compiler"].get!string; + build_platform.frontendVersion = json["frontendVersion"].get!int; + return build_platform; +} diff --git a/source/dub/dependency.d b/source/dub/dependency.d index a00a6da..073ac91 100644 --- a/source/dub/dependency.d +++ b/source/dub/dependency.d @@ -1,7 +1,7 @@ /** - Stuff with dependencies. + Dependency specification functionality. - Copyright: © 2012-2013 Matthias Dondorff + Copyright: © 2012-2013 Matthias Dondorff, © 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ @@ -24,31 +24,119 @@ static import std.compiler; +/** Encapsulates the name of a package along with its dependency specification. +*/ +struct PackageDependency { + /// Name of the referenced package. + string name; + + /// Dependency specification used to select a particular version of the package. + Dependency spec; +} + + /** - Representing a dependency, which is basically a version string and a - compare methode, e.g. '>=1.0.0 <2.0.0' (i.e. a space separates the two - version numbers) + Represents a dependency specification. + + A dependency specification either represents a specific version or version + range, or a path to a package. In addition to that it has `optional` and + `default_` flags to control how non-mandatory dependencies are handled. The + package name is notably not part of the dependency specification. */ struct Dependency { +@trusted: // Too many issues on DMD 2.065.0 to annotate with @safe + private { // Shortcut to create >=0.0.0 enum ANY_IDENT = "*"; - string m_cmpA; + bool m_inclusiveA = true; // A comparison > (true) or >= (false) Version m_versA; - string m_cmpB; + bool m_inclusiveB = true; // B comparison < (true) or <= (false) Version m_versB; Path m_path; bool m_optional = false; + bool m_default = false; } - // A Dependency, which matches every valid version. - static @property any() { return Dependency(ANY_IDENT); } - static @property invalid() { Dependency ret; ret.m_versA = Version.HEAD; ret.m_versB = Version.RELEASE; return ret; } + /// A Dependency, which matches every valid version. + static @property Dependency any() { return Dependency(ANY_IDENT); } - alias ANY = any; - alias INVALID = invalid; + /// An invalid dependency (with no possible version matches). + static @property Dependency invalid() { Dependency ret; ret.m_versA = Version.maxRelease; ret.m_versB = Version.minRelease; return ret; } - this(string ves) + /** Constructs a new dependency specification from a string + + See the `versionSpec` property for a description of the accepted + contents of that string. + */ + this(string spec) + { + this.versionSpec = spec; + } + + /** Constructs a new dependency specification that matches a specific + version. + */ + this(in Version ver) + { + m_inclusiveA = m_inclusiveB = true; + m_versA = ver; + m_versB = ver; + } + + /** Constructs a new dependency specification that matches a specific + path. + */ + this(Path path) + { + this(ANY_IDENT); + m_path = path; + } + + /// If set, overrides any version based dependency selection. + @property void path(Path value) { m_path = value; } + /// ditto + @property Path path() const { return m_path; } + + /// Determines if the dependency is required or optional. + @property bool optional() const { return m_optional; } + /// ditto + @property void optional(bool optional) { m_optional = optional; } + + /// Determines if an optional dependency should be chosen by default. + @property bool default_() const { return m_default; } + /// ditto + @property void default_(bool value) { m_default = value; } + + /// Returns true $(I iff) the version range only matches a specific version. + @property bool isExactVersion() const { return m_versA == m_versB; } + + /// Returns the exact version matched by the version range. + @property Version version_() const { + enforce(m_versA == m_versB, "Dependency "~this.versionSpec~" is no exact version."); + return m_versA; + } + + /** Sets/gets the matching version range as a specification string. + + The acceptable forms for this string are as follows: + + $(UL + $(LI `"1.0.0"` - a single version in SemVer format) + $(LI `"==1.0.0"` - alternative single version notation) + $(LI `">1.0.0"` - version range with a single bound) + $(LI `">1.0.0 <2.0.0"` - version range with two bounds) + $(LI `"~>1.0.0"` - a fuzzy version range) + $(LI `"~>1.0"` - a fuzzy version range with partial version) + $(LI `"~master"` - a branch name) + $(LI `"*" - match any version (see also `any`)) + ) + + Apart from "$(LT)" and "$(GT)", "$(GT)=" and "$(LT)=" are also valid + comparators. + + */ + @property void versionSpec(string ves) { enforce(ves.length > 0); string orig = ves; @@ -61,100 +149,96 @@ if (ves.startsWith("~>")) { // Shortcut: "~>x.y.z" variant. Last non-zero number will indicate // the base for this so something like this: ">=x.y.z <=", ves[0]) == -1) { - m_cmpA = ">="; - m_cmpB = "<="; + m_inclusiveA = true; + m_inclusiveB = true; m_versA = m_versB = Version(ves); } else { - m_cmpA = skipComp(ves); + auto cmpa = skipComp(ves); size_t idx2 = std.string.indexOf(ves, " "); if (idx2 == -1) { - if (m_cmpA == "<=" || m_cmpA == "<") { - m_versA = Version.RELEASE; - m_cmpB = m_cmpA; - m_cmpA = ">="; + if (cmpa == "<=" || cmpa == "<") { + m_versA = Version.minRelease; + m_inclusiveA = true; m_versB = Version(ves); - } else if (m_cmpA == ">=" || m_cmpA == ">") { + m_inclusiveB = cmpa == "<="; + } else if (cmpa == ">=" || cmpa == ">") { m_versA = Version(ves); - m_versB = Version.HEAD; - m_cmpB = "<="; + m_inclusiveA = cmpa == ">="; + m_versB = Version.maxRelease; + m_inclusiveB = true; } else { // Converts "==" to ">=a&&<=a", which makes merging easier m_versA = m_versB = Version(ves); - m_cmpA = ">="; - m_cmpB = "<="; + m_inclusiveA = m_inclusiveB = true; } } else { + enforce(cmpa == ">" || cmpa == ">=", "First comparison operator expected to be either > or >=, not "~cmpa); assert(ves[idx2] == ' '); m_versA = Version(ves[0..idx2]); + m_inclusiveA = cmpa == ">="; string v2 = ves[idx2+1..$]; - m_cmpB = skipComp(v2); + auto cmpb = skipComp(v2); + enforce(cmpb == "<" || cmpb == "<=", "Second comparison operator expected to be either < or <=, not "~cmpb); m_versB = Version(v2); + m_inclusiveB = cmpb == "<="; - enforce(!m_versA.isBranch, format("Partly a branch (A): %s", ves)); - enforce(!m_versB.isBranch, format("Partly a branch (B): %s", ves)); - - if (m_versB < m_versA) { - swap(m_versA, m_versB); - swap(m_cmpA, m_cmpB); - } - enforce( m_cmpA != "==" && m_cmpB != "==", "For equality, please specify a single version."); + enforce(!m_versA.isBranch && !m_versB.isBranch, format("Cannot compare branches: %s", ves)); + enforce(m_versA <= m_versB, "First version must not be greater than the second one."); } } } - - this(in Version ver) - { - m_cmpA = ">="; - m_cmpB = "<="; - m_versA = ver; - m_versB = ver; - } - - this(Path path) - { - this(ANY_IDENT); - m_path = path; - } - - @property void path(Path value) { m_path = value; } - @property Path path() const { return m_path; } - @property bool optional() const { return m_optional; } - @property void optional(bool optional) { m_optional = optional; } - @property bool isExactVersion() const { return m_versA == m_versB; } - - @property Version version_() const { - enforce(m_versA == m_versB, "Dependency "~versionString~" is no exact version."); - return m_versA; - } - - @property string versionString() + /// ditto + @property string versionSpec() const { string r; if (this == invalid) return "invalid"; - if( m_versA == m_versB && m_cmpA == ">=" && m_cmpB == "<=" ){ + if (m_versA == m_versB && m_inclusiveA && m_inclusiveB) { // Special "==" case - if (m_versA == Version.MASTER ) r = "~master"; - else r = m_versA.toString(); - } else { - if( m_versA != Version.RELEASE ) r = m_cmpA ~ m_versA.toString(); - if( m_versB != Version.HEAD ) r ~= (r.length==0?"" : " ") ~ m_cmpB ~ m_versB.toString(); - if( m_versA == Version.RELEASE && m_versB == Version.HEAD ) r = ">=0.0.0"; + if (m_versA == Version.masterBranch) return "~master"; + else return m_versA.toString(); } + + // "~>" case + if (m_inclusiveA && !m_inclusiveB && !m_versA.isBranch) { + auto vs = m_versA.toString(); + auto i1 = std.string.indexOf(vs, '-'), i2 = std.string.indexOf(vs, '+'); + auto i12 = i1 >= 0 ? i2 >= 0 ? i1 < i2 ? i1 : i2 : i1 : i2; + auto va = i12 >= 0 ? vs[0 .. i12] : vs; + auto parts = va.splitter('.').array; + assert(parts.length == 3, "Version string with a digit group count != 3: "~va); + + foreach (i; 0 .. 3) { + auto vp = parts[0 .. i+1].join("."); + auto ve = Version(expandVersion(vp)); + auto veb = Version(expandVersion(bumpVersion(vp))); + if (ve == m_versA && veb == m_versB) return "~>" ~ vp; + } + } + + if (m_versA != Version.minRelease) r = (m_inclusiveA ? ">=" : ">") ~ m_versA.toString(); + if (m_versB != Version.maxRelease) r ~= (r.length==0 ? "" : " ") ~ (m_inclusiveB ? "<=" : "<") ~ m_versB.toString(); + if (m_versA == Version.minRelease && m_versB == Version.maxRelease) r = ">=0.0.0"; return r; } + /** Returns a modified dependency that gets mapped to a given path. + + This function will return an unmodified `Dependency` if it is not path + based. Otherwise, the given `path` will be prefixed to the existing + path. + */ Dependency mapToPath(Path path) const { if (m_path.empty || m_path.absolute) return this; @@ -165,23 +249,37 @@ } } + /** Returns a human-readable string representation of the dependency + specification. + */ string toString()() const { - auto ret = versionString; - if (optional) ret ~= " (optional)"; + auto ret = versionSpec; + if (optional) { + if (default_) ret ~= " (optional, default)"; + else ret ~= " (optional)"; + } if (!path.empty) ret ~= " @"~path.toNativeString(); return ret; } + /** Returns a JSON representation of the dependency specification. + + Simple specifications will be represented as a single specification + string (`versionSpec`), while more complex specifications will be + represented as a JSON object with optional "version", "path", "optional" + and "default" fields. + */ Json toJson() const { Json json; if( path.empty && !optional ){ - json = Json(this.versionString); + json = Json(this.versionSpec); } else { json = Json.emptyObject; - json["version"] = this.versionString; + json["version"] = this.versionSpec; if (!path.empty) json["path"] = path.toString(); if (optional) json["optional"] = true; + if (default_) json["default"] = true; } return json; } @@ -194,6 +292,10 @@ assert(d.toJson() == Json("1.0.0"), "Failed: " ~ d.toJson().toPrettyString()); } + /** Constructs a new `Dependency` from its JSON representation. + + See `toJson` for a description of the JSON format. + */ static Dependency fromJson(Json verspec) { Dependency dep; if( verspec.type == Json.Type.object ){ @@ -201,17 +303,17 @@ if (auto pv = "version" in verspec) logDiagnostic("Ignoring version specification (%s) for path based dependency %s", pv.get!string, pp.get!string); - dep = Dependency.ANY; - dep.path = Path(verspec.path.get!string); + dep = Dependency.any; + dep.path = Path(verspec["path"].get!string); } else { enforce("version" in verspec, "No version field specified!"); auto ver = verspec["version"].get!string; - // Using the string to be able to specifiy a range of versions. + // Using the string to be able to specify a range of versions. dep = Dependency(ver); } - if( auto po = "optional" in verspec ) { - dep.optional = verspec.optional.get!bool; - } + + if (auto po = "optional" in verspec) dep.optional = po.get!bool; + if (auto po = "default" in verspec) dep.default_ = po.get!bool; } else { // canonical "package-id": "version" dep = Dependency(verspec.get!string); @@ -225,36 +327,46 @@ { "version": "2.0.0", "optional": true, + "default": true, "path": "path/to/package" } `)); - Dependency d = Dependency.ANY; // supposed to ignore the version spec + Dependency d = Dependency.any; // supposed to ignore the version spec d.optional = true; + d.default_ = true; d.path = Path("path/to/package"); assert(d == parsed); // optional and path not checked by opEquals. assert(d.optional == parsed.optional); + assert(d.default_ == parsed.default_); assert(d.path == parsed.path); } + /** Compares dependency specifications. + + These methods are suitable for equality comparisons, as well as for + using `Dependency` as a key in hash or tree maps. + */ bool opEquals(in Dependency o) const { // TODO(mdondorff): Check if not comparing the path is correct for all clients. - return o.m_cmpA == m_cmpA && o.m_cmpB == m_cmpB + return o.m_inclusiveA == m_inclusiveA && o.m_inclusiveB == m_inclusiveB && o.m_versA == m_versA && o.m_versB == m_versB - && o.m_optional == m_optional; + && o.m_optional == m_optional && o.m_default == m_default; } + /// ditto int opCmp(in Dependency o) const { - if (m_cmpA != o.m_cmpA) return m_cmpA < o.m_cmpA ? -1 : 1; - if (m_cmpB != o.m_cmpB) return m_cmpB < o.m_cmpB ? -1 : 1; + if (m_inclusiveA != o.m_inclusiveA) return m_inclusiveA < o.m_inclusiveA ? -1 : 1; + if (m_inclusiveB != o.m_inclusiveB) return m_inclusiveB < o.m_inclusiveB ? -1 : 1; if (m_versA != o.m_versA) return m_versA < o.m_versA ? -1 : 1; if (m_versB != o.m_versB) return m_versB < o.m_versB ? -1 : 1; if (m_optional != o.m_optional) return m_optional ? -1 : 1; return 0; } + /// ditto hash_t toHash() const nothrow @trusted { try { auto strhash = &typeid(string).getHash; @@ -263,15 +375,33 @@ } catch (Exception) assert(false); } + /** Determines if this dependency specification is valid. + + A specification is valid if it can match at least one version. + */ bool valid() const { - return m_versA == m_versB // compare not important - || (m_versA < m_versB && doCmp(m_cmpA, m_versB, m_versA) && doCmp(m_cmpB, m_versA, m_versB)); + return m_versA <= m_versB && doCmp(m_inclusiveA && m_inclusiveB, m_versA, m_versB); } + /** Determines if this dependency specification matches arbitrary versions. + + This is true in particular for the `any` constant. + */ + bool matchesAny() const { + auto cmp = Dependency("*"); + cmp.optional = m_optional; + cmp.default_ = m_default; + return cmp == this; + } + + /** Tests if the specification matches a specific version. + */ bool matches(string vers) const { return matches(Version(vers)); } + /// ditto bool matches(const(Version) v) const { return matches(v); } + /// ditto bool matches(ref const(Version) v) const { - if (this == ANY) return true; + if (this.matchesAny) return true; //logDebug(" try match: %s with: %s", v, this); // Master only matches master if(m_versA.isBranch) { @@ -280,34 +410,39 @@ } if(v.isBranch || m_versA.isBranch) return m_versA == v; - if( !doCmp(m_cmpA, v, m_versA) ) + if( !doCmp(m_inclusiveA, m_versA, v) ) return false; - if( !doCmp(m_cmpB, v, m_versB) ) + if( !doCmp(m_inclusiveB, v, m_versB) ) return false; return true; } - /// Merges to versions + /** Merges two dependency specifications. + + The result is a specification that matches the intersection of the set + of versions matched by the individual specifications. Note that this + result can be invalid (i.e. not match any version). + */ Dependency merge(ref const(Dependency) o) const { - if (this == ANY) return o; - if (o == ANY) return this; - if (!this.valid || !o.valid) return INVALID; - if (m_versA.isBranch != o.m_versA.isBranch) return INVALID; - if (m_versB.isBranch != o.m_versB.isBranch) return INVALID; - if (m_versA.isBranch) return m_versA == o.m_versA ? this : INVALID; - if (this.path != o.path) return INVALID; + if (this.matchesAny) return o; + if (o.matchesAny) return this; + if (!this.valid || !o.valid) return invalid; + if (m_versA.isBranch != o.m_versA.isBranch) return invalid; + if (m_versB.isBranch != o.m_versB.isBranch) return invalid; + if (m_versA.isBranch) return m_versA == o.m_versA ? this : invalid; + if (this.path != o.path) return invalid; Version a = m_versA > o.m_versA ? m_versA : o.m_versA; Version b = m_versB < o.m_versB ? m_versB : o.m_versB; Dependency d = this; - d.m_cmpA = !doCmp(m_cmpA, a,a)? m_cmpA : o.m_cmpA; + d.m_inclusiveA = !m_inclusiveA && m_versA >= o.m_versA ? false : o.m_inclusiveA; d.m_versA = a; - d.m_cmpB = !doCmp(m_cmpB, b,b)? m_cmpB : o.m_cmpB; + d.m_inclusiveB = !m_inclusiveB && m_versB <= o.m_versB ? false : o.m_inclusiveB; d.m_versB = b; d.m_optional = m_optional && o.m_optional; - if (!d.valid) return INVALID; + if (!d.valid) return invalid; return d; } @@ -315,46 +450,38 @@ private static bool isDigit(char ch) { return ch >= '0' && ch <= '9'; } private static string skipComp(ref string c) { size_t idx = 0; - while (idx < c.length && !isDigit(c[idx]) && c[idx] != Version.BRANCH_IDENT) idx++; + while (idx < c.length && !isDigit(c[idx]) && c[idx] != Version.branchPrefix) idx++; enforce(idx < c.length, "Expected version number in version spec: "~c); string cmp = idx==c.length-1||idx==0? ">=" : c[0..idx]; c = c[idx..$]; switch(cmp) { - default: enforce(false, "No/Unknown comparision specified: '"~cmp~"'"); return ">="; + default: enforce(false, "No/Unknown comparison specified: '"~cmp~"'"); return ">="; case ">=": goto case; case ">": goto case; case "<=": goto case; case "<": goto case; case "==": return cmp; } } - private static bool doCmp(string mthd, ref const Version a, ref const Version b) { - //logDebug("Calling %s%s%s", a, mthd, b); - switch(mthd) { - default: throw new Exception("Unknown comparison operator: "~mthd); - case ">": return a>b; - case ">=": return a>=b; - case "==": return a==b; - case "<=": return a<=b; - case "<": return a=1.1.0"), b = Dependency(">=1.3.0"); - assert (a.merge(b).valid() && a.merge(b).versionString == ">=1.3.0", a.merge(b).toString()); + assert (a.merge(b).valid() && a.merge(b).versionSpec == ">=1.3.0", a.merge(b).toString()); - a = Dependency("<=1.0.0 >=2.0.0"); - assert (!a.valid(), a.toString()); + assertThrown(Dependency("<=2.0.0 >=1.0.0")); + assertThrown(Dependency(">=2.0.0 <=1.0.0")); a = Dependency(">=1.0.0 <=5.0.0"); b = Dependency(">=2.0.0"); - assert (a.merge(b).valid() && a.merge(b).versionString == ">=2.0.0 <=5.0.0", a.merge(b).toString()); + assert (a.merge(b).valid() && a.merge(b).versionSpec == ">=2.0.0 <=5.0.0", a.merge(b).toString()); assertThrown(a = Dependency(">1.0.0 ==5.0.0"), "Construction is invalid"); a = Dependency(">1.0.0"); b = Dependency("<2.0.0"); assert (a.merge(b).valid(), a.merge(b).toString()); - assert (a.merge(b).versionString == ">1.0.0 <2.0.0", a.merge(b).toString()); + assert (a.merge(b).versionSpec == ">1.0.0 <2.0.0", a.merge(b).toString()); a = Dependency(">2.0.0"); b = Dependency("<1.0.0"); assert (!(a.merge(b)).valid(), a.merge(b).toString()); @@ -377,18 +504,18 @@ // branches / head revisions - a = Dependency(Version.MASTER_STRING); + a = Dependency(Version.masterBranch); assert(a.valid()); - assert(a.matches(Version.MASTER)); - b = Dependency(Version.MASTER_STRING); + assert(a.matches(Version.masterBranch)); + b = Dependency(Version.masterBranch); m = a.merge(b); - assert(m.matches(Version.MASTER)); + assert(m.matches(Version.masterBranch)); //assertThrown(a = Dependency(Version.MASTER_STRING ~ " <=1.0.0"), "Construction invalid"); - assertThrown(a = Dependency(">=1.0.0 " ~ Version.MASTER_STRING), "Construction invalid"); + assertThrown(a = Dependency(">=1.0.0 " ~ Version.masterBranch.toString()), "Construction invalid"); - immutable string branch1 = Version.BRANCH_IDENT ~ "Branch1"; - immutable string branch2 = Version.BRANCH_IDENT ~ "Branch2"; + immutable string branch1 = Version.branchPrefix ~ "Branch1"; + immutable string branch2 = Version.branchPrefix ~ "Branch2"; //assertThrown(a = Dependency(branch1 ~ " " ~ branch2), "Error: '" ~ branch1 ~ " " ~ branch2 ~ "' succeeded"); //assertThrown(a = Dependency(Version.MASTER_STRING ~ " " ~ branch1), "Error: '" ~ Version.MASTER_STRING ~ " " ~ branch1 ~ "' succeeded"); @@ -403,7 +530,7 @@ a = Dependency(branch1); assert(a.matches(branch1), "Dependency(branch1) does not match 'branch1'"); assert(a.matches(Version(branch1)), "Dependency(branch1) does not match Version('branch1')"); - assert(!a.matches(Version.MASTER), "Dependency(branch1) matches Version.MASTER"); + assert(!a.matches(Version.masterBranch), "Dependency(branch1) matches Version.masterBranch"); assert(!a.matches(branch2), "Dependency(branch1) matches 'branch2'"); assert(!a.matches(Version("1.0.0")), "Dependency(branch1) matches '1.0.0'"); a = Dependency(">=1.0.0"); @@ -439,6 +566,14 @@ a = Dependency("~>3.5.0"); assert(a == Dependency(">=3.5.0 <3.6.0"), "Testing failed: " ~ a.toString()); + a = Dependency("~>0.1.1"); + b = Dependency("==0.1.0"); + assert(!a.merge(b).valid); + b = Dependency("==0.1.9999"); + assert(a.merge(b).valid); + b = Dependency("==0.2.0"); + assert(!a.merge(b).valid); + a = Dependency("~>1.0.1-beta"); b = Dependency(">=1.0.1-beta <1.1.0"); assert(a == b, "Testing failed: " ~ a.toString()); @@ -455,70 +590,112 @@ assert(a.valid); assert(a.version_ == Version("~d2test")); - a = Dependency.ANY; + a = Dependency.any; assert(!a.optional); assert(a.valid); assertThrown(a.version_); + assert(a.matches(Version.masterBranch)); + assert(a.matches(Version("1.0.0"))); + assert(a.matches(Version("0.0.1-pre"))); b = Dependency(">=1.0.1"); assert(b == a.merge(b)); assert(b == b.merge(a)); + b = Dependency(Version.masterBranch); + assert(a.merge(b) == b); + assert(b.merge(a) == b); - logDebug("Dependency Unittest sucess."); + a.optional = true; + assert(a.matches(Version.masterBranch)); + assert(a.matches(Version("1.0.0"))); + assert(a.matches(Version("0.0.1-pre"))); + b = Dependency(">=1.0.1"); + assert(b == a.merge(b)); + assert(b == b.merge(a)); + b = Dependency(Version.masterBranch); + assert(a.merge(b) == b); + assert(b.merge(a) == b); + + logDebug("Dependency unittest success."); +} + +unittest { + assert(Dependency("~>1.0.4").versionSpec == "~>1.0.4"); + assert(Dependency("~>1.4").versionSpec == "~>1.4"); + assert(Dependency("~>2").versionSpec == "~>2"); + assert(Dependency("~>1.0.4+1.2.3").versionSpec == "~>1.0.4"); } /** - A version in the format "major.update.bugfix-prerelease+buildmetadata" - according to Semantic Versioning Specification v2.0.0. + Represents a version in semantic version format, or a branch identifier. - (deprecated): - This also supports a format like "~master", to identify trunk, or - "~branch_name" to identify a branch. Both Version types starting with "~" - refer to the head revision of the corresponding branch. - This is subject to be removed soon. + This can either have the form "~master", where "master" is a branch name, + or the form "major.update.bugfix-prerelease+buildmetadata" (see the + Semantic Versioning Specification v2.0.0 at http://semver.org/). */ struct Version { +@safe: private { enum MAX_VERS = "99999.0.0"; enum UNKNOWN_VERS = "unknown"; + enum branchPrefix = '~'; + enum masterString = "~master"; string m_version; } - static @property RELEASE() { return Version("0.0.0"); } - static @property HEAD() { return Version(MAX_VERS); } - static @property MASTER() { return Version(MASTER_STRING); } - static @property UNKNOWN() { return Version(UNKNOWN_VERS); } - static @property MASTER_STRING() { return "~master"; } - static @property BRANCH_IDENT() { return '~'; } + static @property Version minRelease() { return Version("0.0.0"); } + static @property Version maxRelease() { return Version(MAX_VERS); } + static @property Version masterBranch() { return Version(masterString); } + static @property Version unknown() { return Version(UNKNOWN_VERS); } + /** Constructs a new `Version` from its string representation. + */ this(string vers) { enforce(vers.length > 1, "Version strings must not be empty."); - if (vers[0] != BRANCH_IDENT && vers != UNKNOWN_VERS) + if (vers[0] != branchPrefix && vers != UNKNOWN_VERS) enforce(vers.isValidVersion(), "Invalid SemVer format: " ~ vers); m_version = vers; } + /** Constructs a new `Version` from its string representation. + + This method is equivalent to calling the constructor and is used as an + endpoint for the serialization framework. + */ + static Version fromString(string vers) { return Version(vers); } + bool opEquals(const Version oth) const { if (isUnknown || oth.isUnknown) { throw new Exception("Can't compare unknown versions! (this: %s, other: %s)".format(this, oth)); } - return m_version == oth.m_version; + return opCmp(oth) == 0; } - /// Returns true, if this version indicates a branch, which is not the trunk. - @property bool isBranch() const { return !m_version.empty && m_version[0] == BRANCH_IDENT; } - @property bool isMaster() const { return m_version == MASTER_STRING; } + /// Tests if this represents a branch instead of a version. + @property bool isBranch() const { return !m_version.empty && m_version[0] == branchPrefix; } + + /// Tests if this represents the master branch "~master". + @property bool isMaster() const { return m_version == masterString; } + + /** Tests if this represents a pre-release version. + + Note that branches are always considered pre-release versions. + */ @property bool isPreRelease() const { if (isBranch) return true; return isPreReleaseVersion(m_version); } + + /// Tests if this represents the special unknown version constant. @property bool isUnknown() const { return m_version == UNKNOWN_VERS; } - /** - Comparing Versions is generally possible, but comparing Versions - identifying branches other than master will fail. Only equality - can be tested for these. + /** Compares two versions/branches for precedence. + + Versions generally have precedence over branches and the master branch + has precedence over other branches. Apart from that, versions are + compared using SemVer semantics, while branches are compared + lexicographically. */ int opCmp(ref const Version other) const { @@ -534,10 +711,12 @@ return this.m_version < other.m_version ? -1 : 1; } - return compareVersions(isMaster ? MAX_VERS : m_version, other.isMaster ? MAX_VERS : other.m_version); + return compareVersions(m_version, other.m_version); } + /// ditto int opCmp(in Version other) const { return opCmp(other); } + /// Returns the string representation of the version/branch. string toString() const { return m_version; } } @@ -548,10 +727,10 @@ assert(!a.isBranch, "Error: '1.0.0' treated as branch"); assert(a == a, "a == a failed"); - assertNotThrown(a = Version(Version.MASTER_STRING), "Constructing Version("~Version.MASTER_STRING~"') failed"); - assert(a.isBranch, "Error: '"~Version.MASTER_STRING~"' treated as branch"); + assertNotThrown(a = Version(Version.masterString), "Constructing Version("~Version.masterString~"') failed"); + assert(a.isBranch, "Error: '"~Version.masterString~"' treated as branch"); assert(a.isMaster); - assert(a == Version.MASTER, "Constructed master version != default master version."); + assert(a == Version.masterBranch, "Constructed master version != default master version."); assertNotThrown(a = Version("~BRANCH"), "Construction of branch Version failed."); assert(a.isBranch, "Error: '~BRANCH' not treated as branch'"); @@ -564,7 +743,7 @@ assert(a == b, "a == b with a:'1.0.0', b:'1.0.0' failed"); b = Version("2.0.0"); assert(a != b, "a != b with a:'1.0.0', b:'2.0.0' failed"); - a = Version(Version.MASTER_STRING); + a = Version.masterBranch; b = Version("~BRANCH"); assert(a != b, "a != b with a:MASTER, b:'~branch' failed"); assert(a > b); @@ -594,11 +773,13 @@ for(int j=i-1; j>=0; --j) assert(versions[j] < versions[i], "Failed: " ~ versions[j].toString() ~ "<" ~ versions[i].toString()); - a = Version.UNKNOWN; - b = Version.RELEASE; + a = Version.unknown; + b = Version.minRelease; assertThrown(a == b, "Failed: compared " ~ a.toString() ~ " with " ~ b.toString() ~ ""); - a = Version.UNKNOWN; - b = Version.UNKNOWN; + a = Version.unknown; + b = Version.unknown; assertThrown(a == b, "Failed: UNKNOWN == UNKNOWN"); + + assert(Version("1.0.0+a") == Version("1.0.0+b")); } diff --git a/source/dub/dependencyresolver.d b/source/dub/dependencyresolver.d index 36804a0..a2102bd 100644 --- a/source/dub/dependencyresolver.d +++ b/source/dub/dependencyresolver.d @@ -10,7 +10,7 @@ import dub.dependency; import dub.internal.vibecompat.core.log; -import std.algorithm : all, canFind, filter, sort; +import std.algorithm : all, canFind, filter, map, sort; import std.array : appender, array; import std.conv : to; import std.exception : enforce; @@ -21,6 +21,7 @@ static struct TreeNodes { string pack; CONFIGS configs; + DependencyType depType = DependencyType.required; hash_t toHash() const nothrow @trusted { size_t ret = typeid(string).getHash(&pack); @@ -54,26 +55,27 @@ CONFIG[string] resolve(TreeNode root, bool throw_on_failure = true) { - static string rootPackage(string p) { - auto idx = indexOf(p, ":"); - if (idx < 0) return p; - return p[0 .. idx]; - } - - auto root_base_pack = rootPackage(root.pack); + auto root_base_pack = basePackage(root.pack); // find all possible configurations of each possible dependency size_t[string] package_indices; + string[size_t] package_names; CONFIG[][] all_configs; + bool[] any_config; + bool[string] maybe_optional_deps; bool[TreeNode] visited; + void findConfigsRec(TreeNode parent, bool parent_unique) { if (parent in visited) return; visited[parent] = true; foreach (ch; getChildren(parent)) { - auto basepack = rootPackage(ch.pack); + auto basepack = basePackage(ch.pack); auto pidx = all_configs.length; + + if (ch.depType != DependencyType.required) maybe_optional_deps[ch.pack] = true; + CONFIG[] configs; if (auto pi = basepack in package_indices) { pidx = *pi; @@ -83,9 +85,16 @@ else configs = getAllConfigs(basepack); all_configs ~= configs; package_indices[basepack] = pidx; + package_names[pidx] = basepack; } - configs = getSpecificConfigs(ch) ~ configs; + foreach (c; getSpecificConfigs(basepack, ch)) + if (!configs.canFind(c)) + configs = c ~ configs; + + if (any_config.length <= pidx) any_config.length = pidx+1; + if (configs.length > 0) + any_config[pidx] = true; // eliminate configurations from which we know that they can't satisfy // the uniquely defined root dependencies (==version or ~branch style dependencies) @@ -99,20 +108,20 @@ } findConfigsRec(root, true); - // prepend an invalid configuration to denote an unchosen dependency + // append an invalid configuration to denote an unchosen dependency // this is used to properly support optional dependencies (when // getChildren() returns no configurations for an optional dependency, // but getAllConfigs() has already provided an existing list of configs) - foreach (ref cfgs; all_configs) cfgs = CONFIG.invalid ~ cfgs; + foreach (i, ref cfgs; all_configs) + if (cfgs.length == 0 || package_names[i] in maybe_optional_deps) + cfgs = cfgs ~ CONFIG.invalid; logDebug("Configurations used for dependency resolution:"); - foreach (n, i; package_indices) logDebug(" %s (%s): %s", n, i, all_configs[i]); + foreach (n, i; package_indices) logDebug(" %s (%s%s): %s", n, i, n in maybe_optional_deps ? ", maybe optional" : ", required", all_configs[i]); auto config_indices = new size_t[all_configs.length]; config_indices[] = 0; - string last_error; - visited = null; sizediff_t validateConfigs(TreeNode parent, ref string error) { @@ -122,21 +131,36 @@ visited[parent] = true; sizediff_t maxcpi = -1; - sizediff_t parentidx = package_indices.get(rootPackage(parent.pack), -1); - auto parentbase = rootPackage(parent.pack); + sizediff_t parentidx = package_indices.get(basePackage(parent.pack), -1); + auto parentbase = basePackage(parent.pack); // loop over all dependencies foreach (ch; getChildren(parent)) { - auto basepack = rootPackage(ch.pack); + auto basepack = basePackage(ch.pack); assert(basepack in package_indices, format("%s not in packages %s", basepack, package_indices)); // get the current config/version of the current dependency sizediff_t childidx = package_indices[basepack]; if (all_configs[childidx] == [CONFIG.invalid]) { - enforce(parentbase != root_base_pack, format("Root package %s contains reference to invalid package %s", parent.pack, ch.pack)); + // ignore invalid optional dependencies + if (ch.depType != DependencyType.required) + continue; + + if (parentbase == root_base_pack) { + import std.uni : toLower; + auto lp = ch.pack.toLower(); + if (lp != ch.pack) { + logError("Dependency \"%s\" of %s contains upper case letters, but must be lower case.", ch.pack, parent.pack); + if (getAllConfigs(lp).length) logError("Did you mean \"%s\"?", lp); + } + if (any_config[childidx]) + throw new Exception(format("Root package %s reference %s %s cannot be satisfied.", parent.pack, ch.pack, ch.configs)); + else + throw new Exception(format("Root package %s references unknown package %s", parent.pack, ch.pack)); + } // choose another parent config to avoid the invalid child if (parentidx > maxcpi) { - error = format("Package %s contains invalid dependency %s", parent.pack, ch.pack); + error = format("Package %s contains invalid dependency %s (no version candidates)", parent.pack, ch.pack); logDiagnostic("%s (ci=%s)", error, parentidx); maxcpi = parentidx; } @@ -145,6 +169,10 @@ auto chnode = TreeNode(ch.pack, config); if (config == CONFIG.invalid || !matches(ch.configs, config)) { + // ignore missing optional dependencies + if (config == CONFIG.invalid && ch.depType != DependencyType.required) + continue; + // if we are at the root level, we can safely skip the maxcpi computation and instead choose another childidx config if (parentbase == root_base_pack) { error = format("No match for dependency %s %s of %s", ch.pack, ch.configs, parent.pack); @@ -175,7 +203,7 @@ visited = null; string error; auto conflict_index = validateConfigs(root, error); - if (!first_error) first_error = error; + if (first_error is null) first_error = error; // print out current iteration state logDebug("Interation (ci=%s) %s", conflict_index, { @@ -196,6 +224,9 @@ auto cfg = all_configs[i][config_indices[i]]; if (cfg != CONFIG.invalid) ret[p] = cfg; } + logDebug("Resolved dependencies before optional-purge: %s", ret.byKey.map!(k => k~" "~ret[k].to!string)); + purgeOptionalDependencies(root, ret); + logDebug("Resolved dependencies after optional-purge: %s", ret.byKey.map!(k => k~" "~ret[k].to!string)); return ret; } @@ -213,9 +244,46 @@ } protected abstract CONFIG[] getAllConfigs(string pack); - protected abstract CONFIG[] getSpecificConfigs(TreeNodes nodes); + protected abstract CONFIG[] getSpecificConfigs(string pack, TreeNodes nodes); protected abstract TreeNodes[] getChildren(TreeNode node); protected abstract bool matches(CONFIGS configs, CONFIG config); + + private void purgeOptionalDependencies(TreeNode root, ref CONFIG[string] configs) + { + bool[string] required; + bool[string] visited; + + void markRecursively(TreeNode node) + { + if (node.pack in visited) return; + visited[node.pack] = true; + required[basePackage(node.pack)] = true; + foreach (dep; getChildren(node).filter!(dep => dep.depType != DependencyType.optional)) + if (auto dp = basePackage(dep.pack) in configs) + markRecursively(TreeNode(dep.pack, *dp)); + } + + // recursively mark all required dependencies of the concrete dependency tree + markRecursively(root); + + // remove all un-marked configurations + foreach (p; configs.keys.dup) + if (p !in required) + configs.remove(p); + } +} + +enum DependencyType { + required, + optionalDefault, + optional +} + +private string basePackage(string p) +{ + auto idx = indexOf(p, ":"); + if (idx < 0) return p; + return p[0 .. idx]; } @@ -226,8 +294,13 @@ enum invalid = IntConfig(-1); } static IntConfig ic(int v) { return IntConfig(v); } + static struct IntConfigs { + IntConfig[] configs; + alias configs this; + } + static IntConfigs ics(IntConfig[] cfgs) { return IntConfigs(cfgs); } - static class TestResolver : DependencyResolver!(IntConfig[], IntConfig) { + static class TestResolver : DependencyResolver!(IntConfigs, IntConfig) { private TreeNodes[][string] m_children; this(TreeNodes[][string] children) { m_children = children; } protected override IntConfig[] getAllConfigs(string pack) { @@ -241,17 +314,17 @@ ret.data.sort!"a>b"(); return ret.data; } - protected override IntConfig[] getSpecificConfigs(TreeNodes nodes) { return null; } + protected override IntConfig[] getSpecificConfigs(string pack, TreeNodes nodes) { return null; } protected override TreeNodes[] getChildren(TreeNode node) { return m_children.get(node.pack ~ ":" ~ node.config.to!string(), null); } - protected override bool matches(IntConfig[] configs, IntConfig config) { return configs.canFind(config); } + protected override bool matches(IntConfigs configs, IntConfig config) { return configs.canFind(config); } } // properly back up if conflicts are detected along the way (d:2 vs d:1) with (TestResolver) { auto res = new TestResolver([ - "a:0": [TreeNodes("b", [ic(2), ic(1)]), TreeNodes("d", [ic(1)]), TreeNodes("e", [ic(2), ic(1)])], - "b:1": [TreeNodes("c", [ic(2), ic(1)]), TreeNodes("d", [ic(1)])], - "b:2": [TreeNodes("c", [ic(3), ic(2)]), TreeNodes("d", [ic(2), ic(1)])], + "a:0": [TreeNodes("b", ics([ic(2), ic(1)])), TreeNodes("d", ics([ic(1)])), TreeNodes("e", ics([ic(2), ic(1)]))], + "b:1": [TreeNodes("c", ics([ic(2), ic(1)])), TreeNodes("d", ics([ic(1)]))], + "b:2": [TreeNodes("c", ics([ic(3), ic(2)])), TreeNodes("d", ics([ic(2), ic(1)]))], "c:1": [], "c:2": [], "c:3": [], "d:1": [], "d:2": [], "e:1": [], "e:2": [], @@ -262,9 +335,69 @@ // handle cyclic dependencies gracefully with (TestResolver) { auto res = new TestResolver([ - "a:0": [TreeNodes("b", [ic(1)])], - "b:1": [TreeNodes("b", [ic(1)])] + "a:0": [TreeNodes("b", ics([ic(1)]))], + "b:1": [TreeNodes("b", ics([ic(1)]))] ]); assert(res.resolve(TreeNode("a", ic(0))) == ["b":ic(1)]); } + + // don't choose optional dependencies by default + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1)]), DependencyType.optional)], + "b:1": [] + ]); + assert(res.resolve(TreeNode("a", ic(0))).length == 0, to!string(res.resolve(TreeNode("a", ic(0))))); + } + + // choose default optional dependencies by default + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1)]), DependencyType.optionalDefault)], + "b:1": [] + ]); + assert(res.resolve(TreeNode("a", ic(0))) == ["b":ic(1)], to!string(res.resolve(TreeNode("a", ic(0))))); + } + + // choose optional dependency if non-optional within the dependency tree + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1)]), DependencyType.optional), TreeNodes("c", ics([ic(1)]))], + "b:1": [], + "c:1": [TreeNodes("b", ics([ic(1)]))] + ]); + assert(res.resolve(TreeNode("a", ic(0))) == ["b":ic(1), "c":ic(1)], to!string(res.resolve(TreeNode("a", ic(0))))); + } + + // don't choose optional dependency if non-optional outside of final dependency tree + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1)]), DependencyType.optional)], + "b:1": [], + "preset:0": [TreeNodes("b", ics([ic(1)]))] + ]); + assert(res.resolve(TreeNode("a", ic(0))).length == 0, to!string(res.resolve(TreeNode("a", ic(0))))); + } + + // don't choose optional dependency if non-optional in a non-selected version + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1), ic(2)]))], + "b:1": [TreeNodes("c", ics([ic(1)]))], + "b:2": [TreeNodes("c", ics([ic(1)]), DependencyType.optional)], + "c:1": [] + ]); + assert(res.resolve(TreeNode("a", ic(0))) == ["b":ic(2)], to!string(res.resolve(TreeNode("a", ic(0))))); + } + + // make sure non-satisfyable dependencies are not a problem, even if non-optional in some dependencies + with (TestResolver) { + auto res = new TestResolver([ + "a:0": [TreeNodes("b", ics([ic(1), ic(2)]))], + "b:1": [TreeNodes("c", ics([ic(2)]))], + "b:2": [TreeNodes("c", ics([ic(2)]), DependencyType.optional)], + "c:1": [] + ]); + assert(res.resolve(TreeNode("a", ic(0))) == ["b":ic(2)], to!string(res.resolve(TreeNode("a", ic(0))))); + } } diff --git a/source/dub/description.d b/source/dub/description.d new file mode 100644 index 0000000..9f519cb --- /dev/null +++ b/source/dub/description.d @@ -0,0 +1,139 @@ +/** + Types for project descriptions (dub describe). + + Copyright: © 2015-2016 rejectedsoftware e.K. + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Sönke Ludwig +*/ +module dub.description; + +import dub.compilers.buildsettings; +import dub.dependency; +import dub.internal.vibecompat.data.serialization; + + +/** + Describes a complete project for use in IDEs or build tools. + + The build settings will be specific to the compiler, platform + and configuration that has been selected. +*/ +struct ProjectDescription { + string rootPackage; /// Name of the root package being built + string configuration; /// Name of the selected build configuration + string buildType; /// Name of the selected build type + string compiler; /// Canonical name of the compiler used (e.g. "dmd", "gdc" or "ldc") + string[] architecture; /// Architecture constants for the selected platform (e.g. `["x86_64"]`) + string[] platform; /// Platform constants for the selected platform (e.g. `["posix", "osx"]`) + PackageDescription[] packages; /// All packages in the dependency tree + TargetDescription[] targets; /// Build targets + @ignore size_t[string] targetLookup; /// Target index by package name name + + /// Targets by name + ref inout(TargetDescription) lookupTarget(string name) inout + { + import std.exception : enforce; + auto pti = name in targetLookup; + enforce(pti !is null, "Target '"~name~"' doesn't exist. Is the target type set to \"none\" in the package recipe?"); + return targets[*pti]; + } + + /// Projects by name + ref inout(PackageDescription) lookupPackage(string name) inout + { + foreach (ref p; packages) + if (p.name == name) + { + static if (__VERSION__ > 2065) + return p; + else + return *cast(inout(PackageDescription)*)&p; + } + throw new Exception("Package '"~name~"' not found in dependency tree."); + } + + /// Root package + ref inout(PackageDescription) lookupRootPackage() inout { return lookupPackage(rootPackage); } +} + + +/** + Describes the build settings and meta data of a single package. + + This structure contains the effective build settings and dependencies for + the selected build platform. This structure is most useful for displaying + information about a package in an IDE. Use `TargetDescription` instead when + writing a build-tool. +*/ +struct PackageDescription { + string path; /// Path to the package + string name; /// Qualified name of the package + Version version_; /// Version of the package + string description; + string homepage; + string[] authors; + string copyright; + string license; + string[] dependencies; + + bool active; /// Does this package take part in the build? + string configuration; /// The configuration that is built + @byName TargetType targetType; + string targetPath; + string targetName; + string targetFileName; + string workingDirectory; + string mainSourceFile; + string[] dflags; /// Flags passed to the D compiler + string[] lflags; /// Flags passed to the linker + string[] libs; /// Librariy names to link against (typically using "-l") + string[] copyFiles; /// Files to copy to the target directory + string[] versions; /// D version identifiers to set + string[] debugVersions; /// D debug version identifiers to set + string[] importPaths; + string[] stringImportPaths; + string[] preGenerateCommands; /// commands executed before creating the description + string[] postGenerateCommands; /// commands executed after creating the description + string[] preBuildCommands; /// Commands to execute prior to every build + string[] postBuildCommands; /// Commands to execute after every build + @byName BuildRequirement[] buildRequirements; + @byName BuildOption[] options; + SourceFileDescription[] files; /// A list of all source/import files possibly used by the package +} + + +/** + Describes the settings necessary to build a certain binary target. +*/ +struct TargetDescription { + string rootPackage; /// Main package associated with this target, this is also the name of the target. + string[] packages; /// All packages contained in this target (e.g. for target type "sourceLibrary") + string rootConfiguration; /// Build configuration of the target's root package used for building + BuildSettings buildSettings; /// Final build settings to use when building the target + string[] dependencies; /// List of all dependencies of this target (package names) + string[] linkDependencies; /// List of all link-dependencies of this target (target names) +} + +/** + Description for a single source file known to the package. +*/ +struct SourceFileDescription { + @byName SourceFileRole role; /// Main role this file plays in the build process + string path; /// Full path to the file +} + +/** + Determines the role that a file plays in the build process. + + If a file has multiple roles, higher enum values will have precedence, i.e. + if a file is used both, as a source file and as an import file, it will + be classified as a source file. +*/ +enum SourceFileRole { + unusedStringImport, /// Used as a string import for another configuration/platform + unusedImport, /// Used as an import for another configuration/platform + unusedSource, /// Used as a source file for another configuration/platform + stringImport, /// Used as a string import file + import_, /// Used as an import file + source /// Used as a source file +} diff --git a/source/dub/dub.d b/source/dub/dub.d index ad31b55..d47632f 100644 --- a/source/dub/dub.d +++ b/source/dub/dub.d @@ -1,7 +1,7 @@ /** A package manager. - Copyright: © 2012-2013 Matthias Dondorff + Copyright: © 2012-2013 Matthias Dondorff, 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ @@ -39,106 +39,164 @@ // Workaround for libcurl liker errors when building with LDC version (LDC) pragma(lib, "curl"); +// Set output path and options for coverage reports +version (DigitalMars) version (D_Coverage) static if (__VERSION__ >= 2068) +{ + shared static this() + { + import core.runtime, std.file, std.path, std.stdio; + dmd_coverSetMerge(true); + auto path = buildPath(dirName(thisExePath()), "../cov"); + if (!path.exists) + mkdir(path); + dmd_coverDestPath(path); + } +} -/// The default supplier for packages, which is the registry -/// hosted by code.dlang.org. +static this() +{ + import dub.compilers.dmd : DMDCompiler; + import dub.compilers.gdc : GDCCompiler; + import dub.compilers.ldc : LDCCompiler; + registerCompiler(new DMDCompiler); + registerCompiler(new GDCCompiler); + registerCompiler(new LDCCompiler); +} + +/// The URL to the official package registry. +enum defaultRegistryURL = "http://code.dlang.org/"; + +/** Returns a default list of package suppliers. + + This will contain a single package supplier that points to the official + package registry. + + See_Also: `defaultRegistryURL` +*/ PackageSupplier[] defaultPackageSuppliers() { - URL url = URL.parse("http://code.dlang.org/"); - logDiagnostic("Using dub registry url '%s'", url); - return [new RegistryPackageSupplier(url)]; + logDiagnostic("Using dub registry url '%s'", defaultRegistryURL); + return [new RegistryPackageSupplier(URL(defaultRegistryURL))]; } -/// Option flags for fetch -enum FetchOptions -{ - none = 0, - forceBranchUpgrade = 1<<0, - usePrerelease = 1<<1, - forceRemove = 1<<2, - printOnly = 1<<3, -} -/// The Dub class helps in getting the applications -/// dependencies up and running. An instance manages one application. +/** Provides a high-level entry point for DUB's functionality. + + This class provides means to load a certain project (a root package with + all of its dependencies) and to perform high-level operations as found in + the command line interface. +*/ class Dub { private { bool m_dryRun = false; PackageManager m_packageManager; PackageSupplier[] m_packageSuppliers; Path m_rootPath; - Path m_tempPath; - Path m_userDubPath, m_systemDubPath; - Json m_systemConfig, m_userConfig; + SpecialDirs m_dirs; + DubConfig m_config; Path m_projectPath; Project m_project; Path m_overrideSearchPath; + string m_defaultCompiler; } - /// Initiales the package manager for the vibe application - /// under root. - this(PackageSupplier[] additional_package_suppliers = null, string root_path = ".") + /** The default placement location of fetched packages. + + This property can be altered, so that packages which are downloaded as part + of the normal upgrade process are stored in a certain location. This is + how the "--local" and "--system" command line switches operate. + */ + PlacementLocation defaultPlacementLocation = PlacementLocation.user; + + + /** Initializes the instance for use with a specific root package. + + Note that a package still has to be loaded using one of the + `loadPackage` overloads. + + Params: + root_path = Path to the root package + additional_package_suppliers = A list of package suppliers to try + before the suppliers found in the configurations files and the + `defaultPackageSuppliers`. + skip_registry = Can be used to skip using the configured package + suppliers, as well as the default suppliers. + */ + this(string root_path = ".", PackageSupplier[] additional_package_suppliers = null, + SkipPackageSuppliers skip_registry = SkipPackageSuppliers.none) { m_rootPath = Path(root_path); if (!m_rootPath.absolute) m_rootPath = Path(getcwd()) ~ m_rootPath; - version(Windows){ - m_systemDubPath = Path(environment.get("ProgramData")) ~ "dub/"; - m_userDubPath = Path(environment.get("APPDATA")) ~ "dub/"; - m_tempPath = Path(environment.get("TEMP")); - } else version(Posix){ - m_systemDubPath = Path("/var/lib/dub/"); - m_userDubPath = Path(environment.get("HOME")) ~ ".dub/"; - if(!m_userDubPath.absolute) - m_userDubPath = Path(getcwd()) ~ m_userDubPath; - m_tempPath = Path("/tmp"); - } - - m_userConfig = jsonFromFile(m_userDubPath ~ "settings.json", true); - m_systemConfig = jsonFromFile(m_systemDubPath ~ "settings.json", true); + init(); PackageSupplier[] ps = additional_package_suppliers; - if (auto pp = "registryUrls" in m_userConfig) - ps ~= deserializeJson!(string[])(*pp) - .map!(url => cast(PackageSupplier)new RegistryPackageSupplier(URL(url))) - .array; - if (auto pp = "registryUrls" in m_systemConfig) - ps ~= deserializeJson!(string[])(*pp) - .map!(url => cast(PackageSupplier)new RegistryPackageSupplier(URL(url))) - .array; - ps ~= defaultPackageSuppliers(); - auto cacheDir = m_userDubPath ~ "cache/"; - foreach (p; ps) - p.cacheOp(cacheDir, CacheOp.load); + if (skip_registry < SkipPackageSuppliers.all) + { + ps ~= (environment.get("DUB_REGISTRY", null).split(";") ~ m_config.registryURLs) + .map!(url => cast(PackageSupplier)new RegistryPackageSupplier(URL(url))) + .array; + } + + if (skip_registry < SkipPackageSuppliers.standard) + ps ~= defaultPackageSuppliers(); m_packageSuppliers = ps; - m_packageManager = new PackageManager(m_userDubPath, m_systemDubPath); + m_packageManager = new PackageManager(m_dirs.userSettings, m_dirs.systemSettings); updatePackageSearchPath(); } - /// Initializes DUB with only a single search path + unittest + { + scope (exit) environment.remove("DUB_REGISTRY"); + auto dub = new Dub(".", null, SkipPackageSuppliers.standard); + assert(dub.m_packageSuppliers.length == 0); + environment["DUB_REGISTRY"] = "http://example.com/"; + dub = new Dub(".", null, SkipPackageSuppliers.standard); + logInfo("%s", dub.m_packageSuppliers); + assert(dub.m_packageSuppliers.length == 1); + environment["DUB_REGISTRY"] = "http://example.com/;http://foo.com/"; + dub = new Dub(".", null, SkipPackageSuppliers.standard); + assert(dub.m_packageSuppliers.length == 2); + dub = new Dub(".", [new RegistryPackageSupplier(URL("http://bar.com/"))], SkipPackageSuppliers.standard); + assert(dub.m_packageSuppliers.length == 3); + } + + /** Initializes the instance with a single package search path, without + loading a package. + + This constructor corresponds to the "--bare" option of the command line + interface. Use + */ this(Path override_path) { + init(); m_overrideSearchPath = override_path; m_packageManager = new PackageManager(Path(), Path(), false); updatePackageSearchPath(); } - /// Perform cleanup and persist caches to disk - void shutdown() + private void init() { - auto cacheDir = m_userDubPath ~ "cache/"; - foreach (p; m_packageSuppliers) - p.cacheOp(cacheDir, CacheOp.store); - } + import std.file : tempDir; + version(Windows) { + m_dirs.systemSettings = Path(environment.get("ProgramData")) ~ "dub/"; + m_dirs.userSettings = Path(environment.get("APPDATA")) ~ "dub/"; + } else version(Posix){ + m_dirs.systemSettings = Path("/var/lib/dub/"); + m_dirs.userSettings = Path(environment.get("HOME")) ~ ".dub/"; + if (!m_dirs.userSettings.absolute) + m_dirs.userSettings = Path(getcwd()) ~ m_dirs.userSettings; + } - /// cleans all metadata caches - void cleanCaches() - { - auto cacheDir = m_userDubPath ~ "cache/"; - foreach (p; m_packageSuppliers) - p.cacheOp(cacheDir, CacheOp.clean); + m_dirs.temp = Path(tempDir); + + m_config = new DubConfig(jsonFromFile(m_dirs.systemSettings ~ "settings.json", true), m_config); + m_config = new DubConfig(jsonFromFile(Path(thisExePath).parentPath ~ "../etc/dub/settings.json", true), m_config); + m_config = new DubConfig(jsonFromFile(m_dirs.userSettings ~ "settings.json", true), m_config); + + determineDefaultCompiler(); } @property void dryRun(bool v) { m_dryRun = v; } @@ -165,9 +223,20 @@ @property inout(Project) project() inout { return m_project; } - /// Loads the package from the current working directory as the main - /// project package. - void loadPackageFromCwd() + /** Returns the default compiler binary to use for building D code. + + If set, the "defaultCompiler" field of the DUB user or system + configuration file will be used. Otherwise the PATH environment variable + will be searched for files named "dmd", "gdc", "gdmd", "ldc2", "ldmd2" + (in that order, taking into account operating system specific file + extensions) and the first match is returned. If no match is found, "dmd" + will be used. + */ + @property string defaultCompiler() const { return m_defaultCompiler; } + + /** Loads the package that resides within the configured `rootPath`. + */ + void loadPackage() { loadPackage(m_rootPath); } @@ -188,6 +257,95 @@ m_project = new Project(m_packageManager, pack); } + /** Loads a single file package. + + Single-file packages are D files that contain a package receipe comment + at their top. A recipe comment must be a nested `/+ ... +/` style + comment, containing the virtual recipe file name and a colon, followed by the + recipe contents (what would normally be in dub.sdl/dub.json). + + Example: + --- + /+ dub.sdl: + name "test" + dependency "vibe-d" version="~>0.7.29" + +/ + import vibe.http.server; + + void main() + { + auto settings = new HTTPServerSettings; + settings.port = 8080; + listenHTTP(settings, &hello); + } + + void hello(HTTPServerRequest req, HTTPServerResponse res) + { + res.writeBody("Hello, World!"); + } + --- + + The script above can be invoked with "dub --single test.d". + */ + void loadSingleFilePackage(Path path) + { + import dub.recipe.io : parsePackageRecipe; + import std.file : mkdirRecurse, readText; + import std.path : baseName, stripExtension; + + path = makeAbsolute(path); + + string file_content = readText(path.toNativeString()); + + if (file_content.startsWith("#!")) { + auto idx = file_content.indexOf('\n'); + enforce(idx > 0, "The source fine doesn't contain anything but a shebang line."); + file_content = file_content[idx+1 .. $]; + } + + file_content = file_content.strip(); + + string recipe_content; + + if (file_content.startsWith("/+")) { + file_content = file_content[2 .. $]; + auto idx = file_content.indexOf("+/"); + enforce(idx >= 0, "Missing \"+/\" to close comment."); + recipe_content = file_content[0 .. idx].strip(); + } else throw new Exception("The source file must start with a recipe comment."); + + auto nidx = recipe_content.indexOf('\n'); + + auto idx = recipe_content.indexOf(':'); + enforce(idx > 0 && (nidx < 0 || nidx > idx), + "The first line of the recipe comment must list the recipe file name followed by a colon (e.g. \"/+ dub.sdl:\")."); + auto recipe_filename = recipe_content[0 .. idx]; + recipe_content = recipe_content[idx+1 .. $]; + auto recipe_default_package_name = path.toString.baseName.stripExtension.strip; + + auto recipe = parsePackageRecipe(recipe_content, recipe_filename, null, recipe_default_package_name); + enforce(recipe.buildSettings.sourceFiles.length == 0, "Single-file packages are not allowed to specify source files."); + enforce(recipe.buildSettings.sourcePaths.length == 0, "Single-file packages are not allowed to specify source paths."); + enforce(recipe.buildSettings.importPaths.length == 0, "Single-file packages are not allowed to specify import paths."); + recipe.buildSettings.sourceFiles[""] = [path.toNativeString()]; + recipe.buildSettings.sourcePaths[""] = []; + recipe.buildSettings.importPaths[""] = []; + recipe.buildSettings.mainSourceFile = path.toNativeString(); + if (recipe.buildSettings.targetType == TargetType.autodetect) + recipe.buildSettings.targetType = TargetType.executable; + + auto pack = new Package(recipe, path.parentPath, null, "~master"); + loadPackage(pack); + } + /// ditto + void loadSingleFilePackage(string path) + { + loadSingleFilePackage(Path(path)); + } + + /** Disables the default search paths and only searches a specific directory + for packages. + */ void overrideSearchPath(Path path) { if (!path.absolute) path = Path(getcwd()) ~ path; @@ -195,9 +353,23 @@ updatePackageSearchPath(); } + /** Gets the default configuration for a particular build platform. + + This forwards to `Project.getDefaultConfiguration` and requires a + project to be loaded. + */ string getDefaultConfiguration(BuildPlatform platform, bool allow_non_library_configs = true) const { return m_project.getDefaultConfiguration(platform, allow_non_library_configs); } - void upgrade(UpgradeOptions options) + /** Attempts to upgrade the dependency selection of the loaded project. + + Params: + options = Flags that control how the upgrade is carried out + packages_to_upgrade = Optional list of packages. If this list + contains one or more packages, only those packages will + be upgraded. Otherwise, all packages will be upgraded at + once. + */ + void upgrade(UpgradeOptions options, string[] packages_to_upgrade = null) { // clear non-existent version selections if (!(options & UpgradeOptions.upgrade)) { @@ -205,7 +377,10 @@ foreach (p; m_project.selections.selectedPackages) { auto dep = m_project.selections.getSelectedVersion(p); if (!dep.path.empty) { - if (m_packageManager.getOrLoadPackage(dep.path)) continue; + auto path = dep.path; + if (!path.absolute) path = this.rootPath ~ path; + try if (m_packageManager.getOrLoadPackage(path)) continue; + catch (Exception e) { logDebug("Failed to load path based selection: %s", e.toString().sanitize); } } else { if (m_packageManager.getPackage(p, dep.version_)) continue; foreach (ps; m_packageSuppliers) { @@ -214,7 +389,7 @@ if (versions.canFind!(v => dep.matches(v))) continue next_pack; } catch (Exception e) { - logDiagnostic("Error querying versions for %s, %s: %s", p, ps.description, e.msg); + logWarn("Error querying versions for %s, %s: %s", p, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize()); } } @@ -226,11 +401,13 @@ } Dependency[string] versions; - if ((options & UpgradeOptions.useCachedResult) && m_project.isUpgradeCacheUpToDate()) { + if ((options & UpgradeOptions.useCachedResult) && m_project.isUpgradeCacheUpToDate() && !packages_to_upgrade.length) { logDiagnostic("Using cached upgrade results..."); versions = m_project.getUpgradeCache(); } else { auto resolver = new DependencyVersionResolver(this, options); + foreach (p; packages_to_upgrade) + resolver.addPackageToUpgrade(p); versions = resolver.resolve(m_project.rootPackage, m_project.selections); if (options & UpgradeOptions.useCachedResult) { logDiagnostic("Caching upgrade results..."); @@ -249,7 +426,7 @@ if (basename == rootbasename) continue; if (!m_project.selections.hasSelectedVersion(basename)) { - logInfo("Package %s can be installed with version %s.", + logInfo("Non-selected package %s is available with version %s.", basename, ver); any = true; continue; @@ -265,46 +442,62 @@ return; } - foreach (p, ver; versions) { + foreach (p; versions.byKey) { + auto ver = versions[p]; // Workaround for DMD 2.070.0 AA issue (crashes in aaApply2 if iterating by key+value) assert(!p.canFind(":"), "Resolved packages contain a sub package!?: "~p); Package pack; - if (!ver.path.empty) pack = m_packageManager.getOrLoadPackage(ver.path); - else { + if (!ver.path.empty) { + try pack = m_packageManager.getOrLoadPackage(ver.path); + catch (Exception e) { + logDebug("Failed to load path based selection: %s", e.toString().sanitize); + continue; + } + } else { pack = m_packageManager.getBestPackage(p, ver); if (pack && m_packageManager.isManagedPackage(pack) && ver.version_.isBranch && (options & UpgradeOptions.upgrade) != 0) { // TODO: only re-install if there is actually a new commit available logInfo("Re-installing branch based dependency %s %s", p, ver.toString()); - m_packageManager.remove(pack, (options & UpgradeOptions.forceRemove) != 0); + m_packageManager.remove(pack); pack = null; } } FetchOptions fetchOpts; fetchOpts |= (options & UpgradeOptions.preRelease) != 0 ? FetchOptions.usePrerelease : FetchOptions.none; - fetchOpts |= (options & UpgradeOptions.forceRemove) != 0 ? FetchOptions.forceRemove : FetchOptions.none; if (!pack) fetch(p, ver, defaultPlacementLocation, fetchOpts, "getting selected version"); - if ((options & UpgradeOptions.select) && ver.path.empty && p != m_project.rootPackage.name) - m_project.selections.selectVersion(p, ver.version_); + if ((options & UpgradeOptions.select) && p != m_project.rootPackage.name) { + if (ver.path.empty) m_project.selections.selectVersion(p, ver.version_); + else { + Path relpath = ver.path; + if (relpath.absolute) relpath = relpath.relativeTo(m_project.rootPackage.path); + m_project.selections.selectVersion(p, relpath); + } + } } m_project.reinit(); - if (options & UpgradeOptions.select) + if ((options & UpgradeOptions.select) && !(options & UpgradeOptions.noSaveSelections)) m_project.saveSelections(); } - /// Generate project files for a specified IDE. - /// Any existing project files will be overridden. - void generateProject(string ide, GeneratorSettings settings) { + /** Generate project files for a specified generator. + + Any existing project files will be overridden. + */ + void generateProject(string ide, GeneratorSettings settings) + { auto generator = createProjectGenerator(ide, m_project); if (m_dryRun) return; // TODO: pass m_dryRun to the generator generator.generate(settings); } - /// Executes tests on the current project. Throws an exception, if - /// unittests failed. + /** Executes tests on the current project. + + Throws an exception, if unittests failed. + */ void testProject(GeneratorSettings settings, string config, Path custom_main_file) { if (custom_main_file.length && !custom_main_file.absolute) custom_main_file = getWorkingDirectory() ~ custom_main_file; @@ -322,7 +515,7 @@ auto generator = createProjectGenerator("build", m_project); - auto test_config = format("__test__%s__", config); + auto test_config = format("%s-test-%s", m_project.rootPackage.name.replace(".", "-").replace(":", "-"), config); BuildSettings lbuildsettings = settings.buildSettings; m_project.addBuildSettings(lbuildsettings, settings.platform, config, null, true); @@ -331,9 +524,8 @@ return; } - if (lbuildsettings.targetType == TargetType.executable) { - if (config == "unittest") logInfo("Running custom 'unittest' configuration.", config); - else logInfo(`Configuration '%s' does not output a library. Falling back to "dub -b unittest -c %s".`, config, config); + if (lbuildsettings.targetType == TargetType.executable && config == "unittest") { + logInfo("Running custom 'unittest' configuration.", config); if (!custom_main_file.empty) logWarn("Ignoring custom main file."); settings.config = config; } else if (lbuildsettings.sourceFiles.empty) { @@ -341,12 +533,21 @@ if (!custom_main_file.empty) logWarn("Ignoring custom main file."); settings.config = m_project.getDefaultConfiguration(settings.platform); } else { + import std.algorithm : remove; + logInfo(`Generating test runner configuration '%s' for '%s' (%s).`, test_config, config, lbuildsettings.targetType); - BuildSettingsTemplate tcinfo = m_project.rootPackage.info.getConfiguration(config).buildSettings; + BuildSettingsTemplate tcinfo = m_project.rootPackage.recipe.getConfiguration(config).buildSettings; tcinfo.targetType = TargetType.executable; tcinfo.targetName = test_config; - tcinfo.versions[""] ~= "VibeCustomMain"; // HACK for vibe.d's legacy main() behavior + // HACK for vibe.d's legacy main() behavior: + tcinfo.versions[""] ~= "VibeCustomMain"; + m_project.rootPackage.recipe.buildSettings.versions[""] = m_project.rootPackage.recipe.buildSettings.versions.get("", null).remove!(v => v == "VibeDefaultMain"); + // TODO: remove this ^ once vibe.d has removed the default main implementation + + auto mainfil = tcinfo.mainSourceFile; + if (!mainfil.length) mainfil = m_project.rootPackage.recipe.buildSettings.mainSourceFile; + string custommodname; if (custom_main_file.length) { import std.path; @@ -355,10 +556,22 @@ custommodname = custom_main_file.head.toString().baseName(".d"); } + // prepare the list of tested modules string[] import_modules; foreach (file; lbuildsettings.sourceFiles) { - if (file.endsWith(".d") && Path(file).head.toString() != "package.d") - import_modules ~= lbuildsettings.determineModuleName(Path(file), m_project.rootPackage.path); + if (file.endsWith(".d")) { + auto fname = Path(file).head.toString(); + if (Path(file).relativeTo(m_project.rootPackage.path) == Path(mainfil)) { + logWarn("Excluding main source file %s from test.", mainfil); + tcinfo.excludedSourceFiles[""] ~= mainfil; + continue; + } + if (fname == "package.d") { + logWarn("Excluding package.d file from test due to https://issues.dlang.org/show_bug.cgi?id=11847"); + continue; + } + import_modules ~= dub.internal.utils.determineModuleName(lbuildsettings, Path(file), m_project.rootPackage.path); + } } // generate main file @@ -366,7 +579,7 @@ tcinfo.sourceFiles[""] ~= mainfile.toNativeString(); tcinfo.mainSourceFile = mainfile.toNativeString(); if (!m_dryRun) { - auto fil = openFile(mainfile, FileMode.CreateTrunc); + auto fil = openFile(mainfile, FileMode.createTrunc); scope(exit) fil.close(); fil.write("module dub_test_root;\n"); fil.write("import std.typetuple;\n"); @@ -398,7 +611,7 @@ }); } } - m_project.rootPackage.info.configurations ~= ConfigurationInfo(test_config, tcinfo); + m_project.rootPackage.recipe.configurations ~= ConfigurationInfo(test_config, tcinfo); m_project = new Project(m_packageManager, m_project.rootPackage); settings.config = test_config; @@ -407,19 +620,32 @@ generator.generate(settings); } - /// Outputs a JSON description of the project, including its dependencies. - void describeProject(BuildPlatform platform, string config) + /** Prints the specified build settings necessary for building the root package. + */ + void listProjectData(GeneratorSettings settings, string[] requestedData, ListBuildSettingsFormat list_type) { - auto dst = Json.emptyObject; - dst.configuration = config; - dst.compiler = platform.compiler; - dst.architecture = platform.architecture.serializeToJson(); - dst.platform = platform.platform.serializeToJson(); - - m_project.describe(dst, platform, config); - import std.stdio; - write(dst.toPrettyString()); + import std.ascii : newline; + + // Split comma-separated lists + string[] requestedDataSplit = + requestedData + .map!(a => a.splitter(",").map!strip) + .joiner() + .array(); + + auto data = m_project.listBuildSettings(settings, requestedDataSplit, list_type); + + string delimiter; + final switch (list_type) with (ListBuildSettingsFormat) { + case list: delimiter = newline ~ newline; break; + case listNul: delimiter = "\0\0"; break; + case commandLine: delimiter = " "; break; + case commandLineNul: delimiter = "\0\0"; break; + } + + write(data.joiner(delimiter)); + if (delimiter != "\0\0") writeln(); } /// Cleans intermediate/cache files of the given package @@ -434,10 +660,6 @@ if (existsFile(path ~ ".dub/obj")) rmdirRecurse((path ~ ".dub/obj").toNativeString()); } - - /// Returns all cached packages as a "packageId" = "version" associative array - string[string] cachedPackages() const { return m_project.cachedPackagesIDs; } - /// Fetches the package matching the dependency and places it in the specified location. Package fetch(string packageId, const Dependency dep, PlacementLocation location, FetchOptions options, string reason = "") { @@ -445,11 +667,13 @@ PackageSupplier supplier; foreach(ps; m_packageSuppliers){ try { - pinfo = ps.getPackageDescription(packageId, dep, (options & FetchOptions.usePrerelease) != 0); + pinfo = ps.fetchPackageRecipe(packageId, dep, (options & FetchOptions.usePrerelease) != 0); + if (pinfo.type == Json.Type.null_) + continue; supplier = ps; break; } catch(Exception e) { - logDiagnostic("Package %s not found for %s: %s", packageId, ps.description, e.msg); + logWarn("Package %s not found for %s: %s", packageId, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize()); } } @@ -459,8 +683,8 @@ Path placement; final switch (location) { case PlacementLocation.local: placement = m_rootPath; break; - case PlacementLocation.user: placement = m_userDubPath ~ "packages/"; break; - case PlacementLocation.system: placement = m_systemDubPath ~ "packages/"; break; + case PlacementLocation.user: placement = m_dirs.userSettings ~ "packages/"; break; + case PlacementLocation.system: placement = m_dirs.systemSettings ~ "packages/"; break; } // always upgrade branch based versions - TODO: actually check if there is a new commit available @@ -472,9 +696,9 @@ } if (options & FetchOptions.printOnly) { - if (existing && existing.vers != ver) + if (existing && existing.version_ != Version(ver)) logInfo("A new version for %s is available (%s -> %s). Run \"dub upgrade %s\" to switch.", - packageId, existing.vers, ver, packageId); + packageId, existing.version_, ver, packageId); return null; } @@ -486,7 +710,7 @@ return existing; } else { logInfo("Removing %s %s to prepare replacement with a new version.", packageId, ver); - if (!m_dryRun) m_packageManager.remove(existing, (options & FetchOptions.forceRemove) != 0); + if (!m_dryRun) m_packageManager.remove(existing); } } @@ -494,198 +718,425 @@ else logInfo("Fetching %s %s...", packageId, ver); if (m_dryRun) return null; - logDiagnostic("Acquiring package zip file"); - auto dload = m_projectPath ~ ".dub/temp/downloads"; - auto tempfname = packageId ~ "-" ~ (ver.startsWith('~') ? ver[1 .. $] : ver) ~ ".zip"; - auto tempFile = m_tempPath ~ tempfname; - string sTempFile = tempFile.toNativeString(); - if (exists(sTempFile)) std.file.remove(sTempFile); - supplier.retrievePackage(tempFile, packageId, dep, (options & FetchOptions.usePrerelease) != 0); // Q: continue on fail? - scope(exit) std.file.remove(sTempFile); + logDebug("Acquiring package zip file"); - logInfo("Placing %s %s to %s...", packageId, ver, placement.toNativeString()); auto clean_package_version = ver[ver.startsWith("~") ? 1 : 0 .. $]; clean_package_version = clean_package_version.replace("+", "_"); // + has special meaning for Optlink + if (!placement.existsFile()) + mkdirRecurse(placement.toNativeString()); Path dstpath = placement ~ (packageId ~ "-" ~ clean_package_version); + if (!dstpath.existsFile()) + mkdirRecurse(dstpath.toNativeString()); - return m_packageManager.storeFetchedPackage(tempFile, pinfo, dstpath); + // Support libraries typically used with git submodules like ae. + // Such libraries need to have ".." as import path but this can create + // import path leakage. + dstpath = dstpath ~ packageId; + + auto lock = lockFile(dstpath.toNativeString() ~ ".lock", 30.seconds); // possibly wait for other dub instance + if (dstpath.existsFile()) + { + m_packageManager.refresh(false); + return m_packageManager.getPackage(packageId, ver, dstpath); + } + + auto path = getTempFile(packageId, ".zip"); + supplier.fetchPackage(path, packageId, dep, (options & FetchOptions.usePrerelease) != 0); // Q: continue on fail? + scope(exit) std.file.remove(path.toNativeString()); + + logDiagnostic("Placing to %s...", placement.toNativeString()); + return m_packageManager.storeFetchedPackage(path, pinfo, dstpath); } - /// Removes a given package from the list of present/cached modules. - /// @removeFromApplication: if true, this will also remove an entry in the - /// list of dependencies in the application's dub.json - void remove(in Package pack, bool force_remove) + /** Removes a specific locally cached package. + + This will delete the package files from disk and removes the + corresponding entry from the list of known packages. + + Params: + pack = Package instance to remove + */ + void remove(in Package pack) { logInfo("Removing %s in %s", pack.name, pack.path.toNativeString()); - if (!m_dryRun) m_packageManager.remove(pack, force_remove); + if (!m_dryRun) m_packageManager.remove(pack); + } + + /// Compatibility overload. Use the version without a `force_remove` argument instead. + void remove(in Package pack, bool force_remove) + { + remove(pack); } /// @see remove(string, string, RemoveLocation) enum RemoveVersionWildcard = "*"; - /// This will remove a given package with a specified version from the - /// location. - /// It will remove at most one package, unless @param version_ is - /// specified as wildcard "*". - /// @param package_id Package to be removed - /// @param version_ Identifying a version or a wild card. An empty string - /// may be passed into. In this case the package will be removed from the - /// location, if there is only one version retrieved. This will throw an - /// exception, if there are multiple versions retrieved. - /// Note: as wildcard string only RemoveVersionWildcard ("*") is supported. - /// @param location_ - void remove(string package_id, string version_, PlacementLocation location_, bool force_remove) + /** Removes one or more versions of a locally cached package. + + This will remove a given package with a specified version from the + given location. It will remove at most one package, unless `version_` + is set to `RemoveVersionWildcard`. + + Params: + package_id = Name of the package to be removed + location_ = Specifies the location to look for the given package + name/version. + resolve_version = Callback to select package version. + */ + void remove(string package_id, PlacementLocation location, + scope size_t delegate(in Package[] packages) resolve_version) { enforce(!package_id.empty); - if (location_ == PlacementLocation.local) { + if (location == PlacementLocation.local) { logInfo("To remove a locally placed package, make sure you don't have any data" ~ "\nleft in it's directory and then simply remove the whole directory."); throw new Exception("dub cannot remove locally installed packages."); } Package[] packages; - const bool wildcardOrEmpty = version_ == RemoveVersionWildcard || version_.empty; // Retrieve packages to be removed. foreach(pack; m_packageManager.getPackageIterator(package_id)) - if( wildcardOrEmpty || pack.vers == version_ ) + if (m_packageManager.isManagedPackage(pack)) packages ~= pack; // Check validity of packages to be removed. if(packages.empty) { throw new Exception("Cannot find package to remove. (" - ~ "id: '" ~ package_id ~ "', version: '" ~ version_ ~ "', location: '" ~ to!string(location_) ~ "'" + ~ "id: '" ~ package_id ~ "', location: '" ~ to!string(location) ~ "'" ~ ")"); } - if(version_.empty && packages.length > 1) { - logError("Cannot remove package '" ~ package_id ~ "', there are multiple possibilities at location\n" - ~ "'" ~ to!string(location_) ~ "'."); - logError("Available versions:"); - foreach(pack; packages) - logError(" %s", pack.vers); - throw new Exception("Please specify a individual version using --version=... or use the" - ~ " wildcard --version=" ~ RemoveVersionWildcard ~ " to remove all versions."); - } + + immutable idx = resolve_version(packages); + if (idx == size_t.max) + return; + else if (idx != packages.length) + packages = packages[idx .. idx + 1]; logDebug("Removing %s packages.", packages.length); foreach(pack; packages) { try { - remove(pack, force_remove); - logInfo("Removed %s, version %s.", package_id, pack.vers); + remove(pack); + logInfo("Removed %s, version %s.", package_id, pack.version_); } catch (Exception e) { - logError("Failed to remove %s %s: %s", package_id, pack.vers, e.msg); + logError("Failed to remove %s %s: %s", package_id, pack.version_, e.msg); logInfo("Continuing with other packages (if any)."); } } } + /// Compatibility overload. Use the version without a `force_remove` argument instead. + void remove(string package_id, PlacementLocation location, bool force_remove, + scope size_t delegate(in Package[] packages) resolve_version) + { + remove(package_id, location, resolve_version); + } + + /** Removes a specific version of a package. + + Params: + package_id = Name of the package to be removed + version_ = Identifying a version or a wild card. If an empty string + is passed, the package will be removed from the location, if + there is only one version retrieved. This will throw an + exception, if there are multiple versions retrieved. + location_ = Specifies the location to look for the given package + name/version. + */ + void remove(string package_id, string version_, PlacementLocation location) + { + remove(package_id, location, (in packages) { + if (version_ == RemoveVersionWildcard) + return packages.length; + if (version_.empty && packages.length > 1) { + logError("Cannot remove package '" ~ package_id ~ "', there are multiple possibilities at location\n" + ~ "'" ~ to!string(location) ~ "'."); + logError("Available versions:"); + foreach(pack; packages) + logError(" %s", pack.version_); + throw new Exception("Please specify a individual version using --version=... or use the" + ~ " wildcard --version=" ~ RemoveVersionWildcard ~ " to remove all versions."); + } + foreach (i, p; packages) { + if (p.version_ == Version(version_)) + return i; + } + throw new Exception("Cannot find package to remove. (" + ~ "id: '" ~ package_id ~ "', version: '" ~ version_ ~ "', location: '" ~ to!string(location) ~ "'" + ~ ")"); + }); + } + + /// Compatibility overload. Use the version without a `force_remove` argument instead. + void remove(string package_id, string version_, PlacementLocation location, bool force_remove) + { + remove(package_id, version_, location); + } + + /** Adds a directory to the list of locally known packages. + + Forwards to `PackageManager.addLocalPackage`. + + Params: + path = Path to the package + ver = Optional version to associate with the package (can be left + empty) + system = Make the package known system wide instead of user wide + (requires administrator privileges). + + See_Also: `removeLocalPackage` + */ void addLocalPackage(string path, string ver, bool system) { if (m_dryRun) return; m_packageManager.addLocalPackage(makeAbsolute(path), ver, system ? LocalPackageType.system : LocalPackageType.user); } + /** Removes a directory from the list of locally known packages. + + Forwards to `PackageManager.removeLocalPackage`. + + Params: + path = Path to the package + system = Make the package known system wide instead of user wide + (requires administrator privileges). + + See_Also: `addLocalPackage` + */ void removeLocalPackage(string path, bool system) { if (m_dryRun) return; m_packageManager.removeLocalPackage(makeAbsolute(path), system ? LocalPackageType.system : LocalPackageType.user); } + /** Registers a local directory to search for packages to use for satisfying + dependencies. + + Params: + path = Path to a directory containing package directories + system = Make the package known system wide instead of user wide + (requires administrator privileges). + + See_Also: `removeSearchPath` + */ void addSearchPath(string path, bool system) { if (m_dryRun) return; m_packageManager.addSearchPath(makeAbsolute(path), system ? LocalPackageType.system : LocalPackageType.user); } + /** Unregisters a local directory search path. + + Params: + path = Path to a directory containing package directories + system = Make the package known system wide instead of user wide + (requires administrator privileges). + + See_Also: `addSearchPath` + */ void removeSearchPath(string path, bool system) { if (m_dryRun) return; m_packageManager.removeSearchPath(makeAbsolute(path), system ? LocalPackageType.system : LocalPackageType.user); } - void createEmptyPackage(Path path, string[] deps, string type) + /** Queries all package suppliers with the given query string. + + Returns a list of tuples, where the first entry is the human readable + name of the package supplier and the second entry is the list of + matched packages. + + See_Also: `PackageSupplier.searchPackages` + */ + auto searchPackages(string query) + { + Tuple!(string, PackageSupplier.SearchResult[])[] results; + foreach (ps; this.m_packageSuppliers) { + try + results ~= tuple(ps.description, ps.searchPackages(query)); + catch (Exception e) { + logWarn("Searching %s for '%s' failed: %s", ps.description, query, e.msg); + } + } + return results.filter!(tup => tup[1].length); + } + + /** Returns a list of all available versions (including branches) for a + particular package. + + The list returned is based on the registered package suppliers. Local + packages are not queried in the search for versions. + + See_also: `getLatestVersion` + */ + Version[] listPackageVersions(string name) + { + Version[] versions; + foreach (ps; this.m_packageSuppliers) { + try versions ~= ps.getVersions(name); + catch (Exception e) { + logWarn("Failed to get versions for package %s on provider %s: %s", name, ps.description, e.msg); + } + } + return versions.sort().uniq.array; + } + + /** Returns the latest available version for a particular package. + + This function returns the latest numbered version of a package. If no + numbered versions are available, it will return an available branch, + preferring "~master". + + Params: + package_name: The name of the package in question. + prefer_stable: If set to `true` (the default), returns the latest + stable version, even if there are newer pre-release versions. + + See_also: `listPackageVersions` + */ + Version getLatestVersion(string package_name, bool prefer_stable = true) + { + auto vers = listPackageVersions(package_name); + enforce(!vers.empty, "Failed to find any valid versions for a package name of '"~package_name~"'."); + auto final_versions = vers.filter!(v => !v.isBranch && !v.isPreRelease).array; + if (prefer_stable && final_versions.length) return final_versions[$-1]; + else if (vers[$-1].isBranch) return vers[$-1]; + else return vers[$-1]; + } + + /** Initializes a directory with a package skeleton. + + Params: + path = Path of the directory to create the new package in. The + directory will be created if it doesn't exist. + deps = List of dependencies to add to the package recipe. + type = Specifies the type of the application skeleton to use. + format = Determines the package recipe format to use. + recipe_callback = Optional callback that can be used to + customize the recipe before it gets written. + */ + void createEmptyPackage(Path path, string[] deps, string type, + PackageFormat format = PackageFormat.sdl, + scope void delegate(ref PackageRecipe, ref PackageFormat) recipe_callback = null) { if (!path.absolute) path = m_rootPath ~ path; path.normalize(); - if (m_dryRun) return; string[string] depVers; string[] notFound; // keep track of any failed packages in here - foreach(ps; this.m_packageSuppliers){ - foreach(dep; deps){ - try{ - auto versionStrings = ps.getVersions(dep); - depVers[dep] = versionStrings[$-1].toString; - } catch(Exception e){ - notFound ~= dep; - } + foreach (dep; deps) { + Version ver; + try { + ver = getLatestVersion(dep); + depVers[dep] = ver.isBranch ? ver.toString() : "~>" ~ ver.toString(); + } catch (Exception e) { + notFound ~= dep; } } + if(notFound.length > 1){ - throw new Exception(format("Couldn't find packages: %-(%s, %).", notFound)); + throw new Exception(.format("Couldn't find packages: %-(%s, %).", notFound)); } else if(notFound.length == 1){ - throw new Exception(format("Couldn't find package: %-(%s, %).", notFound)); + throw new Exception(.format("Couldn't find package: %-(%s, %).", notFound)); } - initPackage(path, depVers, type); + if (m_dryRun) return; + + initPackage(path, depVers, type, format, recipe_callback); //Act smug to the user. logInfo("Successfully created an empty project in '%s'.", path.toNativeString()); } - void runDdox(bool run) + /** Converts the package recipe of the loaded root package to the given format. + + Params: + destination_file_ext = The file extension matching the desired + format. Possible values are "json" or "sdl". + print_only = Print the converted recipe instead of writing to disk + */ + void convertRecipe(string destination_file_ext, bool print_only = false) + { + import std.path : extension; + import std.stdio : stdout; + import dub.recipe.io : serializePackageRecipe, writePackageRecipe; + + if (print_only) { + auto dst = stdout.lockingTextWriter; + serializePackageRecipe(dst, m_project.rootPackage.rawRecipe, "dub."~destination_file_ext); + return; + } + + auto srcfile = m_project.rootPackage.recipePath; + auto srcext = srcfile[$-1].toString().extension; + if (srcext == "."~destination_file_ext) { + logInfo("Package format is already %s.", destination_file_ext); + return; + } + + writePackageRecipe(srcfile[0 .. $-1] ~ ("dub."~destination_file_ext), m_project.rootPackage.rawRecipe); + removeFile(srcfile); + } + + /** Runs DDOX to generate or serve documentation. + + Params: + run = If set to true, serves documentation on a local web server. + Otherwise generates actual HTML files. + generate_args = Additional command line arguments to pass to + "ddox generate-html" or "ddox serve-html". + */ + void runDdox(bool run, string[] generate_args = null) { if (m_dryRun) return; - auto ddox_pack = m_packageManager.getBestPackage("ddox", ">=0.0.0"); - if (!ddox_pack) ddox_pack = m_packageManager.getBestPackage("ddox", "~master"); - if (!ddox_pack) { - logInfo("DDOX is not present, getting it and storing user wide"); - ddox_pack = fetch("ddox", Dependency(">=0.0.0"), defaultPlacementLocation, FetchOptions.none); + // allow to choose a custom ddox tool + auto tool = m_project.rootPackage.recipe.ddoxTool; + if (tool.empty) tool = "ddox"; + + auto tool_pack = m_packageManager.getBestPackage(tool, ">=0.0.0"); + if (!tool_pack) tool_pack = m_packageManager.getBestPackage(tool, "~master"); + if (!tool_pack) { + logInfo("%s is not present, getting and storing it user wide", tool); + tool_pack = fetch(tool, Dependency(">=0.0.0"), defaultPlacementLocation, FetchOptions.none); } - version(Windows) auto ddox_exe = "ddox.exe"; - else auto ddox_exe = "ddox"; + auto ddox_dub = new Dub(null, m_packageSuppliers); + ddox_dub.loadPackage(tool_pack.path); + ddox_dub.upgrade(UpgradeOptions.select); - if( !existsFile(ddox_pack.path~ddox_exe) ){ - logInfo("DDOX in %s is not built, performing build now.", ddox_pack.path.toNativeString()); + auto compiler_binary = this.defaultCompiler; - auto ddox_dub = new Dub(m_packageSuppliers); - ddox_dub.loadPackage(ddox_pack.path); - ddox_dub.upgrade(UpgradeOptions.select); + GeneratorSettings settings; + settings.config = "application"; + settings.compiler = getCompiler(compiler_binary); // TODO: not using --compiler ??? + settings.platform = settings.compiler.determinePlatform(settings.buildSettings, compiler_binary); + settings.buildType = "debug"; + settings.run = true; - auto compiler_binary = "dmd"; - - GeneratorSettings settings; - settings.config = "application"; - settings.compiler = getCompiler(compiler_binary); - settings.platform = settings.compiler.determinePlatform(settings.buildSettings, compiler_binary); - settings.buildType = "debug"; - ddox_dub.generateProject("build", settings); - - //runCommands(["cd "~ddox_pack.path.toNativeString()~" && dub build -v"]); - } - - auto p = ddox_pack.path; - p.endsWithSlash = true; - auto dub_path = p.toNativeString(); - - string[] commands; - string[] filterargs = m_project.rootPackage.info.ddoxFilterArgs.dup; + auto filterargs = m_project.rootPackage.recipe.ddoxFilterArgs.dup; if (filterargs.empty) filterargs = ["--min-protection=Protected", "--only-documented"]; - commands ~= dub_path~"ddox filter "~filterargs.join(" ")~" docs.json"; - if (!run) { - commands ~= dub_path~"ddox generate-html --navigation-type=ModuleTree docs.json docs"; - version(Windows) commands ~= "xcopy /S /D "~dub_path~"public\\* docs\\"; - else commands ~= "rsync -ru '"~dub_path~"public/' docs/"; - } - runCommands(commands); + + settings.runArgs = "filter" ~ filterargs ~ "docs.json"; + ddox_dub.generateProject("build", settings); + + auto p = tool_pack.path; + p.endsWithSlash = true; + auto tool_path = p.toNativeString(); if (run) { - auto proc = spawnProcess([dub_path~"ddox", "serve-html", "--navigation-type=ModuleTree", "docs.json", "--web-file-dir="~dub_path~"public"]); + settings.runArgs = ["serve-html", "--navigation-type=ModuleTree", "docs.json", "--web-file-dir="~tool_path~"public"] ~ generate_args; browse("http://127.0.0.1:8080/"); - wait(proc); + } else { + settings.runArgs = ["generate-html", "--navigation-type=ModuleTree", "docs.json", "docs"] ~ generate_args; + } + ddox_dub.generateProject("build", settings); + + if (!run) { + // TODO: ddox should copy those files itself + version(Windows) runCommand("xcopy /S /D "~tool_path~"public\\* docs\\"); + else runCommand("rsync -ru '"~tool_path~"public/' docs/"); } } @@ -706,122 +1157,59 @@ } } + private void determineDefaultCompiler() + { + import std.process : environment; + + m_defaultCompiler = m_config.defaultCompiler; + if (m_defaultCompiler.length) return; + + version (Windows) enum sep = ";", exe = ".exe"; + version (Posix) enum sep = ":", exe = ""; + + auto compilers = ["dmd", "gdc", "gdmd", "ldc2", "ldmd2"]; + + auto paths = environment.get("PATH", "").splitter(sep).map!Path; + auto res = compilers.find!(bin => paths.canFind!(p => existsFile(p ~ (bin~exe)))); + m_defaultCompiler = res.empty ? compilers[0] : res.front; + } + private Path makeAbsolute(Path p) const { return p.absolute ? p : m_rootPath ~ p; } private Path makeAbsolute(string p) const { return makeAbsolute(Path(p)); } } -string determineModuleName(BuildSettings settings, Path file, Path base_path) + +/// Option flags for `Dub.fetch` +enum FetchOptions { - assert(base_path.absolute); - if (!file.absolute) file = base_path ~ file; - - size_t path_skip = 0; - foreach (ipath; settings.importPaths.map!(p => Path(p))) { - if (!ipath.absolute) ipath = base_path ~ ipath; - assert(!ipath.empty); - if (file.startsWith(ipath) && ipath.length > path_skip) - path_skip = ipath.length; - } - - enforce(path_skip > 0, - format("Source file '%s' not found in any import path.", file.toNativeString())); - - auto mpath = file[path_skip .. file.length]; - auto ret = appender!string; - - //search for module keyword in file - string moduleName = getModuleNameFromFile(file.to!string); - - if(moduleName.length) return moduleName; - - //create module name from path - foreach (i; 0 .. mpath.length) { - import std.path; - auto p = mpath[i].toString(); - if (p == "package.d") break; - if (i > 0) ret ~= "."; - if (i+1 < mpath.length) ret ~= p; - else ret ~= p.baseName(".d"); - } - - return ret.data; + none = 0, + forceBranchUpgrade = 1<<0, + usePrerelease = 1<<1, + forceRemove = 1<<2, /// Deprecated, does nothing. + printOnly = 1<<3, } -/** - * Search for module keyword in D Code - */ -string getModuleNameFromContent(string content) { - import std.regex; - import std.string; - - content = content.strip; - if (!content.length) return null; - - static bool regex_initialized = false; - static Regex!char comments_pattern, module_pattern; - - if (!regex_initialized) { - comments_pattern = regex(`(/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/)|(//.*)`, "g"); - module_pattern = regex(`module\s+([\w\.]+)\s*;`, "g"); - regex_initialized = true; - } - - content = replaceAll(content, comments_pattern, ""); - auto result = matchFirst(content, module_pattern); - - string moduleName; - if(!result.empty) moduleName = result.front; - - if (moduleName.length >= 7) moduleName = moduleName[7..$-1]; - - return moduleName; -} - -unittest { - //test empty string - string name = getModuleNameFromContent(""); - assert(name == "", "can't get module name from empty string"); - - //test simple name - name = getModuleNameFromContent("module myPackage.myModule;"); - assert(name == "myPackage.myModule", "can't parse module name"); - - //test if it can ignore module inside comments - name = getModuleNameFromContent("/** - module fakePackage.fakeModule; - */ - module myPackage.myModule;"); - - assert(name == "myPackage.myModule", "can't parse module name"); - - name = getModuleNameFromContent("//module fakePackage.fakeModule; - module myPackage.myModule;"); - - assert(name == "myPackage.myModule", "can't parse module name"); -} - -/** - * Search for module keyword in file - */ -string getModuleNameFromFile(string filePath) { - string fileContent = filePath.readText; - - logDiagnostic("Get module name from path: " ~ filePath); - return getModuleNameFromContent(fileContent); -} - +/// Option flags for `Dub.upgrade` enum UpgradeOptions { none = 0, upgrade = 1<<1, /// Upgrade existing packages preRelease = 1<<2, /// inclde pre-release versions in upgrade - forceRemove = 1<<3, /// Force removing package folders, which contain unknown files + forceRemove = 1<<3, /// Deprecated, does nothing. select = 1<<4, /// Update the dub.selections.json file with the upgraded versions printUpgradesOnly = 1<<5, /// Instead of downloading new packages, just print a message to notify the user of their existence useCachedResult = 1<<6, /// Use cached information stored with the package to determine upgrades + noSaveSelections = 1<<7, /// Don't store updated selections on disk } -class DependencyVersionResolver : DependencyResolver!(Dependency, Dependency) { +/// Determines which of the default package suppliers are queried for packages. +enum SkipPackageSuppliers { + none, /// Uses all configured package suppliers. + standard, /// Does not use the default package suppliers (`defaultPackageSuppliers`). + all /// Uses only manually specified package suppliers. +} + +private class DependencyVersionResolver : DependencyResolver!(Dependency, Dependency) { protected { Dub m_dub; UpgradeOptions m_options; @@ -829,6 +1217,7 @@ Package[string] m_remotePackages; SelectedVersions m_selectedVersions; Package m_rootPackage; + bool[string] m_packagesToUpgrade; } @@ -838,11 +1227,21 @@ m_options = options; } + void addPackageToUpgrade(string name) + { + m_packagesToUpgrade[name] = true; + } + Dependency[string] resolve(Package root, SelectedVersions selected_versions) { m_rootPackage = root; m_selectedVersions = selected_versions; - return super.resolve(TreeNode(root.name, Dependency(root.ver)), (m_options & UpgradeOptions.printUpgradesOnly) == 0); + return super.resolve(TreeNode(root.name, Dependency(root.version_)), (m_options & UpgradeOptions.printUpgradesOnly) == 0); + } + + protected bool isFixedPackage(string pack) + { + return m_packagesToUpgrade !is null && pack !in m_packagesToUpgrade; } protected override Dependency[] getAllConfigs(string pack) @@ -850,7 +1249,7 @@ if (auto pvers = pack in m_packageVersions) return *pvers; - if (!(m_options & UpgradeOptions.upgrade) && m_selectedVersions.hasSelectedVersion(pack)) { + if ((!(m_options & UpgradeOptions.upgrade) || isFixedPackage(pack)) && m_selectedVersions.hasSelectedVersion(pack)) { auto ret = [m_selectedVersions.getSelectedVersion(pack)]; logDiagnostic("Using fixed selection %s %s", pack, ret[0]); m_packageVersions[pack] = ret; @@ -860,7 +1259,7 @@ logDiagnostic("Search for versions of %s (%s package suppliers)", pack, m_dub.m_packageSuppliers.length); Version[] versions; foreach (p; m_dub.packageManager.getPackageIterator(pack)) - versions ~= p.ver; + versions ~= p.version_; foreach (ps; m_dub.m_packageSuppliers) { try { @@ -874,7 +1273,7 @@ versions ~= vers; break; } catch (Exception e) { - logDebug("Package %s not found in %s: %s", pack, ps.description, e.msg); + logWarn("Package %s not found in %s: %s", pack, ps.description, e.msg); logDebug("Full error: %s", e.toString().sanitize); } } @@ -886,16 +1285,24 @@ if (!(m_options & UpgradeOptions.preRelease)) versions = versions.filter!(v => !v.isPreRelease).array ~ versions.filter!(v => v.isPreRelease).array; + // filter out invalid/unreachable dependency specs + versions = versions.filter!((v) { + bool valid = getPackage(pack, Dependency(v)) !is null; + if (!valid) logDiagnostic("Excluding invalid dependency specification %s %s from dependency resolution process.", pack, v); + return valid; + }).array; + if (!versions.length) logDiagnostic("Nothing found for %s", pack); + else logDiagnostic("Return for %s: %s", pack, versions); auto ret = versions.map!(v => Dependency(v)).array; m_packageVersions[pack] = ret; return ret; } - protected override Dependency[] getSpecificConfigs(TreeNodes nodes) + protected override Dependency[] getSpecificConfigs(string pack, TreeNodes nodes) { - if (!nodes.configs.path.empty) return [nodes.configs]; + if (!nodes.configs.path.empty && getPackage(pack, nodes.configs)) return [nodes.configs]; else return null; } @@ -911,28 +1318,46 @@ } auto basepack = pack.basePackage; - foreach (dname, dspec; pack.dependencies) { - auto dbasename = getBasePackageName(dname); + foreach (d; pack.getAllDependencies()) { + auto dbasename = getBasePackageName(d.name); // detect dependencies to the root package (or sub packages thereof) if (dbasename == basepack.name) { - auto absdeppath = dspec.mapToPath(pack.path).path; - auto subpack = m_dub.m_packageManager.getSubPackage(basepack, getSubPackageName(dname), true); + auto absdeppath = d.spec.mapToPath(pack.path).path; + absdeppath.endsWithSlash = true; + auto subpack = m_dub.m_packageManager.getSubPackage(basepack, getSubPackageName(d.name), true); if (subpack) { - auto desireddeppath = dname == dbasename ? basepack.path : subpack.path; - enforce(dspec.path.empty || absdeppath == desireddeppath, + auto desireddeppath = d.name == dbasename ? basepack.path : subpack.path; + desireddeppath.endsWithSlash = true; + enforce(d.spec.path.empty || absdeppath == desireddeppath, format("Dependency from %s to root package references wrong path: %s vs. %s", node.pack, absdeppath.toNativeString(), desireddeppath.toNativeString())); } - ret ~= TreeNodes(dname, node.config); + ret ~= TreeNodes(d.name, node.config); continue; } - if (dspec.optional && !m_dub.packageManager.getFirstPackage(dname)) - continue; - if (m_options & UpgradeOptions.upgrade || !m_selectedVersions || !m_selectedVersions.hasSelectedVersion(dbasename)) - ret ~= TreeNodes(dname, dspec.mapToPath(pack.path)); - else ret ~= TreeNodes(dname, m_selectedVersions.getSelectedVersion(dbasename)); + DependencyType dt; + if (d.spec.optional) { + if (d.spec.default_) dt = DependencyType.optionalDefault; + else dt = DependencyType.optional; + } else dt = DependencyType.required; + + Dependency dspec = d.spec.mapToPath(pack.path); + + // if not upgrading, use the selected version + if (!(m_options & UpgradeOptions.upgrade) && m_selectedVersions && m_selectedVersions.hasSelectedVersion(dbasename)) + dspec = m_selectedVersions.getSelectedVersion(dbasename); + + // keep selected optional dependencies and avoid non-selected optional-default dependencies by default + if (m_selectedVersions && !m_selectedVersions.bare) { + if (dt == DependencyType.optionalDefault && !m_selectedVersions.hasSelectedVersion(dbasename)) + dt = DependencyType.optional; + else if (dt == DependencyType.optional && m_selectedVersions.hasSelectedVersion(dbasename)) + dt = DependencyType.optionalDefault; + } + + ret ~= TreeNodes(d.name, dspec, dt); } return ret.data; } @@ -956,8 +1381,15 @@ return sp; } else if (!basepack.subPackages.canFind!(p => p.path.length)) { // note: external sub packages are handled further below - logDiagnostic("Sub package %s doesn't exist in %s %s.", name, basename, dep.version_); - return null; + auto spr = basepack.getInternalSubPackage(subname); + if (!spr.isNull) { + auto sp = new Package(spr, basepack.path, basepack); + m_remotePackages[sp.name] = sp; + return sp; + } else { + logDiagnostic("Sub package %s doesn't exist in %s %s.", name, basename, dep.version_); + return null; + } } else if (auto ret = m_dub.m_packageManager.getBestPackage(name, dep)) { return ret; } else { @@ -966,9 +1398,19 @@ } } + // shortcut if the referenced package is the root package + if (basename == m_rootPackage.basePackage.name) + return m_rootPackage.basePackage; + if (!dep.path.empty) { - auto ret = m_dub.packageManager.getOrLoadPackage(dep.path); - if (dep.matches(ret.ver)) return ret; + try { + auto ret = m_dub.packageManager.getOrLoadPackage(dep.path); + if (dep.matches(ret.version_)) return ret; + } catch (Exception e) { + logDiagnostic("Failed to load path based dependency %s: %s", name, e.msg); + logDebug("Full error: %s", e.toString().sanitize); + return null; + } } if (auto ret = m_dub.m_packageManager.getBestPackage(name, dep)) @@ -985,7 +1427,9 @@ foreach (ps; m_dub.m_packageSuppliers) { if (rootpack == name) { try { - auto desc = ps.getPackageDescription(name, dep, prerelease); + auto desc = ps.fetchPackageRecipe(name, dep, prerelease); + if (desc.type == Json.Type.null_) + continue; auto ret = new Package(desc); m_remotePackages[key] = ret; return ret; @@ -998,8 +1442,7 @@ try { FetchOptions fetchOpts; fetchOpts |= prerelease ? FetchOptions.usePrerelease : FetchOptions.none; - fetchOpts |= (m_options & UpgradeOptions.forceRemove) != 0 ? FetchOptions.forceRemove : FetchOptions.none; - m_dub.fetch(rootpack, dep, defaultPlacementLocation, fetchOpts, "need sub package description"); + m_dub.fetch(rootpack, dep, m_dub.defaultPlacementLocation, fetchOpts, "need sub package description"); auto ret = m_dub.m_packageManager.getBestPackage(name, dep); if (!ret) { logWarn("Package %s %s doesn't have a sub package %s", rootpack, dep.version_, name); @@ -1020,3 +1463,39 @@ return null; } } + +private struct SpecialDirs { + Path temp; + Path userSettings; + Path systemSettings; +} + +private class DubConfig { + private { + DubConfig m_parentConfig; + Json m_data; + } + + this(Json data, DubConfig parent_config) + { + m_data = data; + m_parentConfig = parent_config; + } + + @property string[] registryURLs() + { + string[] ret; + if (auto pv = "registryUrls" in m_data) + ret = (*pv).deserializeJson!(string[]); + if (m_parentConfig) ret ~= m_parentConfig.registryURLs; + return ret; + } + + @property string defaultCompiler() + const { + if (auto pv = "defaultCompiler" in m_data) + return pv.get!string; + if (m_parentConfig) return m_parentConfig.defaultCompiler; + return null; + } +} diff --git a/source/dub/generators/build.d b/source/dub/generators/build.d index 685b34c..86ca3e4 100644 --- a/source/dub/generators/build.d +++ b/source/dub/generators/build.d @@ -8,6 +8,7 @@ module dub.generators.build; import dub.compilers.compiler; +import dub.compilers.utils; import dub.generators.generator; import dub.internal.utils; import dub.internal.vibecompat.core.file; @@ -33,6 +34,7 @@ private { PackageManager m_packageMan; Path[] m_temporaryFiles; + Path m_targetExecutablePath; } this(Project project) @@ -45,6 +47,13 @@ { scope (exit) cleanupTemporaries(); + logInfo("Performing \"%s\" build using %s for %-(%s, %).", + settings.buildType, settings.platform.compilerBinary, settings.platform.architecture); + + bool any_cached = false; + + Path[string] target_paths; + bool[string] visited; void buildTargetRec(string target) { @@ -60,13 +69,16 @@ auto bs = ti.buildSettings.dup; foreach (ldep; ti.linkDependencies) { auto dbs = targets[ldep].buildSettings; - if (bs.targetType != TargetType.staticLibrary) { - bs.addSourceFiles((Path(dbs.targetPath) ~ getTargetFileName(dbs, settings.platform)).toNativeString()); + if (bs.targetType != TargetType.staticLibrary && !(bs.options & BuildOption.syntaxOnly)) { + bs.addSourceFiles(target_paths[ldep].toNativeString()); } else { - additional_dep_files ~= Path(dbs.targetPath) ~ getTargetFileName(dbs, settings.platform); + additional_dep_files ~= target_paths[ldep]; } } - buildTarget(settings, bs, ti.pack, ti.config, ti.packages, additional_dep_files); + Path tpath; + if (buildTarget(settings, bs, ti.pack, ti.config, ti.packages, additional_dep_files, tpath)) + any_cached = true; + target_paths[target] = tpath; } // build all targets @@ -74,89 +86,127 @@ if (settings.rdmd || root_ti.buildSettings.targetType == TargetType.staticLibrary) { // RDMD always builds everything at once and static libraries don't need their // dependencies to be built - buildTarget(settings, root_ti.buildSettings.dup, m_project.rootPackage, root_ti.config, root_ti.packages, null); - } else buildTargetRec(m_project.rootPackage.name); + Path tpath; + buildTarget(settings, root_ti.buildSettings.dup, m_project.rootPackage, root_ti.config, root_ti.packages, null, tpath); + } else { + buildTargetRec(m_project.rootPackage.name); + + if (any_cached) { + logInfo("To force a rebuild of up-to-date targets, run again with --force."); + } + } } override void performPostGenerateActions(GeneratorSettings settings, in TargetInfo[string] targets) { // run the generated executable - auto buildsettings = targets[m_project.rootPackage.name].buildSettings; - if (settings.run && !(buildsettings.options & BuildOptions.syntaxOnly)) { - auto exe_file_path = Path(buildsettings.targetPath) ~ getTargetFileName(buildsettings, settings.platform); + auto buildsettings = targets[m_project.rootPackage.name].buildSettings.dup; + if (settings.run && !(buildsettings.options & BuildOption.syntaxOnly)) { + Path exe_file_path; + if (!m_targetExecutablePath.length) + exe_file_path = getTargetPath(buildsettings, settings); + else + exe_file_path = m_targetExecutablePath ~ settings.compiler.getTargetFileName(buildsettings, settings.platform); runTarget(exe_file_path, buildsettings, settings.runArgs, settings); } } - private void buildTarget(GeneratorSettings settings, BuildSettings buildsettings, in Package pack, string config, in Package[] packages, in Path[] additional_dep_files) + private bool buildTarget(GeneratorSettings settings, BuildSettings buildsettings, in Package pack, string config, in Package[] packages, in Path[] additional_dep_files, out Path target_path) { auto cwd = Path(getcwd()); - bool generate_binary = !(buildsettings.options & BuildOptions.syntaxOnly); + bool generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); auto build_id = computeBuildID(config, buildsettings, settings); // make all paths relative to shrink the command line - string makeRelative(string path) { auto p = Path(path); if (p.absolute) p = p.relativeTo(cwd); return p.toNativeString(); } + string makeRelative(string path) { + auto p = Path(path); + // storing in a separate temprary to work around #601 + auto prel = p.absolute ? p.relativeTo(cwd) : p; + return prel.toNativeString(); + } foreach (ref f; buildsettings.sourceFiles) f = makeRelative(f); foreach (ref p; buildsettings.importPaths) p = makeRelative(p); foreach (ref p; buildsettings.stringImportPaths) p = makeRelative(p); // perform the actual build bool cached = false; - if (settings.rdmd) performRDMDBuild(settings, buildsettings, pack, config); - else if (settings.direct || !generate_binary) performDirectBuild(settings, buildsettings, pack, config); - else cached = performCachedBuild(settings, buildsettings, pack, config, build_id, packages, additional_dep_files); + if (settings.rdmd) performRDMDBuild(settings, buildsettings, pack, config, target_path); + else if (settings.direct || !generate_binary) performDirectBuild(settings, buildsettings, pack, config, target_path); + else cached = performCachedBuild(settings, buildsettings, pack, config, build_id, packages, additional_dep_files, target_path); + + // HACK: cleanup dummy doc files, we shouldn't specialize on buildType + // here and the compiler shouldn't need dummy doc output. + if (settings.buildType == "ddox") { + if ("__dummy.html".exists) + removeFile("__dummy.html"); + if ("__dummy_docs".exists) + rmdirRecurse("__dummy_docs"); + } // run post-build commands if (!cached && buildsettings.postBuildCommands.length) { logInfo("Running post-build commands..."); - runBuildCommands(buildsettings.postBuildCommands, buildsettings); + runBuildCommands(buildsettings.postBuildCommands, pack, m_project, settings, buildsettings); } + + return cached; } - bool performCachedBuild(GeneratorSettings settings, BuildSettings buildsettings, in Package pack, string config, string build_id, in Package[] packages, in Path[] additional_dep_files) + private bool performCachedBuild(GeneratorSettings settings, BuildSettings buildsettings, in Package pack, string config, + string build_id, in Package[] packages, in Path[] additional_dep_files, out Path target_binary_path) { auto cwd = Path(getcwd()); - auto target_path = pack.path ~ format(".dub/build/%s/", build_id); - if (!settings.force && isUpToDate(target_path, buildsettings, settings.platform, pack, packages, additional_dep_files)) { - logInfo("Target %s %s is up to date. Use --force to rebuild.", pack.name, pack.vers); + Path target_path; + if (settings.tempBuild) { + string packageName = pack.basePackage is null ? pack.name : pack.basePackage.name; + m_targetExecutablePath = target_path = getTempDir() ~ format(".dub/build/%s-%s/%s/", packageName, pack.version_, build_id); + } + else target_path = pack.path ~ format(".dub/build/%s/", build_id); + + if (!settings.force && isUpToDate(target_path, buildsettings, settings, pack, packages, additional_dep_files)) { + logInfo("%s %s: target for configuration \"%s\" is up to date.", pack.name, pack.version_, config); logDiagnostic("Using existing build in %s.", target_path.toNativeString()); - copyTargetFile(target_path, buildsettings, settings.platform); + target_binary_path = target_path ~ settings.compiler.getTargetFileName(buildsettings, settings.platform); + if (!settings.tempBuild) + copyTargetFile(target_path, buildsettings, settings); return true; } - if (settings.tempBuild || !isWritableDir(target_path, true)) { + if (!isWritableDir(target_path, true)) { if (!settings.tempBuild) logInfo("Build directory %s is not writable. Falling back to direct build in the system's temp folder.", target_path.relativeTo(cwd).toNativeString()); - performDirectBuild(settings, buildsettings, pack, config); + performDirectBuild(settings, buildsettings, pack, config, target_path); return false; } // determine basic build properties - auto generate_binary = !(buildsettings.options & BuildOptions.syntaxOnly); + auto generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); - logInfo("Building %s %s configuration \"%s\", build type %s.", pack.name, pack.vers, config, settings.buildType); + logInfo("%s %s: building configuration \"%s\"...", pack.name, pack.version_, config); if( buildsettings.preBuildCommands.length ){ logInfo("Running pre-build commands..."); - runBuildCommands(buildsettings.preBuildCommands, buildsettings); + runBuildCommands(buildsettings.preBuildCommands, pack, m_project, settings, buildsettings); } // override target path auto cbuildsettings = buildsettings; cbuildsettings.targetPath = target_path.relativeTo(cwd).toNativeString(); buildWithCompiler(settings, cbuildsettings); + target_binary_path = getTargetPath(cbuildsettings, settings); - copyTargetFile(target_path, buildsettings, settings.platform); + if (!settings.tempBuild) + copyTargetFile(target_path, buildsettings, settings); return false; } - void performRDMDBuild(GeneratorSettings settings, ref BuildSettings buildsettings, in Package pack, string config) + private void performRDMDBuild(GeneratorSettings settings, ref BuildSettings buildsettings, in Package pack, string config, out Path target_path) { auto cwd = Path(getcwd()); - //Added check for existance of [AppNameInPackagejson].d + //Added check for existence of [AppNameInPackagejson].d //If exists, use that as the starting file. Path mainsrc; if (buildsettings.mainSourceFile.length) { @@ -176,7 +226,6 @@ // Create start script, which will be used by the calling bash/cmd script. // build "rdmd --force %DFLAGS% -I%~dp0..\source -Jviews -Isource @deps.txt %LIBS% source\app.d" ~ application arguments // or with "/" instead of "\" - Path exe_file_path; bool tmp_target = false; if (generate_binary) { if (settings.tempBuild || (settings.run && !isWritableDir(Path(buildsettings.targetPath), true))) { @@ -188,11 +237,11 @@ m_temporaryFiles ~= tmpdir; tmp_target = true; } - exe_file_path = Path(buildsettings.targetPath) ~ getTargetFileName(buildsettings, settings.platform); + target_path = getTargetPath(buildsettings, settings); settings.compiler.setTarget(buildsettings, settings.platform); } - logDiagnostic("Application output name is '%s'", getTargetFileName(buildsettings, settings.platform)); + logDiagnostic("Application output name is '%s'", settings.compiler.getTargetFileName(buildsettings, settings.platform)); string[] flags = ["--build-only", "--compiler="~settings.platform.compilerBinary]; if (settings.force) flags ~= "--force"; @@ -204,7 +253,7 @@ runCommands(buildsettings.preBuildCommands); } - logInfo("Building configuration "~config~", build type "~settings.buildType); + logInfo("%s %s: building configuration \"%s\"...", pack.name, pack.version_, config); logInfo("Running rdmd..."); logDiagnostic("rdmd %s", join(flags, " ")); @@ -213,17 +262,17 @@ enforce(result == 0, "Build command failed with exit code "~to!string(result)); if (tmp_target) { - m_temporaryFiles ~= exe_file_path; + m_temporaryFiles ~= target_path; foreach (f; buildsettings.copyFiles) m_temporaryFiles ~= Path(buildsettings.targetPath).parentPath ~ Path(f).head; } } - void performDirectBuild(GeneratorSettings settings, ref BuildSettings buildsettings, in Package pack, string config) + private void performDirectBuild(GeneratorSettings settings, ref BuildSettings buildsettings, in Package pack, string config, out Path target_path) { auto cwd = Path(getcwd()); - auto generate_binary = !(buildsettings.options & BuildOptions.syntaxOnly); + auto generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); auto is_static_library = buildsettings.targetType == TargetType.staticLibrary || buildsettings.targetType == TargetType.library; // make file paths relative to shrink the command line @@ -233,15 +282,19 @@ f = fp.toNativeString(); } - logInfo("Building configuration \""~config~"\", build type "~settings.buildType); + logInfo("%s %s: building configuration \"%s\"...", pack.name, pack.version_, config); // make all target/import paths relative - string makeRelative(string path) { auto p = Path(path); if (p.absolute) p = p.relativeTo(cwd); return p.toNativeString(); } + string makeRelative(string path) { + auto p = Path(path); + // storing in a separate temprary to work around #601 + auto prel = p.absolute ? p.relativeTo(cwd) : p; + return prel.toNativeString(); + } buildsettings.targetPath = makeRelative(buildsettings.targetPath); foreach (ref p; buildsettings.importPaths) p = makeRelative(p); foreach (ref p; buildsettings.stringImportPaths) p = makeRelative(p); - Path exe_file_path; bool is_temp_target = false; if (generate_binary) { if (settings.tempBuild || (settings.run && !isWritableDir(Path(buildsettings.targetPath), true))) { @@ -252,18 +305,18 @@ m_temporaryFiles ~= tmppath; is_temp_target = true; } - exe_file_path = Path(buildsettings.targetPath) ~ getTargetFileName(buildsettings, settings.platform); + target_path = getTargetPath(buildsettings, settings); } if( buildsettings.preBuildCommands.length ){ logInfo("Running pre-build commands..."); - runBuildCommands(buildsettings.preBuildCommands, buildsettings); + runBuildCommands(buildsettings.preBuildCommands, pack, m_project, settings, buildsettings); } buildWithCompiler(settings, buildsettings); if (is_temp_target) { - m_temporaryFiles ~= exe_file_path; + m_temporaryFiles ~= target_path; foreach (f; buildsettings.copyFiles) m_temporaryFiles ~= Path(buildsettings.targetPath).parentPath ~ Path(f).head; } @@ -299,9 +352,9 @@ settings.platform.compiler, settings.platform.frontendVersion, hashstr); } - private void copyTargetFile(Path build_path, BuildSettings buildsettings, BuildPlatform platform) + private void copyTargetFile(Path build_path, BuildSettings buildsettings, GeneratorSettings settings) { - auto filename = getTargetFileName(buildsettings, platform); + auto filename = settings.compiler.getTargetFileName(buildsettings, settings.platform); auto src = build_path ~ filename; logDiagnostic("Copying target from %s to %s", src.toNativeString(), buildsettings.targetPath); if (!existsFile(Path(buildsettings.targetPath))) @@ -309,11 +362,11 @@ hardLinkFile(src, Path(buildsettings.targetPath) ~ filename, true); } - private bool isUpToDate(Path target_path, BuildSettings buildsettings, BuildPlatform platform, in Package main_pack, in Package[] packages, in Path[] additional_dep_files) + private bool isUpToDate(Path target_path, BuildSettings buildsettings, GeneratorSettings settings, in Package main_pack, in Package[] packages, in Path[] additional_dep_files) { import std.datetime; - auto targetfile = target_path ~ getTargetFileName(buildsettings, platform); + auto targetfile = target_path ~ settings.compiler.getTargetFileName(buildsettings, settings.platform); if (!existsFile(targetfile)) { logDiagnostic("Target '%s' doesn't exist, need rebuild.", targetfile.toNativeString()); return false; @@ -326,14 +379,14 @@ allfiles ~= buildsettings.stringImportFiles; // TODO: add library files foreach (p; packages) - allfiles ~= (p.packageInfoFilename != Path.init ? p : p.basePackage).packageInfoFilename.toNativeString(); + allfiles ~= (p.recipePath != Path.init ? p : p.basePackage).recipePath.toNativeString(); foreach (f; additional_dep_files) allfiles ~= f.toNativeString(); - if (main_pack is m_project.rootPackage) + if (main_pack is m_project.rootPackage && m_project.rootPackage.getAllDependencies().length > 0) allfiles ~= (main_pack.path ~ SelectedVersions.defaultFile).toNativeString(); foreach (file; allfiles.data) { if (!existsFile(file)) { - logDiagnostic("File %s doesn't exists, triggering rebuild.", file); + logDiagnostic("File %s doesn't exist, triggering rebuild.", file); return false; } auto ftime = getFileInfo(file).timeModified; @@ -353,7 +406,8 @@ static string pathToObjName(string path) { - return std.path.stripDrive(std.path.buildNormalizedPath(getcwd(), path~objSuffix))[1..$].replace(std.path.dirSeparator, "."); + import std.path : buildNormalizedPath, dirSeparator, stripDrive; + return stripDrive(buildNormalizedPath(getcwd(), path~objSuffix))[1..$].replace(dirSeparator, "."); } /// Compile a single source file (srcFile), and write the object to objName. @@ -370,15 +424,15 @@ return objPath; } - void buildWithCompiler(GeneratorSettings settings, BuildSettings buildsettings) + private void buildWithCompiler(GeneratorSettings settings, BuildSettings buildsettings) { - auto generate_binary = !(buildsettings.options & BuildOptions.syntaxOnly); + auto generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); auto is_static_library = buildsettings.targetType == TargetType.staticLibrary || buildsettings.targetType == TargetType.library; Path target_file; scope (failure) { - logInfo("FAIL %s %s %s" , buildsettings.targetPath, buildsettings.targetName, buildsettings.targetType); - auto tpath = Path(buildsettings.targetPath) ~ getTargetFileName(buildsettings, settings.platform); + logDiagnostic("FAIL %s %s %s" , buildsettings.targetPath, buildsettings.targetName, buildsettings.targetType); + auto tpath = getTargetPath(buildsettings, settings); if (generate_binary && existsFile(tpath)) removeFile(tpath); } @@ -388,12 +442,18 @@ auto lbuildsettings = buildsettings; auto srcs = buildsettings.sourceFiles.filter!(f => !isLinkerFile(f)); auto objs = new string[](srcs.walkLength); - logInfo("Compiling using %s...", settings.platform.compilerBinary); - foreach (i, src; srcs.parallel(1)) { + + void compileSource(size_t i, string src) { logInfo("Compiling %s...", src); objs[i] = compileUnit(src, pathToObjName(src), buildsettings, settings); } + if (settings.parallelBuild) { + foreach (i, src; srcs.parallel(1)) compileSource(i, src); + } else { + foreach (i, src; srcs.array) compileSource(i, src); + } + logInfo("Linking..."); lbuildsettings.sourceFiles = is_static_library ? [] : lbuildsettings.sourceFiles.filter!(f=> f.isLinkerFile()).array; settings.compiler.setTarget(lbuildsettings, settings.platform); @@ -405,16 +465,15 @@ on the other compilers. Later this should be integrated somehow in the build process (either in the dub.json, or using a command line flag) */ - } else if (settings.buildMode == BuildMode.allAtOnce || settings.platform.compilerBinary != "dmd" || !generate_binary || is_static_library) { + } else if (generate_binary && (settings.buildMode == BuildMode.allAtOnce || settings.compiler.name != "dmd" || is_static_library)) { // setup for command line - if (generate_binary) settings.compiler.setTarget(buildsettings, settings.platform); + settings.compiler.setTarget(buildsettings, settings.platform); settings.compiler.prepareBuildSettings(buildsettings, BuildSetting.commandLine); // don't include symbols of dependencies (will be included by the top level target) if (is_static_library) buildsettings.sourceFiles = buildsettings.sourceFiles.filter!(f => !f.isLinkerFile()).array; // invoke the compiler - logInfo("Running %s...", settings.platform.compilerBinary); settings.compiler.invoke(buildsettings, settings.platform, settings.compileCallback); } else { // determine path for the temporary object file @@ -424,25 +483,27 @@ // setup linker command line auto lbuildsettings = buildsettings; lbuildsettings.sourceFiles = lbuildsettings.sourceFiles.filter!(f => isLinkerFile(f)).array; - settings.compiler.setTarget(lbuildsettings, settings.platform); + if (generate_binary) settings.compiler.setTarget(lbuildsettings, settings.platform); settings.compiler.prepareBuildSettings(lbuildsettings, BuildSetting.commandLineSeparate|BuildSetting.sourceFiles); // setup compiler command line buildsettings.libs = null; buildsettings.lflags = null; - buildsettings.addDFlags("-c", "-of"~tempobj.toNativeString()); + if (generate_binary) buildsettings.addDFlags("-c", "-of"~tempobj.toNativeString()); buildsettings.sourceFiles = buildsettings.sourceFiles.filter!(f => !isLinkerFile(f)).array; + settings.compiler.prepareBuildSettings(buildsettings, BuildSetting.commandLine); - logInfo("Compiling using %s...", settings.platform.compilerBinary); settings.compiler.invoke(buildsettings, settings.platform, settings.compileCallback); - logInfo("Linking..."); - settings.compiler.invokeLinker(lbuildsettings, settings.platform, [tempobj.toNativeString()], settings.linkCallback); + if (generate_binary) { + logInfo("Linking..."); + settings.compiler.invokeLinker(lbuildsettings, settings.platform, [tempobj.toNativeString()], settings.linkCallback); + } } } - void runTarget(Path exe_file_path, in BuildSettings buildsettings, string[] run_args, GeneratorSettings settings) + private void runTarget(Path exe_file_path, in BuildSettings buildsettings, string[] run_args, GeneratorSettings settings) { if (buildsettings.targetType == TargetType.executable) { auto cwd = Path(getcwd()); @@ -473,10 +534,11 @@ auto result = prg_pid.wait(); enforce(result == 0, "Program exited with code "~to!string(result)); } - } else logInfo("Target is a library. Skipping execution."); + } else + enforce(false, "Target is a library. Skipping execution."); } - void cleanupTemporaries() + private void cleanupTemporaries() { foreach_reverse (f; m_temporaryFiles) { try { @@ -499,19 +561,7 @@ return prj.path ~ "source/app.d"; } -unittest { - version (Windows) { - assert(isLinkerFile("test.obj")); - assert(isLinkerFile("test.lib")); - assert(isLinkerFile("test.res")); - assert(!isLinkerFile("test.o")); - assert(!isLinkerFile("test.d")); - } else { - assert(isLinkerFile("test.o")); - assert(isLinkerFile("test.a")); - assert(isLinkerFile("test.so")); - assert(isLinkerFile("test.dylib")); - assert(!isLinkerFile("test.obj")); - assert(!isLinkerFile("test.d")); - } +private Path getTargetPath(in ref BuildSettings bs, in ref GeneratorSettings settings) +{ + return Path(bs.targetPath) ~ settings.compiler.getTargetFileName(bs, settings.platform); } diff --git a/source/dub/generators/cmake.d b/source/dub/generators/cmake.d index eb51be7..269c136 100644 --- a/source/dub/generators/cmake.d +++ b/source/dub/generators/cmake.d @@ -26,7 +26,7 @@ { super(project); } - + override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { auto script = appender!(char[]); @@ -34,45 +34,45 @@ bool[string] visited; Path projectRoot = m_project.rootPackage.path; Path cmakeListsPath = projectRoot ~ "CMakeLists.txt"; - + foreach(name, info; targets) { if(visited.get(name, false)) continue; - + visited[name] = true; name = name.sanitize; string targetType; string libType; bool addTarget = true; - + switch(info.buildSettings.targetType) with(TargetType) { case autodetect: throw new Exception("Don't know what to do about autodetect target type"); case executable: targetType = "executable"; - + break; case dynamicLibrary: libType = "SHARED"; - + goto case; case library: case staticLibrary: targetType = "library"; - + break; case sourceLibrary: addTarget = false; - + break; case none: continue; default: assert(false); } - + script.put("include(UseD)\n"); script.put( "add_d_conditions(VERSION %s DEBUG %s)\n".format( @@ -80,42 +80,42 @@ info.buildSettings.debugVersions.dup.join(" "), ) ); - + foreach(directory; info.buildSettings.importPaths) - script.put("include_directories(%s)\n".format(directory)); - + script.put("include_directories(%s)\n".format(directory.sanitizeSlashes)); + if(addTarget) { script.put("add_%s(%s %s\n".format(targetType, name, libType)); - + foreach(file; info.buildSettings.sourceFiles) - script.put(" %s\n".format(file)); - + script.put(" %s\n".format(file.sanitizeSlashes)); + script.put(")\n"); script.put( "target_link_libraries(%s %s %s)\n".format( name, - (info.dependencies ~ info.linkDependencies).dup.stdsort.uniq.map!sanitize.join(" "), + (info.dependencies ~ info.linkDependencies).dup.stdsort.uniq.map!(s => sanitize(s)).join(" "), info.buildSettings.libs.dup.join(" ") ) ); script.put( `set_target_properties(%s PROPERTIES TEXT_INCLUDE_DIRECTORIES "%s")`.format( name, - info.buildSettings.stringImportPaths.dup.join(";") + info.buildSettings.stringImportPaths.map!(s => sanitizeSlashes(s)).join(";") ) ~ "\n" ); } - + string filename = (projectRoot ~ "%s.cmake".format(name)).toNativeString; File file = File(filename, "w"); - + file.write(script.data); file.close; script.shrinkTo(0); scripts.put(filename); } - + if(!cmakeListsPath.existsFile) { logWarn("You must use a fork of CMake which has D support for these scripts to function properly."); @@ -123,12 +123,12 @@ logInfo("Generating default CMakeLists.txt"); script.put("cmake_minimum_required(VERSION 3.0)\n"); script.put("project(%s D)\n".format(m_project.rootPackage.name)); - + foreach(path; scripts.data) script.put("include(%s)\n".format(path)); - + File file = File(cmakeListsPath.toNativeString, "w"); - + file.write(script.data); file.close; } @@ -136,7 +136,15 @@ } ///Transform a package name into a valid CMake target name. -string sanitize(string name) +private string sanitize(string name) { return name.replace(":", "_"); } + +private string sanitizeSlashes(string path) +{ + version(Windows) + return path.replace("\\", "/"); + else + return path; +} diff --git a/source/dub/generators/generator.d b/source/dub/generators/generator.d index d552e80..7865d9e 100644 --- a/source/dub/generators/generator.d +++ b/source/dub/generators/generator.d @@ -1,7 +1,7 @@ /** Generator for project files - Copyright: © 2012-2013 Matthias Dondorff + Copyright: © 2012-2013 Matthias Dondorff, © 2013-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff */ @@ -85,6 +85,8 @@ */ final void generate(GeneratorSettings settings) { + import dub.compilers.utils : enforceBuildRequirements; + if (!settings.config.length) settings.config = m_project.getDefaultConfiguration(settings.platform); TargetInfo[string] targets; @@ -93,12 +95,14 @@ foreach (pack; m_project.getTopologicalPackageList(true, null, configs)) { BuildSettings buildsettings; buildsettings.processVars(m_project, pack, pack.getBuildSettings(settings.platform, configs[pack.name]), true); - prepareGeneration(pack.name, buildsettings); + prepareGeneration(pack, m_project, settings, buildsettings); } string[] mainfiles; collect(settings, m_project.rootPackage, targets, configs, mainfiles, null); downwardsInheritSettings(m_project.rootPackage.name, targets, targets[m_project.rootPackage.name].buildSettings); + addBuildTypeSettings(targets, settings); + foreach (ref t; targets.byValue) enforceBuildRequirements(t.buildSettings); auto bs = &targets[m_project.rootPackage.name].buildSettings; if (bs.targetType == TargetType.executable) bs.addSourceFiles(mainfiles); @@ -107,8 +111,8 @@ foreach (pack; m_project.getTopologicalPackageList(true, null, configs)) { BuildSettings buildsettings; buildsettings.processVars(m_project, pack, pack.getBuildSettings(settings.platform, configs[pack.name]), true); - bool generate_binary = !(buildsettings.options & BuildOptions.syntaxOnly); - finalizeGeneration(pack.name, buildsettings, pack.path, Path(bs.targetPath), generate_binary); + bool generate_binary = !(buildsettings.options & BuildOption.syntaxOnly); + finalizeGeneration(pack, m_project, settings, buildsettings, Path(bs.targetPath), generate_binary); } performPostGenerateActions(settings, targets); @@ -141,6 +145,9 @@ private BuildSettings collect(GeneratorSettings settings, Package pack, ref TargetInfo[string] targets, in string[string] configs, ref string[] main_files, string bin_pack) { + import std.algorithm : sort; + import dub.compilers.utils : isLinkerFile; + if (auto pt = pack.name in targets) return pt.buildSettings; // determine the actual target type @@ -163,18 +170,16 @@ shallowbs.targetType = tt; bool generates_binary = tt != TargetType.sourceLibrary && tt != TargetType.none; - - enforce (generates_binary || pack !is m_project.rootPackage, - format("Main package must have a binary target type, not %s. Cannot build.", tt)); + bool is_target = generates_binary || pack is m_project.rootPackage; if (tt == TargetType.none) { // ignore any build settings for targetType none (only dependencies will be processed) shallowbs = BuildSettings.init; + shallowbs.targetType = TargetType.none; } // start to build up the build settings BuildSettings buildsettings; - if (generates_binary) buildsettings = settings.buildSettings.dup; processVars(buildsettings, m_project, pack, shallowbs, true); // remove any mainSourceFile from library builds @@ -183,16 +188,21 @@ main_files ~= buildsettings.mainSourceFile; } + // set pic for dynamic library builds. + if (buildsettings.targetType == TargetType.dynamicLibrary) + buildsettings.addOptions(BuildOption.pic); + logDiagnostic("Generate target %s (%s %s %s)", pack.name, buildsettings.targetType, buildsettings.targetPath, buildsettings.targetName); - if (generates_binary) + if (is_target) targets[pack.name] = TargetInfo(pack, [pack], configs[pack.name], buildsettings, null); - foreach (depname, depspec; pack.dependencies) { - if (!pack.hasDependency(depname, configs[pack.name])) continue; + auto deps = pack.getDependencies(configs[pack.name]); + foreach (depname; deps.keys.sort()) { + auto depspec = deps[depname]; auto dep = m_project.getDependency(depname, depspec.optional); if (!dep) continue; - auto depbs = collect(settings, dep, targets, configs, main_files, generates_binary ? pack.name : bin_pack); + auto depbs = collect(settings, dep, targets, configs, main_files, is_target ? pack.name : bin_pack); if (depbs.targetType != TargetType.sourceLibrary && depbs.targetType != TargetType.none) { // add a reference to the target binary and remove all source files in the dependency build settings @@ -205,7 +215,7 @@ if (depbs.targetType == TargetType.executable) continue; - auto pt = (generates_binary ? pack.name : bin_pack) in targets; + auto pt = (is_target ? pack.name : bin_pack) in targets; assert(pt !is null); if (auto pdt = depname in targets) { pt.dependencies ~= depname; @@ -215,29 +225,34 @@ } else pt.packages ~= dep; } - if (generates_binary) { - // add build type settings and convert plain DFLAGS to build options - m_project.addBuildTypeSettings(buildsettings, settings.platform, settings.buildType); - settings.compiler.extractBuildOptions(buildsettings); - enforceBuildRequirements(buildsettings); - targets[pack.name].buildSettings = buildsettings.dup; - } + if (is_target) targets[pack.name].buildSettings = buildsettings.dup; return buildsettings; } private string[] downwardsInheritSettings(string target, TargetInfo[string] targets, in BuildSettings root_settings) { + import dub.internal.utils : stripDlangSpecialChars; + auto ti = &targets[target]; ti.buildSettings.addVersions(root_settings.versions); ti.buildSettings.addDebugVersions(root_settings.debugVersions); - ti.buildSettings.addOptions(root_settings.options); + ti.buildSettings.addOptions(BuildOptions(cast(BuildOptions)root_settings.options & inheritedBuildOptions)); // special support for overriding string imports in parent packages // this is a candidate for deprecation, once an alternative approach // has been found - if (ti.buildSettings.stringImportPaths.length) + if (ti.buildSettings.stringImportPaths.length) { + // override string import files (used for up to date checking) + foreach (ref f; ti.buildSettings.stringImportFiles) + foreach (fi; root_settings.stringImportFiles) + if (f != fi && Path(f).head == Path(fi).head) { + f = fi; + } + + // add the string import paths (used by the compiler to find the overridden files) ti.buildSettings.prependStringImportPaths(root_settings.stringImportPaths); + } string[] packs = ti.packages.map!(p => p.name).array; foreach (d; ti.dependencies) @@ -253,6 +268,22 @@ return packs; } + + private void addBuildTypeSettings(TargetInfo[string] targets, GeneratorSettings settings) + { + foreach (ref t; targets) { + t.buildSettings.add(settings.buildSettings); + + // add build type settings and convert plain DFLAGS to build options + m_project.addBuildTypeSettings(t.buildSettings, settings.platform, settings.buildType, t.pack is m_project.rootPackage); + settings.compiler.extractBuildOptions(t.buildSettings); + + auto tt = t.buildSettings.targetType; + bool generates_binary = tt != TargetType.sourceLibrary && tt != TargetType.none; + enforce (generates_binary || t.pack !is m_project.rootPackage || (t.buildSettings.options & BuildOption.syntaxOnly), + format("Main package must have a binary target type, not %s. Cannot build.", tt)); + } + } } @@ -267,7 +298,7 @@ bool combined; // compile all in one go instead of each dependency separately // only used for generator "build" - bool run, force, direct, clean, rdmd, tempBuild; + bool run, force, direct, rdmd, tempBuild, parallelBuild; string[] runArgs; void delegate(int status, string output) compileCallback; void delegate(int status, string output) linkCallback; @@ -320,22 +351,26 @@ /** Runs pre-build commands and performs other required setup before project files are generated. */ -private void prepareGeneration(string pack, in BuildSettings buildsettings) +private void prepareGeneration(in Package pack, in Project proj, in GeneratorSettings settings, + in BuildSettings buildsettings) { - if( buildsettings.preGenerateCommands.length ){ - logInfo("Running pre-generate commands for %s...", pack); - runBuildCommands(buildsettings.preGenerateCommands, buildsettings); + if (buildsettings.preGenerateCommands.length && !isRecursiveInvocation(pack.name)) { + logInfo("Running pre-generate commands for %s...", pack.name); + runBuildCommands(buildsettings.preGenerateCommands, pack, proj, settings, buildsettings); } } /** Runs post-build commands and copies required files to the binary directory. */ -private void finalizeGeneration(string pack, in BuildSettings buildsettings, Path pack_path, Path target_path, bool generate_binary) +private void finalizeGeneration(in Package pack, in Project proj, in GeneratorSettings settings, + in BuildSettings buildsettings, Path target_path, bool generate_binary) { - if (buildsettings.postGenerateCommands.length) { - logInfo("Running post-generate commands for %s...", pack); - runBuildCommands(buildsettings.postGenerateCommands, buildsettings); + import std.path : globMatch; + + if (buildsettings.postGenerateCommands.length && !isRecursiveInvocation(pack.name)) { + logInfo("Running post-generate commands for %s...", pack.name); + runBuildCommands(buildsettings.postGenerateCommands, pack, proj, settings, buildsettings); } if (generate_binary) { @@ -361,7 +396,7 @@ void tryCopyDir(string file) { auto src = Path(file); - if (!src.absolute) src = pack_path ~ src; + if (!src.absolute) src = pack.path ~ src; auto dst = target_path ~ Path(file).head; if (src == dst) { logDiagnostic("Skipping copy of %s (same source and destination)", file); @@ -376,7 +411,7 @@ void tryCopyFile(string file) { auto src = Path(file); - if (!src.absolute) src = pack_path ~ src; + if (!src.absolute) src = pack.path ~ src; auto dst = target_path ~ Path(file).head; if (src == dst) { logDiagnostic("Skipping copy of %s (same source and destination)", file); @@ -387,7 +422,7 @@ hardLinkFile(src, dst, true); } catch(Exception e) logWarn("Failed to copy %s to %s: %s", src.toNativeString(), dst.toNativeString(), e.msg); } - logInfo("Copying files for %s...", pack); + logInfo("Copying files for %s...", pack.name); string[] globs; foreach (f; buildsettings.copyFiles) { @@ -407,11 +442,11 @@ } if (globs.length) // Search all files for glob matches { - foreach (f; dirEntries(pack_path.toNativeString(), SpanMode.breadth)) + foreach (f; dirEntries(pack.path.toNativeString(), SpanMode.breadth)) { foreach (glob; globs) { - if (f.globMatch(glob)) + if (f.name().globMatch(glob)) { if (f.isDir) tryCopyDir(f); @@ -427,19 +462,87 @@ } } -void runBuildCommands(in string[] commands, in BuildSettings build_settings) + +/** Runs a list of build commands for a particular package. + + This function sets all DUB speficic environment variables and makes sure + that recursive dub invocations are detected and don't result in infinite + command execution loops. The latter could otherwise happen when a command + runs "dub describe" or similar functionality. +*/ +void runBuildCommands(in string[] commands, in Package pack, in Project proj, + in GeneratorSettings settings, in BuildSettings build_settings) { + import std.conv; import std.process; import dub.internal.utils; string[string] env = environment.toAA(); // TODO: do more elaborate things here // TODO: escape/quote individual items appropriately - env["DFLAGS"] = join(cast(string[])build_settings.dflags, " "); - env["LFLAGS"] = join(cast(string[])build_settings.lflags," "); - env["VERSIONS"] = join(cast(string[])build_settings.versions," "); - env["LIBS"] = join(cast(string[])build_settings.libs," "); - env["IMPORT_PATHS"] = join(cast(string[])build_settings.importPaths," "); - env["STRING_IMPORT_PATHS"] = join(cast(string[])build_settings.stringImportPaths," "); + env["DFLAGS"] = join(cast(string[])build_settings.dflags, " "); + env["LFLAGS"] = join(cast(string[])build_settings.lflags," "); + env["VERSIONS"] = join(cast(string[])build_settings.versions," "); + env["LIBS"] = join(cast(string[])build_settings.libs," "); + env["IMPORT_PATHS"] = join(cast(string[])build_settings.importPaths," "); + env["STRING_IMPORT_PATHS"] = join(cast(string[])build_settings.stringImportPaths," "); + + env["DC"] = settings.platform.compilerBinary; + env["DC_BASE"] = settings.platform.compiler; + env["D_FRONTEND_VER"] = to!string(settings.platform.frontendVersion); + + env["DUB_PLATFORM"] = join(cast(string[])settings.platform.platform," "); + env["DUB_ARCH"] = join(cast(string[])settings.platform.architecture," "); + + env["DUB_TARGET_TYPE"] = to!string(build_settings.targetType); + env["DUB_TARGET_PATH"] = build_settings.targetPath; + env["DUB_TARGET_NAME"] = build_settings.targetName; + env["DUB_WORKING_DIRECTORY"] = build_settings.workingDirectory; + env["DUB_MAIN_SOURCE_FILE"] = build_settings.mainSourceFile; + + env["DUB_CONFIG"] = settings.config; + env["DUB_BUILD_TYPE"] = settings.buildType; + env["DUB_BUILD_MODE"] = to!string(settings.buildMode); + env["DUB_PACKAGE"] = pack.name; + env["DUB_PACKAGE_DIR"] = pack.path.toNativeString(); + env["DUB_ROOT_PACKAGE"] = proj.rootPackage.name; + env["DUB_ROOT_PACKAGE_DIR"] = proj.rootPackage.path.toNativeString(); + + env["DUB_COMBINED"] = settings.combined? "TRUE" : ""; + env["DUB_RUN"] = settings.run? "TRUE" : ""; + env["DUB_FORCE"] = settings.force? "TRUE" : ""; + env["DUB_DIRECT"] = settings.direct? "TRUE" : ""; + env["DUB_RDMD"] = settings.rdmd? "TRUE" : ""; + env["DUB_TEMP_BUILD"] = settings.tempBuild? "TRUE" : ""; + env["DUB_PARALLEL_BUILD"] = settings.parallelBuild? "TRUE" : ""; + + env["DUB_RUN_ARGS"] = (cast(string[])settings.runArgs).map!(escapeShellFileName).join(" "); + + auto depNames = proj.dependencies.map!((a) => a.name).array(); + storeRecursiveInvokations(env, proj.rootPackage.name ~ depNames); runCommands(commands, env); } + +private bool isRecursiveInvocation(string pack) +{ + import std.algorithm : canFind, splitter; + import std.process : environment; + + return environment + .get("DUB_PACKAGES_USED", "") + .splitter(",") + .canFind(pack); +} + +private void storeRecursiveInvokations(string[string] env, string[] packs) +{ + import std.algorithm : canFind, splitter; + import std.range : chain; + import std.process : environment; + + env["DUB_PACKAGES_USED"] = environment + .get("DUB_PACKAGES_USED", "") + .splitter(",") + .chain(packs) + .join(","); +} diff --git a/source/dub/generators/sublimetext.d b/source/dub/generators/sublimetext.d index 1a7db6a..c22fcb1 100644 --- a/source/dub/generators/sublimetext.d +++ b/source/dub/generators/sublimetext.d @@ -34,60 +34,73 @@ { auto buildSettings = targets[m_project.name].buildSettings; logDebug("About to generate sublime project for %s.", m_project.rootPackage.name); - + auto root = Json([ - "folders": targets.byValue.map!targetFolderJson.array.Json, + "folders": targets.byValue.map!(f => targetFolderJson(f)).array.Json, "build_systems": buildSystems(settings.platform), + "settings": [ "include_paths": buildSettings.importPaths.map!Json.array.Json ].Json, ]); auto jsonString = appender!string(); writePrettyJsonString(jsonString, root); - write(m_project.name ~ ".sublime-project", jsonString.data); + string projectPath = m_project.name ~ ".sublime-project"; - logInfo("SublimeText project generated."); + write(projectPath, jsonString.data); + + logInfo("Project '%s' generated.", projectPath); } } -Json targetFolderJson(in ProjectGenerator.TargetInfo target) +private Json targetFolderJson(in ProjectGenerator.TargetInfo target) { return [ - "name": target.pack.name.Json, - "path": target.pack.path.toNativeString.Json, + "name": target.pack.basePackage.name.Json, + "path": target.pack.basePackage.path.toNativeString.Json, "follow_symlinks": true.Json, "folder_exclude_patterns": [".dub"].map!Json.array.Json, ].Json; } -Json buildSystems(BuildPlatform buildPlatform, string workingDiretory = getcwd()) +private Json buildSystems(BuildPlatform buildPlatform, string workingDiretory = getcwd()) { enum BUILD_TYPES = [ //"plain", "debug", "release", + "release-debug", + "release-nobounds", //"unittest", "docs", "ddox", "profile", + "profile-gc", "cov", "unittest-cov", ]; + string fileRegex; + + if (buildPlatform.frontendVersion >= 2066 && buildPlatform.compiler == "dmd") + fileRegex = r"^(.+)\(([0-9]+)\,([0-9]+)\)\:() (.*)$"; + else + fileRegex = r"^(.+)\(([0-9]+)\)\:() (.*)$"; + auto arch = buildPlatform.architecture[0]; Json makeBuildSystem(string buildType) { return Json([ "name": "DUB build " ~ buildType.Json, - "cmd": ["dub", "build", "--build=" ~ buildType, "--arch=" ~ arch].map!Json.array.Json, - "file_regex": r"^(.+)\(([0-9]+)\)\:() (.*)$".Json, + "cmd": ["dub", "build", "--build=" ~ buildType, "--arch=" ~ arch, "--compiler="~buildPlatform.compilerBinary].map!Json.array.Json, + "file_regex": fileRegex.Json, "working_dir": workingDiretory.Json, "variants": [ [ "name": "Run".Json, - "cmd": ["dub", "run", "--build=" ~ buildType, "--arch=" ~ arch].map!Json.array.Json, + "cmd": ["dub", "run", "--build=" ~ buildType, "--arch=" ~ arch, "--compiler="~buildPlatform.compilerBinary].map!Json.array.Json, ].Json ].array.Json, ]); @@ -97,7 +110,7 @@ buildSystems ~= [ "name": "DUB test".Json, - "cmd": ["dub", "test", "--arch=" ~ arch].map!Json.array.Json, + "cmd": ["dub", "test", "--arch=" ~ arch, "--compiler="~buildPlatform.compilerBinary].map!Json.array.Json, "file_regex": r"^(.+)\(([0-9]+)\)\:() (.*)$".Json, "working_dir": workingDiretory.Json, ].Json; diff --git a/source/dub/generators/targetdescription.d b/source/dub/generators/targetdescription.d new file mode 100644 index 0000000..d2aec6e --- /dev/null +++ b/source/dub/generators/targetdescription.d @@ -0,0 +1,67 @@ +/** + Pseudo generator to output build descriptions. + + Copyright: © 2015 rejectedsoftware e.K. + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Sönke Ludwig +*/ +module dub.generators.targetdescription; + +import dub.compilers.buildsettings; +import dub.compilers.compiler; +import dub.description; +import dub.generators.generator; +import dub.internal.vibecompat.inet.path; +import dub.project; + +class TargetDescriptionGenerator : ProjectGenerator { + TargetDescription[] targetDescriptions; + size_t[string] targetDescriptionLookup; + + this(Project project) + { + super(project); + } + + protected override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) + { + import std.algorithm : map; + import std.array : array; + + auto configs = m_project.getPackageConfigs(settings.platform, settings.config); + targetDescriptions.length = targets.length; + size_t i = 0; + + bool[string] visited; + void visitTargetRec(string target) + { + if (target in visited) return; + visited[target] = true; + + auto ti = targets[target]; + + TargetDescription d; + d.rootPackage = ti.pack.name; + d.packages = ti.packages.map!(p => p.name).array; + d.rootConfiguration = ti.config; + d.buildSettings = ti.buildSettings.dup; + d.dependencies = ti.dependencies.dup; + d.linkDependencies = ti.linkDependencies.dup; + + // Add static library dependencies + foreach (ld; ti.linkDependencies) { + auto ltarget = targets[ld]; + auto ltbs = ltarget.buildSettings; + auto targetfil = (Path(ltbs.targetPath) ~ settings.compiler.getTargetFileName(ltbs, settings.platform)).toNativeString(); + d.buildSettings.addLinkerFiles(targetfil); + } + + targetDescriptionLookup[d.rootPackage] = i; + targetDescriptions[i++] = d; + + foreach (dep; ti.dependencies) + visitTargetRec(dep); + } + visitTargetRec(m_project.rootPackage.name); + } +} diff --git a/source/dub/generators/visuald.d b/source/dub/generators/visuald.d index 2608f15..e47da77 100644 --- a/source/dub/generators/visuald.d +++ b/source/dub/generators/visuald.d @@ -45,11 +45,9 @@ override void generateTargets(GeneratorSettings settings, in TargetInfo[string] targets) { - auto bs = targets[m_project.name].buildSettings; - logDebug("About to generate projects for %s, with %s direct dependencies.", m_project.rootPackage.name, m_project.rootPackage.dependencies.length); + logDebug("About to generate projects for %s, with %s direct dependencies.", m_project.rootPackage.name, m_project.rootPackage.getAllDependencies().length); generateProjectFiles(settings, targets); generateSolutionFile(settings, targets); - logInfo("VisualD project generated."); } private { @@ -122,10 +120,12 @@ // Writing solution file logDebug("About to write to .sln file with %s bytes", to!string(ret.data.length)); - auto sln = openFile(solutionFileName(), FileMode.CreateTrunc); + auto sln = openFile(solutionFileName(), FileMode.createTrunc); scope(exit) sln.close(); sln.put(ret.data); sln.flush(); + + logInfo("Solution '%s' generated.", solutionFileName()); } @@ -153,6 +153,8 @@ void generateProjectFile(string packname, GeneratorSettings settings, in TargetInfo[string] targets) { + import dub.compilers.utils : isLinkerFile; + int i = 0; auto ret = appender!(char[])(); @@ -188,8 +190,8 @@ } foreach (p; targets[packname].packages) - if (!p.packageInfoFilename.empty) - addFile(p.packageInfoFilename.toNativeString(), false); + if (!p.recipePath.empty) + addFile(p.recipePath.toNativeString(), false); if (files.targetType == TargetType.staticLibrary) foreach(s; files.sourceFiles.filter!(s => !isLinkerFile(s))) addFile(s, true); @@ -228,7 +230,7 @@ ret.put("\n \n"); logDebug("About to write to '%s.visualdproj' file %s bytes", getPackageFileName(packname), ret.data.length); - auto proj = openFile(projFileName(packname), FileMode.CreateTrunc); + auto proj = openFile(projFileName(packname), FileMode.createTrunc); scope(exit) proj.close(); proj.put(ret.data); proj.flush(); @@ -257,13 +259,11 @@ auto arch = architecture.vsArchitecture; ret.formattedWrite(" \n", to!string(type), arch); - // FIXME: handle compiler options in an abstract way instead of searching for DMD specific flags - // debug and optimize setting - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.debugInfo ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.optimize ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.inline ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.releaseMode ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.debugInfo ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.optimize ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.inline ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.releaseMode ? "1" : "0"); // Lib or exe? enum @@ -285,11 +285,10 @@ output_type = DynamicLib; output_ext = "dll"; } - string debugSuffix = type == "debug" ? "_d" : ""; - auto bin_path = pack == m_project.rootPackage.name ? Path(buildsettings.targetPath) : Path(".dub/lib/"); + auto bin_path = pack == m_project.rootPackage.name ? Path(buildsettings.targetPath) : Path("lib/"); bin_path.endsWithSlash = true; ret.formattedWrite(" %s\n", output_type); - ret.formattedWrite(" %s%s%s.%s\n", bin_path.toNativeString(), buildsettings.targetName, debugSuffix, output_ext); + ret.formattedWrite(" %s%s.%s\n", bin_path.toNativeString(), buildsettings.targetName, output_ext); // include paths and string imports string imports = join(getPathSettings!"importPaths"(), " "); @@ -311,7 +310,7 @@ if (output_type != StaticLib) ret.formattedWrite(" %s %s\n", linkLibs, addLinkFiles); // Unittests - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.unittests ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.unittests ? "1" : "0"); // compute directory for intermediate files (need dummy/ because of how -op determines the resulting path) size_t ndummy = 0; @@ -337,10 +336,11 @@ //case compileOnly: singlefilemode = 3; break; } ret.formattedWrite(" %s\n", singlefilemode); + ret.formattedWrite(" %s", buildsettings.dflags.canFind("-m32mscoff") ? "1" : "0"); ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.verbose ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.verbose ? "1" : "0"); ret.put(" 0\n"); ret.put(" 0\n"); ret.formattedWrite(" %s\n", arch == "x64" ? 1 : 0); @@ -356,22 +356,22 @@ ret.put(" 0\n"); ret.put(" 0\n"); ret.put(" 0\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.noBoundsCheck ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.noBoundsCheck ? "1" : "0"); ret.put(" 0\n"); ret.put(" 1\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.warningsAsErrors ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.warnings ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.property ? "1" : "0"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.alwaysStackFrame ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.warningsAsErrors ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.warnings ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.property ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.alwaysStackFrame ? "1" : "0"); ret.put(" 0\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.coverage ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.coverage ? "1" : "0"); ret.put(" 0\n"); ret.put(" 2\n"); - ret.formattedWrite(" %s\n", buildsettings.options & BuildOptions.ignoreUnknownPragmas ? "1" : "0"); + ret.formattedWrite(" %s\n", buildsettings.options & BuildOption.ignoreUnknownPragmas ? "1" : "0"); ret.formattedWrite(" %s\n", settings.compiler.name == "ldc" ? 2 : settings.compiler.name == "gdc" ? 1 : 0); ret.formattedWrite(" 0\n"); ret.formattedWrite(" %s\n", bin_path.toNativeString()); - ret.formattedWrite(" .dub/obj/%s/%s\n", to!string(type), intersubdir); + ret.formattedWrite(" obj/%s/%s\n", to!string(type), intersubdir); ret.put(" \n"); ret.put(" \n"); ret.put(" 0\n"); @@ -406,6 +406,10 @@ ret.put(" \n"); ret.put(" \n"); ret.put(" \n"); + auto wdir = Path(buildsettings.workingDirectory); + if (!wdir.absolute) wdir = m_project.rootPackage.path ~ wdir; + ret.formattedWrite(" %s\n", + wdir.relativeTo(project_file_dir).toNativeString()); ret.put(" \n"); ret.put(" \n"); ret.put(" *.obj;*.cmd;*.build;*.dep\n"); @@ -438,14 +442,14 @@ } Path projFileName(string pack) const { - auto basepath = Path(".");//Path(".dub/"); + auto basepath = Path(".dub/"); version(DUBBING) return basepath ~ (getPackageFileName(pack) ~ ".dubbed.visualdproj"); else return basepath ~ (getPackageFileName(pack) ~ ".visualdproj"); } } // TODO: nice folders - struct SourceFile { + private struct SourceFile { Path structurePath; Path filePath; bool build; @@ -477,7 +481,7 @@ } } - auto sortedSources(SourceFile[] sources) { + private auto sortedSources(SourceFile[] sources) { return sort(sources); } @@ -521,7 +525,7 @@ { switch(architecture) { default: logWarn("Unsupported platform('%s'), defaulting to x86", architecture); goto case; - case "x86": return "Win32"; + case "x86", "x86_mscoff": return "Win32"; case "x86_64": return "x64"; } } diff --git a/source/dub/init.d b/source/dub/init.d index 40a5cd7..36d68bd 100644 --- a/source/dub/init.d +++ b/source/dub/init.d @@ -1,7 +1,7 @@ /** - Empty package initialization code. + Package skeleton initialization code. - Copyright: © 2013 rejectedsoftware e.K. + Copyright: © 2013-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ @@ -9,9 +9,10 @@ import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.core.log; -import dub.package_ : packageInfoFiles, defaultPackageFilename; +import dub.package_ : PackageFormat, packageInfoFiles, defaultPackageFilename; +import dub.recipe.packagerecipe; +import dub.dependency; -import std.datetime; import std.exception; import std.file; import std.format; @@ -19,15 +20,48 @@ import std.string; -void initPackage(Path root_path, string[string] deps, string type) +/** Initializes a new package in the given directory. + + The given `root_path` will be checked for any of the files that will be + created by this function. If any exist, an exception will be thrown before + altering the directory. + + Params: + root_path = Directory in which to create the new package. If the + directory doesn't exist, a new one will be created. + deps = A set of extra dependencies to add to the package recipe. The + associative array is expected to map from package name to package + version. + type = The type of package skeleton to create. Can currently be + "minimal", "vibe.d" or "deimos" + recipe_callback = Optional callback that can be used to customize the + package recipe and the file format used to store it prior to + writing it to disk. +*/ +void initPackage(Path root_path, string[string] deps, string type, + PackageFormat format, scope RecipeCallback recipe_callback = null) { + import std.conv : to; + import dub.recipe.io : writePackageRecipe; + void enforceDoesNotExist(string filename) { enforce(!existsFile(root_path ~ filename), "The target directory already contains a '"~filename~"' file. Aborting."); } + string username = getUserName(); + + PackageRecipe p; + p.name = root_path.head.toString().toLower(); + p.authors ~= username; + p.license = "proprietary"; + foreach (pack, v; deps) { + import std.ascii : isDigit; + p.buildSettings.dependencies[pack] = Dependency(v); + } + //Check to see if a target directory needs to be created - if( !root_path.empty ){ - if( !existsFile(root_path) ) + if (!root_path.empty) { + if (!existsFile(root_path)) createDirectory(root_path); } @@ -39,18 +73,30 @@ foreach (fil; files) enforceDoesNotExist(fil); + void processRecipe() + { + if (recipe_callback) + recipe_callback(p, format); + } + switch (type) { default: throw new Exception("Unknown package init type: "~type); - case "minimal": initMinimalPackage(root_path, deps); break; - case "vibe.d": initVibeDPackage(root_path, deps); break; - case "deimos": initDeimosPackage(root_path, deps); break; + case "minimal": initMinimalPackage(root_path, p, &processRecipe); break; + case "vibe.d": initVibeDPackage(root_path, p, &processRecipe); break; + case "deimos": initDeimosPackage(root_path, p, &processRecipe); break; } + + writePackageRecipe(root_path ~ ("dub."~format.to!string), p); writeGitignore(root_path); } -void initMinimalPackage(Path root_path, string[string] deps) +alias RecipeCallback = void delegate(ref PackageRecipe, ref PackageFormat); + +private void initMinimalPackage(Path root_path, ref PackageRecipe p, scope void delegate() pre_write_callback) { - writePackageJson(root_path, "A minimal D application.", deps); + p.description = "A minimal D application."; + pre_write_callback(); + createDirectory(root_path ~ "source"); write((root_path ~ "source/app.d").toNativeString(), q{import std.stdio; @@ -62,20 +108,20 @@ }); } -void initVibeDPackage(Path root_path, string[string] deps) +private void initVibeDPackage(Path root_path, ref PackageRecipe p, scope void delegate() pre_write_callback) { - if("vibe-d" !in deps) - deps["vibe-d"] = "~>0.7.19"; + if ("vibe-d" !in p.buildSettings.dependencies) + p.buildSettings.dependencies["vibe-d"] = Dependency("~>0.7.30"); + p.description = "A simple vibe.d server application."; + pre_write_callback(); - writePackageJson(root_path, "A simple vibe.d server application.", - deps, ["versions": `["VibeDefaultMain"]`]); createDirectory(root_path ~ "source"); createDirectory(root_path ~ "views"); createDirectory(root_path ~ "public"); write((root_path ~ "source/app.d").toNativeString(), -q{import vibe.d; +q{import vibe.vibe; -shared static this() +void main() { auto settings = new HTTPServerSettings; settings.port = 8080; @@ -83,6 +129,7 @@ listenHTTP(settings, &hello); logInfo("Please open http://127.0.0.1:8080/ in your browser."); + runApplication(); } void hello(HTTPServerRequest req, HTTPServerResponse res) @@ -92,41 +139,43 @@ }); } -void initDeimosPackage(Path root_path, string[string] deps) +private void initDeimosPackage(Path root_path, ref PackageRecipe p, scope void delegate() pre_write_callback) { + import dub.compilers.buildsettings : TargetType; + auto name = root_path.head.toString().toLower(); - writePackageJson(root_path, "Deimos Bindings for "~name~".", - deps, ["targetType": `"sourceLibrary"`, "importPaths": `["."]`]); + p.description = format("Deimos Bindings for "~p.name~"."); + p.buildSettings.importPaths[""] ~= "."; + p.buildSettings.targetType = TargetType.sourceLibrary; + pre_write_callback(); + createDirectory(root_path ~ "C"); createDirectory(root_path ~ "deimos"); } -void writePackageJson(Path root_path, string description, string[string] dependencies = null, string[string] addFields = null) -{ - import std.algorithm : map; - - assert(!root_path.empty); - - string username; - version (Windows) username = environment.get("USERNAME", "Peter Parker"); - else username = environment.get("USER", "Peter Parker"); - - auto fil = openFile(root_path ~ defaultPackageFilename, FileMode.Append); - scope(exit) fil.close(); - - fil.formattedWrite("{\n\t\"name\": \"%s\",\n", root_path.head.toString().toLower()); - fil.formattedWrite("\t\"description\": \"%s\",\n", description); - fil.formattedWrite("\t\"copyright\": \"Copyright © %s, %s\",\n", Clock.currTime().year, username); - fil.formattedWrite("\t\"authors\": [\"%s\"],\n", username); - fil.formattedWrite("\t\"dependencies\": {"); - fil.formattedWrite("%(\n\t\t%s: %s,%)", dependencies); - fil.formattedWrite("\n\t}"); - fil.formattedWrite("%-(,\n\t\"%s\": %s%)", addFields); - fil.write("\n}\n"); -} - -void writeGitignore(Path root_path) +private void writeGitignore(Path root_path) { write((root_path ~ ".gitignore").toNativeString(), - ".dub\ndocs.json\n__dummy.html\n*.o\n*.obj\n"); + ".dub\ndocs.json\n__dummy.html\n*.o\n*.obj\n__test__*__\n"); +} + +private string getUserName() +{ + version (Windows) + return environment.get("USERNAME", "Peter Parker"); + else version (Posix) + { + import core.sys.posix.pwd, core.sys.posix.unistd, core.stdc.string : strlen; + import std.algorithm : splitter; + + if (auto pw = getpwuid(getuid)) + { + auto uinfo = pw.pw_gecos[0 .. strlen(pw.pw_gecos)].splitter(','); + if (!uinfo.empty && uinfo.front.length) + return uinfo.front.idup; + } + return environment.get("USER", "Peter Parker"); + } + else + static assert(0); } diff --git a/source/dub/internal/libInputVisitor.d b/source/dub/internal/libInputVisitor.d new file mode 100644 index 0000000..d388b78 --- /dev/null +++ b/source/dub/internal/libInputVisitor.d @@ -0,0 +1,96 @@ +module dub.internal.libInputVisitor; + +version (Have_libInputVisitor) public import libInputVisitor; +else: + +/++ +Copyright (C) 2012 Nick Sabalausky + +This program is free software. It comes without any warranty, to +the extent permitted by applicable law. You can redistribute it +and/or modify it under the terms of the Do What The Fuck You Want +To Public License, Version 2, as published by Sam Hocevar. See +http://www.wtfpl.net/ for more details. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + Version 2, December 2004 + +Copyright (C) 2004 Sam Hocevar + +Everyone is permitted to copy and distribute verbatim or modified +copies of this license document, and changing it is allowed as long +as the name is changed. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + +0. You just DO WHAT THE FUCK YOU WANT TO. ++/ + +/++ +Should work with DMD 2.059 and up + +For more info on this, see: +http://semitwist.com/articles/article/view/combine-coroutines-and-input-ranges-for-dead-simple-d-iteration ++/ + +import core.thread; + +class InputVisitor(Obj, Elem) : Fiber +{ + bool started = false; + Obj obj; + this(Obj obj) + { + this.obj = obj; + super(&run); + } + + private void run() + { + obj.visit(this); + } + + private void ensureStarted() + { + if(!started) + { + call(); + started = true; + } + } + + // Member 'front' must be a function due to DMD Issue #5403 + private Elem _front; + @property Elem front() + { + ensureStarted(); + return _front; + } + + void popFront() + { + ensureStarted(); + call(); + } + + @property bool empty() + { + ensureStarted(); + return state == Fiber.State.TERM; + } + + void yield(Elem elem) + { + _front = elem; + Fiber.yield(); + } +} + +template inputVisitor(Elem) +{ + @property InputVisitor!(Obj, Elem) inputVisitor(Obj)(Obj obj) + { + return new InputVisitor!(Obj, Elem)(obj); + } +} diff --git a/source/dub/internal/sdlang/ast.d b/source/dub/internal/sdlang/ast.d new file mode 100644 index 0000000..deda488 --- /dev/null +++ b/source/dub/internal/sdlang/ast.d @@ -0,0 +1,1837 @@ +// SDLang-D +// Written in the D programming language. + +module dub.internal.sdlang.ast; + +version (Have_sdlang_d) public import sdlang.ast; +else: + +import std.algorithm; +import std.array; +import std.conv; +import std.range; +import std.string; + +version(sdlangUnittest) +version(unittest) +{ + import std.stdio; + import std.exception; +} + +import dub.internal.sdlang.exception; +import dub.internal.sdlang.token; +import dub.internal.sdlang.util; + +class Attribute +{ + Value value; + Location location; + + private Tag _parent; + /// Get parent tag. To set a parent, attach this Attribute to its intended + /// parent tag by calling 'Tag.add(...)', or by passing it to + /// the parent tag's constructor. + @property Tag parent() + { + return _parent; + } + + private string _namespace; + @property string namespace() + { + return _namespace; + } + /// Not particularly efficient, but it works. + @property void namespace(string value) + { + if(_parent && _namespace != value) + { + // Remove + auto saveParent = _parent; + if(_parent) + this.remove(); + + // Change namespace + _namespace = value; + + // Re-add + if(saveParent) + saveParent.add(this); + } + else + _namespace = value; + } + + private string _name; + /// Not including namespace. Use 'fullName' if you want the namespace included. + @property string name() + { + return _name; + } + /// Not the most efficient, but it works. + @property void name(string value) + { + if(_parent && _name != value) + { + _parent.updateId++; + + void removeFromGroupedLookup(string ns) + { + // Remove from _parent._attributes[ns] + auto sameNameAttrs = _parent._attributes[ns][_name]; + auto targetIndex = sameNameAttrs.countUntil(this); + _parent._attributes[ns][_name].removeIndex(targetIndex); + } + + // Remove from _parent._tags + removeFromGroupedLookup(_namespace); + removeFromGroupedLookup("*"); + + // Change name + _name = value; + + // Add to new locations in _parent._attributes + _parent._attributes[_namespace][_name] ~= this; + _parent._attributes["*"][_name] ~= this; + } + else + _name = value; + } + + @property string fullName() + { + return _namespace==""? _name : text(_namespace, ":", _name); + } + + this(string namespace, string name, Value value, Location location = Location(0, 0, 0)) + { + this._namespace = namespace; + this._name = name; + this.location = location; + this.value = value; + } + + this(string name, Value value, Location location = Location(0, 0, 0)) + { + this._namespace = ""; + this._name = name; + this.location = location; + this.value = value; + } + + /// Removes 'this' from its parent, if any. Returns 'this' for chaining. + /// Inefficient ATM, but it works. + Attribute remove() + { + if(!_parent) + return this; + + void removeFromGroupedLookup(string ns) + { + // Remove from _parent._attributes[ns] + auto sameNameAttrs = _parent._attributes[ns][_name]; + auto targetIndex = sameNameAttrs.countUntil(this); + _parent._attributes[ns][_name].removeIndex(targetIndex); + } + + // Remove from _parent._attributes + removeFromGroupedLookup(_namespace); + removeFromGroupedLookup("*"); + + // Remove from _parent.allAttributes + auto allAttrsIndex = _parent.allAttributes.countUntil(this); + _parent.allAttributes.removeIndex(allAttrsIndex); + + // Remove from _parent.attributeIndicies + auto sameNamespaceAttrs = _parent.attributeIndicies[_namespace]; + auto attrIndiciesIndex = sameNamespaceAttrs.countUntil(allAttrsIndex); + _parent.attributeIndicies[_namespace].removeIndex(attrIndiciesIndex); + + // Fixup other indicies + foreach(ns, ref nsAttrIndicies; _parent.attributeIndicies) + foreach(k, ref v; nsAttrIndicies) + if(v > allAttrsIndex) + v--; + + _parent.removeNamespaceIfEmpty(_namespace); + _parent.updateId++; + _parent = null; + return this; + } + + override bool opEquals(Object o) + { + auto a = cast(Attribute)o; + if(!a) + return false; + + return + _namespace == a._namespace && + _name == a._name && + value == a.value; + } + + string toSDLString()() + { + Appender!string sink; + this.toSDLString(sink); + return sink.data; + } + + void toSDLString(Sink)(ref Sink sink) if(isOutputRange!(Sink,char)) + { + if(_namespace != "") + { + sink.put(_namespace); + sink.put(':'); + } + + sink.put(_name); + sink.put('='); + value.toSDLString(sink); + } +} + +class Tag +{ + Location location; + Value[] values; + + private Tag _parent; + /// Get parent tag. To set a parent, attach this Tag to its intended + /// parent tag by calling 'Tag.add(...)', or by passing it to + /// the parent tag's constructor. + @property Tag parent() + { + return _parent; + } + + private string _namespace; + @property string namespace() + { + return _namespace; + } + /// Not particularly efficient, but it works. + @property void namespace(string value) + { + if(_parent && _namespace != value) + { + // Remove + auto saveParent = _parent; + if(_parent) + this.remove(); + + // Change namespace + _namespace = value; + + // Re-add + if(saveParent) + saveParent.add(this); + } + else + _namespace = value; + } + + private string _name; + /// Not including namespace. Use 'fullName' if you want the namespace included. + @property string name() + { + return _name; + } + /// Not the most efficient, but it works. + @property void name(string value) + { + if(_parent && _name != value) + { + _parent.updateId++; + + void removeFromGroupedLookup(string ns) + { + // Remove from _parent._tags[ns] + auto sameNameTags = _parent._tags[ns][_name]; + auto targetIndex = sameNameTags.countUntil(this); + _parent._tags[ns][_name].removeIndex(targetIndex); + } + + // Remove from _parent._tags + removeFromGroupedLookup(_namespace); + removeFromGroupedLookup("*"); + + // Change name + _name = value; + + // Add to new locations in _parent._tags + _parent._tags[_namespace][_name] ~= this; + _parent._tags["*"][_name] ~= this; + } + else + _name = value; + } + + /// This tag's name, including namespace if one exists. + @property string fullName() + { + return _namespace==""? _name : text(_namespace, ":", _name); + } + + // Tracks dirtiness. This is incremented every time a change is made which + // could invalidate existing ranges. This way, the ranges can detect when + // they've been invalidated. + private size_t updateId=0; + + this(Tag parent = null) + { + if(parent) + parent.add(this); + } + + this( + string namespace, string name, + Value[] values=null, Attribute[] attributes=null, Tag[] children=null + ) + { + this(null, namespace, name, values, attributes, children); + } + + this( + Tag parent, string namespace, string name, + Value[] values=null, Attribute[] attributes=null, Tag[] children=null + ) + { + this._namespace = namespace; + this._name = name; + + if(parent) + parent.add(this); + + this.values = values; + this.add(attributes); + this.add(children); + } + + private Attribute[] allAttributes; // In same order as specified in SDL file. + private Tag[] allTags; // In same order as specified in SDL file. + private string[] allNamespaces; // In same order as specified in SDL file. + + private size_t[][string] attributeIndicies; // allAttributes[ attributes[namespace][i] ] + private size_t[][string] tagIndicies; // allTags[ tags[namespace][i] ] + + private Attribute[][string][string] _attributes; // attributes[namespace or "*"][name][i] + private Tag[][string][string] _tags; // tags[namespace or "*"][name][i] + + /// Adds a Value, Attribute, Tag (or array of such) as a member/child of this Tag. + /// Returns 'this' for chaining. + /// Throws 'SDLangValidationException' if trying to add an Attribute or Tag + /// that already has a parent. + Tag add(Value val) + { + values ~= val; + updateId++; + return this; + } + + ///ditto + Tag add(Value[] vals) + { + foreach(val; vals) + add(val); + + return this; + } + + ///ditto + Tag add(Attribute attr) + { + if(attr._parent) + { + throw new SDLangValidationException( + "Attribute is already attached to a parent tag. "~ + "Use Attribute.remove() before adding it to another tag." + ); + } + + if(!allNamespaces.canFind(attr._namespace)) + allNamespaces ~= attr._namespace; + + attr._parent = this; + + allAttributes ~= attr; + attributeIndicies[attr._namespace] ~= allAttributes.length-1; + _attributes[attr._namespace][attr._name] ~= attr; + _attributes["*"] [attr._name] ~= attr; + + updateId++; + return this; + } + + ///ditto + Tag add(Attribute[] attrs) + { + foreach(attr; attrs) + add(attr); + + return this; + } + + ///ditto + Tag add(Tag tag) + { + if(tag._parent) + { + throw new SDLangValidationException( + "Tag is already attached to a parent tag. "~ + "Use Tag.remove() before adding it to another tag." + ); + } + + if(!allNamespaces.canFind(tag._namespace)) + allNamespaces ~= tag._namespace; + + tag._parent = this; + + allTags ~= tag; + tagIndicies[tag._namespace] ~= allTags.length-1; + _tags[tag._namespace][tag._name] ~= tag; + _tags["*"] [tag._name] ~= tag; + + updateId++; + return this; + } + + ///ditto + Tag add(Tag[] tags) + { + foreach(tag; tags) + add(tag); + + return this; + } + + /// Removes 'this' from its parent, if any. Returns 'this' for chaining. + /// Inefficient ATM, but it works. + Tag remove() + { + if(!_parent) + return this; + + void removeFromGroupedLookup(string ns) + { + // Remove from _parent._tags[ns] + auto sameNameTags = _parent._tags[ns][_name]; + auto targetIndex = sameNameTags.countUntil(this); + _parent._tags[ns][_name].removeIndex(targetIndex); + } + + // Remove from _parent._tags + removeFromGroupedLookup(_namespace); + removeFromGroupedLookup("*"); + + // Remove from _parent.allTags + auto allTagsIndex = _parent.allTags.countUntil(this); + _parent.allTags.removeIndex(allTagsIndex); + + // Remove from _parent.tagIndicies + auto sameNamespaceTags = _parent.tagIndicies[_namespace]; + auto tagIndiciesIndex = sameNamespaceTags.countUntil(allTagsIndex); + _parent.tagIndicies[_namespace].removeIndex(tagIndiciesIndex); + + // Fixup other indicies + foreach(ns, ref nsTagIndicies; _parent.tagIndicies) + foreach(k, ref v; nsTagIndicies) + if(v > allTagsIndex) + v--; + + _parent.removeNamespaceIfEmpty(_namespace); + _parent.updateId++; + _parent = null; + return this; + } + + private void removeNamespaceIfEmpty(string namespace) + { + // If namespace has no attributes, remove it from attributeIndicies/_attributes + if(namespace in attributeIndicies && attributeIndicies[namespace].length == 0) + { + attributeIndicies.remove(namespace); + _attributes.remove(namespace); + } + + // If namespace has no tags, remove it from tagIndicies/_tags + if(namespace in tagIndicies && tagIndicies[namespace].length == 0) + { + tagIndicies.remove(namespace); + _tags.remove(namespace); + } + + // If namespace is now empty, remove it from allNamespaces + if( + namespace !in tagIndicies && + namespace !in attributeIndicies + ) + { + auto allNamespacesIndex = allNamespaces.length - allNamespaces.find(namespace).length; + allNamespaces = allNamespaces[0..allNamespacesIndex] ~ allNamespaces[allNamespacesIndex+1..$]; + } + } + + struct NamedMemberRange(T, string membersGrouped) + { + private Tag tag; + private string namespace; // "*" indicates "all namespaces" (ok since it's not a valid namespace name) + private string name; + private size_t updateId; // Tag's updateId when this range was created. + + this(Tag tag, string namespace, string name, size_t updateId) + { + this.tag = tag; + this.namespace = namespace; + this.name = name; + this.updateId = updateId; + frontIndex = 0; + + if( + namespace in mixin("tag."~membersGrouped) && + name in mixin("tag."~membersGrouped~"[namespace]") + ) + endIndex = mixin("tag."~membersGrouped~"[namespace][name].length"); + else + endIndex = 0; + } + + invariant() + { + assert( + this.updateId == tag.updateId, + "This range has been invalidated by a change to the tag." + ); + } + + @property bool empty() + { + return frontIndex == endIndex; + } + + private size_t frontIndex; + @property T front() + { + return this[0]; + } + void popFront() + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + frontIndex++; + } + + private size_t endIndex; // One past the last element + @property T back() + { + return this[$-1]; + } + void popBack() + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + endIndex--; + } + + alias length opDollar; + @property size_t length() + { + return endIndex - frontIndex; + } + + @property typeof(this) save() + { + auto r = typeof(this)(this.tag, this.namespace, this.name, this.updateId); + r.frontIndex = this.frontIndex; + r.endIndex = this.endIndex; + return r; + } + + typeof(this) opSlice() + { + return save(); + } + + typeof(this) opSlice(size_t start, size_t end) + { + auto r = save(); + r.frontIndex = this.frontIndex + start; + r.endIndex = this.frontIndex + end; + + if( + r.frontIndex > this.endIndex || + r.endIndex > this.endIndex || + r.frontIndex > r.endIndex + ) + throw new SDLangRangeException("Slice out of range"); + + return r; + } + + T opIndex(size_t index) + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + return mixin("tag."~membersGrouped~"[namespace][name][frontIndex+index]"); + } + } + + struct MemberRange(T, string allMembers, string memberIndicies, string membersGrouped) + { + private Tag tag; + private string namespace; // "*" indicates "all namespaces" (ok since it's not a valid namespace name) + private bool isMaybe; + private size_t updateId; // Tag's updateId when this range was created. + private size_t initialEndIndex; + + this(Tag tag, string namespace, bool isMaybe) + { + this.tag = tag; + this.namespace = namespace; + this.updateId = tag.updateId; + this.isMaybe = isMaybe; + frontIndex = 0; + + if(namespace == "*") + initialEndIndex = mixin("tag."~allMembers~".length"); + else if(namespace in mixin("tag."~memberIndicies)) + initialEndIndex = mixin("tag."~memberIndicies~"[namespace].length"); + else + initialEndIndex = 0; + + endIndex = initialEndIndex; + } + + invariant() + { + assert( + this.updateId == tag.updateId, + "This range has been invalidated by a change to the tag." + ); + } + + @property bool empty() + { + return frontIndex == endIndex; + } + + private size_t frontIndex; + @property T front() + { + return this[0]; + } + void popFront() + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + frontIndex++; + } + + private size_t endIndex; // One past the last element + @property T back() + { + return this[$-1]; + } + void popBack() + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + endIndex--; + } + + alias length opDollar; + @property size_t length() + { + return endIndex - frontIndex; + } + + @property typeof(this) save() + { + auto r = typeof(this)(this.tag, this.namespace, this.isMaybe); + r.frontIndex = this.frontIndex; + r.endIndex = this.endIndex; + r.initialEndIndex = this.initialEndIndex; + r.updateId = this.updateId; + return r; + } + + typeof(this) opSlice() + { + return save(); + } + + typeof(this) opSlice(size_t start, size_t end) + { + auto r = save(); + r.frontIndex = this.frontIndex + start; + r.endIndex = this.frontIndex + end; + + if( + r.frontIndex > this.endIndex || + r.endIndex > this.endIndex || + r.frontIndex > r.endIndex + ) + throw new SDLangRangeException("Slice out of range"); + + return r; + } + + T opIndex(size_t index) + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + if(namespace == "*") + return mixin("tag."~allMembers~"[ frontIndex+index ]"); + else + return mixin("tag."~allMembers~"[ tag."~memberIndicies~"[namespace][frontIndex+index] ]"); + } + + alias NamedMemberRange!(T,membersGrouped) ThisNamedMemberRange; + ThisNamedMemberRange opIndex(string name) + { + if(frontIndex != 0 || endIndex != initialEndIndex) + { + throw new SDLangRangeException( + "Cannot lookup tags/attributes by name on a subset of a range, "~ + "only across the entire tag. "~ + "Please make sure you haven't called popFront or popBack on this "~ + "range and that you aren't using a slice of the range." + ); + } + + if(!isMaybe && empty) + throw new SDLangRangeException("Range is empty"); + + if(!isMaybe && name !in this) + throw new SDLangRangeException(`No such `~T.stringof~` named: "`~name~`"`); + + return ThisNamedMemberRange(tag, namespace, name, updateId); + } + + bool opBinaryRight(string op)(string name) if(op=="in") + { + if(frontIndex != 0 || endIndex != initialEndIndex) + { + throw new SDLangRangeException( + "Cannot lookup tags/attributes by name on a subset of a range, "~ + "only across the entire tag. "~ + "Please make sure you haven't called popFront or popBack on this "~ + "range and that you aren't using a slice of the range." + ); + } + + return + namespace in mixin("tag."~membersGrouped) && + name in mixin("tag."~membersGrouped~"[namespace]") && + mixin("tag."~membersGrouped~"[namespace][name].length") > 0; + } + } + + struct NamespaceRange + { + private Tag tag; + private bool isMaybe; + private size_t updateId; // Tag's updateId when this range was created. + + this(Tag tag, bool isMaybe) + { + this.tag = tag; + this.isMaybe = isMaybe; + this.updateId = tag.updateId; + frontIndex = 0; + endIndex = tag.allNamespaces.length; + } + + invariant() + { + assert( + this.updateId == tag.updateId, + "This range has been invalidated by a change to the tag." + ); + } + + @property bool empty() + { + return frontIndex == endIndex; + } + + private size_t frontIndex; + @property NamespaceAccess front() + { + return this[0]; + } + void popFront() + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + frontIndex++; + } + + private size_t endIndex; // One past the last element + @property NamespaceAccess back() + { + return this[$-1]; + } + void popBack() + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + endIndex--; + } + + alias length opDollar; + @property size_t length() + { + return endIndex - frontIndex; + } + + @property NamespaceRange save() + { + auto r = NamespaceRange(this.tag, this.isMaybe); + r.frontIndex = this.frontIndex; + r.endIndex = this.endIndex; + r.updateId = this.updateId; + return r; + } + + typeof(this) opSlice() + { + return save(); + } + + typeof(this) opSlice(size_t start, size_t end) + { + auto r = save(); + r.frontIndex = this.frontIndex + start; + r.endIndex = this.frontIndex + end; + + if( + r.frontIndex > this.endIndex || + r.endIndex > this.endIndex || + r.frontIndex > r.endIndex + ) + throw new SDLangRangeException("Slice out of range"); + + return r; + } + + NamespaceAccess opIndex(size_t index) + { + if(empty) + throw new SDLangRangeException("Range is empty"); + + auto namespace = tag.allNamespaces[frontIndex+index]; + return NamespaceAccess( + namespace, + AttributeRange(tag, namespace, isMaybe), + TagRange(tag, namespace, isMaybe) + ); + } + + NamespaceAccess opIndex(string namespace) + { + if(!isMaybe && empty) + throw new SDLangRangeException("Range is empty"); + + if(!isMaybe && namespace !in this) + throw new SDLangRangeException(`No such namespace: "`~namespace~`"`); + + return NamespaceAccess( + namespace, + AttributeRange(tag, namespace, isMaybe), + TagRange(tag, namespace, isMaybe) + ); + } + + /// Inefficient when range is a slice or has used popFront/popBack, but it works. + bool opBinaryRight(string op)(string namespace) if(op=="in") + { + if(frontIndex == 0 && endIndex == tag.allNamespaces.length) + { + return + namespace in tag.attributeIndicies || + namespace in tag.tagIndicies; + } + else + // Slower fallback method + return tag.allNamespaces[frontIndex..endIndex].canFind(namespace); + } + } + + struct NamespaceAccess + { + string name; + AttributeRange attributes; + TagRange tags; + } + + alias MemberRange!(Attribute, "allAttributes", "attributeIndicies", "_attributes") AttributeRange; + alias MemberRange!(Tag, "allTags", "tagIndicies", "_tags" ) TagRange; + static assert(isRandomAccessRange!AttributeRange); + static assert(isRandomAccessRange!TagRange); + static assert(isRandomAccessRange!NamespaceRange); + + /// Access all attributes that don't have a namespace + @property AttributeRange attributes() + { + return AttributeRange(this, "", false); + } + + /// Access all direct-child tags that don't have a namespace + @property TagRange tags() + { + return TagRange(this, "", false); + } + + /// Access all namespaces in this tag, and the attributes/tags within them. + @property NamespaceRange namespaces() + { + return NamespaceRange(this, false); + } + + /// Access all attributes and tags regardless of namespace. + @property NamespaceAccess all() + { + // "*" isn't a valid namespace name, so we can use it to indicate "all namespaces" + return NamespaceAccess( + "*", + AttributeRange(this, "*", false), + TagRange(this, "*", false) + ); + } + + struct MaybeAccess + { + Tag tag; + + /// Access all attributes that don't have a namespace + @property AttributeRange attributes() + { + return AttributeRange(tag, "", true); + } + + /// Access all direct-child tags that don't have a namespace + @property TagRange tags() + { + return TagRange(tag, "", true); + } + + /// Access all namespaces in this tag, and the attributes/tags within them. + @property NamespaceRange namespaces() + { + return NamespaceRange(tag, true); + } + + /// Access all attributes and tags regardless of namespace. + @property NamespaceAccess all() + { + // "*" isn't a valid namespace name, so we can use it to indicate "all namespaces" + return NamespaceAccess( + "*", + AttributeRange(tag, "*", true), + TagRange(tag, "*", true) + ); + } + } + + /// Access 'attributes', 'tags', 'namespaces' and 'all' like normal, + /// except that looking up a non-existent name/namespace with + /// opIndex(string) results in an empty array instead of a thrown SDLangRangeException. + @property MaybeAccess maybe() + { + return MaybeAccess(this); + } + + override bool opEquals(Object o) + { + auto t = cast(Tag)o; + if(!t) + return false; + + if(_namespace != t._namespace || _name != t._name) + return false; + + if( + values .length != t.values .length || + allAttributes .length != t.allAttributes.length || + allNamespaces .length != t.allNamespaces.length || + allTags .length != t.allTags .length + ) + return false; + + if(values != t.values) + return false; + + if(allNamespaces != t.allNamespaces) + return false; + + if(allAttributes != t.allAttributes) + return false; + + // Ok because cycles are not allowed + //TODO: Actually check for or prevent cycles. + return allTags == t.allTags; + } + + /// Treats 'this' as the root tag. Note that root tags cannot have + /// values or attributes, and cannot be part of a namespace. + /// If this isn't a valid root tag, 'SDLangValidationException' will be thrown. + string toSDLDocument()(string indent="\t", int indentLevel=0) + { + Appender!string sink; + toSDLDocument(sink, indent, indentLevel); + return sink.data; + } + + ///ditto + void toSDLDocument(Sink)(ref Sink sink, string indent="\t", int indentLevel=0) + if(isOutputRange!(Sink,char)) + { + if(values.length > 0) + throw new SDLangValidationException("Root tags cannot have any values, only child tags."); + + if(allAttributes.length > 0) + throw new SDLangValidationException("Root tags cannot have any attributes, only child tags."); + + if(_namespace != "") + throw new SDLangValidationException("Root tags cannot have a namespace."); + + foreach(tag; allTags) + tag.toSDLString(sink, indent, indentLevel); + } + + /// Output this entire tag in SDL format. Does *not* treat 'this' as + /// a root tag. If you intend this to be the root of a standard SDL + /// document, use 'toSDLDocument' instead. + string toSDLString()(string indent="\t", int indentLevel=0) + { + Appender!string sink; + toSDLString(sink, indent, indentLevel); + return sink.data; + } + + ///ditto + void toSDLString(Sink)(ref Sink sink, string indent="\t", int indentLevel=0) + if(isOutputRange!(Sink,char)) + { + if(_name == "" && values.length == 0) + throw new SDLangValidationException("Anonymous tags must have at least one value."); + + if(_name == "" && _namespace != "") + throw new SDLangValidationException("Anonymous tags cannot have a namespace."); + + // Indent + foreach(i; 0..indentLevel) + sink.put(indent); + + // Name + if(_namespace != "") + { + sink.put(_namespace); + sink.put(':'); + } + sink.put(_name); + + // Values + foreach(i, v; values) + { + // Omit the first space for anonymous tags + if(_name != "" || i > 0) + sink.put(' '); + + v.toSDLString(sink); + } + + // Attributes + foreach(attr; allAttributes) + { + sink.put(' '); + attr.toSDLString(sink); + } + + // Child tags + bool foundChild=false; + foreach(tag; allTags) + { + if(!foundChild) + { + sink.put(" {\n"); + foundChild = true; + } + + tag.toSDLString(sink, indent, indentLevel+1); + } + if(foundChild) + { + foreach(i; 0..indentLevel) + sink.put(indent); + + sink.put("}\n"); + } + else + sink.put("\n"); + } + + /// Not the most efficient, but it works. + string toDebugString() + { + import std.algorithm : sort; + + Appender!string buf; + + buf.put("\n"); + buf.put("Tag "); + if(_namespace != "") + { + buf.put("["); + buf.put(_namespace); + buf.put("]"); + } + buf.put("'%s':\n".format(_name)); + + // Values + foreach(val; values) + buf.put(" (%s): %s\n".format(.toString(val.type), val)); + + // Attributes + foreach(attrNamespace; _attributes.keys.sort()) + if(attrNamespace != "*") + foreach(attrName; _attributes[attrNamespace].keys.sort()) + foreach(attr; _attributes[attrNamespace][attrName]) + { + string namespaceStr; + if(attr._namespace != "") + namespaceStr = "["~attr._namespace~"]"; + + buf.put( + " %s%s(%s): %s\n".format( + namespaceStr, attr._name, .toString(attr.value.type), attr.value + ) + ); + } + + // Children + foreach(tagNamespace; _tags.keys.sort()) + if(tagNamespace != "*") + foreach(tagName; _tags[tagNamespace].keys.sort()) + foreach(tag; _tags[tagNamespace][tagName]) + buf.put( tag.toDebugString().replace("\n", "\n ") ); + + return buf.data; + } +} + +version(sdlangUnittest) +{ + private void testRandomAccessRange(R, E)(R range, E[] expected, bool function(E, E) equals=null) + { + static assert(isRandomAccessRange!R); + static assert(is(ElementType!R == E)); + static assert(hasLength!R); + static assert(!isInfinite!R); + + assert(range.length == expected.length); + if(range.length == 0) + { + assert(range.empty); + return; + } + + static bool defaultEquals(E e1, E e2) + { + return e1 == e2; + } + if(equals is null) + equals = &defaultEquals; + + assert(equals(range.front, expected[0])); + assert(equals(range.front, expected[0])); // Ensure consistent result from '.front' + assert(equals(range.front, expected[0])); // Ensure consistent result from '.front' + + assert(equals(range.back, expected[$-1])); + assert(equals(range.back, expected[$-1])); // Ensure consistent result from '.back' + assert(equals(range.back, expected[$-1])); // Ensure consistent result from '.back' + + // Forward iteration + auto original = range.save; + auto r2 = range.save; + foreach(i; 0..expected.length) + { + //trace("Forward iteration: ", i); + + // Test length/empty + assert(range.length == expected.length - i); + assert(range.length == r2.length); + assert(!range.empty); + assert(!r2.empty); + + // Test front + assert(equals(range.front, expected[i])); + assert(equals(range.front, r2.front)); + + // Test back + assert(equals(range.back, expected[$-1])); + assert(equals(range.back, r2.back)); + + // Test opIndex(0) + assert(equals(range[0], expected[i])); + assert(equals(range[0], r2[0])); + + // Test opIndex($-1) + assert(equals(range[$-1], expected[$-1])); + assert(equals(range[$-1], r2[$-1])); + + // Test popFront + range.popFront(); + assert(range.length == r2.length - 1); + r2.popFront(); + assert(range.length == r2.length); + } + assert(range.empty); + assert(r2.empty); + assert(original.length == expected.length); + + // Backwards iteration + range = original.save; + r2 = original.save; + foreach(i; iota(0, expected.length).retro()) + { + //trace("Backwards iteration: ", i); + + // Test length/empty + assert(range.length == i+1); + assert(range.length == r2.length); + assert(!range.empty); + assert(!r2.empty); + + // Test front + assert(equals(range.front, expected[0])); + assert(equals(range.front, r2.front)); + + // Test back + assert(equals(range.back, expected[i])); + assert(equals(range.back, r2.back)); + + // Test opIndex(0) + assert(equals(range[0], expected[0])); + assert(equals(range[0], r2[0])); + + // Test opIndex($-1) + assert(equals(range[$-1], expected[i])); + assert(equals(range[$-1], r2[$-1])); + + // Test popBack + range.popBack(); + assert(range.length == r2.length - 1); + r2.popBack(); + assert(range.length == r2.length); + } + assert(range.empty); + assert(r2.empty); + assert(original.length == expected.length); + + // Random access + range = original.save; + r2 = original.save; + foreach(i; 0..expected.length) + { + //trace("Random access: ", i); + + // Test length/empty + assert(range.length == expected.length); + assert(range.length == r2.length); + assert(!range.empty); + assert(!r2.empty); + + // Test front + assert(equals(range.front, expected[0])); + assert(equals(range.front, r2.front)); + + // Test back + assert(equals(range.back, expected[$-1])); + assert(equals(range.back, r2.back)); + + // Test opIndex(i) + assert(equals(range[i], expected[i])); + assert(equals(range[i], r2[i])); + } + assert(!range.empty); + assert(!r2.empty); + assert(original.length == expected.length); + } +} + +version(sdlangUnittest) +unittest +{ + import sdlang.parser; + writeln("Unittesting sdlang ast..."); + stdout.flush(); + + Tag root; + root = parseSource(""); + testRandomAccessRange(root.attributes, cast( Attribute[])[]); + testRandomAccessRange(root.tags, cast( Tag[])[]); + testRandomAccessRange(root.namespaces, cast(Tag.NamespaceAccess[])[]); + + root = parseSource(` + blue 3 "Lee" isThree=true + blue 5 "Chan" 12345 isThree=false + stuff:orange 1 2 3 2 1 + stuff:square points=4 dimensions=2 points="Still four" + stuff:triangle data:points=3 data:dimensions=2 + nothing + namespaces small:A=1 med:A=2 big:A=3 small:B=10 big:B=30 + + people visitor:a=1 b=2 { + chiyo "Small" "Flies?" nemesis="Car" score=100 + yukari + visitor:sana + tomo + visitor:hayama + } + `); + + auto blue3 = new Tag( + null, "", "blue", + [ Value(3), Value("Lee") ], + [ new Attribute("isThree", Value(true)) ], + null + ); + auto blue5 = new Tag( + null, "", "blue", + [ Value(5), Value("Chan"), Value(12345) ], + [ new Attribute("isThree", Value(false)) ], + null + ); + auto orange = new Tag( + null, "stuff", "orange", + [ Value(1), Value(2), Value(3), Value(2), Value(1) ], + null, + null + ); + auto square = new Tag( + null, "stuff", "square", + null, + [ + new Attribute("points", Value(4)), + new Attribute("dimensions", Value(2)), + new Attribute("points", Value("Still four")), + ], + null + ); + auto triangle = new Tag( + null, "stuff", "triangle", + null, + [ + new Attribute("data", "points", Value(3)), + new Attribute("data", "dimensions", Value(2)), + ], + null + ); + auto nothing = new Tag( + null, "", "nothing", + null, null, null + ); + auto namespaces = new Tag( + null, "", "namespaces", + null, + [ + new Attribute("small", "A", Value(1)), + new Attribute("med", "A", Value(2)), + new Attribute("big", "A", Value(3)), + new Attribute("small", "B", Value(10)), + new Attribute("big", "B", Value(30)), + ], + null + ); + auto chiyo = new Tag( + null, "", "chiyo", + [ Value("Small"), Value("Flies?") ], + [ + new Attribute("nemesis", Value("Car")), + new Attribute("score", Value(100)), + ], + null + ); + auto chiyo_ = new Tag( + null, "", "chiyo_", + [ Value("Small"), Value("Flies?") ], + [ + new Attribute("nemesis", Value("Car")), + new Attribute("score", Value(100)), + ], + null + ); + auto yukari = new Tag( + null, "", "yukari", + null, null, null + ); + auto sana = new Tag( + null, "visitor", "sana", + null, null, null + ); + auto sana_ = new Tag( + null, "visitor", "sana_", + null, null, null + ); + auto sanaVisitor_ = new Tag( + null, "visitor_", "sana_", + null, null, null + ); + auto tomo = new Tag( + null, "", "tomo", + null, null, null + ); + auto hayama = new Tag( + null, "visitor", "hayama", + null, null, null + ); + auto people = new Tag( + null, "", "people", + null, + [ + new Attribute("visitor", "a", Value(1)), + new Attribute("b", Value(2)), + ], + [chiyo, yukari, sana, tomo, hayama] + ); + + assert(blue3 .opEquals( blue3 )); + assert(blue5 .opEquals( blue5 )); + assert(orange .opEquals( orange )); + assert(square .opEquals( square )); + assert(triangle .opEquals( triangle )); + assert(nothing .opEquals( nothing )); + assert(namespaces .opEquals( namespaces )); + assert(people .opEquals( people )); + assert(chiyo .opEquals( chiyo )); + assert(yukari .opEquals( yukari )); + assert(sana .opEquals( sana )); + assert(tomo .opEquals( tomo )); + assert(hayama .opEquals( hayama )); + + assert(!blue3.opEquals(orange)); + assert(!blue3.opEquals(people)); + assert(!blue3.opEquals(sana)); + assert(!blue3.opEquals(blue5)); + assert(!blue5.opEquals(blue3)); + + alias Tag.NamespaceAccess NSA; + static bool namespaceEquals(NSA n1, NSA n2) + { + return n1.name == n2.name; + } + + testRandomAccessRange(root.attributes, cast(Attribute[])[]); + testRandomAccessRange(root.tags, [blue3, blue5, nothing, namespaces, people]); + testRandomAccessRange(root.namespaces, [NSA(""), NSA("stuff")], &namespaceEquals); + testRandomAccessRange(root.namespaces[0].tags, [blue3, blue5, nothing, namespaces, people]); + testRandomAccessRange(root.namespaces[1].tags, [orange, square, triangle]); + assert("" in root.namespaces); + assert("stuff" in root.namespaces); + assert("foobar" !in root.namespaces); + testRandomAccessRange(root.namespaces[ ""].tags, [blue3, blue5, nothing, namespaces, people]); + testRandomAccessRange(root.namespaces["stuff"].tags, [orange, square, triangle]); + testRandomAccessRange(root.all.attributes, cast(Attribute[])[]); + testRandomAccessRange(root.all.tags, [blue3, blue5, orange, square, triangle, nothing, namespaces, people]); + testRandomAccessRange(root.all.tags[], [blue3, blue5, orange, square, triangle, nothing, namespaces, people]); + testRandomAccessRange(root.all.tags[3..6], [square, triangle, nothing]); + assert("blue" in root.tags); + assert("nothing" in root.tags); + assert("people" in root.tags); + assert("orange" !in root.tags); + assert("square" !in root.tags); + assert("foobar" !in root.tags); + assert("blue" in root.all.tags); + assert("nothing" in root.all.tags); + assert("people" in root.all.tags); + assert("orange" in root.all.tags); + assert("square" in root.all.tags); + assert("foobar" !in root.all.tags); + assert("orange" in root.namespaces["stuff"].tags); + assert("square" in root.namespaces["stuff"].tags); + assert("square" in root.namespaces["stuff"].tags); + assert("foobar" !in root.attributes); + assert("foobar" !in root.all.attributes); + assert("foobar" !in root.namespaces["stuff"].attributes); + assert("blue" !in root.attributes); + assert("blue" !in root.all.attributes); + assert("blue" !in root.namespaces["stuff"].attributes); + testRandomAccessRange(root.tags["nothing"], [nothing]); + testRandomAccessRange(root.tags["blue"], [blue3, blue5]); + testRandomAccessRange(root.namespaces["stuff"].tags["orange"], [orange]); + testRandomAccessRange(root.all.tags["nothing"], [nothing]); + testRandomAccessRange(root.all.tags["blue"], [blue3, blue5]); + testRandomAccessRange(root.all.tags["orange"], [orange]); + + assertThrown!SDLangRangeException(root.tags["foobar"]); + assertThrown!SDLangRangeException(root.all.tags["foobar"]); + assertThrown!SDLangRangeException(root.attributes["foobar"]); + assertThrown!SDLangRangeException(root.all.attributes["foobar"]); + + // DMD Issue #12585 causes a segfault in these two tests when using 2.064 or 2.065, + // so work around it. + //assertThrown!SDLangRangeException(root.namespaces["foobar"].tags["foobar"]); + //assertThrown!SDLangRangeException(root.namespaces["foobar"].attributes["foobar"]); + bool didCatch = false; + try + auto x = root.namespaces["foobar"].tags["foobar"]; + catch(SDLangRangeException e) + didCatch = true; + assert(didCatch); + + didCatch = false; + try + auto x = root.namespaces["foobar"].attributes["foobar"]; + catch(SDLangRangeException e) + didCatch = true; + assert(didCatch); + + testRandomAccessRange(root.maybe.tags["nothing"], [nothing]); + testRandomAccessRange(root.maybe.tags["blue"], [blue3, blue5]); + testRandomAccessRange(root.maybe.namespaces["stuff"].tags["orange"], [orange]); + testRandomAccessRange(root.maybe.all.tags["nothing"], [nothing]); + testRandomAccessRange(root.maybe.all.tags["blue"], [blue3, blue5]); + testRandomAccessRange(root.maybe.all.tags["blue"][], [blue3, blue5]); + testRandomAccessRange(root.maybe.all.tags["blue"][0..1], [blue3]); + testRandomAccessRange(root.maybe.all.tags["blue"][1..2], [blue5]); + testRandomAccessRange(root.maybe.all.tags["orange"], [orange]); + testRandomAccessRange(root.maybe.tags["foobar"], cast(Tag[])[]); + testRandomAccessRange(root.maybe.all.tags["foobar"], cast(Tag[])[]); + testRandomAccessRange(root.maybe.namespaces["foobar"].tags["foobar"], cast(Tag[])[]); + testRandomAccessRange(root.maybe.attributes["foobar"], cast(Attribute[])[]); + testRandomAccessRange(root.maybe.all.attributes["foobar"], cast(Attribute[])[]); + testRandomAccessRange(root.maybe.namespaces["foobar"].attributes["foobar"], cast(Attribute[])[]); + + testRandomAccessRange(blue3.attributes, [ new Attribute("isThree", Value(true)) ]); + testRandomAccessRange(blue3.tags, cast(Tag[])[]); + testRandomAccessRange(blue3.namespaces, [NSA("")], &namespaceEquals); + testRandomAccessRange(blue3.all.attributes, [ new Attribute("isThree", Value(true)) ]); + testRandomAccessRange(blue3.all.tags, cast(Tag[])[]); + + testRandomAccessRange(blue5.attributes, [ new Attribute("isThree", Value(false)) ]); + testRandomAccessRange(blue5.tags, cast(Tag[])[]); + testRandomAccessRange(blue5.namespaces, [NSA("")], &namespaceEquals); + testRandomAccessRange(blue5.all.attributes, [ new Attribute("isThree", Value(false)) ]); + testRandomAccessRange(blue5.all.tags, cast(Tag[])[]); + + testRandomAccessRange(orange.attributes, cast(Attribute[])[]); + testRandomAccessRange(orange.tags, cast(Tag[])[]); + testRandomAccessRange(orange.namespaces, cast(NSA[])[], &namespaceEquals); + testRandomAccessRange(orange.all.attributes, cast(Attribute[])[]); + testRandomAccessRange(orange.all.tags, cast(Tag[])[]); + + testRandomAccessRange(square.attributes, [ + new Attribute("points", Value(4)), + new Attribute("dimensions", Value(2)), + new Attribute("points", Value("Still four")), + ]); + testRandomAccessRange(square.tags, cast(Tag[])[]); + testRandomAccessRange(square.namespaces, [NSA("")], &namespaceEquals); + testRandomAccessRange(square.all.attributes, [ + new Attribute("points", Value(4)), + new Attribute("dimensions", Value(2)), + new Attribute("points", Value("Still four")), + ]); + testRandomAccessRange(square.all.tags, cast(Tag[])[]); + + testRandomAccessRange(triangle.attributes, cast(Attribute[])[]); + testRandomAccessRange(triangle.tags, cast(Tag[])[]); + testRandomAccessRange(triangle.namespaces, [NSA("data")], &namespaceEquals); + testRandomAccessRange(triangle.namespaces[0].attributes, [ + new Attribute("data", "points", Value(3)), + new Attribute("data", "dimensions", Value(2)), + ]); + assert("data" in triangle.namespaces); + assert("foobar" !in triangle.namespaces); + testRandomAccessRange(triangle.namespaces["data"].attributes, [ + new Attribute("data", "points", Value(3)), + new Attribute("data", "dimensions", Value(2)), + ]); + testRandomAccessRange(triangle.all.attributes, [ + new Attribute("data", "points", Value(3)), + new Attribute("data", "dimensions", Value(2)), + ]); + testRandomAccessRange(triangle.all.tags, cast(Tag[])[]); + + testRandomAccessRange(nothing.attributes, cast(Attribute[])[]); + testRandomAccessRange(nothing.tags, cast(Tag[])[]); + testRandomAccessRange(nothing.namespaces, cast(NSA[])[], &namespaceEquals); + testRandomAccessRange(nothing.all.attributes, cast(Attribute[])[]); + testRandomAccessRange(nothing.all.tags, cast(Tag[])[]); + + testRandomAccessRange(namespaces.attributes, cast(Attribute[])[]); + testRandomAccessRange(namespaces.tags, cast(Tag[])[]); + testRandomAccessRange(namespaces.namespaces, [NSA("small"), NSA("med"), NSA("big")], &namespaceEquals); + testRandomAccessRange(namespaces.namespaces[], [NSA("small"), NSA("med"), NSA("big")], &namespaceEquals); + testRandomAccessRange(namespaces.namespaces[1..2], [NSA("med")], &namespaceEquals); + testRandomAccessRange(namespaces.namespaces[0].attributes, [ + new Attribute("small", "A", Value(1)), + new Attribute("small", "B", Value(10)), + ]); + testRandomAccessRange(namespaces.namespaces[1].attributes, [ + new Attribute("med", "A", Value(2)), + ]); + testRandomAccessRange(namespaces.namespaces[2].attributes, [ + new Attribute("big", "A", Value(3)), + new Attribute("big", "B", Value(30)), + ]); + testRandomAccessRange(namespaces.namespaces[1..2][0].attributes, [ + new Attribute("med", "A", Value(2)), + ]); + assert("small" in namespaces.namespaces); + assert("med" in namespaces.namespaces); + assert("big" in namespaces.namespaces); + assert("foobar" !in namespaces.namespaces); + assert("small" !in namespaces.namespaces[1..2]); + assert("med" in namespaces.namespaces[1..2]); + assert("big" !in namespaces.namespaces[1..2]); + assert("foobar" !in namespaces.namespaces[1..2]); + testRandomAccessRange(namespaces.namespaces["small"].attributes, [ + new Attribute("small", "A", Value(1)), + new Attribute("small", "B", Value(10)), + ]); + testRandomAccessRange(namespaces.namespaces["med"].attributes, [ + new Attribute("med", "A", Value(2)), + ]); + testRandomAccessRange(namespaces.namespaces["big"].attributes, [ + new Attribute("big", "A", Value(3)), + new Attribute("big", "B", Value(30)), + ]); + testRandomAccessRange(namespaces.all.attributes, [ + new Attribute("small", "A", Value(1)), + new Attribute("med", "A", Value(2)), + new Attribute("big", "A", Value(3)), + new Attribute("small", "B", Value(10)), + new Attribute("big", "B", Value(30)), + ]); + testRandomAccessRange(namespaces.all.attributes[], [ + new Attribute("small", "A", Value(1)), + new Attribute("med", "A", Value(2)), + new Attribute("big", "A", Value(3)), + new Attribute("small", "B", Value(10)), + new Attribute("big", "B", Value(30)), + ]); + testRandomAccessRange(namespaces.all.attributes[2..4], [ + new Attribute("big", "A", Value(3)), + new Attribute("small", "B", Value(10)), + ]); + testRandomAccessRange(namespaces.all.tags, cast(Tag[])[]); + assert("A" !in namespaces.attributes); + assert("B" !in namespaces.attributes); + assert("foobar" !in namespaces.attributes); + assert("A" in namespaces.all.attributes); + assert("B" in namespaces.all.attributes); + assert("foobar" !in namespaces.all.attributes); + assert("A" in namespaces.namespaces["small"].attributes); + assert("B" in namespaces.namespaces["small"].attributes); + assert("foobar" !in namespaces.namespaces["small"].attributes); + assert("A" in namespaces.namespaces["med"].attributes); + assert("B" !in namespaces.namespaces["med"].attributes); + assert("foobar" !in namespaces.namespaces["med"].attributes); + assert("A" in namespaces.namespaces["big"].attributes); + assert("B" in namespaces.namespaces["big"].attributes); + assert("foobar" !in namespaces.namespaces["big"].attributes); + assert("foobar" !in namespaces.tags); + assert("foobar" !in namespaces.all.tags); + assert("foobar" !in namespaces.namespaces["small"].tags); + assert("A" !in namespaces.tags); + assert("A" !in namespaces.all.tags); + assert("A" !in namespaces.namespaces["small"].tags); + testRandomAccessRange(namespaces.namespaces["small"].attributes["A"], [ + new Attribute("small", "A", Value(1)), + ]); + testRandomAccessRange(namespaces.namespaces["med"].attributes["A"], [ + new Attribute("med", "A", Value(2)), + ]); + testRandomAccessRange(namespaces.namespaces["big"].attributes["A"], [ + new Attribute("big", "A", Value(3)), + ]); + testRandomAccessRange(namespaces.all.attributes["A"], [ + new Attribute("small", "A", Value(1)), + new Attribute("med", "A", Value(2)), + new Attribute("big", "A", Value(3)), + ]); + testRandomAccessRange(namespaces.all.attributes["B"], [ + new Attribute("small", "B", Value(10)), + new Attribute("big", "B", Value(30)), + ]); + + testRandomAccessRange(chiyo.attributes, [ + new Attribute("nemesis", Value("Car")), + new Attribute("score", Value(100)), + ]); + testRandomAccessRange(chiyo.tags, cast(Tag[])[]); + testRandomAccessRange(chiyo.namespaces, [NSA("")], &namespaceEquals); + testRandomAccessRange(chiyo.all.attributes, [ + new Attribute("nemesis", Value("Car")), + new Attribute("score", Value(100)), + ]); + testRandomAccessRange(chiyo.all.tags, cast(Tag[])[]); + + testRandomAccessRange(yukari.attributes, cast(Attribute[])[]); + testRandomAccessRange(yukari.tags, cast(Tag[])[]); + testRandomAccessRange(yukari.namespaces, cast(NSA[])[], &namespaceEquals); + testRandomAccessRange(yukari.all.attributes, cast(Attribute[])[]); + testRandomAccessRange(yukari.all.tags, cast(Tag[])[]); + + testRandomAccessRange(sana.attributes, cast(Attribute[])[]); + testRandomAccessRange(sana.tags, cast(Tag[])[]); + testRandomAccessRange(sana.namespaces, cast(NSA[])[], &namespaceEquals); + testRandomAccessRange(sana.all.attributes, cast(Attribute[])[]); + testRandomAccessRange(sana.all.tags, cast(Tag[])[]); + + testRandomAccessRange(people.attributes, [new Attribute("b", Value(2))]); + testRandomAccessRange(people.tags, [chiyo, yukari, tomo]); + testRandomAccessRange(people.namespaces, [NSA("visitor"), NSA("")], &namespaceEquals); + testRandomAccessRange(people.namespaces[0].attributes, [new Attribute("visitor", "a", Value(1))]); + testRandomAccessRange(people.namespaces[1].attributes, [new Attribute("b", Value(2))]); + testRandomAccessRange(people.namespaces[0].tags, [sana, hayama]); + testRandomAccessRange(people.namespaces[1].tags, [chiyo, yukari, tomo]); + assert("visitor" in people.namespaces); + assert("" in people.namespaces); + assert("foobar" !in people.namespaces); + testRandomAccessRange(people.namespaces["visitor"].attributes, [new Attribute("visitor", "a", Value(1))]); + testRandomAccessRange(people.namespaces[ ""].attributes, [new Attribute("b", Value(2))]); + testRandomAccessRange(people.namespaces["visitor"].tags, [sana, hayama]); + testRandomAccessRange(people.namespaces[ ""].tags, [chiyo, yukari, tomo]); + testRandomAccessRange(people.all.attributes, [ + new Attribute("visitor", "a", Value(1)), + new Attribute("b", Value(2)), + ]); + testRandomAccessRange(people.all.tags, [chiyo, yukari, sana, tomo, hayama]); + + people.attributes["b"][0].name = "b_"; + people.namespaces["visitor"].attributes["a"][0].name = "a_"; + people.tags["chiyo"][0].name = "chiyo_"; + people.namespaces["visitor"].tags["sana"][0].name = "sana_"; + + assert("b_" in people.attributes); + assert("a_" in people.namespaces["visitor"].attributes); + assert("chiyo_" in people.tags); + assert("sana_" in people.namespaces["visitor"].tags); + + assert(people.attributes["b_"][0] == new Attribute("b_", Value(2))); + assert(people.namespaces["visitor"].attributes["a_"][0] == new Attribute("visitor", "a_", Value(1))); + assert(people.tags["chiyo_"][0] == chiyo_); + assert(people.namespaces["visitor"].tags["sana_"][0] == sana_); + + assert("b" !in people.attributes); + assert("a" !in people.namespaces["visitor"].attributes); + assert("chiyo" !in people.tags); + assert("sana" !in people.namespaces["visitor"].tags); + + assert(people.maybe.attributes["b"].length == 0); + assert(people.maybe.namespaces["visitor"].attributes["a"].length == 0); + assert(people.maybe.tags["chiyo"].length == 0); + assert(people.maybe.namespaces["visitor"].tags["sana"].length == 0); + + people.tags["tomo"][0].remove(); + people.namespaces["visitor"].tags["hayama"][0].remove(); + people.tags["chiyo_"][0].remove(); + testRandomAccessRange(people.tags, [yukari]); + testRandomAccessRange(people.namespaces, [NSA("visitor"), NSA("")], &namespaceEquals); + testRandomAccessRange(people.namespaces[0].tags, [sana_]); + testRandomAccessRange(people.namespaces[1].tags, [yukari]); + assert("visitor" in people.namespaces); + assert("" in people.namespaces); + assert("foobar" !in people.namespaces); + testRandomAccessRange(people.namespaces["visitor"].tags, [sana_]); + testRandomAccessRange(people.namespaces[ ""].tags, [yukari]); + testRandomAccessRange(people.all.tags, [yukari, sana_]); + + people.attributes["b_"][0].namespace = "_"; + people.namespaces["visitor"].attributes["a_"][0].namespace = "visitor_"; + assert("_" in people.namespaces); + assert("visitor_" in people.namespaces); + assert("" in people.namespaces); + assert("visitor" in people.namespaces); + people.namespaces["visitor"].tags["sana_"][0].namespace = "visitor_"; + assert("_" in people.namespaces); + assert("visitor_" in people.namespaces); + assert("" in people.namespaces); + assert("visitor" !in people.namespaces); + + assert(people.namespaces["_" ].attributes["b_"][0] == new Attribute("_", "b_", Value(2))); + assert(people.namespaces["visitor_"].attributes["a_"][0] == new Attribute("visitor_", "a_", Value(1))); + assert(people.namespaces["visitor_"].tags["sana_"][0] == sanaVisitor_); + + people.tags["yukari"][0].remove(); + people.namespaces["visitor_"].tags["sana_"][0].remove(); + people.namespaces["visitor_"].attributes["a_"][0].namespace = "visitor"; + people.namespaces["_"].attributes["b_"][0].namespace = ""; + testRandomAccessRange(people.tags, cast(Tag[])[]); + testRandomAccessRange(people.namespaces, [NSA("visitor"), NSA("")], &namespaceEquals); + testRandomAccessRange(people.namespaces[0].tags, cast(Tag[])[]); + testRandomAccessRange(people.namespaces[1].tags, cast(Tag[])[]); + assert("visitor" in people.namespaces); + assert("" in people.namespaces); + assert("foobar" !in people.namespaces); + testRandomAccessRange(people.namespaces["visitor"].tags, cast(Tag[])[]); + testRandomAccessRange(people.namespaces[ ""].tags, cast(Tag[])[]); + testRandomAccessRange(people.all.tags, cast(Tag[])[]); + + people.namespaces["visitor"].attributes["a_"][0].remove(); + testRandomAccessRange(people.attributes, [new Attribute("b_", Value(2))]); + testRandomAccessRange(people.namespaces, [NSA("")], &namespaceEquals); + testRandomAccessRange(people.namespaces[0].attributes, [new Attribute("b_", Value(2))]); + assert("visitor" !in people.namespaces); + assert("" in people.namespaces); + assert("foobar" !in people.namespaces); + testRandomAccessRange(people.namespaces[""].attributes, [new Attribute("b_", Value(2))]); + testRandomAccessRange(people.all.attributes, [ + new Attribute("b_", Value(2)), + ]); + + people.attributes["b_"][0].remove(); + testRandomAccessRange(people.attributes, cast(Attribute[])[]); + testRandomAccessRange(people.namespaces, cast(NSA[])[], &namespaceEquals); + assert("visitor" !in people.namespaces); + assert("" !in people.namespaces); + assert("foobar" !in people.namespaces); + testRandomAccessRange(people.all.attributes, cast(Attribute[])[]); +} + +// Regression test, issue #11: https://github.com/Abscissa/SDLang-D/issues/11 +version(sdlangUnittest) +unittest +{ + import sdlang.parser; + writeln("ast: Regression test issue #11..."); + stdout.flush(); + + auto root = parseSource( +`// +a`); + + assert("a" in root.tags); + + root = parseSource( +`// +parent { + child +} +`); + + auto child = new Tag( + null, "", "child", + null, null, null + ); + + assert("parent" in root.tags); + assert("child" !in root.tags); + testRandomAccessRange(root.tags["parent"][0].tags, [child]); + assert("child" in root.tags["parent"][0].tags); +} diff --git a/source/dub/internal/sdlang/exception.d b/source/dub/internal/sdlang/exception.d new file mode 100644 index 0000000..931285a --- /dev/null +++ b/source/dub/internal/sdlang/exception.d @@ -0,0 +1,45 @@ +// SDLang-D +// Written in the D programming language. + +module dub.internal.sdlang.exception; + +version (Have_sdlang_d) public import sdlang.exception; +else: + +import std.exception; +import std.string; + +import dub.internal.sdlang.util; + +abstract class SDLangException : Exception +{ + this(string msg) { super(msg); } +} + +class SDLangParseException : SDLangException +{ + Location location; + bool hasLocation; + + this(string msg) + { + hasLocation = false; + super(msg); + } + + this(Location location, string msg) + { + hasLocation = true; + super("%s: %s".format(location.toString(), msg)); + } +} + +class SDLangValidationException : SDLangException +{ + this(string msg) { super(msg); } +} + +class SDLangRangeException : SDLangException +{ + this(string msg) { super(msg); } +} diff --git a/source/dub/internal/sdlang/lexer.d b/source/dub/internal/sdlang/lexer.d new file mode 100644 index 0000000..2295ed7 --- /dev/null +++ b/source/dub/internal/sdlang/lexer.d @@ -0,0 +1,2074 @@ +// SDLang-D +// Written in the D programming language. + +module dub.internal.sdlang.lexer; + +version (Have_sdlang_d) public import sdlang.lexer; +else: + +import std.algorithm; +import std.array; +import std.base64; +import std.bigint; +import std.conv; +import std.datetime; +import std.file; +import std.traits; +import std.typecons; +import std.uni; +import std.utf; +import std.variant; + +import dub.internal.sdlang.exception; +import dub.internal.sdlang.symbol; +import dub.internal.sdlang.token; +import dub.internal.sdlang.util; + +alias dub.internal.sdlang.util.startsWith startsWith; + +Token[] lexFile(string filename) +{ + auto source = cast(string)read(filename); + return lexSource(source, filename); +} + +Token[] lexSource(string source, string filename=null) +{ + auto lexer = scoped!Lexer(source, filename); + + // Can't use 'std.array.array(Range)' because 'lexer' is scoped + // and therefore cannot have its reference copied. + Appender!(Token[]) tokens; + foreach(tok; lexer) + tokens.put(tok); + + return tokens.data; +} + +// Kind of a poor-man's yield, but fast. +// Only to be used inside Lexer.popFront (and Lexer.this). +private template accept(string symbolName) +{ + static assert(symbolName != "Value", "Value symbols must also take a value."); + enum accept = acceptImpl!(symbolName, "null"); +} +private template accept(string symbolName, string value) +{ + static assert(symbolName == "Value", "Only a Value symbol can take a value."); + enum accept = acceptImpl!(symbolName, value); +} +private template accept(string symbolName, string value, string startLocation, string endLocation) +{ + static assert(symbolName == "Value", "Only a Value symbol can take a value."); + enum accept = (" + { + _front = makeToken!"~symbolName.stringof~"; + _front.value = "~value~"; + _front.location = "~(startLocation==""? "tokenStart" : startLocation)~"; + _front.data = source[ + "~(startLocation==""? "tokenStart.index" : startLocation)~" + .. + "~(endLocation==""? "location.index" : endLocation)~" + ]; + return; + } + ").replace("\n", ""); +} +private template acceptImpl(string symbolName, string value) +{ + enum acceptImpl = (" + { + _front = makeToken!"~symbolName.stringof~"; + _front.value = "~value~"; + return; + } + ").replace("\n", ""); +} + +class Lexer +{ + string source; + string filename; + Location location; /// Location of current character in source + + private dchar ch; // Current character + private dchar nextCh; // Lookahead character + private size_t nextPos; // Position of lookahead character (an index into source) + private bool hasNextCh; // If false, then there's no more lookahead, just EOF + private size_t posAfterLookahead; // Position after lookahead character (an index into source) + + private Location tokenStart; // The starting location of the token being lexed + + // Length so far of the token being lexed, not including current char + private size_t tokenLength; // Length in UTF-8 code units + private size_t tokenLength32; // Length in UTF-32 code units + + // Slight kludge: + // If a numeric fragment is found after a Date (separated by arbitrary + // whitespace), it could be the "hours" part of a DateTime, or it could + // be a separate numeric literal that simply follows a plain Date. If the + // latter, then the Date must be emitted, but numeric fragment that was + // found after it needs to be saved for the the lexer's next iteration. + // + // It's a slight kludge, and could instead be implemented as a slightly + // kludgey parser hack, but it's the only situation where SDL's lexing + // needs to lookahead more than one character, so this is good enough. + private struct LookaheadTokenInfo + { + bool exists = false; + string numericFragment = ""; + bool isNegative = false; + Location tokenStart; + } + private LookaheadTokenInfo lookaheadTokenInfo; + + this(string source=null, string filename=null) + { + this.filename = filename; + this.source = source; + + _front = Token(symbol!"Error", Location()); + lookaheadTokenInfo = LookaheadTokenInfo.init; + + if( source.startsWith( ByteOrderMarks[BOM.UTF8] ) ) + { + source = source[ ByteOrderMarks[BOM.UTF8].length .. $ ]; + this.source = source; + } + + foreach(bom; ByteOrderMarks) + if( source.startsWith(bom) ) + error(Location(filename,0,0,0), "SDL spec only supports UTF-8, not UTF-16 or UTF-32"); + + if(source == "") + mixin(accept!"EOF"); + + // Prime everything + hasNextCh = true; + nextCh = source.decode(posAfterLookahead); + advanceChar(ErrorOnEOF.Yes); + location = Location(filename, 0, 0, 0); + popFront(); + } + + @property bool empty() + { + return _front.symbol == symbol!"EOF"; + } + + Token _front; + @property Token front() + { + return _front; + } + + @property bool isEOF() + { + return location.index == source.length && !lookaheadTokenInfo.exists; + } + + private void error(string msg) + { + error(location, msg); + } + + private void error(Location loc, string msg) + { + throw new SDLangParseException(loc, "Error: "~msg); + } + + private Token makeToken(string symbolName)() + { + auto tok = Token(symbol!symbolName, tokenStart); + tok.data = tokenData; + return tok; + } + + private @property string tokenData() + { + return source[ tokenStart.index .. location.index ]; + } + + /// Check the lookahead character + private bool lookahead(dchar ch) + { + return hasNextCh && nextCh == ch; + } + + private bool lookahead(bool function(dchar) condition) + { + return hasNextCh && condition(nextCh); + } + + private static bool isNewline(dchar ch) + { + return ch == '\n' || ch == '\r' || ch == lineSep || ch == paraSep; + } + + /// Returns the length of the newline sequence, or zero if the current + /// character is not a newline + /// + /// Note that there are only single character sequences and the two + /// character sequence `\r\n` as used on Windows. + private size_t isAtNewline() + { + if(ch == '\n' || ch == lineSep || ch == paraSep) return 1; + else if(ch == '\r') return lookahead('\n') ? 2 : 1; + else return 0; + } + + /// Is 'ch' a valid base 64 character? + private bool isBase64(dchar ch) + { + if(ch >= 'A' && ch <= 'Z') + return true; + + if(ch >= 'a' && ch <= 'z') + return true; + + if(ch >= '0' && ch <= '9') + return true; + + return ch == '+' || ch == '/' || ch == '='; + } + + /// Is the current character one that's allowed + /// immediately *after* an int/float literal? + private bool isEndOfNumber() + { + if(isEOF) + return true; + + return !isDigit(ch) && ch != ':' && ch != '_' && !isAlpha(ch); + } + + /// Is current character the last one in an ident? + private bool isEndOfIdentCached = false; + private bool _isEndOfIdent; + private bool isEndOfIdent() + { + if(!isEndOfIdentCached) + { + if(!hasNextCh) + _isEndOfIdent = true; + else + _isEndOfIdent = !isIdentChar(nextCh); + + isEndOfIdentCached = true; + } + + return _isEndOfIdent; + } + + /// Is 'ch' a character that's allowed *somewhere* in an identifier? + private bool isIdentChar(dchar ch) + { + if(isAlpha(ch)) + return true; + + else if(isNumber(ch)) + return true; + + else + return + ch == '-' || + ch == '_' || + ch == '.' || + ch == '$'; + } + + private bool isDigit(dchar ch) + { + return ch >= '0' && ch <= '9'; + } + + private enum KeywordResult + { + Accept, // Keyword is matched + Continue, // Keyword is not matched *yet* + Failed, // Keyword doesn't match + } + private KeywordResult checkKeyword(dstring keyword32) + { + // Still within length of keyword + if(tokenLength32 < keyword32.length) + { + if(ch == keyword32[tokenLength32]) + return KeywordResult.Continue; + else + return KeywordResult.Failed; + } + + // At position after keyword + else if(tokenLength32 == keyword32.length) + { + if(isEOF || !isIdentChar(ch)) + { + debug assert(tokenData == to!string(keyword32)); + return KeywordResult.Accept; + } + else + return KeywordResult.Failed; + } + + assert(0, "Fell off end of keyword to check"); + } + + enum ErrorOnEOF { No, Yes } + + /// Advance one code point. + private void advanceChar(ErrorOnEOF errorOnEOF) + { + if(auto cnt = isAtNewline()) + { + if (cnt == 1) + location.line++; + location.col = 0; + } + else + location.col++; + + location.index = nextPos; + + nextPos = posAfterLookahead; + ch = nextCh; + + if(!hasNextCh) + { + if(errorOnEOF == ErrorOnEOF.Yes) + error("Unexpected end of file"); + + return; + } + + tokenLength32++; + tokenLength = location.index - tokenStart.index; + + if(nextPos == source.length) + { + nextCh = dchar.init; + hasNextCh = false; + return; + } + + nextCh = source.decode(posAfterLookahead); + isEndOfIdentCached = false; + } + + /// Advances the specified amount of characters + private void advanceChar(size_t count, ErrorOnEOF errorOnEOF) + { + while(count-- > 0) + advanceChar(errorOnEOF); + } + + void popFront() + { + // -- Main Lexer ------------- + + eatWhite(); + + if(isEOF) + mixin(accept!"EOF"); + + tokenStart = location; + tokenLength = 0; + tokenLength32 = 0; + isEndOfIdentCached = false; + + if(lookaheadTokenInfo.exists) + { + tokenStart = lookaheadTokenInfo.tokenStart; + + auto prevLATokenInfo = lookaheadTokenInfo; + lookaheadTokenInfo = LookaheadTokenInfo.init; + lexNumeric(prevLATokenInfo); + return; + } + + if(ch == '=') + { + advanceChar(ErrorOnEOF.No); + mixin(accept!"="); + } + + else if(ch == '{') + { + advanceChar(ErrorOnEOF.No); + mixin(accept!"{"); + } + + else if(ch == '}') + { + advanceChar(ErrorOnEOF.No); + mixin(accept!"}"); + } + + else if(ch == ':') + { + advanceChar(ErrorOnEOF.No); + mixin(accept!":"); + } + + else if(ch == ';') + { + advanceChar(ErrorOnEOF.No); + mixin(accept!"EOL"); + } + + else if(auto cnt = isAtNewline()) + { + advanceChar(cnt, ErrorOnEOF.No); + mixin(accept!"EOL"); + } + + else if(isAlpha(ch) || ch == '_') + lexIdentKeyword(); + + else if(ch == '"') + lexRegularString(); + + else if(ch == '`') + lexRawString(); + + else if(ch == '\'') + lexCharacter(); + + else if(ch == '[') + lexBinary(); + + else if(ch == '-' || ch == '.' || isDigit(ch)) + lexNumeric(); + + else + { + advanceChar(ErrorOnEOF.No); + error("Syntax error"); + } + } + + /// Lex Ident or Keyword + private void lexIdentKeyword() + { + assert(isAlpha(ch) || ch == '_'); + + // Keyword + struct Key + { + dstring name; + Value value; + bool failed = false; + } + static Key[5] keywords; + static keywordsInited = false; + if(!keywordsInited) + { + // Value (as a std.variant-based type) can't be statically inited + keywords[0] = Key("true", Value(true )); + keywords[1] = Key("false", Value(false)); + keywords[2] = Key("on", Value(true )); + keywords[3] = Key("off", Value(false)); + keywords[4] = Key("null", Value(null )); + keywordsInited = true; + } + + foreach(ref key; keywords) + key.failed = false; + + auto numKeys = keywords.length; + + do + { + foreach(ref key; keywords) + if(!key.failed) + { + final switch(checkKeyword(key.name)) + { + case KeywordResult.Accept: + mixin(accept!("Value", "key.value")); + + case KeywordResult.Continue: + break; + + case KeywordResult.Failed: + key.failed = true; + numKeys--; + break; + } + } + + if(numKeys == 0) + { + lexIdent(); + return; + } + + advanceChar(ErrorOnEOF.No); + + } while(!isEOF); + + foreach(ref key; keywords) + if(!key.failed) + if(key.name.length == tokenLength32+1) + mixin(accept!("Value", "key.value")); + + mixin(accept!"Ident"); + } + + /// Lex Ident + private void lexIdent() + { + if(tokenLength == 0) + assert(isAlpha(ch) || ch == '_'); + + while(!isEOF && isIdentChar(ch)) + advanceChar(ErrorOnEOF.No); + + mixin(accept!"Ident"); + } + + /// Lex regular string + private void lexRegularString() + { + assert(ch == '"'); + + Appender!string buf; + size_t spanStart = nextPos; + + // Doesn't include current character + void updateBuf() + { + if(location.index == spanStart) + return; + + buf.put( source[spanStart..location.index] ); + } + + advanceChar(ErrorOnEOF.Yes); + while(ch != '"') + { + if(ch == '\\') + { + updateBuf(); + + bool wasEscSequence = true; + if(hasNextCh) + { + switch(nextCh) + { + case 'n': buf.put('\n'); break; + case 'r': buf.put('\r'); break; + case 't': buf.put('\t'); break; + case '"': buf.put('\"'); break; + case '\\': buf.put('\\'); break; + default: wasEscSequence = false; break; + } + } + + if(wasEscSequence) + { + advanceChar(ErrorOnEOF.Yes); + spanStart = nextPos; + } + else + { + eatWhite(false); + spanStart = location.index; + } + } + + else if(isNewline(ch)) + error("Unescaped newlines are only allowed in raw strings, not regular strings."); + + advanceChar(ErrorOnEOF.Yes); + } + + updateBuf(); + advanceChar(ErrorOnEOF.No); // Skip closing double-quote + mixin(accept!("Value", "buf.data")); + } + + /// Lex raw string + private void lexRawString() + { + assert(ch == '`'); + + do + advanceChar(ErrorOnEOF.Yes); + while(ch != '`'); + + advanceChar(ErrorOnEOF.No); // Skip closing back-tick + mixin(accept!("Value", "tokenData[1..$-1]")); + } + + /// Lex character literal + private void lexCharacter() + { + assert(ch == '\''); + advanceChar(ErrorOnEOF.Yes); // Skip opening single-quote + + dchar value; + if(ch == '\\') + { + advanceChar(ErrorOnEOF.Yes); // Skip escape backslash + switch(ch) + { + case 'n': value = '\n'; break; + case 'r': value = '\r'; break; + case 't': value = '\t'; break; + case '\'': value = '\''; break; + case '\\': value = '\\'; break; + default: error("Invalid escape sequence."); + } + } + else if(isNewline(ch)) + error("Newline not alowed in character literal."); + else + value = ch; + advanceChar(ErrorOnEOF.Yes); // Skip the character itself + + if(ch == '\'') + advanceChar(ErrorOnEOF.No); // Skip closing single-quote + else + error("Expected closing single-quote."); + + mixin(accept!("Value", "value")); + } + + /// Lex base64 binary literal + private void lexBinary() + { + assert(ch == '['); + advanceChar(ErrorOnEOF.Yes); + + void eatBase64Whitespace() + { + while(!isEOF && isWhite(ch)) + { + if(isNewline(ch)) + advanceChar(ErrorOnEOF.Yes); + + if(!isEOF && isWhite(ch)) + eatWhite(); + } + } + + eatBase64Whitespace(); + + // Iterates all valid base64 characters, ending at ']'. + // Skips all whitespace. Throws on invalid chars. + struct Base64InputRange + { + Lexer lexer; + private bool isInited = false; + private int numInputCharsMod4 = 0; + + @property bool empty() + { + if(lexer.ch == ']') + { + if(numInputCharsMod4 != 0) + lexer.error("Length of Base64 encoding must be a multiple of 4. ("~to!string(numInputCharsMod4)~")"); + + return true; + } + + return false; + } + + @property dchar front() + { + return lexer.ch; + } + + void popFront() + { + auto lex = lexer; + + if(!isInited) + { + if(lexer.isBase64(lexer.ch)) + { + numInputCharsMod4++; + numInputCharsMod4 %= 4; + } + + isInited = true; + } + + lex.advanceChar(lex.ErrorOnEOF.Yes); + + eatBase64Whitespace(); + + if(lex.isEOF) + lex.error("Unexpected end of file."); + + if(lex.ch != ']') + { + if(!lex.isBase64(lex.ch)) + lex.error("Invalid character in base64 binary literal."); + + numInputCharsMod4++; + numInputCharsMod4 %= 4; + } + } + } + + // This is a slow ugly hack. It's necessary because Base64.decode + // currently requires the source to have known length. + //TODO: Remove this when DMD issue #9543 is fixed. + dchar[] tmpBuf = array(Base64InputRange(this)); + + Appender!(ubyte[]) outputBuf; + // Ugly workaround for DMD issue #9102 + //TODO: Remove this when DMD #9102 is fixed + struct OutputBuf + { + void put(ubyte ch) + { + outputBuf.put(ch); + } + } + + try + //Base64.decode(Base64InputRange(this), OutputBuf()); + Base64.decode(tmpBuf, OutputBuf()); + + //TODO: Starting with dmd 2.062, this should be a Base64Exception + catch(Exception e) + error("Invalid character in base64 binary literal."); + + advanceChar(ErrorOnEOF.No); // Skip ']' + mixin(accept!("Value", "outputBuf.data")); + } + + private BigInt toBigInt(bool isNegative, string absValue) + { + auto num = BigInt(absValue); + assert(num >= 0); + + if(isNegative) + num = -num; + + return num; + } + + /// Lex [0-9]+, but without emitting a token. + /// This is used by the other numeric parsing functions. + private string lexNumericFragment() + { + if(!isDigit(ch)) + error("Expected a digit 0-9."); + + auto spanStart = location.index; + + do + { + advanceChar(ErrorOnEOF.No); + } while(!isEOF && isDigit(ch)); + + return source[spanStart..location.index]; + } + + /// Lex anything that starts with 0-9 or '-'. Ints, floats, dates, etc. + private void lexNumeric(LookaheadTokenInfo laTokenInfo = LookaheadTokenInfo.init) + { + bool isNegative; + string firstFragment; + if(laTokenInfo.exists) + { + firstFragment = laTokenInfo.numericFragment; + isNegative = laTokenInfo.isNegative; + } + else + { + assert(ch == '-' || ch == '.' || isDigit(ch)); + + // Check for negative + isNegative = ch == '-'; + if(isNegative) + advanceChar(ErrorOnEOF.Yes); + + // Some floating point with omitted leading zero? + if(ch == '.') + { + lexFloatingPoint(""); + return; + } + + firstFragment = lexNumericFragment(); + } + + // Long integer (64-bit signed)? + if(ch == 'L' || ch == 'l') + { + advanceChar(ErrorOnEOF.No); + + // BigInt(long.min) is a workaround for DMD issue #9548 + auto num = toBigInt(isNegative, firstFragment); + if(num < BigInt(long.min) || num > long.max) + error(tokenStart, "Value doesn't fit in 64-bit signed long integer: "~to!string(num)); + + mixin(accept!("Value", "num.toLong()")); + } + + // Float (32-bit signed)? + else if(ch == 'F' || ch == 'f') + { + auto value = to!float(tokenData); + advanceChar(ErrorOnEOF.No); + mixin(accept!("Value", "value")); + } + + // Double float (64-bit signed) with suffix? + else if((ch == 'D' || ch == 'd') && !lookahead(':') + ) + { + auto value = to!double(tokenData); + advanceChar(ErrorOnEOF.No); + mixin(accept!("Value", "value")); + } + + // Decimal (128+ bits signed)? + else if( + (ch == 'B' || ch == 'b') && + (lookahead('D') || lookahead('d')) + ) + { + auto value = to!real(tokenData); + advanceChar(ErrorOnEOF.No); + advanceChar(ErrorOnEOF.No); + mixin(accept!("Value", "value")); + } + + // Some floating point? + else if(ch == '.') + lexFloatingPoint(firstFragment); + + // Some date? + else if(ch == '/' && hasNextCh && isDigit(nextCh)) + lexDate(isNegative, firstFragment); + + // Some time span? + else if(ch == ':' || ch == 'd') + lexTimeSpan(isNegative, firstFragment); + + // Integer (32-bit signed)? + else if(isEndOfNumber()) + { + auto num = toBigInt(isNegative, firstFragment); + if(num < int.min || num > int.max) + error(tokenStart, "Value doesn't fit in 32-bit signed integer: "~to!string(num)); + + mixin(accept!("Value", "num.toInt()")); + } + + // Invalid suffix + else + error("Invalid integer suffix."); + } + + /// Lex any floating-point literal (after the initial numeric fragment was lexed) + private void lexFloatingPoint(string firstPart) + { + assert(ch == '.'); + advanceChar(ErrorOnEOF.No); + + auto secondPart = lexNumericFragment(); + + try + { + // Double float (64-bit signed) with suffix? + if(ch == 'D' || ch == 'd') + { + auto value = to!double(tokenData); + advanceChar(ErrorOnEOF.No); + mixin(accept!("Value", "value")); + } + + // Float (32-bit signed)? + else if(ch == 'F' || ch == 'f') + { + auto value = to!float(tokenData); + advanceChar(ErrorOnEOF.No); + mixin(accept!("Value", "value")); + } + + // Decimal (128+ bits signed)? + else if(ch == 'B' || ch == 'b') + { + auto value = to!real(tokenData); + advanceChar(ErrorOnEOF.Yes); + + if(!isEOF && (ch == 'D' || ch == 'd')) + { + advanceChar(ErrorOnEOF.No); + if(isEndOfNumber()) + mixin(accept!("Value", "value")); + } + + error("Invalid floating point suffix."); + } + + // Double float (64-bit signed) without suffix? + else if(isEOF || !isIdentChar(ch)) + { + auto value = to!double(tokenData); + mixin(accept!("Value", "value")); + } + + // Invalid suffix + else + error("Invalid floating point suffix."); + } + catch(ConvException e) + error("Invalid floating point literal."); + } + + private Date makeDate(bool isNegative, string yearStr, string monthStr, string dayStr) + { + BigInt biTmp; + + biTmp = BigInt(yearStr); + if(isNegative) + biTmp = -biTmp; + if(biTmp < int.min || biTmp > int.max) + error(tokenStart, "Date's year is out of range. (Must fit within a 32-bit signed int.)"); + auto year = biTmp.toInt(); + + biTmp = BigInt(monthStr); + if(biTmp < 1 || biTmp > 12) + error(tokenStart, "Date's month is out of range."); + auto month = biTmp.toInt(); + + biTmp = BigInt(dayStr); + if(biTmp < 1 || biTmp > 31) + error(tokenStart, "Date's month is out of range."); + auto day = biTmp.toInt(); + + return Date(year, month, day); + } + + private DateTimeFrac makeDateTimeFrac( + bool isNegative, Date date, string hourStr, string minuteStr, + string secondStr, string millisecondStr + ) + { + BigInt biTmp; + + biTmp = BigInt(hourStr); + if(biTmp < int.min || biTmp > int.max) + error(tokenStart, "Datetime's hour is out of range."); + auto numHours = biTmp.toInt(); + + biTmp = BigInt(minuteStr); + if(biTmp < 0 || biTmp > int.max) + error(tokenStart, "Datetime's minute is out of range."); + auto numMinutes = biTmp.toInt(); + + int numSeconds = 0; + if(secondStr != "") + { + biTmp = BigInt(secondStr); + if(biTmp < 0 || biTmp > int.max) + error(tokenStart, "Datetime's second is out of range."); + numSeconds = biTmp.toInt(); + } + + int millisecond = 0; + if(millisecondStr != "") + { + biTmp = BigInt(millisecondStr); + if(biTmp < 0 || biTmp > int.max) + error(tokenStart, "Datetime's millisecond is out of range."); + millisecond = biTmp.toInt(); + + if(millisecondStr.length == 1) + millisecond *= 100; + else if(millisecondStr.length == 2) + millisecond *= 10; + } + + Duration fracSecs = millisecond.msecs; + + auto offset = hours(numHours) + minutes(numMinutes) + seconds(numSeconds); + + if(isNegative) + { + offset = -offset; + fracSecs = -fracSecs; + } + + return DateTimeFrac(DateTime(date) + offset, fracSecs); + } + + private Duration makeDuration( + bool isNegative, string dayStr, + string hourStr, string minuteStr, string secondStr, + string millisecondStr + ) + { + BigInt biTmp; + + long day = 0; + if(dayStr != "") + { + biTmp = BigInt(dayStr); + if(biTmp < long.min || biTmp > long.max) + error(tokenStart, "Time span's day is out of range."); + day = biTmp.toLong(); + } + + biTmp = BigInt(hourStr); + if(biTmp < long.min || biTmp > long.max) + error(tokenStart, "Time span's hour is out of range."); + auto hour = biTmp.toLong(); + + biTmp = BigInt(minuteStr); + if(biTmp < long.min || biTmp > long.max) + error(tokenStart, "Time span's minute is out of range."); + auto minute = biTmp.toLong(); + + biTmp = BigInt(secondStr); + if(biTmp < long.min || biTmp > long.max) + error(tokenStart, "Time span's second is out of range."); + auto second = biTmp.toLong(); + + long millisecond = 0; + if(millisecondStr != "") + { + biTmp = BigInt(millisecondStr); + if(biTmp < long.min || biTmp > long.max) + error(tokenStart, "Time span's millisecond is out of range."); + millisecond = biTmp.toLong(); + + if(millisecondStr.length == 1) + millisecond *= 100; + else if(millisecondStr.length == 2) + millisecond *= 10; + } + + auto duration = + dur!"days" (day) + + dur!"hours" (hour) + + dur!"minutes"(minute) + + dur!"seconds"(second) + + dur!"msecs" (millisecond); + + if(isNegative) + duration = -duration; + + return duration; + } + + // This has to reproduce some weird corner case behaviors from the + // original Java version of SDL. So some of this may seem weird. + private Nullable!Duration getTimeZoneOffset(string str) + { + if(str.length < 2) + return Nullable!Duration(); // Unknown timezone + + if(str[0] != '+' && str[0] != '-') + return Nullable!Duration(); // Unknown timezone + + auto isNegative = str[0] == '-'; + + string numHoursStr; + string numMinutesStr; + if(str[1] == ':') + { + numMinutesStr = str[1..$]; + numHoursStr = ""; + } + else + { + numMinutesStr = str.find(':'); + numHoursStr = str[1 .. $-numMinutesStr.length]; + } + + long numHours = 0; + long numMinutes = 0; + bool isUnknown = false; + try + { + switch(numHoursStr.length) + { + case 0: + if(numMinutesStr.length == 3) + { + numHours = 0; + numMinutes = to!long(numMinutesStr[1..$]); + } + else + isUnknown = true; + break; + + case 1: + case 2: + if(numMinutesStr.length == 0) + { + numHours = to!long(numHoursStr); + numMinutes = 0; + } + else if(numMinutesStr.length == 3) + { + numHours = to!long(numHoursStr); + numMinutes = to!long(numMinutesStr[1..$]); + } + else + isUnknown = true; + break; + + default: + if(numMinutesStr.length == 0) + { + // Yes, this is correct + numHours = 0; + numMinutes = to!long(numHoursStr[1..$]); + } + else + isUnknown = true; + break; + } + } + catch(ConvException e) + isUnknown = true; + + if(isUnknown) + return Nullable!Duration(); // Unknown timezone + + auto timeZoneOffset = hours(numHours) + minutes(numMinutes); + if(isNegative) + timeZoneOffset = -timeZoneOffset; + + // Timezone valid + return Nullable!Duration(timeZoneOffset); + } + + /// Lex date or datetime (after the initial numeric fragment was lexed) + private void lexDate(bool isDateNegative, string yearStr) + { + assert(ch == '/'); + + // Lex months + advanceChar(ErrorOnEOF.Yes); // Skip '/' + auto monthStr = lexNumericFragment(); + + // Lex days + if(ch != '/') + error("Invalid date format: Missing days."); + advanceChar(ErrorOnEOF.Yes); // Skip '/' + auto dayStr = lexNumericFragment(); + + auto date = makeDate(isDateNegative, yearStr, monthStr, dayStr); + + if(!isEndOfNumber() && ch != '/') + error("Dates cannot have suffixes."); + + // Date? + if(isEOF) + mixin(accept!("Value", "date")); + + auto endOfDate = location; + + while( + !isEOF && + ( ch == '\\' || ch == '/' || (isWhite(ch) && !isNewline(ch)) ) + ) + { + if(ch == '\\' && hasNextCh && isNewline(nextCh)) + { + advanceChar(ErrorOnEOF.Yes); + if(isAtNewline()) + advanceChar(ErrorOnEOF.Yes); + advanceChar(ErrorOnEOF.No); + } + + eatWhite(); + } + + // Date? + if(isEOF || (!isDigit(ch) && ch != '-')) + mixin(accept!("Value", "date", "", "endOfDate.index")); + + auto startOfTime = location; + + // Is time negative? + bool isTimeNegative = ch == '-'; + if(isTimeNegative) + advanceChar(ErrorOnEOF.Yes); + + // Lex hours + auto hourStr = ch == '.'? "" : lexNumericFragment(); + + // Lex minutes + if(ch != ':') + { + // No minutes found. Therefore we had a plain Date followed + // by a numeric literal, not a DateTime. + lookaheadTokenInfo.exists = true; + lookaheadTokenInfo.numericFragment = hourStr; + lookaheadTokenInfo.isNegative = isTimeNegative; + lookaheadTokenInfo.tokenStart = startOfTime; + mixin(accept!("Value", "date", "", "endOfDate.index")); + } + advanceChar(ErrorOnEOF.Yes); // Skip ':' + auto minuteStr = lexNumericFragment(); + + // Lex seconds, if exists + string secondStr; + if(ch == ':') + { + advanceChar(ErrorOnEOF.Yes); // Skip ':' + secondStr = lexNumericFragment(); + } + + // Lex milliseconds, if exists + string millisecondStr; + if(ch == '.') + { + advanceChar(ErrorOnEOF.Yes); // Skip '.' + millisecondStr = lexNumericFragment(); + } + + auto dateTimeFrac = makeDateTimeFrac(isTimeNegative, date, hourStr, minuteStr, secondStr, millisecondStr); + + // Lex zone, if exists + if(ch == '-') + { + advanceChar(ErrorOnEOF.Yes); // Skip '-' + auto timezoneStart = location; + + if(!isAlpha(ch)) + error("Invalid timezone format."); + + while(!isEOF && !isWhite(ch)) + advanceChar(ErrorOnEOF.No); + + auto timezoneStr = source[timezoneStart.index..location.index]; + if(timezoneStr.startsWith("GMT")) + { + auto isoPart = timezoneStr["GMT".length..$]; + auto offset = getTimeZoneOffset(isoPart); + + if(offset.isNull()) + { + // Unknown time zone + mixin(accept!("Value", "DateTimeFracUnknownZone(dateTimeFrac.dateTime, dateTimeFrac.fracSecs, timezoneStr)")); + } + else + { + auto timezone = new immutable SimpleTimeZone(offset.get()); + static if (__VERSION__ >= 2067) auto fsecs = dateTimeFrac.fracSecs; + else auto fsecs = FracSec.from!"hnsecs"(dateTimeFrac.fracSecs.total!"hnsecs"); + mixin(accept!("Value", "SysTime(dateTimeFrac.dateTime, fsecs, timezone)")); + } + } + + try + { + auto timezone = PosixTimeZone.getTimeZone(timezoneStr); + if(timezone) { + static if (__VERSION__ >= 2067) auto fsecs = dateTimeFrac.fracSecs; + else auto fsecs = FracSec.from!"hnsecs"(dateTimeFrac.fracSecs.total!"hnsecs"); + mixin(accept!("Value", "SysTime(dateTimeFrac.dateTime, fsecs, timezone)")); + } + } + catch(TimeException e) + { + // Time zone not found. So just move along to "Unknown time zone" below. + } + + // Unknown time zone + mixin(accept!("Value", "DateTimeFracUnknownZone(dateTimeFrac.dateTime, dateTimeFrac.fracSecs, timezoneStr)")); + } + + if(!isEndOfNumber()) + error("Date-Times cannot have suffixes."); + + mixin(accept!("Value", "dateTimeFrac")); + } + + /// Lex time span (after the initial numeric fragment was lexed) + private void lexTimeSpan(bool isNegative, string firstPart) + { + assert(ch == ':' || ch == 'd'); + + string dayStr = ""; + string hourStr; + + // Lexed days? + bool hasDays = ch == 'd'; + if(hasDays) + { + dayStr = firstPart; + advanceChar(ErrorOnEOF.Yes); // Skip 'd' + + // Lex hours + if(ch != ':') + error("Invalid time span format: Missing hours."); + advanceChar(ErrorOnEOF.Yes); // Skip ':' + hourStr = lexNumericFragment(); + } + else + hourStr = firstPart; + + // Lex minutes + if(ch != ':') + error("Invalid time span format: Missing minutes."); + advanceChar(ErrorOnEOF.Yes); // Skip ':' + auto minuteStr = lexNumericFragment(); + + // Lex seconds + if(ch != ':') + error("Invalid time span format: Missing seconds."); + advanceChar(ErrorOnEOF.Yes); // Skip ':' + auto secondStr = lexNumericFragment(); + + // Lex milliseconds, if exists + string millisecondStr = ""; + if(ch == '.') + { + advanceChar(ErrorOnEOF.Yes); // Skip '.' + millisecondStr = lexNumericFragment(); + } + + if(!isEndOfNumber()) + error("Time spans cannot have suffixes."); + + auto duration = makeDuration(isNegative, dayStr, hourStr, minuteStr, secondStr, millisecondStr); + mixin(accept!("Value", "duration")); + } + + /// Advances past whitespace and comments + private void eatWhite(bool allowComments=true) + { + // -- Comment/Whitepace Lexer ------------- + + enum State + { + normal, + lineComment, // Got "#" or "//" or "--", Eating everything until newline + blockComment, // Got "/*", Eating everything until "*/" + } + + if(isEOF) + return; + + Location commentStart; + State state = State.normal; + bool consumeNewlines = false; + bool hasConsumedNewline = false; + while(true) + { + final switch(state) + { + case State.normal: + + if(ch == '\\') + { + commentStart = location; + consumeNewlines = true; + hasConsumedNewline = false; + } + + else if(ch == '#') + { + if(!allowComments) + return; + + commentStart = location; + state = State.lineComment; + continue; + } + + else if(ch == '/' || ch == '-') + { + commentStart = location; + if(lookahead(ch)) + { + if(!allowComments) + return; + + advanceChar(ErrorOnEOF.No); + state = State.lineComment; + continue; + } + else if(ch == '/' && lookahead('*')) + { + if(!allowComments) + return; + + advanceChar(ErrorOnEOF.No); + state = State.blockComment; + continue; + } + else + return; // Done + } + else if(isAtNewline()) + { + if(consumeNewlines) + hasConsumedNewline = true; + else + return; // Done + } + else if(!isWhite(ch)) + { + if(consumeNewlines) + { + if(hasConsumedNewline) + return; // Done + else + error("Only whitespace can come between a line-continuation backslash and the following newline."); + } + else + return; // Done + } + + break; + + case State.lineComment: + if(lookahead(&isNewline)) + state = State.normal; + break; + + case State.blockComment: + if(ch == '*' && lookahead('/')) + { + advanceChar(ErrorOnEOF.No); + state = State.normal; + } + break; + } + + advanceChar(ErrorOnEOF.No); + if(isEOF) + { + // Reached EOF + + if(consumeNewlines && !hasConsumedNewline) + error("Missing newline after line-continuation backslash."); + + else if(state == State.blockComment) + error(commentStart, "Unterminated block comment."); + + else + return; // Done, reached EOF + } + } + } +} + +version(sdlangUnittest) +{ + import std.stdio; + + private auto loc = Location("filename", 0, 0, 0); + private auto loc2 = Location("a", 1, 1, 1); + + unittest + { + assert([Token(symbol!"EOL",loc) ] == [Token(symbol!"EOL",loc) ] ); + assert([Token(symbol!"EOL",loc,Value(7),"A")] == [Token(symbol!"EOL",loc2,Value(7),"B")] ); + } + + private int numErrors = 0; + private void testLex(string source, Token[] expected, bool test_locations = false, string file=__FILE__, size_t line=__LINE__) + { + Token[] actual; + try + actual = lexSource(source, "filename"); + catch(SDLangParseException e) + { + numErrors++; + stderr.writeln(file, "(", line, "): testLex failed on: ", source); + stderr.writeln(" Expected:"); + stderr.writeln(" ", expected); + stderr.writeln(" Actual: SDLangParseException thrown:"); + stderr.writeln(" ", e.msg); + return; + } + + bool is_same = actual == expected; + if (is_same && test_locations) { + is_same = actual.map!(t => t.location).equal(expected.map!(t => t.location)); + } + + if(!is_same) + { + numErrors++; + stderr.writeln(file, "(", line, "): testLex failed on: ", source); + stderr.writeln(" Expected:"); + stderr.writeln(" ", expected); + stderr.writeln(" Actual:"); + stderr.writeln(" ", actual); + + if(expected.length > 1 || actual.length > 1) + { + stderr.writeln(" expected.length: ", expected.length); + stderr.writeln(" actual.length: ", actual.length); + + if(actual.length == expected.length) + foreach(i; 0..actual.length) + if(actual[i] != expected[i]) + { + stderr.writeln(" Unequal at index #", i, ":"); + stderr.writeln(" Expected:"); + stderr.writeln(" ", expected[i]); + stderr.writeln(" Actual:"); + stderr.writeln(" ", actual[i]); + } + } + } + } + + private void testLexThrows(string file=__FILE__, size_t line=__LINE__)(string source) + { + bool hadException = false; + Token[] actual; + try + actual = lexSource(source, "filename"); + catch(SDLangParseException e) + hadException = true; + + if(!hadException) + { + numErrors++; + stderr.writeln(file, "(", line, "): testLex failed on: ", source); + stderr.writeln(" Expected SDLangParseException"); + stderr.writeln(" Actual:"); + stderr.writeln(" ", actual); + } + } +} + +version(sdlangUnittest) +unittest +{ + writeln("Unittesting sdlang lexer..."); + stdout.flush(); + + testLex("", []); + testLex(" ", []); + testLex("\\\n", []); + testLex("/*foo*/", []); + testLex("/* multiline \n comment */", []); + testLex("/* * */", []); + testLexThrows("/* "); + + testLex(":", [ Token(symbol!":", loc) ]); + testLex("=", [ Token(symbol!"=", loc) ]); + testLex("{", [ Token(symbol!"{", loc) ]); + testLex("}", [ Token(symbol!"}", loc) ]); + testLex(";", [ Token(symbol!"EOL",loc) ]); + testLex("\n", [ Token(symbol!"EOL",loc) ]); + + testLex("foo", [ Token(symbol!"Ident",loc,Value(null),"foo") ]); + testLex("_foo", [ Token(symbol!"Ident",loc,Value(null),"_foo") ]); + testLex("foo.bar", [ Token(symbol!"Ident",loc,Value(null),"foo.bar") ]); + testLex("foo-bar", [ Token(symbol!"Ident",loc,Value(null),"foo-bar") ]); + testLex("foo.", [ Token(symbol!"Ident",loc,Value(null),"foo.") ]); + testLex("foo-", [ Token(symbol!"Ident",loc,Value(null),"foo-") ]); + testLexThrows(".foo"); + + testLex("foo bar", [ + Token(symbol!"Ident",loc,Value(null),"foo"), + Token(symbol!"Ident",loc,Value(null),"bar"), + ]); + testLex("foo \\ \n \n bar", [ + Token(symbol!"Ident",loc,Value(null),"foo"), + Token(symbol!"Ident",loc,Value(null),"bar"), + ]); + testLex("foo \\ \n \\ \n bar", [ + Token(symbol!"Ident",loc,Value(null),"foo"), + Token(symbol!"Ident",loc,Value(null),"bar"), + ]); + testLexThrows("foo \\ "); + testLexThrows("foo \\ bar"); + testLexThrows("foo \\ \n \\ "); + testLexThrows("foo \\ \n \\ bar"); + + testLex("foo : = { } ; \n bar \n", [ + Token(symbol!"Ident",loc,Value(null),"foo"), + Token(symbol!":",loc), + Token(symbol!"=",loc), + Token(symbol!"{",loc), + Token(symbol!"}",loc), + Token(symbol!"EOL",loc), + Token(symbol!"EOL",loc), + Token(symbol!"Ident",loc,Value(null),"bar"), + Token(symbol!"EOL",loc), + ]); + + testLexThrows("<"); + testLexThrows("*"); + testLexThrows(`\`); + + // Integers + testLex( "7", [ Token(symbol!"Value",loc,Value(cast( int) 7)) ]); + testLex( "-7", [ Token(symbol!"Value",loc,Value(cast( int)-7)) ]); + testLex( "7L", [ Token(symbol!"Value",loc,Value(cast(long) 7)) ]); + testLex( "7l", [ Token(symbol!"Value",loc,Value(cast(long) 7)) ]); + testLex("-7L", [ Token(symbol!"Value",loc,Value(cast(long)-7)) ]); + testLex( "0", [ Token(symbol!"Value",loc,Value(cast( int) 0)) ]); + testLex( "-0", [ Token(symbol!"Value",loc,Value(cast( int) 0)) ]); + + testLex("7/**/", [ Token(symbol!"Value",loc,Value(cast( int) 7)) ]); + testLex("7#", [ Token(symbol!"Value",loc,Value(cast( int) 7)) ]); + + testLex("7 A", [ + Token(symbol!"Value",loc,Value(cast(int)7)), + Token(symbol!"Ident",loc,Value( null),"A"), + ]); + testLexThrows("7A"); + testLexThrows("-A"); + testLexThrows(`-""`); + + testLex("7;", [ + Token(symbol!"Value",loc,Value(cast(int)7)), + Token(symbol!"EOL",loc), + ]); + + // Floats + testLex("1.2F" , [ Token(symbol!"Value",loc,Value(cast( float)1.2)) ]); + testLex("1.2f" , [ Token(symbol!"Value",loc,Value(cast( float)1.2)) ]); + testLex("1.2" , [ Token(symbol!"Value",loc,Value(cast(double)1.2)) ]); + testLex("1.2D" , [ Token(symbol!"Value",loc,Value(cast(double)1.2)) ]); + testLex("1.2d" , [ Token(symbol!"Value",loc,Value(cast(double)1.2)) ]); + testLex("1.2BD", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); + testLex("1.2bd", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); + testLex("1.2Bd", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); + testLex("1.2bD", [ Token(symbol!"Value",loc,Value(cast( real)1.2)) ]); + + testLex(".2F" , [ Token(symbol!"Value",loc,Value(cast( float)0.2)) ]); + testLex(".2" , [ Token(symbol!"Value",loc,Value(cast(double)0.2)) ]); + testLex(".2D" , [ Token(symbol!"Value",loc,Value(cast(double)0.2)) ]); + testLex(".2BD", [ Token(symbol!"Value",loc,Value(cast( real)0.2)) ]); + + testLex("-1.2F" , [ Token(symbol!"Value",loc,Value(cast( float)-1.2)) ]); + testLex("-1.2" , [ Token(symbol!"Value",loc,Value(cast(double)-1.2)) ]); + testLex("-1.2D" , [ Token(symbol!"Value",loc,Value(cast(double)-1.2)) ]); + testLex("-1.2BD", [ Token(symbol!"Value",loc,Value(cast( real)-1.2)) ]); + + testLex("-.2F" , [ Token(symbol!"Value",loc,Value(cast( float)-0.2)) ]); + testLex("-.2" , [ Token(symbol!"Value",loc,Value(cast(double)-0.2)) ]); + testLex("-.2D" , [ Token(symbol!"Value",loc,Value(cast(double)-0.2)) ]); + testLex("-.2BD", [ Token(symbol!"Value",loc,Value(cast( real)-0.2)) ]); + + testLex( "0.0" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); + testLex( "0.0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); + testLex( "0.0BD", [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); + testLex("-0.0" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); + testLex("-0.0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); + testLex("-0.0BD", [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); + testLex( "7F" , [ Token(symbol!"Value",loc,Value(cast( float)7.0)) ]); + testLex( "7D" , [ Token(symbol!"Value",loc,Value(cast(double)7.0)) ]); + testLex( "7BD" , [ Token(symbol!"Value",loc,Value(cast( real)7.0)) ]); + testLex( "0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); + testLex( "0D" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); + testLex( "0BD" , [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); + testLex("-0F" , [ Token(symbol!"Value",loc,Value(cast( float)0.0)) ]); + testLex("-0D" , [ Token(symbol!"Value",loc,Value(cast(double)0.0)) ]); + testLex("-0BD" , [ Token(symbol!"Value",loc,Value(cast( real)0.0)) ]); + + testLex("1.2 F", [ + Token(symbol!"Value",loc,Value(cast(double)1.2)), + Token(symbol!"Ident",loc,Value( null),"F"), + ]); + testLexThrows("1.2A"); + testLexThrows("1.2B"); + testLexThrows("1.2BDF"); + + testLex("1.2;", [ + Token(symbol!"Value",loc,Value(cast(double)1.2)), + Token(symbol!"EOL",loc), + ]); + + testLex("1.2F;", [ + Token(symbol!"Value",loc,Value(cast(float)1.2)), + Token(symbol!"EOL",loc), + ]); + + testLex("1.2BD;", [ + Token(symbol!"Value",loc,Value(cast(real)1.2)), + Token(symbol!"EOL",loc), + ]); + + // Booleans and null + testLex("true", [ Token(symbol!"Value",loc,Value( true)) ]); + testLex("false", [ Token(symbol!"Value",loc,Value(false)) ]); + testLex("on", [ Token(symbol!"Value",loc,Value( true)) ]); + testLex("off", [ Token(symbol!"Value",loc,Value(false)) ]); + testLex("null", [ Token(symbol!"Value",loc,Value( null)) ]); + + testLex("TRUE", [ Token(symbol!"Ident",loc,Value(null),"TRUE") ]); + testLex("true ", [ Token(symbol!"Value",loc,Value(true)) ]); + testLex("true ", [ Token(symbol!"Value",loc,Value(true)) ]); + testLex("tru", [ Token(symbol!"Ident",loc,Value(null),"tru") ]); + testLex("truX", [ Token(symbol!"Ident",loc,Value(null),"truX") ]); + testLex("trueX", [ Token(symbol!"Ident",loc,Value(null),"trueX") ]); + + // Raw Backtick Strings + testLex("`hello world`", [ Token(symbol!"Value",loc,Value(`hello world` )) ]); + testLex("` hello world `", [ Token(symbol!"Value",loc,Value(` hello world ` )) ]); + testLex("`hello \\t world`", [ Token(symbol!"Value",loc,Value(`hello \t world`)) ]); + testLex("`hello \\n world`", [ Token(symbol!"Value",loc,Value(`hello \n world`)) ]); + testLex("`hello \n world`", [ Token(symbol!"Value",loc,Value("hello \n world")) ]); + testLex("`hello \r\n world`", [ Token(symbol!"Value",loc,Value("hello \r\n world")) ]); + testLex("`hello \"world\"`", [ Token(symbol!"Value",loc,Value(`hello "world"` )) ]); + + testLexThrows("`foo"); + testLexThrows("`"); + + // Double-Quote Strings + testLex(`"hello world"`, [ Token(symbol!"Value",loc,Value("hello world" )) ]); + testLex(`" hello world "`, [ Token(symbol!"Value",loc,Value(" hello world " )) ]); + testLex(`"hello \t world"`, [ Token(symbol!"Value",loc,Value("hello \t world")) ]); + testLex(`"hello \n world"`, [ Token(symbol!"Value",loc,Value("hello \n world")) ]); + testLex("\"hello \\\n world\"", [ Token(symbol!"Value",loc,Value("hello world" )) ]); + testLex("\"hello \\ \n world\"", [ Token(symbol!"Value",loc,Value("hello world" )) ]); + testLex("\"hello \\ \n\n world\"", [ Token(symbol!"Value",loc,Value("hello world" )) ]); + testLex(`"\"hello world\""`, [ Token(symbol!"Value",loc,Value(`"hello world"` )) ]); + + testLexThrows("\"hello \n world\""); + testLexThrows(`"foo`); + testLexThrows(`"`); + + // Characters + testLex("'a'", [ Token(symbol!"Value",loc,Value(cast(dchar) 'a')) ]); + testLex("'\\n'", [ Token(symbol!"Value",loc,Value(cast(dchar)'\n')) ]); + testLex("'\\t'", [ Token(symbol!"Value",loc,Value(cast(dchar)'\t')) ]); + testLex("'\t'", [ Token(symbol!"Value",loc,Value(cast(dchar)'\t')) ]); + testLex("'\\''", [ Token(symbol!"Value",loc,Value(cast(dchar)'\'')) ]); + testLex(`'\\'`, [ Token(symbol!"Value",loc,Value(cast(dchar)'\\')) ]); + + testLexThrows("'a"); + testLexThrows("'aa'"); + testLexThrows("''"); + testLexThrows("'\\\n'"); + testLexThrows("'\n'"); + testLexThrows(`'\`); + testLexThrows(`'\'`); + testLexThrows("'"); + + // Unicode + testLex("日本語", [ Token(symbol!"Ident",loc,Value(null), "日本語") ]); + testLex("`おはよう、日本。`", [ Token(symbol!"Value",loc,Value(`おはよう、日本。`)) ]); + testLex(`"おはよう、日本。"`, [ Token(symbol!"Value",loc,Value(`おはよう、日本。`)) ]); + testLex("'月'", [ Token(symbol!"Value",loc,Value("月"d.dup[0])) ]); + + // Base64 Binary + testLex("[aGVsbG8gd29ybGQ=]", [ Token(symbol!"Value",loc,Value(cast(ubyte[])"hello world".dup))]); + testLex("[ aGVsbG8gd29ybGQ= ]", [ Token(symbol!"Value",loc,Value(cast(ubyte[])"hello world".dup))]); + testLex("[\n aGVsbG8g \n \n d29ybGQ= \n]", [ Token(symbol!"Value",loc,Value(cast(ubyte[])"hello world".dup))]); + + testLexThrows("[aGVsbG8gd29ybGQ]"); // Ie: Not multiple of 4 + testLexThrows("[ aGVsbG8gd29ybGQ ]"); + + // Date + testLex( "1999/12/5", [ Token(symbol!"Value",loc,Value(Date( 1999, 12, 5))) ]); + testLex( "2013/2/22", [ Token(symbol!"Value",loc,Value(Date( 2013, 2, 22))) ]); + testLex("-2013/2/22", [ Token(symbol!"Value",loc,Value(Date(-2013, 2, 22))) ]); + + testLexThrows("7/"); + testLexThrows("2013/2/22a"); + testLexThrows("2013/2/22f"); + + testLex("1999/12/5\n", [ + Token(symbol!"Value",loc,Value(Date(1999, 12, 5))), + Token(symbol!"EOL",loc), + ]); + + // DateTime, no timezone + testLex( "2013/2/22 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); + testLex( "2013/2/22 \t 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); + testLex( "2013/2/22/*foo*/07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); + testLex( "2013/2/22 /*foo*/ \\\n /*bar*/ 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); + testLex( "2013/2/22 /*foo*/ \\\n\n \n /*bar*/ 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); + testLex( "2013/2/22 /*foo*/ \\\n\\\n \\\n /*bar*/ 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); + testLex( "2013/2/22/*foo*/\\\n/*bar*/07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0)))) ]); + testLex("-2013/2/22 07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime(-2013, 2, 22, 7, 53, 0)))) ]); + testLex( "2013/2/22 -07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53)))) ]); + testLex("-2013/2/22 -07:53", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime(-2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53)))) ]); + testLex( "2013/2/22 07:53:34", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34)))) ]); + testLex( "2013/2/22 07:53:34.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"(123)))) ]); + testLex( "2013/2/22 07:53:34.12", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"(120)))) ]); + testLex( "2013/2/22 07:53:34.1", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"(100)))) ]); + testLex( "2013/2/22 07:53.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 7, 53, 0), FracSec.from!"msecs"(123)))) ]); + + testLex( "2013/2/22 34:65", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) + hours(34) + minutes(65) + seconds( 0)))) ]); + testLex( "2013/2/22 34:65:77.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) + hours(34) + minutes(65) + seconds(77), FracSec.from!"msecs"(123)))) ]); + testLex( "2013/2/22 34:65.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) + hours(34) + minutes(65) + seconds( 0), FracSec.from!"msecs"(123)))) ]); + + testLex( "2013/2/22 -34:65", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds( 0)))) ]); + testLex( "2013/2/22 -34:65:77.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds(77), FracSec.from!"msecs"(-123)))) ]); + testLex( "2013/2/22 -34:65.123", [ Token(symbol!"Value",loc,Value(DateTimeFrac(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds( 0), FracSec.from!"msecs"(-123)))) ]); + + testLexThrows("2013/2/22 07:53a"); + testLexThrows("2013/2/22 07:53f"); + testLexThrows("2013/2/22 07:53:34.123a"); + testLexThrows("2013/2/22 07:53:34.123f"); + testLexThrows("2013/2/22a 07:53"); + + testLex(`2013/2/22 "foo"`, [ + Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), + Token(symbol!"Value",loc,Value("foo")), + ]); + + testLex("2013/2/22 07", [ + Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), + Token(symbol!"Value",loc,Value(cast(int)7)), + ]); + + testLex("2013/2/22 1.2F", [ + Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), + Token(symbol!"Value",loc,Value(cast(float)1.2)), + ]); + + testLex("2013/2/22 .2F", [ + Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), + Token(symbol!"Value",loc,Value(cast(float)0.2)), + ]); + + testLex("2013/2/22 -1.2F", [ + Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), + Token(symbol!"Value",loc,Value(cast(float)-1.2)), + ]); + + testLex("2013/2/22 -.2F", [ + Token(symbol!"Value",loc,Value(Date(2013, 2, 22))), + Token(symbol!"Value",loc,Value(cast(float)-0.2)), + ]); + + // DateTime, with known timezone + testLex( "2013/2/22 07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone( hours(0) )))) ]); + testLex("-2013/2/22 07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime(-2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone( hours(0) )))) ]); + testLex( "2013/2/22 -07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), new immutable SimpleTimeZone( hours(0) )))) ]); + testLex("-2013/2/22 -07:53-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime(-2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), new immutable SimpleTimeZone( hours(0) )))) ]); + testLex( "2013/2/22 07:53-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); + testLex( "2013/2/22 07:53-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); + testLex( "2013/2/22 07:53:34-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), new immutable SimpleTimeZone( hours(0) )))) ]); + testLex( "2013/2/22 07:53:34-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); + testLex( "2013/2/22 07:53:34-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); + testLex( "2013/2/22 07:53:34.123-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"(123), new immutable SimpleTimeZone( hours(0) )))) ]); + testLex( "2013/2/22 07:53:34.123-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"(123), new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); + testLex( "2013/2/22 07:53:34.123-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"(123), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); + testLex( "2013/2/22 07:53.123-GMT+00:00", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), FracSec.from!"msecs"(123), new immutable SimpleTimeZone( hours(0) )))) ]); + testLex( "2013/2/22 07:53.123-GMT+02:10", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), FracSec.from!"msecs"(123), new immutable SimpleTimeZone( hours(2)+minutes(10))))) ]); + testLex( "2013/2/22 07:53.123-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 7, 53, 0), FracSec.from!"msecs"(123), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); + + testLex( "2013/2/22 -34:65-GMT-05:30", [ Token(symbol!"Value",loc,Value(SysTime(DateTime( 2013, 2, 22, 0, 0, 0) - hours(34) - minutes(65) - seconds( 0), new immutable SimpleTimeZone(-hours(5)-minutes(30))))) ]); + + // DateTime, with Java SDL's occasionally weird interpretation of some + // "not quite ISO" variations of the "GMT with offset" timezone strings. + Token testTokenSimpleTimeZone(Duration d) + { + auto dateTime = DateTime(2013, 2, 22, 7, 53, 0); + auto tz = new immutable SimpleTimeZone(d); + return Token( symbol!"Value", loc, Value(SysTime(dateTime,tz)) ); + } + Token testTokenUnknownTimeZone(string tzName) + { + auto dateTime = DateTime(2013, 2, 22, 7, 53, 0); + auto frac = FracSec.from!"msecs"(0); + return Token( symbol!"Value", loc, Value(DateTimeFracUnknownZone(dateTime,frac,tzName)) ); + } + testLex("2013/2/22 07:53-GMT+", [ testTokenUnknownTimeZone("GMT+") ]); + testLex("2013/2/22 07:53-GMT+:", [ testTokenUnknownTimeZone("GMT+:") ]); + testLex("2013/2/22 07:53-GMT+:3", [ testTokenUnknownTimeZone("GMT+:3") ]); + testLex("2013/2/22 07:53-GMT+:03", [ testTokenSimpleTimeZone(minutes(3)) ]); + testLex("2013/2/22 07:53-GMT+:003", [ testTokenUnknownTimeZone("GMT+:003") ]); + + testLex("2013/2/22 07:53-GMT+4", [ testTokenSimpleTimeZone(hours(4)) ]); + testLex("2013/2/22 07:53-GMT+4:", [ testTokenUnknownTimeZone("GMT+4:") ]); + testLex("2013/2/22 07:53-GMT+4:3", [ testTokenUnknownTimeZone("GMT+4:3") ]); + testLex("2013/2/22 07:53-GMT+4:03", [ testTokenSimpleTimeZone(hours(4)+minutes(3)) ]); + testLex("2013/2/22 07:53-GMT+4:003", [ testTokenUnknownTimeZone("GMT+4:003") ]); + + testLex("2013/2/22 07:53-GMT+04", [ testTokenSimpleTimeZone(hours(4)) ]); + testLex("2013/2/22 07:53-GMT+04:", [ testTokenUnknownTimeZone("GMT+04:") ]); + testLex("2013/2/22 07:53-GMT+04:3", [ testTokenUnknownTimeZone("GMT+04:3") ]); + testLex("2013/2/22 07:53-GMT+04:03", [ testTokenSimpleTimeZone(hours(4)+minutes(3)) ]); + testLex("2013/2/22 07:53-GMT+04:03abc", [ testTokenUnknownTimeZone("GMT+04:03abc") ]); + testLex("2013/2/22 07:53-GMT+04:003", [ testTokenUnknownTimeZone("GMT+04:003") ]); + + testLex("2013/2/22 07:53-GMT+004", [ testTokenSimpleTimeZone(minutes(4)) ]); + testLex("2013/2/22 07:53-GMT+004:", [ testTokenUnknownTimeZone("GMT+004:") ]); + testLex("2013/2/22 07:53-GMT+004:3", [ testTokenUnknownTimeZone("GMT+004:3") ]); + testLex("2013/2/22 07:53-GMT+004:03", [ testTokenUnknownTimeZone("GMT+004:03") ]); + testLex("2013/2/22 07:53-GMT+004:003", [ testTokenUnknownTimeZone("GMT+004:003") ]); + + testLex("2013/2/22 07:53-GMT+0004", [ testTokenSimpleTimeZone(minutes(4)) ]); + testLex("2013/2/22 07:53-GMT+0004:", [ testTokenUnknownTimeZone("GMT+0004:") ]); + testLex("2013/2/22 07:53-GMT+0004:3", [ testTokenUnknownTimeZone("GMT+0004:3") ]); + testLex("2013/2/22 07:53-GMT+0004:03", [ testTokenUnknownTimeZone("GMT+0004:03") ]); + testLex("2013/2/22 07:53-GMT+0004:003", [ testTokenUnknownTimeZone("GMT+0004:003") ]); + + testLex("2013/2/22 07:53-GMT+00004", [ testTokenSimpleTimeZone(minutes(4)) ]); + testLex("2013/2/22 07:53-GMT+00004:", [ testTokenUnknownTimeZone("GMT+00004:") ]); + testLex("2013/2/22 07:53-GMT+00004:3", [ testTokenUnknownTimeZone("GMT+00004:3") ]); + testLex("2013/2/22 07:53-GMT+00004:03", [ testTokenUnknownTimeZone("GMT+00004:03") ]); + testLex("2013/2/22 07:53-GMT+00004:003", [ testTokenUnknownTimeZone("GMT+00004:003") ]); + + // DateTime, with unknown timezone + testLex( "2013/2/22 07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 0), FracSec.from!"msecs"( 0), "Bogus/Foo")), "2013/2/22 07:53-Bogus/Foo") ]); + testLex("-2013/2/22 07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime(-2013, 2, 22, 7, 53, 0), FracSec.from!"msecs"( 0), "Bogus/Foo"))) ]); + testLex( "2013/2/22 -07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), FracSec.from!"msecs"( 0), "Bogus/Foo"))) ]); + testLex("-2013/2/22 -07:53-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime(-2013, 2, 22, 0, 0, 0) - hours(7) - minutes(53), FracSec.from!"msecs"( 0), "Bogus/Foo"))) ]); + testLex( "2013/2/22 07:53:34-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"( 0), "Bogus/Foo"))) ]); + testLex( "2013/2/22 07:53:34.123-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 34), FracSec.from!"msecs"(123), "Bogus/Foo"))) ]); + testLex( "2013/2/22 07:53.123-Bogus/Foo", [ Token(symbol!"Value",loc,Value(DateTimeFracUnknownZone(DateTime( 2013, 2, 22, 7, 53, 0), FracSec.from!"msecs"(123), "Bogus/Foo"))) ]); + + // Time Span + testLex( "12:14:42", [ Token(symbol!"Value",loc,Value( days( 0)+hours(12)+minutes(14)+seconds(42)+msecs( 0))) ]); + testLex("-12:14:42", [ Token(symbol!"Value",loc,Value(-days( 0)-hours(12)-minutes(14)-seconds(42)-msecs( 0))) ]); + testLex( "00:09:12", [ Token(symbol!"Value",loc,Value( days( 0)+hours( 0)+minutes( 9)+seconds(12)+msecs( 0))) ]); + testLex( "00:00:01.023", [ Token(symbol!"Value",loc,Value( days( 0)+hours( 0)+minutes( 0)+seconds( 1)+msecs( 23))) ]); + testLex( "23d:05:21:23.532", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(532))) ]); + testLex( "23d:05:21:23.53", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(530))) ]); + testLex( "23d:05:21:23.5", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(500))) ]); + testLex("-23d:05:21:23.532", [ Token(symbol!"Value",loc,Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(532))) ]); + testLex("-23d:05:21:23.5", [ Token(symbol!"Value",loc,Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(500))) ]); + testLex( "23d:05:21:23", [ Token(symbol!"Value",loc,Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs( 0))) ]); + + testLexThrows("12:14:42a"); + testLexThrows("23d:05:21:23.532a"); + testLexThrows("23d:05:21:23.532f"); + + // Combination + testLex("foo. 7", [ + Token(symbol!"Ident",loc,Value( null),"foo."), + Token(symbol!"Value",loc,Value(cast(int)7)) + ]); + + testLex(` + namespace:person "foo" "bar" 1 23L name.first="ひとみ" name.last="Smith" { + namespace:age 37; namespace:favorite_color "blue" // comment + somedate 2013/2/22 07:53 -- comment + + inventory /* comment */ { + socks + } + } + `, + [ + Token(symbol!"EOL",loc,Value(null),"\n"), + + Token(symbol!"Ident", loc, Value( null ), "namespace"), + Token(symbol!":", loc, Value( null ), ":"), + Token(symbol!"Ident", loc, Value( null ), "person"), + Token(symbol!"Value", loc, Value( "foo" ), `"foo"`), + Token(symbol!"Value", loc, Value( "bar" ), `"bar"`), + Token(symbol!"Value", loc, Value( cast( int) 1 ), "1"), + Token(symbol!"Value", loc, Value( cast(long)23 ), "23L"), + Token(symbol!"Ident", loc, Value( null ), "name.first"), + Token(symbol!"=", loc, Value( null ), "="), + Token(symbol!"Value", loc, Value( "ひとみ" ), `"ひとみ"`), + Token(symbol!"Ident", loc, Value( null ), "name.last"), + Token(symbol!"=", loc, Value( null ), "="), + Token(symbol!"Value", loc, Value( "Smith" ), `"Smith"`), + Token(symbol!"{", loc, Value( null ), "{"), + Token(symbol!"EOL", loc, Value( null ), "\n"), + + Token(symbol!"Ident", loc, Value( null ), "namespace"), + Token(symbol!":", loc, Value( null ), ":"), + Token(symbol!"Ident", loc, Value( null ), "age"), + Token(symbol!"Value", loc, Value( cast(int)37 ), "37"), + Token(symbol!"EOL", loc, Value( null ), ";"), + Token(symbol!"Ident", loc, Value( null ), "namespace"), + Token(symbol!":", loc, Value( null ), ":"), + Token(symbol!"Ident", loc, Value( null ), "favorite_color"), + Token(symbol!"Value", loc, Value( "blue" ), `"blue"`), + Token(symbol!"EOL", loc, Value( null ), "\n"), + + Token(symbol!"Ident", loc, Value( null ), "somedate"), + Token(symbol!"Value", loc, Value( DateTimeFrac(DateTime(2013, 2, 22, 7, 53, 0)) ), "2013/2/22 07:53"), + Token(symbol!"EOL", loc, Value( null ), "\n"), + Token(symbol!"EOL", loc, Value( null ), "\n"), + + Token(symbol!"Ident", loc, Value(null), "inventory"), + Token(symbol!"{", loc, Value(null), "{"), + Token(symbol!"EOL", loc, Value(null), "\n"), + + Token(symbol!"Ident", loc, Value(null), "socks"), + Token(symbol!"EOL", loc, Value(null), "\n"), + + Token(symbol!"}", loc, Value(null), "}"), + Token(symbol!"EOL", loc, Value(null), "\n"), + + Token(symbol!"}", loc, Value(null), "}"), + Token(symbol!"EOL", loc, Value(null), "\n"), + ]); + + if(numErrors > 0) + stderr.writeln(numErrors, " failed test(s)"); +} + +version(sdlangUnittest) +unittest +{ + writeln("lexer: Regression test issue #8..."); + stdout.flush(); + + testLex(`"\n \n"`, [ Token(symbol!"Value",loc,Value("\n \n"),`"\n \n"`) ]); + testLex(`"\t\t"`, [ Token(symbol!"Value",loc,Value("\t\t"),`"\t\t"`) ]); + testLex(`"\n\n"`, [ Token(symbol!"Value",loc,Value("\n\n"),`"\n\n"`) ]); +} + +version(sdlangUnittest) +unittest +{ + writeln("lexer: Regression test issue #11..."); + stdout.flush(); + + void test(string input) + { + testLex( + input, + [ + Token(symbol!"EOL", loc, Value(null), "\n"), + Token(symbol!"Ident",loc,Value(null), "a") + ] + ); + } + + test("//X\na"); + test("//\na"); + test("--\na"); + test("#\na"); +} + +version(sdlangUnittest) +unittest +{ + writeln("lexer: Regression test issue #28..."); + stdout.flush(); + + enum offset = 1; // workaround for an of-by-one error for line numbers + testLex("test", [ + Token(symbol!"Ident", Location("filename", 0, 0, 0), Value(null), "test") + ], true); + testLex("\ntest", [ + Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\n"), + Token(symbol!"Ident", Location("filename", 1, 0, 1), Value(null), "test") + ], true); + testLex("\rtest", [ + Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r"), + Token(symbol!"Ident", Location("filename", 1, 0, 1), Value(null), "test") + ], true); + testLex("\r\ntest", [ + Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r\n"), + Token(symbol!"Ident", Location("filename", 1, 0, 2), Value(null), "test") + ], true); + testLex("\r\n\ntest", [ + Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r\n"), + Token(symbol!"EOL", Location("filename", 1, 0, 2), Value(null), "\n"), + Token(symbol!"Ident", Location("filename", 2, 0, 3), Value(null), "test") + ], true); + testLex("\r\r\ntest", [ + Token(symbol!"EOL", Location("filename", 0, 0, 0), Value(null), "\r"), + Token(symbol!"EOL", Location("filename", 1, 0, 1), Value(null), "\r\n"), + Token(symbol!"Ident", Location("filename", 2, 0, 3), Value(null), "test") + ], true); +} diff --git a/source/dub/internal/sdlang/package.d b/source/dub/internal/sdlang/package.d new file mode 100644 index 0000000..7f1c67a --- /dev/null +++ b/source/dub/internal/sdlang/package.d @@ -0,0 +1,135 @@ +// SDLang-D +// Written in the D programming language. + +/++ +$(H2 SDLang-D v0.9.3) + +Library for parsing and generating SDL (Simple Declarative Language). + +Import this module to use SDLang-D as a library. + +For the list of officially supported compiler versions, see the +$(LINK2 https://github.com/Abscissa/SDLang-D/blob/master/.travis.yml, .travis.yml) +file included with your version of SDLang-D. + +Links: +$(UL + $(LI $(LINK2 https://github.com/Abscissa/SDLang-D, SDLang-D Homepage) ) + $(LI $(LINK2 http://semitwist.com/sdlang-d, SDLang-D API Reference (latest version) ) ) + $(LI $(LINK2 http://semitwist.com/sdlang-d-docs, SDLang-D API Reference (earlier versions) ) ) + $(LI $(LINK2 http://sdl.ikayzo.org/display/SDL/Language+Guide, Official SDL Site) [$(LINK2 http://semitwist.com/sdl-mirror/Language+Guide.html, mirror)] ) +) + +Authors: Nick Sabalausky ("Abscissa") http://semitwist.com/contact +Copyright: +Copyright (C) 2012-2015 Nick Sabalausky. + +License: $(LINK2 https://github.com/Abscissa/SDLang-D/blob/master/LICENSE.txt, zlib/libpng) ++/ + +module dub.internal.sdlang; + +version (Have_sdlang_d) public import sdlang; +else: + +import std.array; +import std.datetime; +import std.file; +import std.stdio; + +import dub.internal.sdlang.ast; +import dub.internal.sdlang.exception; +import dub.internal.sdlang.lexer; +import dub.internal.sdlang.parser; +import dub.internal.sdlang.symbol; +import dub.internal.sdlang.token; +import dub.internal.sdlang.util; + +// Expose main public API +public import dub.internal.sdlang.ast : Attribute, Tag; +public import dub.internal.sdlang.exception; +public import dub.internal.sdlang.parser : parseFile, parseSource; +public import dub.internal.sdlang.token : Value, Token, DateTimeFrac, DateTimeFracUnknownZone; +public import dub.internal.sdlang.util : sdlangVersion, Location; + +version(sdlangUnittest) + void main() {} + +version(sdlangTestApp) +{ + int main(string[] args) + { + if( + args.length != 3 || + (args[1] != "lex" && args[1] != "parse" && args[1] != "to-sdl") + ) + { + stderr.writeln("SDLang-D v", sdlangVersion); + stderr.writeln("Usage: sdlang [lex|parse|to-sdl] filename.sdl"); + return 1; + } + + auto filename = args[2]; + + try + { + if(args[1] == "lex") + doLex(filename); + else if(args[1] == "parse") + doParse(filename); + else + doToSDL(filename); + } + catch(SDLangParseException e) + { + stderr.writeln(e.msg); + return 1; + } + + return 0; + } + + void doLex(string filename) + { + auto source = cast(string)read(filename); + auto lexer = new Lexer(source, filename); + + foreach(tok; lexer) + { + // Value + string value; + if(tok.symbol == symbol!"Value") + value = tok.value.hasValue? toString(tok.value.type) : "{null}"; + + value = value==""? "\t" : "("~value~":"~tok.value.toString()~") "; + + // Data + auto data = tok.data.replace("\n", "").replace("\r", ""); + if(data != "") + data = "\t|"~tok.data~"|"; + + // Display + writeln( + tok.location.toString, ":\t", + tok.symbol.name, value, + data + ); + + if(tok.symbol.name == "Error") + break; + } + } + + void doParse(string filename) + { + auto root = parseFile(filename); + stdout.rawWrite(root.toDebugString()); + writeln(); + } + + void doToSDL(string filename) + { + auto root = parseFile(filename); + stdout.rawWrite(root.toSDLDocument()); + } +} diff --git a/source/dub/internal/sdlang/parser.d b/source/dub/internal/sdlang/parser.d new file mode 100644 index 0000000..8ccf119 --- /dev/null +++ b/source/dub/internal/sdlang/parser.d @@ -0,0 +1,554 @@ +// SDLang-D +// Written in the D programming language. + +module dub.internal.sdlang.parser; + +version (Have_sdlang_d) public import sdlang.parser; +else: + +import std.file; + +import dub.internal.libInputVisitor; + +import dub.internal.sdlang.ast; +import dub.internal.sdlang.exception; +import dub.internal.sdlang.lexer; +import dub.internal.sdlang.symbol; +import dub.internal.sdlang.token; +import dub.internal.sdlang.util; + +/// Returns root tag. +Tag parseFile(string filename) +{ + auto source = cast(string)read(filename); + return parseSource(source, filename); +} + +/// Returns root tag. The optional 'filename' parameter can be included +/// so that the SDL document's filename (if any) can be displayed with +/// any syntax error messages. +Tag parseSource(string source, string filename=null) +{ + auto lexer = new Lexer(source, filename); + auto parser = DOMParser(lexer); + return parser.parseRoot(); +} + +/++ +Parses an SDL document using StAX/Pull-style. Returns an InputRange with +element type ParserEvent. + +The pullParseFile version reads a file and parses it, while pullParseSource +parses a string passed in. The optional 'filename' parameter in pullParseSource +can be included so that the SDL document's filename (if any) can be displayed +with any syntax error messages. + +Warning! The FileStartEvent and FileEndEvent events *might* be removed later. +See $(LINK https://github.com/Abscissa/SDLang-D/issues/17) + +Example: +------------------ +parent 12 attr="q" { + childA 34 + childB 56 +} +lastTag +------------------ + +The ParserEvent sequence emitted for that SDL document would be as +follows (indented for readability): +------------------ +FileStartEvent + TagStartEvent (parent) + ValueEvent (12) + AttributeEvent (attr, "q") + TagStartEvent (childA) + ValueEvent (34) + TagEndEvent + TagStartEvent (childB) + ValueEvent (56) + TagEndEvent + TagEndEvent + TagStartEvent (lastTag) + TagEndEvent +FileEndEvent +------------------ + +Example: +------------------ +foreach(event; pullParseFile("stuff.sdl")) +{ + import std.stdio; + + if(event.peek!FileStartEvent()) + writeln("FileStartEvent, starting! "); + + else if(event.peek!FileEndEvent()) + writeln("FileEndEvent, done! "); + + else if(auto e = event.peek!TagStartEvent()) + writeln("TagStartEvent: ", e.namespace, ":", e.name, " @ ", e.location); + + else if(event.peek!TagEndEvent()) + writeln("TagEndEvent"); + + else if(auto e = event.peek!ValueEvent()) + writeln("ValueEvent: ", e.value); + + else if(auto e = event.peek!AttributeEvent()) + writeln("AttributeEvent: ", e.namespace, ":", e.name, "=", e.value); + + else // Shouldn't happen + throw new Exception("Received unknown parser event"); +} +------------------ ++/ +auto pullParseFile(string filename) +{ + auto source = cast(string)read(filename); + return parseSource(source, filename); +} + +///ditto +auto pullParseSource(string source, string filename=null) +{ + auto lexer = new Lexer(source, filename); + auto parser = PullParser(lexer); + return inputVisitor!ParserEvent( parser ); +} + +/// The element of the InputRange returned by pullParseFile and pullParseSource: +alias ParserEvent = std.variant.Algebraic!( + FileStartEvent, + FileEndEvent, + TagStartEvent, + TagEndEvent, + ValueEvent, + AttributeEvent, +); + +/// Event: Start of file +struct FileStartEvent +{ + Location location; +} + +/// Event: End of file +struct FileEndEvent +{ + Location location; +} + +/// Event: Start of tag +struct TagStartEvent +{ + Location location; + string namespace; + string name; +} + +/// Event: End of tag +struct TagEndEvent +{ + //Location location; +} + +/// Event: Found a Value in the current tag +struct ValueEvent +{ + Location location; + Value value; +} + +/// Event: Found an Attribute in the current tag +struct AttributeEvent +{ + Location location; + string namespace; + string name; + Value value; +} + +// The actual pull parser +private struct PullParser +{ + private Lexer lexer; + + private struct IDFull + { + string namespace; + string name; + } + + private void error(string msg) + { + error(lexer.front.location, msg); + } + + private void error(Location loc, string msg) + { + throw new SDLangParseException(loc, "Error: "~msg); + } + + private InputVisitor!(PullParser, ParserEvent) v; + + void visit(InputVisitor!(PullParser, ParserEvent) v) + { + this.v = v; + parseRoot(); + } + + private void emit(Event)(Event event) + { + v.yield( ParserEvent(event) ); + } + + /// ::= EOF (Lookaheads: Anything) + private void parseRoot() + { + //trace("Starting parse of file: ", lexer.filename); + //trace(__FUNCTION__, ": ::= EOF (Lookaheads: Anything)"); + + auto startLocation = Location(lexer.filename, 0, 0, 0); + emit( FileStartEvent(startLocation) ); + + parseTags(); + + auto token = lexer.front; + if(!token.matches!"EOF"()) + error("Expected end-of-file, not " ~ token.symbol.name); + + emit( FileEndEvent(token.location) ); + } + + /// ::= (Lookaheads: Ident Value) + /// | EOL (Lookaheads: EOL) + /// | {empty} (Lookaheads: Anything else, except '{') + void parseTags() + { + //trace("Enter ", __FUNCTION__); + while(true) + { + auto token = lexer.front; + if(token.matches!"Ident"() || token.matches!"Value"()) + { + //trace(__FUNCTION__, ": ::= (Lookaheads: Ident Value)"); + parseTag(); + continue; + } + else if(token.matches!"EOL"()) + { + //trace(__FUNCTION__, ": ::= EOL (Lookaheads: EOL)"); + lexer.popFront(); + continue; + } + else if(token.matches!"{"()) + { + error("Anonymous tags must have at least one value. They cannot just have children and attributes only."); + } + else + { + //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else, except '{')"); + break; + } + } + } + + /// + /// ::= (Lookaheads: Ident) + /// | (Lookaheads: Value) + void parseTag() + { + auto token = lexer.front; + if(token.matches!"Ident"()) + { + //trace(__FUNCTION__, ": ::= (Lookaheads: Ident)"); + //trace("Found tag named: ", tag.fullName); + auto id = parseIDFull(); + emit( TagStartEvent(token.location, id.namespace, id.name) ); + } + else if(token.matches!"Value"()) + { + //trace(__FUNCTION__, ": ::= (Lookaheads: Value)"); + //trace("Found anonymous tag."); + emit( TagStartEvent(token.location, null, null) ); + } + else + error("Expected tag name or value, not " ~ token.symbol.name); + + if(lexer.front.matches!"="()) + error("Anonymous tags must have at least one value. They cannot just have attributes and children only."); + + parseValues(); + parseAttributes(); + parseOptChild(); + parseTagTerminator(); + + emit( TagEndEvent() ); + } + + /// ::= Ident (Lookaheads: Ident) + IDFull parseIDFull() + { + auto token = lexer.front; + if(token.matches!"Ident"()) + { + //trace(__FUNCTION__, ": ::= Ident (Lookaheads: Ident)"); + lexer.popFront(); + return parseIDSuffix(token.data); + } + else + { + error("Expected namespace or identifier, not " ~ token.symbol.name); + assert(0); + } + } + + /// + /// ::= ':' Ident (Lookaheads: ':') + /// ::= {empty} (Lookaheads: Anything else) + IDFull parseIDSuffix(string firstIdent) + { + auto token = lexer.front; + if(token.matches!":"()) + { + //trace(__FUNCTION__, ": ::= ':' Ident (Lookaheads: ':')"); + lexer.popFront(); + token = lexer.front; + if(token.matches!"Ident"()) + { + lexer.popFront(); + return IDFull(firstIdent, token.data); + } + else + { + error("Expected name, not " ~ token.symbol.name); + assert(0); + } + } + else + { + //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); + return IDFull("", firstIdent); + } + } + + /// + /// ::= Value (Lookaheads: Value) + /// | {empty} (Lookaheads: Anything else) + void parseValues() + { + while(true) + { + auto token = lexer.front; + if(token.matches!"Value"()) + { + //trace(__FUNCTION__, ": ::= Value (Lookaheads: Value)"); + parseValue(); + continue; + } + else + { + //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); + break; + } + } + } + + /// Handle Value terminals that aren't part of an attribute + void parseValue() + { + auto token = lexer.front; + if(token.matches!"Value"()) + { + //trace(__FUNCTION__, ": (Handle Value terminals that aren't part of an attribute)"); + auto value = token.value; + //trace("In tag '", parent.fullName, "', found value: ", value); + emit( ValueEvent(token.location, value) ); + + lexer.popFront(); + } + else + error("Expected value, not "~token.symbol.name); + } + + /// + /// ::= (Lookaheads: Ident) + /// | {empty} (Lookaheads: Anything else) + void parseAttributes() + { + while(true) + { + auto token = lexer.front; + if(token.matches!"Ident"()) + { + //trace(__FUNCTION__, ": ::= (Lookaheads: Ident)"); + parseAttribute(); + continue; + } + else + { + //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); + break; + } + } + } + + /// ::= '=' Value (Lookaheads: Ident) + void parseAttribute() + { + //trace(__FUNCTION__, ": ::= '=' Value (Lookaheads: Ident)"); + auto token = lexer.front; + if(!token.matches!"Ident"()) + error("Expected attribute name, not "~token.symbol.name); + + auto id = parseIDFull(); + + token = lexer.front; + if(!token.matches!"="()) + error("Expected '=' after attribute name, not "~token.symbol.name); + + lexer.popFront(); + token = lexer.front; + if(!token.matches!"Value"()) + error("Expected attribute value, not "~token.symbol.name); + + //trace("In tag '", parent.fullName, "', found attribute '", attr.fullName, "'"); + emit( AttributeEvent(token.location, id.namespace, id.name, token.value) ); + + lexer.popFront(); + } + + /// + /// ::= '{' EOL '}' (Lookaheads: '{') + /// | {empty} (Lookaheads: Anything else) + void parseOptChild() + { + auto token = lexer.front; + if(token.matches!"{") + { + //trace(__FUNCTION__, ": ::= '{' EOL '}' (Lookaheads: '{')"); + lexer.popFront(); + token = lexer.front; + if(!token.matches!"EOL"()) + error("Expected newline or semicolon after '{', not "~token.symbol.name); + + lexer.popFront(); + parseTags(); + + token = lexer.front; + if(!token.matches!"}"()) + error("Expected '}' after child tags, not "~token.symbol.name); + lexer.popFront(); + } + else + { + //trace(__FUNCTION__, ": ::= {empty} (Lookaheads: Anything else)"); + // Do nothing, no error. + } + } + + /// + /// ::= EOL (Lookahead: EOL) + /// | {empty} (Lookahead: EOF) + void parseTagTerminator() + { + auto token = lexer.front; + if(token.matches!"EOL") + { + //trace(__FUNCTION__, ": ::= EOL (Lookahead: EOL)"); + lexer.popFront(); + } + else if(token.matches!"EOF") + { + //trace(__FUNCTION__, ": ::= {empty} (Lookahead: EOF)"); + // Do nothing + } + else + error("Expected end of tag (newline, semicolon or end-of-file), not " ~ token.symbol.name); + } +} + +private struct DOMParser +{ + Lexer lexer; + + Tag parseRoot() + { + auto currTag = new Tag(null, null, "root"); + currTag.location = Location(lexer.filename, 0, 0, 0); + + auto parser = PullParser(lexer); + auto eventRange = inputVisitor!ParserEvent( parser ); + foreach(event; eventRange) + { + if(auto e = event.peek!TagStartEvent()) + { + auto newTag = new Tag(currTag, e.namespace, e.name); + newTag.location = e.location; + + currTag = newTag; + } + else if(event.peek!TagEndEvent()) + { + currTag = currTag.parent; + + if(!currTag) + parser.error("Internal Error: Received an extra TagEndEvent"); + } + else if(auto e = event.peek!ValueEvent()) + { + currTag.add(e.value); + } + else if(auto e = event.peek!AttributeEvent()) + { + auto attr = new Attribute(e.namespace, e.name, e.value, e.location); + currTag.add(attr); + } + else if(event.peek!FileStartEvent()) + { + // Do nothing + } + else if(event.peek!FileEndEvent()) + { + // There shouldn't be another parent. + if(currTag.parent) + parser.error("Internal Error: Unexpected end of file, not enough TagEndEvent"); + } + else + parser.error("Internal Error: Received unknown parser event"); + } + + return currTag; + } +} + +// Other parser tests are part of the AST's tests over in the ast module. + +// Regression test, issue #16: https://github.com/Abscissa/SDLang-D/issues/16 +version(sdlangUnittest) +unittest +{ + import std.stdio; + writeln("parser: Regression test issue #16..."); + stdout.flush(); + + // Shouldn't crash + foreach(event; pullParseSource(`tag "data"`)) + { + event.peek!FileStartEvent(); + } +} + +// Regression test, issue #31: https://github.com/Abscissa/SDLang-D/issues/31 +// "Escape sequence results in range violation error" +version(sdlangUnittest) +unittest +{ + import std.stdio; + writeln("parser: Regression test issue #31..."); + stdout.flush(); + + // Shouldn't get a Range violation + parseSource(`test "\"foo\""`); +} diff --git a/source/dub/internal/sdlang/symbol.d b/source/dub/internal/sdlang/symbol.d new file mode 100644 index 0000000..04de244 --- /dev/null +++ b/source/dub/internal/sdlang/symbol.d @@ -0,0 +1,64 @@ +// SDLang-D +// Written in the D programming language. + +module dub.internal.sdlang.symbol; + +version (Have_sdlang_d) public import sdlang.symbol; +else: + +import std.algorithm; + +static immutable validSymbolNames = [ + "Error", + "EOF", + "EOL", + + ":", + "=", + "{", + "}", + + "Ident", + "Value", +]; + +/// Use this to create a Symbol. Ex: symbol!"Value" or symbol!"=" +/// Invalid names (such as symbol!"FooBar") are rejected at compile-time. +template symbol(string name) +{ + static assert(validSymbolNames.find(name), "Invalid Symbol: '"~name~"'"); + immutable symbol = _symbol(name); +} + +private Symbol _symbol(string name) +{ + return Symbol(name); +} + +/// Symbol is essentially the "type" of a Token. +/// Token is like an instance of a Symbol. +/// +/// This only represents terminals. Nonterminal tokens aren't +/// constructed since the AST is built directly during parsing. +/// +/// You can't create a Symbol directly. Instead, use the 'symbol' +/// template. +struct Symbol +{ + private string _name; + @property string name() + { + return _name; + } + + @disable this(); + private this(string name) + { + this._name = name; + } + + string toString() + { + return _name; + } +} diff --git a/source/dub/internal/sdlang/token.d b/source/dub/internal/sdlang/token.d new file mode 100644 index 0000000..b5f8f4a --- /dev/null +++ b/source/dub/internal/sdlang/token.d @@ -0,0 +1,532 @@ +// SDLang-D +// Written in the D programming language. + +module dub.internal.sdlang.token; + +version (Have_sdlang_d) public import sdlang.token; +else: + +import std.array; +import std.base64; +import std.conv; +import std.datetime; +import std.range; +import std.string; +import std.typetuple; +import std.variant; + +import dub.internal.sdlang.symbol; +import dub.internal.sdlang.util; + +/// DateTime doesn't support milliseconds, but SDL's "Date Time" type does. +/// So this is needed for any SDL "Date Time" that doesn't include a time zone. +struct DateTimeFrac +{ + this(DateTime dt, Duration fs) { this.dateTime = dt; this.fracSecs = fs; } + this(DateTime dt, FracSec fs) { this.dateTime = dt; this.fracSecs = fs.hnsecs.hnsecs; } + + DateTime dateTime; + Duration fracSecs; + deprecated("Use fracSecs instead.") { + @property FracSec fracSec() const { return FracSec.from!"hnsecs"(fracSecs.total!"hnsecs"); } + @property void fracSec(FracSec v) { fracSecs = v.hnsecs.hnsecs; } + } +} + +/++ +If a "Date Time" literal in the SDL file has a time zone that's not found in +your system, you get one of these instead of a SysTime. (Because it's +impossible to indicate "unknown time zone" with 'std.datetime.TimeZone'.) + +The difference between this and 'DateTimeFrac' is that 'DateTimeFrac' +indicates that no time zone was specified in the SDL at all, whereas +'DateTimeFracUnknownZone' indicates that a time zone was specified but +data for it could not be found on your system. ++/ +struct DateTimeFracUnknownZone +{ + DateTime dateTime; + Duration fracSecs; + deprecated("Use fracSecs instead.") { + @property FracSec fracSec() { return FracSec.from!"hnsecs"(fracSecs.total!"hnsecs"); } + @property void fracSec(FracSec v) { fracSecs = v.hnsecs.hnsecs; } + } + string timeZone; + + bool opEquals(const DateTimeFracUnknownZone b) const + { + return opEquals(b); + } + bool opEquals(ref const DateTimeFracUnknownZone b) const + { + return + this.dateTime == b.dateTime && + this.fracSecs == b.fracSecs && + this.timeZone == b.timeZone; + } +} + +/++ +SDL's datatypes map to D's datatypes as described below. +Most are straightforward, but take special note of the date/time-related types. + +Boolean: bool +Null: typeof(null) +Unicode Character: dchar +Double-Quote Unicode String: string +Raw Backtick Unicode String: string +Integer (32 bits signed): int +Long Integer (64 bits signed): long +Float (32 bits signed): float +Double Float (64 bits signed): double +Decimal (128+ bits signed): real +Binary (standard Base64): ubyte[] +Time Span: Duration + +Date (with no time at all): Date +Date Time (no timezone): DateTimeFrac +Date Time (with a known timezone): SysTime +Date Time (with an unknown timezone): DateTimeFracUnknownZone ++/ +alias TypeTuple!( + bool, + string, dchar, + int, long, + float, double, real, + Date, DateTimeFrac, SysTime, DateTimeFracUnknownZone, Duration, + ubyte[], + typeof(null), +) ValueTypes; + +alias Algebraic!( ValueTypes ) Value; ///ditto + +template isSDLSink(T) +{ + enum isSink = + isOutputRange!T && + is(ElementType!(T)[] == string); +} + +string toSDLString(T)(T value) if( + is( T : Value ) || + is( T : bool ) || + is( T : string ) || + is( T : dchar ) || + is( T : int ) || + is( T : long ) || + is( T : float ) || + is( T : double ) || + is( T : real ) || + is( T : Date ) || + is( T : DateTimeFrac ) || + is( T : SysTime ) || + is( T : DateTimeFracUnknownZone ) || + is( T : Duration ) || + is( T : ubyte[] ) || + is( T : typeof(null) ) +) +{ + Appender!string sink; + toSDLString(value, sink); + return sink.data; +} + +void toSDLString(Sink)(Value value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + foreach(T; ValueTypes) + { + if(value.type == typeid(T)) + { + toSDLString( value.get!T(), sink ); + return; + } + } + + throw new Exception("Internal SDLang-D error: Unhandled type of Value. Contains: "~value.toString()); +} + +void toSDLString(Sink)(typeof(null) value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put("null"); +} + +void toSDLString(Sink)(bool value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put(value? "true" : "false"); +} + +//TODO: Figure out how to properly handle strings/chars containing lineSep or paraSep +void toSDLString(Sink)(string value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put('"'); + + // This loop is UTF-safe + foreach(char ch; value) + { + if (ch == '\n') sink.put(`\n`); + else if(ch == '\r') sink.put(`\r`); + else if(ch == '\t') sink.put(`\t`); + else if(ch == '\"') sink.put(`\"`); + else if(ch == '\\') sink.put(`\\`); + else + sink.put(ch); + } + + sink.put('"'); +} + +void toSDLString(Sink)(dchar value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put('\''); + + if (value == '\n') sink.put(`\n`); + else if(value == '\r') sink.put(`\r`); + else if(value == '\t') sink.put(`\t`); + else if(value == '\'') sink.put(`\'`); + else if(value == '\\') sink.put(`\\`); + else + sink.put(value); + + sink.put('\''); +} + +void toSDLString(Sink)(int value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put( "%s".format(value) ); +} + +void toSDLString(Sink)(long value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put( "%sL".format(value) ); +} + +void toSDLString(Sink)(float value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put( "%.10sF".format(value) ); +} + +void toSDLString(Sink)(double value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put( "%.30sD".format(value) ); +} + +void toSDLString(Sink)(real value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put( "%.30sBD".format(value) ); +} + +void toSDLString(Sink)(Date value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put(to!string(value.year)); + sink.put('/'); + sink.put(to!string(cast(int)value.month)); + sink.put('/'); + sink.put(to!string(value.day)); +} + +void toSDLString(Sink)(DateTimeFrac value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + toSDLString(value.dateTime.date, sink); + sink.put(' '); + sink.put("%.2s".format(value.dateTime.hour)); + sink.put(':'); + sink.put("%.2s".format(value.dateTime.minute)); + + if(value.dateTime.second != 0) + { + sink.put(':'); + sink.put("%.2s".format(value.dateTime.second)); + } + + if(value.fracSecs.total!"msecs" != 0) + { + sink.put('.'); + sink.put("%.3s".format(value.fracSecs.total!"msecs")); + } +} + +void toSDLString(Sink)(SysTime value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + static if (__VERSION__ >= 2067) + auto dateTimeFrac = DateTimeFrac(cast(DateTime)value, value.fracSecs); + else + auto dateTimeFrac = DateTimeFrac(cast(DateTime)value, value.fracSec); + toSDLString(dateTimeFrac, sink); + + sink.put("-"); + + auto tzString = value.timezone.name; + + // If name didn't exist, try abbreviation. + // Note that according to std.datetime docs, on Windows the + // stdName/dstName may not be properly abbreviated. + version(Windows) {} else + if(tzString == "") + { + auto tz = value.timezone; + auto stdTime = value.stdTime; + + if(tz.hasDST()) + tzString = tz.dstInEffect(stdTime)? tz.dstName : tz.stdName; + else + tzString = tz.stdName; + } + + if(tzString == "") + { + auto offset = value.timezone.utcOffsetAt(value.stdTime); + sink.put("GMT"); + + if(offset < seconds(0)) + { + sink.put("-"); + offset = -offset; + } + else + sink.put("+"); + + long hours, minutes; + static if (__VERSION__ >= 2066) + offset.split!("hours", "minutes")(hours, minutes); + else { + hours = offset.hours; + minutes = offset.minutes; + } + + sink.put("%.2s".format(hours)); + sink.put(":"); + sink.put("%.2s".format(minutes)); + } + else + sink.put(tzString); +} + +void toSDLString(Sink)(DateTimeFracUnknownZone value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + auto dateTimeFrac = DateTimeFrac(value.dateTime, value.fracSecs); + toSDLString(dateTimeFrac, sink); + + sink.put("-"); + sink.put(value.timeZone); +} + +void toSDLString(Sink)(Duration value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + if(value < seconds(0)) + { + sink.put("-"); + value = -value; + } + + auto days = value.total!"days"(); + if(days != 0) + { + sink.put("%s".format(days)); + sink.put("d:"); + } + + long hours, minutes, seconds, msecs; + static if (__VERSION__ >= 2066) + value.split!("hours", "minutes", "seconds", "msecs")(hours, minutes, seconds, msecs); + else { + hours = value.hours; + minutes = value.minutes; + seconds = value.seconds; + msecs = value.fracSec.msecs; + } + + sink.put("%.2s".format(hours)); + sink.put(':'); + sink.put("%.2s".format(minutes)); + sink.put(':'); + sink.put("%.2s".format(seconds)); + + if(msecs != 0) + { + sink.put('.'); + sink.put("%.3s".format(msecs)); + } +} + +void toSDLString(Sink)(ubyte[] value, ref Sink sink) if(isOutputRange!(Sink,char)) +{ + sink.put('['); + sink.put( Base64.encode(value) ); + sink.put(']'); +} + +/// This only represents terminals. Nonterminals aren't +/// constructed since the AST is directly built during parsing. +struct Token +{ + Symbol symbol = dub.internal.sdlang.symbol.symbol!"Error"; /// The "type" of this token + Location location; + Value value; /// Only valid when 'symbol' is symbol!"Value", otherwise null + string data; /// Original text from source + + @disable this(); + this(Symbol symbol, Location location, Value value=Value(null), string data=null) + { + this.symbol = symbol; + this.location = location; + this.value = value; + this.data = data; + } + + /// Tokens with differing symbols are always unequal. + /// Tokens with differing values are always unequal. + /// Tokens with differing Value types are always unequal. + /// Member 'location' is always ignored for comparison. + /// Member 'data' is ignored for comparison *EXCEPT* when the symbol is Ident. + bool opEquals(Token b) + { + return opEquals(b); + } + bool opEquals(ref Token b) ///ditto + { + if( + this.symbol != b.symbol || + this.value.type != b.value.type || + this.value != b.value + ) + return false; + + if(this.symbol == .symbol!"Ident") + return this.data == b.data; + + return true; + } + + bool matches(string symbolName)() + { + return this.symbol == .symbol!symbolName; + } +} + +version(sdlangUnittest) +unittest +{ + import std.stdio; + writeln("Unittesting sdlang token..."); + stdout.flush(); + + auto loc = Location("", 0, 0, 0); + auto loc2 = Location("a", 1, 1, 1); + + assert(Token(symbol!"EOL",loc) == Token(symbol!"EOL",loc )); + assert(Token(symbol!"EOL",loc) == Token(symbol!"EOL",loc2)); + assert(Token(symbol!":", loc) == Token(symbol!":", loc )); + assert(Token(symbol!"EOL",loc) != Token(symbol!":", loc )); + assert(Token(symbol!"EOL",loc,Value(null),"\n") == Token(symbol!"EOL",loc,Value(null),"\n")); + + assert(Token(symbol!"EOL",loc,Value(null),"\n") == Token(symbol!"EOL",loc,Value(null),";" )); + assert(Token(symbol!"EOL",loc,Value(null),"A" ) == Token(symbol!"EOL",loc,Value(null),"B" )); + assert(Token(symbol!":", loc,Value(null),"A" ) == Token(symbol!":", loc,Value(null),"BB")); + assert(Token(symbol!"EOL",loc,Value(null),"A" ) != Token(symbol!":", loc,Value(null),"A" )); + + assert(Token(symbol!"Ident",loc,Value(null),"foo") == Token(symbol!"Ident",loc,Value(null),"foo")); + assert(Token(symbol!"Ident",loc,Value(null),"foo") != Token(symbol!"Ident",loc,Value(null),"BAR")); + + assert(Token(symbol!"Value",loc,Value(null),"foo") == Token(symbol!"Value",loc, Value(null),"foo")); + assert(Token(symbol!"Value",loc,Value(null),"foo") == Token(symbol!"Value",loc2,Value(null),"foo")); + assert(Token(symbol!"Value",loc,Value(null),"foo") == Token(symbol!"Value",loc, Value(null),"BAR")); + assert(Token(symbol!"Value",loc,Value( 7),"foo") == Token(symbol!"Value",loc, Value( 7),"BAR")); + assert(Token(symbol!"Value",loc,Value( 7),"foo") != Token(symbol!"Value",loc, Value( "A"),"foo")); + assert(Token(symbol!"Value",loc,Value( 7),"foo") != Token(symbol!"Value",loc, Value( 2),"foo")); + assert(Token(symbol!"Value",loc,Value(cast(int)7)) != Token(symbol!"Value",loc, Value(cast(long)7))); + assert(Token(symbol!"Value",loc,Value(cast(float)1.2)) != Token(symbol!"Value",loc, Value(cast(double)1.2))); +} + +version(sdlangUnittest) +unittest +{ + import std.stdio; + writeln("Unittesting sdlang Value.toSDLString()..."); + stdout.flush(); + + // Bool and null + assert(Value(null ).toSDLString() == "null"); + assert(Value(true ).toSDLString() == "true"); + assert(Value(false).toSDLString() == "false"); + + // Base64 Binary + assert(Value(cast(ubyte[])"hello world".dup).toSDLString() == "[aGVsbG8gd29ybGQ=]"); + + // Integer + assert(Value(cast( int) 7).toSDLString() == "7"); + assert(Value(cast( int)-7).toSDLString() == "-7"); + assert(Value(cast( int) 0).toSDLString() == "0"); + + assert(Value(cast(long) 7).toSDLString() == "7L"); + assert(Value(cast(long)-7).toSDLString() == "-7L"); + assert(Value(cast(long) 0).toSDLString() == "0L"); + + // Floating point + assert(Value(cast(float) 1.5).toSDLString() == "1.5F"); + assert(Value(cast(float)-1.5).toSDLString() == "-1.5F"); + assert(Value(cast(float) 0).toSDLString() == "0F"); + + assert(Value(cast(double) 1.5).toSDLString() == "1.5D"); + assert(Value(cast(double)-1.5).toSDLString() == "-1.5D"); + assert(Value(cast(double) 0).toSDLString() == "0D"); + + assert(Value(cast(real) 1.5).toSDLString() == "1.5BD"); + assert(Value(cast(real)-1.5).toSDLString() == "-1.5BD"); + assert(Value(cast(real) 0).toSDLString() == "0BD"); + + // String + assert(Value("hello" ).toSDLString() == `"hello"`); + assert(Value(" hello ").toSDLString() == `" hello "`); + assert(Value("" ).toSDLString() == `""`); + assert(Value("hello \r\n\t\"\\ world").toSDLString() == `"hello \r\n\t\"\\ world"`); + assert(Value("日本語").toSDLString() == `"日本語"`); + + // Chars + assert(Value(cast(dchar) 'A').toSDLString() == `'A'`); + assert(Value(cast(dchar)'\r').toSDLString() == `'\r'`); + assert(Value(cast(dchar)'\n').toSDLString() == `'\n'`); + assert(Value(cast(dchar)'\t').toSDLString() == `'\t'`); + assert(Value(cast(dchar)'\'').toSDLString() == `'\''`); + assert(Value(cast(dchar)'\\').toSDLString() == `'\\'`); + assert(Value(cast(dchar) '月').toSDLString() == `'月'`); + + // Date + assert(Value(Date( 2004,10,31)).toSDLString() == "2004/10/31"); + assert(Value(Date(-2004,10,31)).toSDLString() == "-2004/10/31"); + + // DateTimeFrac w/o Frac + assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15))).toSDLString() == "2004/10/31 14:30:15"); + assert(Value(DateTimeFrac(DateTime(2004,10,31, 1, 2, 3))).toSDLString() == "2004/10/31 01:02:03"); + assert(Value(DateTimeFrac(DateTime(-2004,10,31, 14,30,15))).toSDLString() == "-2004/10/31 14:30:15"); + + // DateTimeFrac w/ Frac + assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), FracSec.from!"msecs"(123))).toSDLString() == "2004/10/31 14:30:15.123"); + assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), FracSec.from!"msecs"(120))).toSDLString() == "2004/10/31 14:30:15.120"); + assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), FracSec.from!"msecs"(100))).toSDLString() == "2004/10/31 14:30:15.100"); + assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), FracSec.from!"msecs"( 12))).toSDLString() == "2004/10/31 14:30:15.012"); + assert(Value(DateTimeFrac(DateTime(2004,10,31, 14,30,15), FracSec.from!"msecs"( 1))).toSDLString() == "2004/10/31 14:30:15.001"); + assert(Value(DateTimeFrac(DateTime(-2004,10,31, 14,30,15), FracSec.from!"msecs"(123))).toSDLString() == "-2004/10/31 14:30:15.123"); + + // DateTimeFracUnknownZone + assert(Value(DateTimeFracUnknownZone(DateTime(2004,10,31, 14,30,15), FracSec.from!"msecs"(123), "Foo/Bar")).toSDLString() == "2004/10/31 14:30:15.123-Foo/Bar"); + + // SysTime + assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone( hours(0) ))).toSDLString() == "2004/10/31 14:30:15-GMT+00:00"); + assert(Value(SysTime(DateTime(2004,10,31, 1, 2, 3), new immutable SimpleTimeZone( hours(0) ))).toSDLString() == "2004/10/31 01:02:03-GMT+00:00"); + assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone( hours(2)+minutes(10) ))).toSDLString() == "2004/10/31 14:30:15-GMT+02:10"); + assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone(-hours(5)-minutes(30) ))).toSDLString() == "2004/10/31 14:30:15-GMT-05:30"); + assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), new immutable SimpleTimeZone( hours(2)+minutes( 3) ))).toSDLString() == "2004/10/31 14:30:15-GMT+02:03"); + assert(Value(SysTime(DateTime(2004,10,31, 14,30,15), FracSec.from!"msecs"(123), new immutable SimpleTimeZone( hours(0) ))).toSDLString() == "2004/10/31 14:30:15.123-GMT+00:00"); + + // Duration + assert( "12:14:42" == Value( days( 0)+hours(12)+minutes(14)+seconds(42)+msecs( 0)).toSDLString()); + assert("-12:14:42" == Value(-days( 0)-hours(12)-minutes(14)-seconds(42)-msecs( 0)).toSDLString()); + assert( "00:09:12" == Value( days( 0)+hours( 0)+minutes( 9)+seconds(12)+msecs( 0)).toSDLString()); + assert( "00:00:01.023" == Value( days( 0)+hours( 0)+minutes( 0)+seconds( 1)+msecs( 23)).toSDLString()); + assert( "23d:05:21:23.532" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(532)).toSDLString()); + assert( "23d:05:21:23.530" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(530)).toSDLString()); + assert( "23d:05:21:23.500" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs(500)).toSDLString()); + assert("-23d:05:21:23.532" == Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(532)).toSDLString()); + assert("-23d:05:21:23.500" == Value(-days(23)-hours( 5)-minutes(21)-seconds(23)-msecs(500)).toSDLString()); + assert( "23d:05:21:23" == Value( days(23)+hours( 5)+minutes(21)+seconds(23)+msecs( 0)).toSDLString()); +} diff --git a/source/dub/internal/sdlang/util.d b/source/dub/internal/sdlang/util.d new file mode 100644 index 0000000..c2e2ac7 --- /dev/null +++ b/source/dub/internal/sdlang/util.d @@ -0,0 +1,105 @@ +// SDLang-D +// Written in the D programming language. + +module dub.internal.sdlang.util; + +version (Have_sdlang_d) public import sdlang.util; +else: + +import std.algorithm; +import std.datetime; +import std.stdio; +import std.string; + +import dub.internal.sdlang.token; + +enum sdlangVersion = "0.9.1"; + +alias immutable(ubyte)[] ByteString; + +auto startsWith(T)(string haystack, T needle) + if( is(T:ByteString) || is(T:string) ) +{ + return std.algorithm.startsWith( cast(ByteString)haystack, cast(ByteString)needle ); +} + +struct Location +{ + string file; /// Filename (including path) + int line; /// Zero-indexed + int col; /// Zero-indexed, Tab counts as 1 + size_t index; /// Index into the source + + this(int line, int col, int index) + { + this.line = line; + this.col = col; + this.index = index; + } + + this(string file, int line, int col, int index) + { + this.file = file; + this.line = line; + this.col = col; + this.index = index; + } + + string toString() + { + return "%s(%s:%s)".format(file, line+1, col+1); + } +} + +void removeIndex(E)(ref E[] arr, ptrdiff_t index) +{ + arr = arr[0..index] ~ arr[index+1..$]; +} + +void trace(string file=__FILE__, size_t line=__LINE__, TArgs...)(TArgs args) +{ + version(sdlangTrace) + { + writeln(file, "(", line, "): ", args); + stdout.flush(); + } +} + +string toString(TypeInfo ti) +{ + if (ti == typeid( bool )) return "bool"; + else if(ti == typeid( string )) return "string"; + else if(ti == typeid( dchar )) return "dchar"; + else if(ti == typeid( int )) return "int"; + else if(ti == typeid( long )) return "long"; + else if(ti == typeid( float )) return "float"; + else if(ti == typeid( double )) return "double"; + else if(ti == typeid( real )) return "real"; + else if(ti == typeid( Date )) return "Date"; + else if(ti == typeid( DateTimeFrac )) return "DateTimeFrac"; + else if(ti == typeid( DateTimeFracUnknownZone )) return "DateTimeFracUnknownZone"; + else if(ti == typeid( SysTime )) return "SysTime"; + else if(ti == typeid( Duration )) return "Duration"; + else if(ti == typeid( ubyte[] )) return "ubyte[]"; + else if(ti == typeid( typeof(null) )) return "null"; + + return "{unknown}"; +} + +enum BOM { + UTF8, /// UTF-8 + UTF16LE, /// UTF-16 (little-endian) + UTF16BE, /// UTF-16 (big-endian) + UTF32LE, /// UTF-32 (little-endian) + UTF32BE, /// UTF-32 (big-endian) +} + +enum NBOM = __traits(allMembers, BOM).length; +immutable ubyte[][NBOM] ByteOrderMarks = +[ + [0xEF, 0xBB, 0xBF], //UTF8 + [0xFF, 0xFE], //UTF16LE + [0xFE, 0xFF], //UTF16BE + [0xFF, 0xFE, 0x00, 0x00], //UTF32LE + [0x00, 0x00, 0xFE, 0xFF] //UTF32BE +]; diff --git a/source/dub/internal/utils.d b/source/dub/internal/utils.d index de7ebc6..e1e2175 100644 --- a/source/dub/internal/utils.d +++ b/source/dub/internal/utils.d @@ -11,28 +11,31 @@ import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.data.json; import dub.internal.vibecompat.inet.url; +import dub.compilers.buildsettings : BuildSettings; import dub.version_; // todo: cleanup imports. -import std.algorithm : startsWith; +import core.thread; +import std.algorithm : canFind, startsWith; import std.array; import std.conv; import std.exception; import std.file; import std.process; import std.string; +import std.traits : isIntegral; import std.typecons; import std.zip; version(DubUseCurl) import std.net.curl; +private Path[] temporary_files; + Path getTempDir() { return Path(std.file.tempDir()); } -private Path[] temporary_files; - Path getTempFile(string prefix, string extension = null) { import std.uuid : randomUUID; @@ -42,11 +45,55 @@ return path; } +/** + Obtain a lock for a file at the given path. If the file cannot be locked + within the given duration, an exception is thrown. The file will be created + if it does not yet exist. Deleting the file is not safe as another process + could create a new file with the same name. + The returned lock will get unlocked upon destruction. + + Params: + path = path to file that gets locked + timeout = duration after which locking failed + Returns: + The locked file or an Exception on timeout. +*/ +auto lockFile(string path, Duration timeout) +{ + import std.datetime, std.stdio : File; + import std.algorithm : move; + + // Just a wrapper to hide (and destruct) the locked File. + static struct LockFile + { + // The Lock can't be unlinked as someone could try to lock an already + // opened fd while a new file with the same name gets created. + // Exclusive filesystem locks (O_EXCL, mkdir) could be deleted but + // aren't automatically freed when a process terminates, see #1149. + private File f; + } + + auto file = File(path, "w"); + auto t0 = Clock.currTime(); + auto dur = 1.msecs; + while (true) + { + if (file.tryLock()) + return LockFile(move(file)); + enforce(Clock.currTime() - t0 < timeout, "Failed to lock '"~path~"'."); + if (dur < 1024.msecs) // exponentially increase sleep time + dur *= 2; + Thread.sleep(dur); + } +} + static ~this() { foreach (path; temporary_files) { - std.file.remove(path.toNativeString()); + auto spath = path.toNativeString(); + if (spath.exists) + std.file.remove(spath); } } @@ -61,7 +108,7 @@ import std.random; auto fname = p ~ format("__dub_write_test_%08X", uniform(0, uint.max)); if (create_if_missing && !exists(p.toNativeString())) mkdirRecurse(p.toNativeString()); - try openFile(fname, FileMode.CreateTrunc).close(); + try openFile(fname, FileMode.createTrunc).close(); catch (Exception) return false; remove(fname.toNativeString()); return true; @@ -69,14 +116,14 @@ Json jsonFromFile(Path file, bool silent_fail = false) { if( silent_fail && !existsFile(file) ) return Json.emptyObject; - auto f = openFile(file.toNativeString(), FileMode.Read); + auto f = openFile(file.toNativeString(), FileMode.read); scope(exit) f.close(); auto text = stripUTF8Bom(cast(string)f.readAll()); return parseJsonString(text, file.toNativeString()); } Json jsonFromZip(Path zip, string filename) { - auto f = openFile(zip, FileMode.Read); + auto f = openFile(zip, FileMode.read); ubyte[] b = new ubyte[cast(size_t)f.size]; f.rawRead(b); f.close(); @@ -87,11 +134,27 @@ void writeJsonFile(Path path, Json json) { - auto f = openFile(path, FileMode.CreateTrunc); + auto f = openFile(path, FileMode.createTrunc); scope(exit) f.close(); f.writePrettyJsonString(json); } +/// Performs a write->delete->rename sequence to atomically "overwrite" the destination file +void atomicWriteJsonFile(Path path, Json json) +{ + import std.random : uniform; + auto tmppath = path[0 .. $-1] ~ format("%s.%s.tmp", path.head, uniform(0, int.max)); + auto f = openFile(tmppath, FileMode.createTrunc); + scope (failure) { + f.close(); + removeFile(tmppath); + } + f.writePrettyJsonString(json); + f.close(); + if (existsFile(path)) removeFile(path); + moveFile(tmppath, path); +} + bool isPathFromZip(string p) { enforce(p.length > 0); return p[$-1] == '/'; @@ -103,18 +166,71 @@ return fi.isDirectory; } +void runCommand(string command, string[string] env = null) +{ + runCommands((&command)[0 .. 1], env); +} + void runCommands(in string[] commands, string[string] env = null) { + import std.stdio : stdin, stdout, stderr, File; + + version(Windows) enum nullFile = "NUL"; + else version(Posix) enum nullFile = "/dev/null"; + else static assert(0); + + auto childStdout = stdout; + auto childStderr = stderr; + auto config = Config.retainStdout | Config.retainStderr; + + // Disable child's stdout/stderr depending on LogLevel + auto logLevel = getLogLevel(); + if(logLevel >= LogLevel.warn) + childStdout = File(nullFile, "w"); + if(logLevel >= LogLevel.none) + childStderr = File(nullFile, "w"); + foreach(cmd; commands){ logDiagnostic("Running %s", cmd); Pid pid; - if( env !is null ) pid = spawnShell(cmd, env); - else pid = spawnShell(cmd); + pid = spawnShell(cmd, stdin, childStdout, childStderr, env, config); auto exitcode = pid.wait(); enforce(exitcode == 0, "Command failed with exit code "~to!string(exitcode)); } } +version(DubUseCurl) { + /++ + Exception thrown on HTTP request failures, e.g. 404 Not Found. + +/ + static if (__VERSION__ <= 2075) class HTTPStatusException : CurlException + { + /++ + Params: + status = The HTTP status code. + msg = The message for the exception. + file = The file where the exception occurred. + line = The line number where the exception occurred. + next = The previous exception in the chain of exceptions, if any. + +/ + @safe pure nothrow + this( + int status, + string msg, + string file = __FILE__, + size_t line = __LINE__, + Throwable next = null) + { + this.status = status; + super(msg, file, line, next); + } + + int status; /// The HTTP status code + } +} else version (Have_vibe_d_http) { + public import vibe.http.common : HTTPStatusException; +} + /** Downloads a file from the specified URL. @@ -127,10 +243,19 @@ auto conn = HTTP(); setupHTTPClient(conn); logDebug("Storing %s...", url); - std.net.curl.download(url, filename, conn); - enforce(conn.statusLine.code < 400, - format("Failed to download %s: %s %s", - url, conn.statusLine.code, conn.statusLine.reason)); + static if (__VERSION__ <= 2075) + { + try + std.net.curl.download(url, filename, conn); + catch (CurlException e) + { + if (e.msg.canFind("404")) + throw new HTTPStatusException(404, e.msg); + throw e; + } + } + else + std.net.curl.download(url, filename, conn); } else version (Have_vibe_d) { import vibe.inet.urltransfer; vibe.inet.urltransfer.download(url, filename); @@ -148,16 +273,24 @@ auto conn = HTTP(); setupHTTPClient(conn); logDebug("Getting %s...", url); - auto ret = cast(ubyte[])get(url, conn); - enforce(conn.statusLine.code < 400, - format("Failed to GET %s: %s %s", - url, conn.statusLine.code, conn.statusLine.reason)); - return ret; + static if (__VERSION__ <= 2075) + { + try + return cast(ubyte[])get(url, conn); + catch (CurlException e) + { + if (e.msg.canFind("404")) + throw new HTTPStatusException(404, e.msg); + throw e; + } + } + else + return cast(ubyte[])get(url, conn); } else version (Have_vibe_d) { import vibe.inet.urltransfer; import vibe.stream.operations; ubyte[] ret; - download(url, (scope input) { ret = input.readAll(); }); + vibe.inet.urltransfer.download(url, (scope input) { ret = input.readAll(); }); return ret; } else assert(false); } @@ -192,6 +325,9 @@ auto proxy = environment.get("http_proxy", null); if (proxy.length) conn.proxy = proxy; + auto noProxy = environment.get("no_proxy", null); + if (noProxy.length) conn.handle.set(CurlOption.noproxy, noProxy); + conn.addRequestHeader("User-Agent", "dub/"~getDUBVersion()~" (std.net.curl; +https://github.com/rejectedsoftware/dub)"); } } @@ -253,3 +389,145 @@ return strings.partition3!((a, b) => a.length + threshold < b.length)(input)[1] .schwartzSort!(p => levenshteinDistance(input.toUpper, p.toUpper)); } + +/** + If T is a bitfield-style enum, this function returns a string range + listing the names of all members included in the given value. + + Example: + --------- + enum Bits { + none = 0, + a = 1<<0, + b = 1<<1, + c = 1<<2, + a_c = a | c, + } + + assert( bitFieldNames(Bits.none).equals(["none"]) ); + assert( bitFieldNames(Bits.a).equals(["a"]) ); + assert( bitFieldNames(Bits.a_c).equals(["a", "c", "a_c"]) ); + --------- + */ +auto bitFieldNames(T)(T value) if(is(T==enum) && isIntegral!T) +{ + import std.algorithm : filter, map; + import std.conv : to; + import std.traits : EnumMembers; + + return [ EnumMembers!(T) ] + .filter!(member => member==0? value==0 : (value & member) == member) + .map!(member => to!string(member)); +} + + +bool isIdentChar(dchar ch) +{ + import std.ascii : isAlphaNum; + return isAlphaNum(ch) || ch == '_'; +} + +string stripDlangSpecialChars(string s) +{ + import std.array : appender; + auto ret = appender!string(); + foreach(ch; s) + ret.put(isIdentChar(ch) ? ch : '_'); + return ret.data; +} + +string determineModuleName(BuildSettings settings, Path file, Path base_path) +{ + import std.algorithm : map; + + assert(base_path.absolute); + if (!file.absolute) file = base_path ~ file; + + size_t path_skip = 0; + foreach (ipath; settings.importPaths.map!(p => Path(p))) { + if (!ipath.absolute) ipath = base_path ~ ipath; + assert(!ipath.empty); + if (file.startsWith(ipath) && ipath.length > path_skip) + path_skip = ipath.length; + } + + enforce(path_skip > 0, + format("Source file '%s' not found in any import path.", file.toNativeString())); + + auto mpath = file[path_skip .. file.length]; + auto ret = appender!string; + + //search for module keyword in file + string moduleName = getModuleNameFromFile(file.to!string); + + if(moduleName.length) return moduleName; + + //create module name from path + foreach (i; 0 .. mpath.length) { + import std.path; + auto p = mpath[i].toString(); + if (p == "package.d") break; + if (i > 0) ret ~= "."; + if (i+1 < mpath.length) ret ~= p; + else ret ~= p.baseName(".d"); + } + + return ret.data; +} + +/** + * Search for module keyword in D Code + */ +string getModuleNameFromContent(string content) { + import std.regex; + import std.string; + + content = content.strip; + if (!content.length) return null; + + static bool regex_initialized = false; + static Regex!char comments_pattern, module_pattern; + + if (!regex_initialized) { + comments_pattern = regex(`//[^\r\n]*\r?\n?|/\*.*?\*/|/\+.*\+/`, "g"); + module_pattern = regex(`module\s+([\w\.]+)\s*;`, "g"); + regex_initialized = true; + } + + content = replaceAll(content, comments_pattern, " "); + auto result = matchFirst(content, module_pattern); + + if (!result.empty) return result[1]; + + return null; +} + +unittest { + assert(getModuleNameFromContent("") == ""); + assert(getModuleNameFromContent("module myPackage.myModule;") == "myPackage.myModule"); + assert(getModuleNameFromContent("module \t\n myPackage.myModule \t\r\n;") == "myPackage.myModule"); + assert(getModuleNameFromContent("// foo\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("/*\nfoo\n*/\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("/+\nfoo\n+/\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("/***\nfoo\n***/\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("/+++\nfoo\n+++/\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("// module foo;\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("/* module foo; */\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("/+ module foo; +/\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("/+ /+ module foo; +/ +/\nmodule bar;") == "bar"); + assert(getModuleNameFromContent("// module foo;\nmodule bar; // module foo;") == "bar"); + assert(getModuleNameFromContent("// module foo;\nmodule// module foo;\nbar//module foo;\n;// module foo;") == "bar"); + assert(getModuleNameFromContent("/* module foo; */\nmodule/*module foo;*/bar/*module foo;*/;") == "bar", getModuleNameFromContent("/* module foo; */\nmodule/*module foo;*/bar/*module foo;*/;")); + assert(getModuleNameFromContent("/+ /+ module foo; +/ module foo; +/ module bar;") == "bar"); + //assert(getModuleNameFromContent("/+ /+ module foo; +/ module foo; +/ module bar/++/;") == "bar"); // nested comments require a context-free parser! +} + +/** + * Search for module keyword in file + */ +string getModuleNameFromFile(string filePath) { + string fileContent = filePath.readText; + + logDiagnostic("Get module name from path: " ~ filePath); + return getModuleNameFromContent(fileContent); +} diff --git a/source/dub/internal/vibecompat/core/file.d b/source/dub/internal/vibecompat/core/file.d index 1ca6a25..0dcbe14 100644 --- a/source/dub/internal/vibecompat/core/file.d +++ b/source/dub/internal/vibecompat/core/file.d @@ -17,7 +17,7 @@ import std.exception; import std.file; import std.path; -static import std.stream; +import std.stdio; import std.string; import std.utf; @@ -25,50 +25,50 @@ /* Add output range support to File */ struct RangeFile { - std.stream.File file; +@safe: + std.stdio.File file; - void put(in ubyte[] bytes) { file.writeExact(bytes.ptr, bytes.length); } - void put(in char[] str) { put(cast(ubyte[])str); } - void put(char ch) { put((&ch)[0 .. 1]); } + void put(in ubyte[] bytes) @trusted { file.rawWrite(bytes); } + void put(in char[] str) { put(cast(const(ubyte)[])str); } + void put(char ch) @trusted { put((&ch)[0 .. 1]); } void put(dchar ch) { char[4] chars; put(chars[0 .. encode(chars, ch)]); } ubyte[] readAll() { - file.seek(0, std.stream.SeekPos.End); - auto sz = file.position; + auto sz = this.size; enforce(sz <= size_t.max, "File is too big to read to memory."); - file.seek(0, std.stream.SeekPos.Set); + () @trusted { file.seek(0, SEEK_SET); } (); auto ret = new ubyte[cast(size_t)sz]; - file.readExact(ret.ptr, ret.length); + rawRead(ret); return ret; } - void rawRead(ubyte[] dst) { file.readExact(dst.ptr, dst.length); } + void rawRead(ubyte[] dst) @trusted { enforce(file.rawRead(dst).length == dst.length, "Failed to readall bytes from file."); } void write(string str) { put(str); } - void close() { file.close(); } - void flush() { file.flush(); } - @property ulong size() { return file.size; } + void close() @trusted { file.close(); } + void flush() @trusted { file.flush(); } + @property ulong size() @trusted { return file.size; } } /** Opens a file stream with the specified mode. */ -RangeFile openFile(Path path, FileMode mode = FileMode.Read) +RangeFile openFile(Path path, FileMode mode = FileMode.read) { - std.stream.FileMode fmode; + string fmode; final switch(mode){ - case FileMode.Read: fmode = std.stream.FileMode.In; break; - case FileMode.ReadWrite: fmode = std.stream.FileMode.Out; break; - case FileMode.CreateTrunc: fmode = std.stream.FileMode.OutNew; break; - case FileMode.Append: fmode = std.stream.FileMode.Append; break; + case FileMode.read: fmode = "rb"; break; + case FileMode.readWrite: fmode = "r+b"; break; + case FileMode.createTrunc: fmode = "wb"; break; + case FileMode.append: fmode = "ab"; break; } - auto ret = new std.stream.File(path.toNativeString(), fmode); + auto ret = std.stdio.File(path.toNativeString(), fmode); assert(ret.isOpen); return RangeFile(ret); } /// ditto -RangeFile openFile(string path, FileMode mode = FileMode.Read) +RangeFile openFile(string path, FileMode mode = FileMode.read) { return openFile(Path(path), mode); } @@ -113,20 +113,26 @@ removeFile(to); } - .copy(from.toNativeString(), to.toNativeString()); - - // try to preserve ownership/permissions in Posix - version (Posix) { - import core.sys.posix.sys.stat; - import core.sys.posix.unistd; - import std.utf; - auto cspath = toUTFz!(const(char)*)(from.toNativeString()); - auto cdpath = toUTFz!(const(char)*)(to.toNativeString()); - stat_t st; - enforce(stat(cspath, &st) == 0, "Failed to get attributes of source file."); - if (chown(cdpath, st.st_uid, st.st_gid) != 0) - st.st_mode &= ~(S_ISUID | S_ISGID); - chmod(cdpath, st.st_mode); + static if (is(PreserveAttributes)) + { + .copy(from.toNativeString(), to.toNativeString(), PreserveAttributes.yes); + } + else + { + .copy(from.toNativeString(), to.toNativeString()); + // try to preserve ownership/permissions in Posix + version (Posix) { + import core.sys.posix.sys.stat; + import core.sys.posix.unistd; + import std.utf; + auto cspath = toUTFz!(const(char)*)(from.toNativeString()); + auto cdpath = toUTFz!(const(char)*)(to.toNativeString()); + stat_t st; + enforce(stat(cspath, &st) == 0, "Failed to get attributes of source file."); + if (chown(cdpath, st.st_uid, st.st_gid) != 0) + st.st_mode &= ~(S_ISUID | S_ISGID); + chmod(cdpath, st.st_mode); + } } } /// ditto @@ -140,8 +146,14 @@ // guess whether 2 files are identical, ignores filename and content private bool sameFile(Path a, Path b) { - static assert(__traits(allMembers, FileInfo)[0] == "name"); - return getFileInfo(a).tupleof[1 .. $] == getFileInfo(b).tupleof[1 .. $]; + version (Posix) { + auto st_a = std.file.DirEntry(a.toNativeString).statBuf; + auto st_b = std.file.DirEntry(b.toNativeString).statBuf; + return st_a == st_b; + } else { + static assert(__traits(allMembers, FileInfo)[0] == "name"); + return getFileInfo(a).tupleof[1 .. $] == getFileInfo(b).tupleof[1 .. $]; + } } /** @@ -152,7 +164,7 @@ if (existsFile(to)) { enforce(overwrite, "Destination file already exists."); if (auto fe = collectException!FileException(removeFile(to))) { - version (Windows) if (sameFile(from, to)) return; + if (sameFile(from, to)) return; throw fe; } } @@ -298,13 +310,13 @@ */ enum FileMode { /// The file is opened read-only. - Read, + read, /// The file is opened for read-write random access. - ReadWrite, + readWrite, /// The file is truncated if it exists and created otherwise and the opened for read-write access. - CreateTrunc, + createTrunc, /// The file is opened for appending data to it and created if it does not exist. - Append + append } /** diff --git a/source/dub/internal/vibecompat/core/log.d b/source/dub/internal/vibecompat/core/log.d index 5d0c705..e48569e 100644 --- a/source/dub/internal/vibecompat/core/log.d +++ b/source/dub/internal/vibecompat/core/log.d @@ -66,18 +66,18 @@ txt.reserve(256); formattedWrite(txt, fmt, args); - auto threadid = cast(ulong)cast(void*)Thread.getThis(); - auto fiberid = cast(ulong)cast(void*)Fiber.getThis(); + auto threadid = () @trusted { return cast(ulong)cast(void*)Thread.getThis(); } (); + auto fiberid = () @trusted { return cast(ulong)cast(void*)Fiber.getThis(); } (); threadid ^= threadid >> 32; fiberid ^= fiberid >> 32; - if( level >= s_minLevel ){ - if (level == LogLevel.info) { - stdout.writeln(txt.data); - stdout.flush(); - } else { - stderr.writeln(txt.data); - stderr.flush(); + if (level >= s_minLevel) { + File output; + if (level == LogLevel.info) () @trusted { output = stdout; } (); + else () @trusted { output = stderr; } (); + if (output.isOpen) { + output.writeln(txt.data); + output.flush(); } } } catch( Exception e ){ diff --git a/source/dub/internal/vibecompat/data/json.d b/source/dub/internal/vibecompat/data/json.d index 22e12f1..a2705a3 100644 --- a/source/dub/internal/vibecompat/data/json.d +++ b/source/dub/internal/vibecompat/data/json.d @@ -1,20 +1,36 @@ +/** + JSON serialization and value handling. + + This module provides the Json struct for reading, writing and manipulating + JSON values. De(serialization) of arbitrary D types is also supported and + is recommended for handling JSON in performance sensitive applications. + + Copyright: © 2012-2015 RejectedSoftware e.K. + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Sönke Ludwig +*/ module dub.internal.vibecompat.data.json; -version (Have_vibe_d) public import vibe.data.json; +version (Have_vibe_d_data) public import vibe.data.json; else: import dub.internal.vibecompat.data.utils; +public import dub.internal.vibecompat.data.serialization; + +public import std.json : JSONException; +import std.algorithm : equal, min; import std.array; import std.conv; import std.datetime; import std.exception; import std.format; -import std.string; import std.range; +import std.string : format; import std.traits; version = JsonLineNumbers; +version = VibeJsonFieldNames; /******************************************************************************/ @@ -28,52 +44,62 @@ behave mostly like values in ECMA script in the way that you can transparently perform operations on them. However, strict typechecking is done, so that operations between differently typed JSON values will throw - an exception. Additionally, an explicit cast or using get!() or to!() is + a JSONException. Additionally, an explicit cast or using get!() or to!() is required to convert a JSON value to the corresponding static D type. */ struct Json { private { - union { - bool m_bool; - long m_int; - double m_float; - string m_string; - Json[] m_array; - Json[string] m_object; - }; + // putting all fields in a union results in many false pointers leading to + // memory leaks and, worse, std.algorithm.swap triggering an assertion + // because of internal pointers. This crude workaround seems to fix + // the issues. + void*[2] m_data; + ref inout(T) getDataAs(T)() inout { static assert(T.sizeof <= m_data.sizeof); return *cast(inout(T)*)m_data.ptr; } + @property ref inout(long) m_int() inout { return getDataAs!long(); } + @property ref inout(double) m_float() inout { return getDataAs!double(); } + @property ref inout(bool) m_bool() inout { return getDataAs!bool(); } + @property ref inout(string) m_string() inout { return getDataAs!string(); } + @property ref inout(Json[string]) m_object() inout { return getDataAs!(Json[string])(); } + @property ref inout(Json[]) m_array() inout { return getDataAs!(Json[])(); } + Type m_type = Type.undefined; - uint m_magic = 0x1337f00d; // workaround for Appender bug - string m_name; + + version (VibeJsonFieldNames) { + uint m_magic = 0x1337f00d; // works around Appender bug (DMD BUG 10690/10859/11357) + string m_name; + string m_fileName; + } } /** Represents the run time type of a JSON value. */ enum Type { - /// A non-existent value in a JSON object - undefined, - /// Null value - null_, - /// Boolean value - bool_, - /// 64-bit integer value - int_, - /// 64-bit floating point value - float_, - /// UTF-8 string - string, - /// Array of JSON values - array, - /// JSON object aka. dictionary from string to Json - object + undefined, /// A non-existent value in a JSON object + null_, /// Null value + bool_, /// Boolean value + int_, /// 64-bit integer value + float_, /// 64-bit floating point value + string, /// UTF-8 string + array, /// Array of JSON values + object, /// JSON object aka. dictionary from string to Json + + Undefined = undefined, /// Compatibility alias - will be deprecated soon + Null = null_, /// Compatibility alias - will be deprecated soon + Bool = bool_, /// Compatibility alias - will be deprecated soon + Int = int_, /// Compatibility alias - will be deprecated soon + Float = float_, /// Compatibility alias - will be deprecated soon + String = string, /// Compatibility alias - will be deprecated soon + Array = array, /// Compatibility alias - will be deprecated soon + Object = object /// Compatibility alias - will be deprecated soon } - /// New JSON value of Type.undefined + /// New JSON value of Type.Undefined static @property Json undefined() { return Json(); } - /// New JSON value of Type.object + /// New JSON value of Type.Object static @property Json emptyObject() { return Json(cast(Json[string])null); } - /// New JSON value of Type.array + /// New JSON value of Type.Array static @property Json emptyArray() { return Json(cast(Json[])null); } version(JsonLineNumbers) int line; @@ -85,7 +111,17 @@ /// ditto this(bool v) { m_type = Type.bool_; m_bool = v; } /// ditto - this(int v) { m_type = Type.int_; m_int = v; } + this(byte v) { this(cast(long)v); } + /// ditto + this(ubyte v) { this(cast(long)v); } + /// ditto + this(short v) { this(cast(long)v); } + /// ditto + this(ushort v) { this(cast(long)v); } + /// ditto + this(int v) { this(cast(long)v); } + /// ditto + this(uint v) { this(cast(long)v); } /// ditto this(long v) { m_type = Type.int_; m_int = v; } /// ditto @@ -100,7 +136,8 @@ /** Allows assignment of D values to a JSON value. */ - ref Json opAssign(Json v){ + ref Json opAssign(Json v) + { m_type = v.m_type; final switch(m_type){ case Type.undefined: m_string = null; break; @@ -109,14 +146,8 @@ case Type.int_: m_int = v.m_int; break; case Type.float_: m_float = v.m_float; break; case Type.string: m_string = v.m_string; break; - case Type.array: - m_array = v.m_array; - if (m_magic == 0x1337f00d) { foreach (ref av; m_array) av.m_name = m_name; } else m_name = null; - break; - case Type.object: - m_object = v.m_object; - if (m_magic == 0x1337f00d) { foreach (k, ref av; m_object) av.m_name = m_name ~ "." ~ k; } else m_name = null; - break; + case Type.array: opAssign(v.m_array); break; + case Type.object: opAssign(v.m_object); break; } return this; } @@ -137,7 +168,7 @@ { m_type = Type.array; m_array = v; - if (m_magic == 0x1337f00d) foreach (ref av; m_array) av.m_name = m_name; + version (VibeJsonFieldNames) { if (m_magic == 0x1337f00d) { foreach (idx, ref av; m_array) av.m_name = format("%s[%s]", m_name, idx); } else m_name = null; } return v; } /// ditto @@ -145,44 +176,113 @@ { m_type = Type.object; m_object = v; - if (m_magic == 0x1337f00d) foreach (k, ref av; m_object) av.m_name = m_name ~ "." ~ k; + version (VibeJsonFieldNames) { if (m_magic == 0x1337f00d) { foreach (key, ref av; m_object) av.m_name = format("%s.%s", m_name, key); } else m_name = null; } return v; } /** + Allows removal of values from Type.Object Json objects. + */ + void remove(string item) { checkType!(Json[string])(); m_object.remove(item); } + + /** The current type id of this JSON object. */ @property Type type() const { return m_type; } /** + Clones a JSON value recursively. + */ + Json clone() + const { + final switch (m_type) { + case Type.undefined: return Json.undefined; + case Type.null_: return Json(null); + case Type.bool_: return Json(m_bool); + case Type.int_: return Json(m_int); + case Type.float_: return Json(m_float); + case Type.string: return Json(m_string); + case Type.array: + auto ret = Json.emptyArray; + foreach (v; this) ret ~= v.clone(); + return ret; + case Type.object: + auto ret = Json.emptyObject; + foreach (string name, v; this) ret[name] = v.clone(); + return ret; + } + } + + /** + Check whether the JSON object contains the given key and if yes, + return a pointer to the corresponding object, otherwise return `null`. + */ + inout(Json*) opBinaryRight(string op : "in")(string key) inout { + checkType!(Json[string])(); + return key in m_object; + } + + /** Allows direct indexing of array typed JSON values. */ ref inout(Json) opIndex(size_t idx) inout { checkType!(Json[])(); return m_array[idx]; } + /// + unittest { + Json value = Json.emptyArray; + value ~= 1; + value ~= true; + value ~= "foo"; + assert(value[0] == 1); + assert(value[1] == true); + assert(value[2] == "foo"); + } + + /** Allows direct indexing of object typed JSON values using a string as the key. */ - const(Json) opIndex(string key) const { + const(Json) opIndex(string key) + const { checkType!(Json[string])(); if( auto pv = key in m_object ) return *pv; Json ret = Json.undefined; ret.m_string = key; + version (VibeJsonFieldNames) ret.m_name = format("%s.%s", m_name, key); return ret; } /// ditto - ref Json opIndex(string key){ + ref Json opIndex(string key) + { checkType!(Json[string])(); if( auto pv = key in m_object ) return *pv; - m_object[key] = Json(); - m_object[key].m_type = Type.undefined; // DMDBUG: AAs are teh $H1T!!!11 + if (m_object is null) { + m_object = ["": Json.init]; + m_object.remove(""); + } + m_object[key] = Json.init; + assert(m_object !is null); + assert(key in m_object, "Failed to insert key '"~key~"' into AA!?"); + m_object[key].m_type = Type.undefined; // DMDBUG: AAs are the $H1T!!!11 assert(m_object[key].type == Type.undefined); - m_object[key].m_name = m_name ~ "." ~ key; m_object[key].m_string = key; + version (VibeJsonFieldNames) m_object[key].m_name = format("%s.%s", m_name, key); return m_object[key]; } + /// + unittest { + Json value = Json.emptyObject; + value["a"] = 1; + value["b"] = true; + value["c"] = "foo"; + assert(value["a"] == 1); + assert(value["b"] == true); + assert(value["c"] == "foo"); + } + /** Returns a slice of a JSON array. */ @@ -191,16 +291,11 @@ inout(Json[]) opSlice(size_t from, size_t to) inout { checkType!(Json[])(); return m_array[from .. to]; } /** - Removes an entry from an object. - */ - void remove(string item) { checkType!(Json[string])(); m_object.remove(item); } - - /** Returns the number of entries of string, array or object typed JSON values. */ @property size_t length() const { - checkType!(string, Json[], Json[string]); + checkType!(string, Json[], Json[string])("property length"); switch(m_type){ case Type.string: return m_string.length; case Type.array: return m_array.length; @@ -214,7 +309,7 @@ */ int opApply(int delegate(ref Json obj) del) { - checkType!(Json[], Json[string]); + checkType!(Json[], Json[string])("opApply"); if( m_type == Type.array ){ foreach( ref v; m_array ) if( auto ret = del(v) ) @@ -231,7 +326,7 @@ /// ditto int opApply(int delegate(ref const Json obj) del) const { - checkType!(Json[], Json[string]); + checkType!(Json[], Json[string])("opApply"); if( m_type == Type.array ){ foreach( ref v; m_array ) if( auto ret = del(v) ) @@ -248,7 +343,7 @@ /// ditto int opApply(int delegate(ref size_t idx, ref Json obj) del) { - checkType!(Json[]); + checkType!(Json[])("opApply"); foreach( idx, ref v; m_array ) if( auto ret = del(idx, v) ) return ret; @@ -257,7 +352,7 @@ /// ditto int opApply(int delegate(ref size_t idx, ref const Json obj) del) const { - checkType!(Json[]); + checkType!(Json[])("opApply"); foreach( idx, ref v; m_array ) if( auto ret = del(idx, v) ) return ret; @@ -266,7 +361,7 @@ /// ditto int opApply(int delegate(ref string idx, ref Json obj) del) { - checkType!(Json[string]); + checkType!(Json[string])("opApply"); foreach( idx, ref v; m_object ) if( v.type != Type.undefined ) if( auto ret = del(idx, v) ) @@ -276,7 +371,7 @@ /// ditto int opApply(int delegate(ref string idx, ref const Json obj) del) const { - checkType!(Json[string]); + checkType!(Json[string])("opApply"); foreach( idx, ref v; m_object ) if( v.type != Type.undefined ) if( auto ret = del(idx, v) ) @@ -286,23 +381,46 @@ /** Converts the JSON value to the corresponding D type - types must match exactly. + + Available_Types: + $(UL + $(LI `bool` (`Type.bool_`)) + $(LI `double` (`Type.float_`)) + $(LI `float` (Converted from `double`)) + $(LI `long` (`Type.int_`)) + $(LI `ulong`, `int`, `uint`, `short`, `ushort`, `byte`, `ubyte` (Converted from `long`)) + $(LI `string` (`Type.string`)) + $(LI `Json[]` (`Type.array`)) + $(LI `Json[string]` (`Type.object`)) + ) + + See_Also: `opt`, `to`, `deserializeJson` */ inout(T) opCast(T)() inout { return get!T; } /// ditto @property inout(T) get(T)() inout { checkType!T(); - static if( is(T == bool) ) return m_bool; - else static if( is(T == double) ) return m_float; - else static if( is(T == float) ) return cast(T)m_float; - else static if( is(T == long) ) return m_int; - else static if( is(T : long) ){ enforce(m_int <= T.max && m_int >= T.min); return cast(T)m_int; } - else static if( is(T == string) ) return m_string; - else static if( is(T == Json[]) ) return m_array; - else static if( is(T == Json[string]) ) return m_object; - else static assert("JSON can only be casted to (bool, long, double, string, Json[] or Json[string]. Not "~T.stringof~"."); + static if (is(T == bool)) return m_bool; + else static if (is(T == double)) return m_float; + else static if (is(T == float)) return cast(T)m_float; + else static if (is(T == long)) return m_int; + else static if (is(T == ulong)) return cast(ulong)m_int; + else static if (is(T : long)){ enforceJson(m_int <= T.max && m_int >= T.min, "Integer conversion out of bounds error", m_fileName, line); return cast(T)m_int; } + else static if (is(T == string)) return m_string; + else static if (is(T == Json[])) return m_array; + else static if (is(T == Json[string])) return m_object; + else static assert("JSON can only be cast to (bool, long, double, string, Json[] or Json[string]. Not "~T.stringof~"."); } - /// ditto + + /** + Returns the native type for this JSON if it matches the current runtime type. + + If the runtime type does not match the given native type, the 'def' parameter is returned + instead. + + See_Also: `get` + */ @property const(T) opt(T)(const(T) def = T.init) const { if( typeId!T != m_type ) return def; @@ -316,7 +434,13 @@ } /** - Converts the JSON value to the corresponding D type - types are converted as neccessary. + Converts the JSON value to the corresponding D type - types are converted as necessary. + + Automatically performs conversions between strings and numbers. See + `get` for the list of available types. For converting/deserializing + JSON to complex data types see `deserializeJson`. + + See_Also: `get`, `deserializeJson` */ @property inout(T) to(T)() inout { @@ -391,7 +515,7 @@ default: return Json(["value": this]); case Type.object: return m_object; } - } else static assert("JSON can only be casted to (bool, long, double, string, Json[] or Json[string]. Not "~T.stringof~"."); + } else static assert("JSON can only be cast to (bool, long, double, string, Json[] or Json[string]. Not "~T.stringof~"."); } /** @@ -415,7 +539,7 @@ checkType!bool(); return Json(~m_bool); } else static if( op == "+" || op == "-" || op == "++" || op == "--" ){ - checkType!(long, double); + checkType!(long, double)("unary "~op); if( m_type == Type.int_ ) mixin("return Json("~op~"m_int);"); else if( m_type == Type.float_ ) mixin("return Json("~op~"m_float);"); else assert(false); @@ -425,7 +549,7 @@ /** Performs binary operations between JSON values. - The two JSON values must be of the same run time type or an exception + The two JSON values must be of the same run time type or a JSONException will be thrown. Only the operations listed are allowed for each of the types. @@ -436,46 +560,47 @@ $(DT Float) $(DD +, -, *, /, %) $(DT String) $(DD ~) $(DT Array) $(DD ~) - $(DT Object) $(DD none) + $(DT Object) $(DD in) ) */ Json opBinary(string op)(ref const(Json) other) const { - enforce(m_type == other.m_type, "Binary operation '"~op~"' between "~.to!string(m_type)~" and "~.to!string(other.m_type)~" JSON objects."); + enforceJson(m_type == other.m_type, "Binary operation '"~op~"' between "~.to!string(m_type)~" and "~.to!string(other.m_type)~" JSON objects."); static if( op == "&&" ){ - checkType!(bool)("'&&'"); other.checkType!(bool)("'&&'"); + checkType!(bool)(op); return Json(m_bool && other.m_bool); } else static if( op == "||" ){ - checkType!(bool)("'||'"); other.checkType!(bool)("'||'"); + checkType!(bool)(op); return Json(m_bool || other.m_bool); } else static if( op == "+" ){ - checkType!(double, long)("'+'"); other.checkType!(double, long)("'+'"); - if( m_type == Type.int_ ) return Json(m_int + other.m_int); + checkType!(long, double)(op); + if( m_type == Type.Int ) return Json(m_int + other.m_int); else if( m_type == Type.float_ ) return Json(m_float + other.m_float); else assert(false); } else static if( op == "-" ){ - checkType!(double, long)("'-'"); other.checkType!(double, long)("'-'"); - if( m_type == Type.int_ ) return Json(m_int - other.m_int); + checkType!(long, double)(op); + if( m_type == Type.Int ) return Json(m_int - other.m_int); else if( m_type == Type.float_ ) return Json(m_float - other.m_float); else assert(false); } else static if( op == "*" ){ - checkType!(double, long)("'*'"); other.checkType!(double, long)("'*'"); - if( m_type == Type.int_ ) return Json(m_int * other.m_int); + checkType!(long, double)(op); + if( m_type == Type.Int ) return Json(m_int * other.m_int); else if( m_type == Type.float_ ) return Json(m_float * other.m_float); else assert(false); } else static if( op == "/" ){ - checkType!(double, long)("'/'"); other.checkType!(double, long)("'/'"); - if( m_type == Type.int_ ) return Json(m_int / other.m_int); + checkType!(long, double)(op); + if( m_type == Type.Int ) return Json(m_int / other.m_int); else if( m_type == Type.float_ ) return Json(m_float / other.m_float); else assert(false); } else static if( op == "%" ){ - checkType!(double, long)("'%'"); other.checkType!(double, long)("'%'"); - if( m_type == Type.int_ ) return Json(m_int % other.m_int); + checkType!(long, double)(op); + if( m_type == Type.Int ) return Json(m_int % other.m_int); else if( m_type == Type.float_ ) return Json(m_float % other.m_float); else assert(false); } else static if( op == "~" ){ - checkType!(string)("'~'"); other.checkType!(string)("'~'"); + checkType!(string, Json[])(op); if( m_type == Type.string ) return Json(m_string ~ other.m_string); + else if (m_type == Type.array) return Json(m_array ~ other.m_array); else assert(false); } else static assert("Unsupported operator '"~op~"' for type JSON."); } @@ -484,7 +609,7 @@ if( op == "~" ) { static if( op == "~" ){ - checkType!(string, Json[])("'~'"); other.checkType!(string, Json[])("'~'"); + checkType!(string, Json[])(op); if( m_type == Type.string ) return Json(m_string ~ other.m_string); else if( m_type == Type.array ) return Json(m_array ~ other.m_array); else assert(false); @@ -492,35 +617,43 @@ } /// ditto void opOpAssign(string op)(Json other) - if( op == "+" || op == "-" || op == "*" ||op == "/" || op == "%" ) + if (op == "+" || op == "-" || op == "*" || op == "/" || op == "%" || op =="~") { - enforce(m_type == other.m_type, "Binary operation '"~op~"' between "~.to!string(m_type)~" and "~.to!string(other.m_type)~" JSON objects."); + enforceJson(m_type == other.m_type || op == "~" && m_type == Type.array, + "Binary operation '"~op~"=' between "~.to!string(m_type)~" and "~.to!string(other.m_type)~" JSON objects."); static if( op == "+" ){ if( m_type == Type.int_ ) m_int += other.m_int; else if( m_type == Type.float_ ) m_float += other.m_float; - else enforce(false, "'+' only allowed for scalar types, not "~.to!string(m_type)~"."); + else enforceJson(false, "'+=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "-" ){ if( m_type == Type.int_ ) m_int -= other.m_int; else if( m_type == Type.float_ ) m_float -= other.m_float; - else enforce(false, "'-' only allowed for scalar types, not "~.to!string(m_type)~"."); + else enforceJson(false, "'-=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "*" ){ if( m_type == Type.int_ ) m_int *= other.m_int; else if( m_type == Type.float_ ) m_float *= other.m_float; - else enforce(false, "'*' only allowed for scalar types, not "~.to!string(m_type)~"."); + else enforceJson(false, "'*=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "/" ){ if( m_type == Type.int_ ) m_int /= other.m_int; else if( m_type == Type.float_ ) m_float /= other.m_float; - else enforce(false, "'/' only allowed for scalar types, not "~.to!string(m_type)~"."); + else enforceJson(false, "'/=' only allowed for scalar types, not "~.to!string(m_type)~"."); } else static if( op == "%" ){ if( m_type == Type.int_ ) m_int %= other.m_int; else if( m_type == Type.float_ ) m_float %= other.m_float; - else enforce(false, "'%' only allowed for scalar types, not "~.to!string(m_type)~"."); - } /*else static if( op == "~" ){ - if( m_type == Type.string ) m_string ~= other.m_string; - else if( m_type == Type.array ) m_array ~= other.m_array; - else enforce(false, "'%' only allowed for scalar types, not "~.to!string(m_type)~"."); - }*/ else static assert("Unsupported operator '"~op~"' for type JSON."); - assert(false); + else enforceJson(false, "'%=' only allowed for scalar types, not "~.to!string(m_type)~"."); + } else static if( op == "~" ){ + if (m_type == Type.string) m_string ~= other.m_string; + else if (m_type == Type.array) { + if (other.m_type == Type.array) m_array ~= other.m_array; + else appendArrayElement(other); + } else enforceJson(false, "'~=' only allowed for string and array types, not "~.to!string(m_type)~"."); + } else static assert("Unsupported operator '"~op~"=' for type JSON."); + } + /// ditto + void opOpAssign(string op, T)(T other) + if (!is(T == Json) && is(typeof(Json(other)))) + { + opOpAssign!op(Json(other)); } /// ditto Json opBinary(string op)(bool other) const { checkType!bool(); mixin("return Json(m_bool "~op~" other);"); } @@ -531,11 +664,7 @@ /// ditto Json opBinary(string op)(string other) const { checkType!string(); mixin("return Json(m_string "~op~" other);"); } /// ditto - Json opBinary(string op)(Json other) - if (op == "~") { - if (m_type == Type.array) return Json(m_array ~ other); - else return Json(this ~ other); - } + Json opBinary(string op)(Json[] other) { checkType!(Json[])(); mixin("return Json(m_array "~op~" other);"); } /// ditto Json opBinaryRight(string op)(bool other) const { checkType!bool(); mixin("return Json(other "~op~" m_bool);"); } /// ditto @@ -552,13 +681,17 @@ if( pv.type == Type.undefined ) return null; return pv; } + /// ditto + Json opBinaryRight(string op)(Json[] other) { checkType!(Json[])(); mixin("return Json(other "~op~" m_array);"); } /** - Allows to access existing fields of a JSON object using dot syntax. - */ - @property const(Json) opDispatch(string prop)() const { return opIndex(prop); } - /// ditto - @property ref Json opDispatch(string prop)() { return opIndex(prop); } + * The append operator will append arrays. This method always appends it's argument as an array element, so nested arrays can be created. + */ + void appendArrayElement(Json element) + { + enforceJson(m_type == Type.array, "'appendArrayElement' only allowed for array types, not "~.to!string(m_type)~"."); + m_array ~= element; + } /** Compares two JSON values for equality. @@ -588,6 +721,8 @@ /// ditto bool opEquals(bool v) const { return m_type == Type.bool_ && m_bool == v; } /// ditto + bool opEquals(int v) const { return m_type == Type.int_ && m_int == v; } + /// ditto bool opEquals(long v) const { return m_type == Type.int_ && m_int == v; } /// ditto bool opEquals(double v) const { return m_type == Type.float_ && m_float == v; } @@ -616,12 +751,12 @@ case Type.string: return m_string < other.m_string ? -1 : m_string == other.m_string ? 0 : 1; case Type.array: return m_array < other.m_array ? -1 : m_array == other.m_array ? 0 : 1; case Type.object: - enforce(false, "JSON objects cannot be compared."); + enforceJson(false, "JSON objects cannot be compared."); assert(false); } } - + alias opDollar = length; /** Returns the type id corresponding to the given D type. @@ -667,7 +802,7 @@ --- Params: - level = Specifies the base amount of indentation for the output. Indentation is always + level = Specifies the base amount of indentation for the output. Indentation is always done using tab characters. See_Also: writePrettyJsonString, toString @@ -679,32 +814,34 @@ return ret.data; } - private void checkType(T...)(string op = null) + private void checkType(TYPES...)(string op = null) const { - bool found = false; - foreach (t; T) if (m_type == typeId!t) found = true; - if (found) return; - if (T.length == 1) { - throw new Exception(format("Got %s - expected %s.", this.displayName, typeId!(T[0]).to!string)); - } else { - string types; - foreach (t; T) { - if (types.length) types ~= ", "; - types ~= typeId!t.to!string; - } - throw new Exception(format("Got %s - expected one of %s.", this.displayName, types)); - } - } + bool matched = false; + foreach (T; TYPES) if (m_type == typeId!T) matched = true; + if (matched) return; - private @property string displayName() - const { - if (m_name.length) return m_name ~ " of type " ~ m_type.to!string(); - else return "JSON of type " ~ m_type.to!string(); + string name; + version (VibeJsonFieldNames) { + if (m_name.length) name = m_name ~ " of type " ~ m_type.to!string; + else name = "JSON of type " ~ m_type.to!string; + } else name = "JSON of type " ~ m_type.to!string; + + string expected; + static if (TYPES.length == 1) expected = typeId!(TYPES[0]).to!string; + else { + foreach (T; TYPES) { + if (expected.length > 0) expected ~= ", "; + expected ~= typeId!T.to!string; + } + } + + enforceJson(op.length > 0, format("Got %s, expected %s.", name, expected), m_fileName, line); + enforceJson(false, format("Got %s, expected %s for %s.", name, expected, op), m_fileName, line); } /*invariant() { - assert(m_type >= Type.undefined && m_type <= Type.object); + assert(m_type >= Type.Undefined && m_type <= Type.Object); }*/ } @@ -719,44 +856,43 @@ The range is shrunk during parsing, leaving any remaining text that is not part of the JSON contents. - Throws an Exception if any parsing error occured. + Throws a JSONException if any parsing error occurred. */ -Json parseJson(R)(ref R range, string filename, int* line) +Json parseJson(R)(ref R range, int* line = null, string filename = null) if( is(R == string) ) { - import std.algorithm : min; + import std.string : startsWith; - assert(line !is null); Json ret; enforceJson(!range.empty, "JSON string is empty.", filename, 0); skipWhitespace(range, line); - version(JsonLineNumbers){ + version(JsonLineNumbers) { import dub.internal.vibecompat.core.log; int curline = line ? *line : 0; } switch( range.front ){ case 'f': - enforceJson(range[1 .. $].startsWith("alse"), "Expected 'false', got '"~range[0 .. min($, 5)]~"'.", filename, *line); + enforceJson(range[1 .. $].startsWith("alse"), "Expected 'false', got '"~range[0 .. min(5, $)]~"'.", filename, line); range.popFrontN(5); ret = false; break; case 'n': - enforceJson(range[1 .. $].startsWith("ull"), "Expected 'null', got '"~range[0 .. min($, 4)]~"'.", filename, *line); + enforceJson(range[1 .. $].startsWith("ull"), "Expected 'null', got '"~range[0 .. min(4, $)]~"'.", filename, line); range.popFrontN(4); ret = null; break; case 't': - enforceJson(range[1 .. $].startsWith("rue"), "Expected 'true', got '"~range[0 .. min($, 4)]~"'.", filename, *line); + enforceJson(range[1 .. $].startsWith("rue"), "Expected 'true', got '"~range[0 .. min(4, $)]~"'.", filename, line); range.popFrontN(4); ret = true; break; - case '0': .. case '9'+1: + case '0': .. case '9': case '-': bool is_float; - auto num = skipNumber(range, is_float, filename, *line); + auto num = skipNumber(range, is_float, filename, line); if( is_float ) ret = to!double(num); else ret = to!long(num); break; @@ -766,13 +902,15 @@ case '[': Json[] arr; range.popFront(); - while(true) { + while (true) { skipWhitespace(range, line); - enforceJson(!range.empty, "Missing ']' before EOF.", filename, *line); + enforceJson(!range.empty, "Missing ']' before EOF.", filename, line); if(range.front == ']') break; - arr ~= parseJson(range, filename, line); + arr ~= parseJson(range, line, filename); skipWhitespace(range, line); - enforceJson(!range.empty && (range.front == ',' || range.front == ']'), "Expected ']' or ','.", filename, *line); + enforceJson(!range.empty, "Missing ']' before EOF.", filename, line); + enforceJson(range.front == ',' || range.front == ']', + format("Expected ']' or ',' - got '%s'.", range.front), filename, line); if( range.front == ']' ) break; else range.popFront(); } @@ -782,44 +920,51 @@ case '{': Json[string] obj; range.popFront(); - while(true) { + while (true) { skipWhitespace(range, line); - enforceJson(!range.empty, "Missing '}' before EOF.", filename, *line); + enforceJson(!range.empty, "Missing '}' before EOF.", filename, line); if(range.front == '}') break; string key = skipJsonString(range, filename, line); skipWhitespace(range, line); - enforceJson(range.startsWith(":"), "Expected ':' for key '" ~ key ~ "'", filename, *line); + enforceJson(range.startsWith(":"), "Expected ':' for key '" ~ key ~ "'", filename, line); range.popFront(); skipWhitespace(range, line); - Json itm = parseJson(range, filename, line); + Json itm = parseJson(range, line, filename); obj[key] = itm; skipWhitespace(range, line); - enforceJson(!range.empty && (range.front == ',' || range.front == '}'), "Expected '}' or ',' - got '"~range[0]~"'.", filename, *line); - if( range.front == '}' ) break; + enforceJson(!range.empty, "Missing '}' before EOF.", filename, line); + enforceJson(range.front == ',' || range.front == '}', + format("Expected '}' or ',' - got '%s'.", range.front), filename, line); + if (range.front == '}') break; else range.popFront(); } range.popFront(); ret = obj; break; default: - enforceJson(false, "Expected valid json token, got '"~range[0 .. min($, 12)]~"'.", filename, *line); + enforceJson(false, format("Expected valid JSON token, got '%s'.", range[0 .. min(12, $)]), filename, line); + assert(false); } assert(ret.type != Json.Type.undefined); version(JsonLineNumbers) ret.line = curline; + ret.m_fileName = filename; return ret; } /** Parses the given JSON string and returns the corresponding Json object. - Throws an Exception if any parsing error occurs. + Throws a JSONException if any parsing error occurs. */ Json parseJsonString(string str, string filename = null) { + import std.string : strip; + + auto strcopy = str; int line = 0; - auto ret = parseJson(str, filename, &line); - enforceJson(str.strip().length == 0, "Expected end of string after JSON value, not '"~str.strip()~"'.", filename, line); + auto ret = parseJson(strcopy, &line, filename); + enforceJson(strcopy.strip().length == 0, "Expected end of string after JSON value.", filename, line); return ret; } @@ -833,8 +978,22 @@ assert(parseJsonString("[1, 2, 3]") == Json([Json(1), Json(2), Json(3)])); assert(parseJsonString("{\"a\": 1}") == Json(["a": Json(1)])); assert(parseJsonString(`"\\\/\b\f\n\r\t\u1234"`).get!string == "\\/\b\f\n\r\t\u1234"); + auto json = parseJsonString(`{"hey": "This is @à test éhééhhéhéé !%/??*&?\ud83d\udcec"}`); + assert(json.toPrettyString() == parseJsonString(json.toPrettyString()).toPrettyString()); } +unittest { + import std.string : endsWith; + + try parseJsonString(`{"a": 1`); + catch (Exception e) assert(e.msg.endsWith("Missing '}' before EOF.")); + try parseJsonString(`{"a": 1 x`); + catch (Exception e) assert(e.msg.endsWith("Expected '}' or ',' - got 'x'.")); + try parseJsonString(`[1`); + catch (Exception e) assert(e.msg.endsWith("Missing ']' before EOF.")); + try parseJsonString(`[1 x`); + catch (Exception e) assert(e.msg.endsWith("Expected ']' or ',' - got 'x'.")); +} /** Serializes the given value to JSON. @@ -842,16 +1001,16 @@ The following types of values are supported: $(DL - $(DT Json) $(DD Used as-is) - $(DT null) $(DD Converted to Json.Type.null_) - $(DT bool) $(DD Converted to Json.Type.bool_) - $(DT float, double) $(DD Converted to Json.Type.Double) - $(DT short, ushort, int, uint, long, ulong) $(DD Converted to Json.Type.int_) - $(DT string) $(DD Converted to Json.Type.string) - $(DT T[]) $(DD Converted to Json.Type.array) - $(DT T[string]) $(DD Converted to Json.Type.object) - $(DT struct) $(DD Converted to Json.Type.object) - $(DT class) $(DD Converted to Json.Type.object or Json.Type.null_) + $(DT `Json`) $(DD Used as-is) + $(DT `null`) $(DD Converted to `Json.Type.null_`) + $(DT `bool`) $(DD Converted to `Json.Type.bool_`) + $(DT `float`, `double`) $(DD Converted to `Json.Type.float_`) + $(DT `short`, `ushort`, `int`, `uint`, `long`, `ulong`) $(DD Converted to `Json.Type.int_`) + $(DT `string`) $(DD Converted to `Json.Type.string`) + $(DT `T[]`) $(DD Converted to `Json.Type.array`) + $(DT `T[string]`) $(DD Converted to `Json.Type.object`) + $(DT `struct`) $(DD Converted to `Json.Type.object`) + $(DT `class`) $(DD Converted to `Json.Type.object` or `Json.Type.null_`) ) All entries of an array or an associative array, as well as all R/W properties and @@ -872,55 +1031,150 @@ --- The methods will have to be defined in pairs. The first pair that is implemented by - the type will be used for serialization (i.e. toJson overrides toString). + the type will be used for serialization (i.e. `toJson` overrides `toString`). + + See_Also: `deserializeJson`, `vibe.data.serialization` */ Json serializeToJson(T)(T value) { + version (VibeOldSerialization) { + return serializeToJsonOld(value); + } else { + return serialize!JsonSerializer(value); + } +} +/// ditto +void serializeToJson(R, T)(R destination, T value) + if (isOutputRange!(R, char) || isOutputRange!(R, ubyte)) +{ + serialize!(JsonStringSerializer!R)(value, destination); +} +/// ditto +string serializeToJsonString(T)(T value) +{ + auto ret = appender!string; + serializeToJson(ret, value); + return ret.data; +} + +/// +unittest { + struct Foo { + int number; + string str; + } + + Foo f; + f.number = 12; + f.str = "hello"; + + string json = serializeToJsonString(f); + assert(json == `{"number":12,"str":"hello"}`); + + Json jsonval = serializeToJson(f); + assert(jsonval.type == Json.Type.object); + assert(jsonval["number"] == Json(12)); + assert(jsonval["str"] == Json("hello")); +} + + +/** + Serializes the given value to a pretty printed JSON string. + + See_also: `serializeToJson`, `vibe.data.serialization` +*/ +void serializeToPrettyJson(R, T)(R destination, T value) + if (isOutputRange!(R, char) || isOutputRange!(R, ubyte)) +{ + serialize!(JsonStringSerializer!(R, true))(value, destination); +} +/// ditto +string serializeToPrettyJson(T)(T value) +{ + auto ret = appender!string; + serializeToPrettyJson(ret, value); + return ret.data; +} + +/// +unittest { + struct Foo { + int number; + string str; + } + + Foo f; + f.number = 12; + f.str = "hello"; + + string json = serializeToPrettyJson(f); + assert(json == +`{ + "number": 12, + "str": "hello" +}`); +} + + +/// private +Json serializeToJsonOld(T)(T value) +{ + import vibe.internal.meta.traits; + alias TU = Unqual!T; - static if( is(TU == Json) ) return value; - else static if( is(TU == typeof(null)) ) return Json(null); - else static if( is(TU == bool) ) return Json(value); - else static if( is(TU == float) ) return Json(cast(double)value); - else static if( is(TU == double) ) return Json(value); - else static if( is(TU == DateTime) ) return Json(value.toISOExtString()); - else static if( is(TU == SysTime) ) return Json(value.toISOExtString()); - else static if( is(TU : long) ) return Json(cast(long)value); - else static if( is(TU == string) ) return Json(value); - else static if( isArray!T ){ + static if (is(TU == Json)) return value; + else static if (is(TU == typeof(null))) return Json(null); + else static if (is(TU == bool)) return Json(value); + else static if (is(TU == float)) return Json(cast(double)value); + else static if (is(TU == double)) return Json(value); + else static if (is(TU == DateTime)) return Json(value.toISOExtString()); + else static if (is(TU == SysTime)) return Json(value.toISOExtString()); + else static if (is(TU == Date)) return Json(value.toISOExtString()); + else static if (is(TU : long)) return Json(cast(long)value); + else static if (is(TU : string)) return Json(value); + else static if (isArray!T) { auto ret = new Json[value.length]; - foreach( i; 0 .. value.length ) + foreach (i; 0 .. value.length) ret[i] = serializeToJson(value[i]); return Json(ret); - } else static if( isAssociativeArray!TU ){ + } else static if (isAssociativeArray!TU) { Json[string] ret; - foreach( string key, value; value ) - ret[key] = serializeToJson(value); + alias TK = KeyType!T; + foreach (key, value; value) { + static if(is(TK == string)) { + ret[key] = serializeToJson(value); + } else static if (is(TK == enum)) { + ret[to!string(key)] = serializeToJson(value); + } else static if (isStringSerializable!(TK)) { + ret[key.toString()] = serializeToJson(value); + } else static assert("AA key type %s not supported for JSON serialization."); + } return Json(ret); - } else static if( isJsonSerializable!TU ){ + } else static if (isJsonSerializable!TU) { return value.toJson(); - } else static if( isStringSerializable!TU ){ + } else static if (isStringSerializable!TU) { return Json(value.toString()); - } else static if( is(TU == struct) ){ + } else static if (is(TU == struct)) { Json[string] ret; - foreach( m; __traits(allMembers, T) ){ - static if( isRWField!(TU, m) ){ + foreach (m; __traits(allMembers, T)) { + static if (isRWField!(TU, m)) { auto mv = __traits(getMember, value, m); ret[underscoreStrip(m)] = serializeToJson(mv); } } return Json(ret); - } else static if( is(TU == class) ){ - if( value is null ) return Json(null); + } else static if(is(TU == class)) { + if (value is null) return Json(null); Json[string] ret; - foreach( m; __traits(allMembers, T) ){ - static if( isRWField!(TU, m) ){ + foreach (m; __traits(allMembers, T)) { + static if (isRWField!(TU, m)) { auto mv = __traits(getMember, value, m); ret[underscoreStrip(m)] = serializeToJson(mv); } } return Json(ret); - } else static if( isPointer!TU ){ - if( value is null ) return Json(null); + } else static if (isPointer!TU) { + if (value is null) return Json(null); return serializeToJson(*value); } else { static assert(false, "Unsupported type '"~T.stringof~"' for JSON serialization."); @@ -931,7 +1185,9 @@ /** Deserializes a JSON value into the destination variable. - The same types as for serializeToJson() are supported and handled inversely. + The same types as for `serializeToJson()` are supported and handled inversely. + + See_Also: `serializeToJson`, `serializeToJsonString`, `vibe.data.serialization` */ void deserializeJson(T)(ref T dst, Json src) { @@ -940,52 +1196,82 @@ /// ditto T deserializeJson(T)(Json src) { - static if( is(T == Json) ) return src; - else static if( is(T == typeof(null)) ){ return null; } - else static if( is(T == bool) ) return src.get!bool; - else static if( is(T == float) ) return src.to!float; // since doubles are frequently serialized without - else static if( is(T == double) ) return src.to!double; // a decimal point, we allow conversions here - else static if( is(T == DateTime) ) return DateTime.fromISOExtString(src.get!string); - else static if( is(T == SysTime) ) return SysTime.fromISOExtString(src.get!string); - else static if( is(T : long) ) return cast(T)src.get!long; - else static if( is(T == string) ) return src.get!string; - else static if( isArray!T ){ + version (VibeOldSerialization) { + return deserializeJsonOld!T(src); + } else { + return deserialize!(JsonSerializer, T)(src); + } +} +/// ditto +T deserializeJson(T, R)(R input) + if (isInputRange!R && !is(R == Json)) +{ + return deserialize!(JsonStringSerializer!R, T)(input); +} + +/// private +T deserializeJsonOld(T)(Json src) +{ + import vibe.internal.meta.traits; + + static if( is(T == struct) || isSomeString!T || isIntegral!T || isFloatingPoint!T ) + if( src.type == Json.Type.null_ ) return T.init; + static if (is(T == Json)) return src; + else static if (is(T == typeof(null))) { return null; } + else static if (is(T == bool)) return src.get!bool; + else static if (is(T == float)) return src.to!float; // since doubles are frequently serialized without + else static if (is(T == double)) return src.to!double; // a decimal point, we allow conversions here + else static if (is(T == DateTime)) return DateTime.fromISOExtString(src.get!string); + else static if (is(T == SysTime)) return SysTime.fromISOExtString(src.get!string); + else static if (is(T == Date)) return Date.fromISOExtString(src.get!string); + else static if (is(T : long)) return cast(T)src.get!long; + else static if (is(T : string)) return cast(T)src.get!string; + else static if (isArray!T) { alias TV = typeof(T.init[0]) ; auto dst = new Unqual!TV[src.length]; - foreach( size_t i, v; src ) + foreach (size_t i, v; src) dst[i] = deserializeJson!(Unqual!TV)(v); - return dst; - } else static if( isAssociativeArray!T ){ + return cast(T)dst; + } else static if( isAssociativeArray!T ) { alias TV = typeof(T.init.values[0]) ; - Unqual!TV[string] dst; - foreach( string key, value; src ) - dst[key] = deserializeJson!(Unqual!TV)(value); + alias TK = KeyType!T; + Unqual!TV[TK] dst; + foreach (string key, value; src) { + static if (is(TK == string)) { + dst[key] = deserializeJson!(Unqual!TV)(value); + } else static if (is(TK == enum)) { + dst[to!(TK)(key)] = deserializeJson!(Unqual!TV)(value); + } else static if (isStringSerializable!TK) { + auto dsk = TK.fromString(key); + dst[dsk] = deserializeJson!(Unqual!TV)(value); + } else static assert("AA key type %s not supported for JSON serialization."); + } return dst; - } else static if( isJsonSerializable!T ){ + } else static if (isJsonSerializable!T) { return T.fromJson(src); - } else static if( isStringSerializable!T ){ + } else static if (isStringSerializable!T) { return T.fromString(src.get!string); - } else static if( is(T == struct) ){ + } else static if (is(T == struct)) { T dst; - foreach( m; __traits(allMembers, T) ){ - static if( isRWPlainField!(T, m) || isRWField!(T, m) ){ + foreach (m; __traits(allMembers, T)) { + static if (isRWPlainField!(T, m) || isRWField!(T, m)) { alias TM = typeof(__traits(getMember, dst, m)) ; __traits(getMember, dst, m) = deserializeJson!TM(src[underscoreStrip(m)]); } } return dst; - } else static if( is(T == class) ){ - if( src.type == Json.Type.null_ ) return null; + } else static if (is(T == class)) { + if (src.type == Json.Type.null_) return null; auto dst = new T; - foreach( m; __traits(allMembers, T) ){ - static if( isRWPlainField!(T, m) || isRWField!(T, m) ){ + foreach (m; __traits(allMembers, T)) { + static if (isRWPlainField!(T, m) || isRWField!(T, m)) { alias TM = typeof(__traits(getMember, dst, m)) ; __traits(getMember, dst, m) = deserializeJson!TM(src[underscoreStrip(m)]); } } return dst; - } else static if( isPointer!T ){ - if( src.type == Json.Type.null_ ) return null; + } else static if (isPointer!T) { + if (src.type == Json.Type.null_) return null; alias TD = typeof(*T.init) ; dst = new TD; *dst = deserializeJson!TD(src); @@ -995,10 +1281,24 @@ } } +/// +unittest { + struct Foo { + int number; + string str; + } + + Foo f = deserializeJson!Foo(`{"number": 12, "str": "hello"}`); + assert(f.number == 12); + assert(f.str == "hello"); +} + unittest { import std.stdio; - static struct S { float a; double b; bool c; int d; string e; byte f; ubyte g; long h; ulong i; float[] j; } - immutable S t = {1.5, -3.0, true, int.min, "Test", -128, 255, long.min, ulong.max, [1.1, 1.2, 1.3]}; + enum Foo : string { k = "test" } + enum Boo : int { l = 5 } + static struct S { float a; double b; bool c; int d; string e; byte f; ubyte g; long h; ulong i; float[] j; Foo k; Boo l; } + immutable S t = {1.5, -3.0, true, int.min, "Test", -128, 255, long.min, ulong.max, [1.1, 1.2, 1.3], Foo.k, Boo.l}; S u; deserializeJson(u, serializeToJson(t)); assert(t.a == u.a); @@ -1011,6 +1311,49 @@ assert(t.h == u.h); assert(t.i == u.i); assert(t.j == u.j); + assert(t.k == u.k); + assert(t.l == u.l); +} + +unittest +{ + assert(uint.max == serializeToJson(uint.max).deserializeJson!uint); + assert(ulong.max == serializeToJson(ulong.max).deserializeJson!ulong); +} + +unittest { + static struct A { int value; static A fromJson(Json val) { return A(val.get!int); } Json toJson() const { return Json(value); } } + static struct C { int value; static C fromString(string val) { return C(val.to!int); } string toString() const { return value.to!string; } } + static struct D { int value; } + + assert(serializeToJson(const A(123)) == Json(123)); + assert(serializeToJson(A(123)) == Json(123)); + assert(serializeToJson(const C(123)) == Json("123")); + assert(serializeToJson(C(123)) == Json("123")); + assert(serializeToJson(const D(123)) == serializeToJson(["value": 123])); + assert(serializeToJson(D(123)) == serializeToJson(["value": 123])); +} + +unittest { + auto d = Date(2001,1,1); + deserializeJson(d, serializeToJson(Date.init)); + assert(d == Date.init); + deserializeJson(d, serializeToJson(Date(2001,1,1))); + assert(d == Date(2001,1,1)); + struct S { immutable(int)[] x; } + S s; + deserializeJson(s, serializeToJson(S([1,2,3]))); + assert(s == S([1,2,3])); + struct T { + @optional S s; + @optional int i; + @optional float f_; // underscore strip feature + @optional double d; + @optional string str; + } + auto t = T(S([1,2,3])); + deserializeJson(t, parseJsonString(`{ "s" : null, "i" : null, "f" : null, "d" : null, "str" : null }`)); + assert(text(t) == text(T())); } unittest { @@ -1034,13 +1377,357 @@ assert(c.b == d.b); } +unittest { + static struct C { int value; static C fromString(string val) { return C(val.to!int); } string toString() const { return value.to!string; } } + enum Color { Red, Green, Blue } + { + static class T { + string[Color] enumIndexedMap; + string[C] stringableIndexedMap; + this() { + enumIndexedMap = [ Color.Red : "magenta", Color.Blue : "deep blue" ]; + stringableIndexedMap = [ C(42) : "forty-two" ]; + } + } + + T original = new T; + original.enumIndexedMap[Color.Green] = "olive"; + T other; + deserializeJson(other, serializeToJson(original)); + assert(serializeToJson(other) == serializeToJson(original)); + } + { + static struct S { + string[Color] enumIndexedMap; + string[C] stringableIndexedMap; + } + + S *original = new S; + original.enumIndexedMap = [ Color.Red : "magenta", Color.Blue : "deep blue" ]; + original.enumIndexedMap[Color.Green] = "olive"; + original.stringableIndexedMap = [ C(42) : "forty-two" ]; + S other; + deserializeJson(other, serializeToJson(original)); + assert(serializeToJson(other) == serializeToJson(original)); + } +} + +unittest { + import std.typecons : Nullable; + + struct S { Nullable!int a, b; } + S s; + s.a = 2; + + auto j = serializeToJson(s); + assert(j["a"].type == Json.Type.int_); + assert(j["b"].type == Json.Type.null_); + + auto t = deserializeJson!S(j); + assert(!t.a.isNull() && t.a == 2); + assert(t.b.isNull()); +} + +unittest { // #840 + int[2][2] nestedArray = 1; + assert(nestedArray.serializeToJson.deserializeJson!(typeof(nestedArray)) == nestedArray); +} + + +/** + Serializer for a plain Json representation. + + See_Also: vibe.data.serialization.serialize, vibe.data.serialization.deserialize, serializeToJson, deserializeJson +*/ +struct JsonSerializer { + template isJsonBasicType(T) { enum isJsonBasicType = isNumeric!T || isBoolean!T || is(T == string) || is(T == typeof(null)) || isJsonSerializable!T; } + + template isSupportedValueType(T) { enum isSupportedValueType = isJsonBasicType!T || is(T == Json); } + + private { + Json m_current; + Json[] m_compositeStack; + } + + this(Json data) { m_current = data; } + + @disable this(this); + + // + // serialization + // + Json getSerializedResult() { return m_current; } + void beginWriteDictionary(T)() { m_compositeStack ~= Json.emptyObject; } + void endWriteDictionary(T)() { m_current = m_compositeStack[$-1]; m_compositeStack.length--; } + void beginWriteDictionaryEntry(T)(string name) {} + void endWriteDictionaryEntry(T)(string name) { m_compositeStack[$-1][name] = m_current; } + + void beginWriteArray(T)(size_t) { m_compositeStack ~= Json.emptyArray; } + void endWriteArray(T)() { m_current = m_compositeStack[$-1]; m_compositeStack.length--; } + void beginWriteArrayEntry(T)(size_t) {} + void endWriteArrayEntry(T)(size_t) { m_compositeStack[$-1].appendArrayElement(m_current); } + + void writeValue(T)(T value) + { + static if (is(T == Json)) m_current = value; + else static if (isJsonSerializable!T) m_current = value.toJson(); + else m_current = Json(value); + } + + void writeValue(T)(in Json value) if (is(T == Json)) + { + m_current = value.clone; + } + + // + // deserialization + // + void readDictionary(T)(scope void delegate(string) field_handler) + { + enforceJson(m_current.type == Json.Type.object, "Expected JSON object, got "~m_current.type.to!string); + auto old = m_current; + foreach (string key, value; m_current) { + m_current = value; + field_handler(key); + } + m_current = old; + } + + void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback) + { + enforceJson(m_current.type == Json.Type.array, "Expected JSON array, got "~m_current.type.to!string); + auto old = m_current; + size_callback(m_current.length); + foreach (ent; old) { + m_current = ent; + entry_callback(); + } + m_current = old; + } + + T readValue(T)() + { + static if (is(T == Json)) return m_current; + else static if (isJsonSerializable!T) return T.fromJson(m_current); + else static if (is(T == float) || is(T == double)) { + if (m_current.type == Json.Type.undefined) return T.nan; + return m_current.type == Json.Type.float_ ? cast(T)m_current.get!double : cast(T)m_current.get!long; + } + else { + return m_current.get!T(); + } + } + + bool tryReadNull() { return m_current.type == Json.Type.null_; } +} + + +/** + Serializer for a range based plain JSON string representation. + + See_Also: vibe.data.serialization.serialize, vibe.data.serialization.deserialize, serializeToJson, deserializeJson +*/ +struct JsonStringSerializer(R, bool pretty = false) + if (isInputRange!R || isOutputRange!(R, char)) +{ + private { + R m_range; + size_t m_level = 0; + } + + template isJsonBasicType(T) { enum isJsonBasicType = isNumeric!T || isBoolean!T || is(T == string) || is(T == typeof(null)) || isJsonSerializable!T; } + + template isSupportedValueType(T) { enum isSupportedValueType = isJsonBasicType!T || is(T == Json); } + + this(R range) + { + m_range = range; + } + + @disable this(this); + + // + // serialization + // + static if (isOutputRange!(R, char)) { + private { + bool m_firstInComposite; + } + + void getSerializedResult() {} + + void beginWriteDictionary(T)() { startComposite(); m_range.put('{'); } + void endWriteDictionary(T)() { endComposite(); m_range.put("}"); } + void beginWriteDictionaryEntry(T)(string name) + { + startCompositeEntry(); + m_range.put('"'); + m_range.jsonEscape(name); + static if (pretty) m_range.put(`": `); + else m_range.put(`":`); + } + void endWriteDictionaryEntry(T)(string name) {} + + void beginWriteArray(T)(size_t) { startComposite(); m_range.put('['); } + void endWriteArray(T)() { endComposite(); m_range.put(']'); } + void beginWriteArrayEntry(T)(size_t) { startCompositeEntry(); } + void endWriteArrayEntry(T)(size_t) {} + + void writeValue(T)(in T value) + { + static if (is(T == typeof(null))) m_range.put("null"); + else static if (is(T == bool)) m_range.put(value ? "true" : "false"); + else static if (is(T : long)) m_range.formattedWrite("%s", value); + else static if (is(T : real)) m_range.formattedWrite("%.16g", value); + else static if (is(T == string)) { + m_range.put('"'); + m_range.jsonEscape(value); + m_range.put('"'); + } + else static if (is(T == Json)) m_range.writeJsonString(value); + else static if (isJsonSerializable!T) m_range.writeJsonString!(R, pretty)(value.toJson(), m_level); + else static assert(false, "Unsupported type: " ~ T.stringof); + } + + private void startComposite() + { + static if (pretty) m_level++; + m_firstInComposite = true; + } + + private void startCompositeEntry() + { + if (!m_firstInComposite) { + m_range.put(','); + } else { + m_firstInComposite = false; + } + static if (pretty) indent(); + } + + private void endComposite() + { + static if (pretty) { + m_level--; + if (!m_firstInComposite) indent(); + } + m_firstInComposite = false; + } + + private void indent() + { + m_range.put('\n'); + foreach (i; 0 .. m_level) m_range.put('\t'); + } + } + + // + // deserialization + // + static if (isInputRange!(R)) { + private { + int m_line = 0; + } + + void readDictionary(T)(scope void delegate(string) entry_callback) + { + m_range.skipWhitespace(&m_line); + enforceJson(!m_range.empty && m_range.front == '{', "Expecting object."); + m_range.popFront(); + bool first = true; + while(true) { + m_range.skipWhitespace(&m_line); + enforceJson(!m_range.empty, "Missing '}'."); + if (m_range.front == '}') { + m_range.popFront(); + break; + } else if (!first) { + enforceJson(m_range.front == ',', "Expecting ',' or '}', not '"~m_range.front.to!string~"'."); + m_range.popFront(); + m_range.skipWhitespace(&m_line); + } else first = false; + + auto name = m_range.skipJsonString(null, &m_line); + + m_range.skipWhitespace(&m_line); + enforceJson(!m_range.empty && m_range.front == ':', "Expecting ':', not '"~m_range.front.to!string~"'."); + m_range.popFront(); + + entry_callback(name); + } + } + + void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback) + { + m_range.skipWhitespace(&m_line); + enforceJson(!m_range.empty && m_range.front == '[', "Expecting array."); + m_range.popFront(); + bool first = true; + while(true) { + m_range.skipWhitespace(&m_line); + enforceJson(!m_range.empty, "Missing ']'."); + if (m_range.front == ']') { + m_range.popFront(); + break; + } else if (!first) { + enforceJson(m_range.front == ',', "Expecting ',' or ']'."); + m_range.popFront(); + } else first = false; + + entry_callback(); + } + } + + T readValue(T)() + { + m_range.skipWhitespace(&m_line); + static if (is(T == typeof(null))) { enforceJson(m_range.take(4).equal("null"), "Expecting 'null'."); return null; } + else static if (is(T == bool)) { + bool ret = m_range.front == 't'; + string expected = ret ? "true" : "false"; + foreach (ch; expected) { + enforceJson(m_range.front == ch, "Expecting 'true' or 'false'."); + m_range.popFront(); + } + return ret; + } else static if (is(T : long)) { + bool is_float; + auto num = m_range.skipNumber(is_float, null, &m_line); + enforceJson(!is_float, "Expecting integer number."); + return to!T(num); + } else static if (is(T : real)) { + bool is_float; + auto num = m_range.skipNumber(is_float); + return to!T(num); + } + else static if (is(T == string)) return m_range.skipJsonString(null, &m_line); + else static if (is(T == Json)) return m_range.parseJson(&m_line); + else static if (isJsonSerializable!T) return T.fromJson(m_range.parseJson(&m_line)); + else static assert(false, "Unsupported type: " ~ T.stringof); + } + + bool tryReadNull() + { + m_range.skipWhitespace(&m_line); + if (m_range.front != 'n') return false; + foreach (ch; "null") { + enforceJson(m_range.front == ch, "Expecting 'null'."); + m_range.popFront(); + } + assert(m_range.empty || m_range.front != 'l'); + return true; + } + } +} + + /** Writes the given JSON object as a JSON string into the destination range. This function will convert the given JSON value to a string without adding any white space between tokens (no newlines, no indentation and no padding). - The output size is thus minizized, at the cost of bad human readability. + The output size is thus minimized, at the cost of bad human readability. Params: dst = References the string output range to which the result is written. @@ -1048,7 +1735,7 @@ See_Also: Json.toString, writePrettyJsonString */ -void writeJsonString(R)(ref R dst, in Json json) +void writeJsonString(R, bool pretty = false)(ref R dst, in Json json, size_t level = 0) // if( isOutputRange!R && is(ElementEncodingType!R == char) ) { final switch( json.type ){ @@ -1056,40 +1743,132 @@ case Json.Type.null_: dst.put("null"); break; case Json.Type.bool_: dst.put(cast(bool)json ? "true" : "false"); break; case Json.Type.int_: formattedWrite(dst, "%d", json.get!long); break; - case Json.Type.float_: formattedWrite(dst, "%.16g", json.get!double); break; + case Json.Type.float_: + auto d = json.get!double; + if (d != d) + dst.put("undefined"); // JSON has no NaN value so set null + else + formattedWrite(dst, "%.16g", json.get!double); + break; case Json.Type.string: - dst.put("\""); + dst.put('\"'); jsonEscape(dst, cast(string)json); - dst.put("\""); + dst.put('\"'); break; case Json.Type.array: - dst.put("["); + dst.put('['); bool first = true; - foreach( ref const Json e; json ){ - if( e.type == Json.Type.undefined ) continue; + foreach (ref const Json e; json) { if( !first ) dst.put(","); first = false; - writeJsonString(dst, e); + static if (pretty) { + dst.put('\n'); + foreach (tab; 0 .. level+1) dst.put('\t'); + } + if (e.type == Json.Type.undefined) dst.put("null"); + else writeJsonString!(R, pretty)(dst, e, level+1); } - dst.put("]"); + static if (pretty) { + if (json.length > 0) { + dst.put('\n'); + foreach (tab; 0 .. level) dst.put('\t'); + } + } + dst.put(']'); break; case Json.Type.object: - dst.put("{"); + dst.put('{'); bool first = true; foreach( string k, ref const Json e; json ){ if( e.type == Json.Type.undefined ) continue; - if( !first ) dst.put(","); + if( !first ) dst.put(','); first = false; - dst.put("\""); + static if (pretty) { + dst.put('\n'); + foreach (tab; 0 .. level+1) dst.put('\t'); + } + dst.put('\"'); jsonEscape(dst, k); - dst.put("\":"); - writeJsonString(dst, e); + dst.put(pretty ? `": ` : `":`); + writeJsonString!(R, pretty)(dst, e, level+1); } - dst.put("}"); + static if (pretty) { + if (json.length > 0) { + dst.put('\n'); + foreach (tab; 0 .. level) dst.put('\t'); + } + } + dst.put('}'); break; } } +unittest { + auto a = Json.emptyObject; + a["a"] = Json.emptyArray; + a["b"] = Json.emptyArray; + a["b"] ~= Json(1); + a["b"] ~= Json.emptyObject; + + assert(a.toString() == `{"a":[],"b":[1,{}]}` || a.toString == `{"b":[1,{}],"a":[]}`); + assert(a.toPrettyString() == +`{ + "a": [], + "b": [ + 1, + {} + ] +}` || a.toPrettyString() == +`{ + "b": [ + 1, + {} + ], + "a": [] +}`); +} + +unittest { // #735 + auto a = Json.emptyArray; + a ~= "a"; + a ~= Json(); + a ~= "b"; + a ~= null; + a ~= "c"; + assert(a.toString() == `["a",null,"b",null,"c"]`); +} + +unittest { + auto a = Json.emptyArray; + a ~= Json(1); + a ~= Json(2); + a ~= Json(3); + a ~= Json(4); + a ~= Json(5); + + auto b = Json(a[0..a.length]); + assert(a == b); + + auto c = Json(a[0..$]); + assert(a == c); + assert(b == c); + + auto d = [Json(1),Json(2),Json(3)]; + assert(d == a[0..a.length-2]); + assert(d == a[0..$-2]); +} + +unittest { + auto j = Json(double.init); + + assert(j.toString == "undefined"); // A double nan should serialize to undefined + j = 17.04f; + assert(j.toString == "17.04"); // A proper double should serialize correctly + + double d; + deserializeJson(d, Json.undefined); // Json.undefined should deserialize to nan + assert(d != d); +} /** Writes the given JSON object as a prettified JSON string into the destination range. @@ -1098,71 +1877,74 @@ Params: dst = References the string output range to which the result is written. json = Specifies the JSON value that is to be stringified. - level = Specifies the base amount of indentation for the output. Indentation is always - done using tab characters. + level = Specifies the base amount of indentation for the output. Indentation is always + done using tab characters. See_Also: Json.toPrettyString, writeJsonString */ void writePrettyJsonString(R)(ref R dst, in Json json, int level = 0) // if( isOutputRange!R && is(ElementEncodingType!R == char) ) { - final switch( json.type ){ - case Json.Type.undefined: dst.put("undefined"); break; - case Json.Type.null_: dst.put("null"); break; - case Json.Type.bool_: dst.put(cast(bool)json ? "true" : "false"); break; - case Json.Type.int_: formattedWrite(dst, "%d", json.get!long); break; - case Json.Type.float_: formattedWrite(dst, "%.16g", json.get!double); break; - case Json.Type.string: - dst.put("\""); - jsonEscape(dst, cast(string)json); - dst.put("\""); - break; - case Json.Type.array: - dst.put("["); - bool first = true; - foreach( e; json ){ - if( e.type == Json.Type.undefined ) continue; - if( !first ) dst.put(","); - first = false; - dst.put("\n"); - foreach( tab; 0 .. level+1 ) dst.put('\t'); - writePrettyJsonString(dst, e, level+1); - } - if( json.length > 0 ) { - dst.put('\n'); - foreach( tab; 0 .. level ) dst.put('\t'); - } - dst.put("]"); - break; - case Json.Type.object: - dst.put("{"); - bool first = true; - foreach( string k, e; json ){ - if( e.type == Json.Type.undefined ) continue; - if( !first ) dst.put(","); - dst.put("\n"); - first = false; - foreach( tab; 0 .. level+1 ) dst.put('\t'); - dst.put("\""); - jsonEscape(dst, k); - dst.put("\": "); - writePrettyJsonString(dst, e, level+1); - } - if( json.length > 0 ) { - dst.put('\n'); - foreach( tab; 0 .. level ) dst.put('\t'); - } - dst.put("}"); - break; - } + writeJsonString!(R, true)(dst, json, level); } -/// private -private void jsonEscape(R)(ref R dst, string s) + +/** + Helper function that escapes all Unicode characters in a JSON string. +*/ +string convertJsonToASCII(string json) { - foreach( ch; s ){ - switch(ch){ - default: dst.put(ch); break; + auto ret = appender!string; + jsonEscape!true(ret, json); + return ret.data; +} + + +/// private +private void jsonEscape(bool escape_unicode = false, R)(ref R dst, string s) +{ + for (size_t pos = 0; pos < s.length; pos++) { + immutable(char) ch = s[pos]; + + switch (ch) { + default: + static if (escape_unicode) { + if (ch > 0x20 && ch < 0x80) dst.put(ch); + else { + import std.utf : decode; + char[13] buf; + int len; + dchar codepoint = decode(s, pos); + import core.stdc.stdio : sprintf; + /* codepoint is in BMP */ + if(codepoint < 0x10000) + { + sprintf(&buf[0], "\\u%04X", codepoint); + len = 6; + } + /* not in BMP -> construct a UTF-16 surrogate pair */ + else + { + int first, last; + + codepoint -= 0x10000; + first = 0xD800 | ((codepoint & 0xffc00) >> 10); + last = 0xDC00 | (codepoint & 0x003ff); + + sprintf(&buf[0], "\\u%04X\\u%04X", first, last); + len = 12; + } + + pos -= 1; + foreach (i; 0 .. len) + dst.put(buf[i]); + + } + } else { + if (ch < 0x20) dst.formattedWrite("\\u%04X", ch); + else dst.put(ch); + } + break; case '\\': dst.put("\\\\"); break; case '\r': dst.put("\\r"); break; case '\n': dst.put("\\n"); break; @@ -1173,7 +1955,7 @@ } /// private -private string jsonUnescape(R)(ref R range) +private string jsonUnescape(R)(ref R range, string filename, int* line) { auto ret = appender!string(); while(!range.empty){ @@ -1182,9 +1964,9 @@ case '"': return ret.data; case '\\': range.popFront(); - enforce(!range.empty, "Unterminated string escape sequence."); + enforceJson(!range.empty, "Unterminated string escape sequence.", filename, line); switch(range.front){ - default: enforce("Invalid string escape sequence."); break; + default: enforceJson(false, "Invalid string escape sequence.", filename, line); break; case '"': ret.put('\"'); range.popFront(); break; case '\\': ret.put('\\'); range.popFront(); break; case '/': ret.put('/'); range.popFront(); break; @@ -1194,17 +1976,39 @@ case 'r': ret.put('\r'); range.popFront(); break; case 't': ret.put('\t'); range.popFront(); break; case 'u': - range.popFront(); - dchar uch = 0; - foreach( i; 0 .. 4 ){ - uch *= 16; - enforce(!range.empty, "Unicode sequence must be '\\uXXXX'."); - auto dc = range.front; + + dchar decode_unicode_escape() { + enforceJson(range.front == 'u'); range.popFront(); - if( dc >= '0' && dc <= '9' ) uch += dc - '0'; - else if( dc >= 'a' && dc <= 'f' ) uch += dc - 'a' + 10; - else if( dc >= 'A' && dc <= 'F' ) uch += dc - 'A' + 10; - else enforce(false, "Unicode sequence must be '\\uXXXX'."); + dchar uch = 0; + foreach( i; 0 .. 4 ){ + uch *= 16; + enforceJson(!range.empty, "Unicode sequence must be '\\uXXXX'.", filename, line); + auto dc = range.front; + range.popFront(); + + if( dc >= '0' && dc <= '9' ) uch += dc - '0'; + else if( dc >= 'a' && dc <= 'f' ) uch += dc - 'a' + 10; + else if( dc >= 'A' && dc <= 'F' ) uch += dc - 'A' + 10; + else enforceJson(false, "Unicode sequence must be '\\uXXXX'.", filename, line); + } + return uch; + } + + auto uch = decode_unicode_escape(); + + if(0xD800 <= uch && uch <= 0xDBFF) { + /* surrogate pair */ + range.popFront(); // backslash '\' + auto uch2 = decode_unicode_escape(); + enforceJson(0xDC00 <= uch2 && uch2 <= 0xDFFF, "invalid Unicode", filename, line); + { + /* valid second surrogate */ + uch = + ((uch - 0xD800) << 10) + + (uch2 - 0xDC00) + + 0x10000; + } } ret.put(uch); break; @@ -1219,12 +2023,14 @@ return ret.data; } -private string skipNumber(ref string s, out bool is_float, string filename, int line) +/// private +private string skipNumber(R)(ref R s, out bool is_float, string filename, int* line) { + // TODO: make this work with input ranges size_t idx = 0; is_float = false; - if( s[idx] == '-' ) idx++; - if( s[idx] == '0' ) idx++; + if (s[idx] == '-') idx++; + if (s[idx] == '0') idx++; else { enforceJson(isDigit(s[idx++]), "Digit expected at beginning of number.", filename, line); while( idx < s.length && isDigit(s[idx]) ) idx++; @@ -1240,7 +2046,7 @@ idx++; is_float = true; if( idx < s.length && (s[idx] == '+' || s[idx] == '-') ) idx++; - enforceJson(idx < s.length && isDigit(s[idx]), "Expected exponent." ~ s[0 .. idx], filename, line); + enforceJson( idx < s.length && isDigit(s[idx]), "Expected exponent." ~ s[0 .. idx], filename, line); idx++; while( idx < s.length && isDigit(s[idx]) ) idx++; } @@ -1250,39 +2056,40 @@ return ret; } -private string skipJsonString(ref string s, string filename, int* line = null) +/// private +private string skipJsonString(R)(ref R s, string filename, int* line) { - enforceJson(s.length >= 2, "Too small for a string: '" ~ s ~ "'", filename, *line); - enforceJson(s[0] == '\"', "Expected string, not '" ~ s ~ "'", filename, *line); - s = s[1 .. $]; - string ret = jsonUnescape(s); - enforce(s.length > 0 && s[0] == '\"', "Unterminated string literal.", filename, *line); - s = s[1 .. $]; + // TODO: count or disallow any newlines inside of the string + enforceJson(!s.empty && s.front == '"', "Expected '\"' to start string.", filename, line); + s.popFront(); + string ret = jsonUnescape(s, filename, line); + enforceJson(!s.empty && s.front == '"', "Expected '\"' to terminate string.", filename, line); + s.popFront(); return ret; } -private void skipWhitespace(ref string s, int* line = null) +/// private +private void skipWhitespace(R)(ref R s, int* line = null) { - while( s.length > 0 ){ - switch( s[0] ){ + while (!s.empty) { + switch (s.front) { default: return; - case ' ', '\t': s = s[1 .. $]; break; + case ' ', '\t': s.popFront(); break; case '\n': - s = s[1 .. $]; - if( s.length > 0 && s[0] == '\r' ) s = s[1 .. $]; - if( line ) (*line)++; + s.popFront(); + if (!s.empty && s.front == '\r') s.popFront(); + if (line) (*line)++; break; case '\r': - s = s[1 .. $]; - if( s.length > 0 && s[0] == '\n' ) s = s[1 .. $]; - if( line ) (*line)++; + s.popFront(); + if (!s.empty && s.front == '\n') s.popFront(); + if (line) (*line)++; break; } } } -/// private -private bool isDigit(T)(T ch){ return ch >= '0' && ch <= '9'; } +private bool isDigit(dchar ch) { return ch >= '0' && ch <= '9'; } private string underscoreStrip(string field_name) { @@ -1290,13 +2097,23 @@ else return field_name[0 .. $-1]; } -private template isJsonSerializable(T) { enum isJsonSerializable = is(typeof(T.init.toJson()) == Json) && is(typeof(T.fromJson(Json())) == T); } -package template isStringSerializable(T) { enum isStringSerializable = is(typeof(T.init.toString()) == string) && is(typeof(T.fromString("")) == T); } +/// private +package template isJsonSerializable(T) { enum isJsonSerializable = is(typeof(T.init.toJson()) == Json) && is(typeof(T.fromJson(Json())) == T); } -private void enforceJson(string filename = __FILE__, int line = __LINE__)(bool cond, lazy string message, string err_file, int err_line) +private void enforceJson(string file = __FILE__, size_t line = __LINE__)(bool cond, lazy string message = "JSON exception") { - if (!cond) { - auto err_msg = format("%s(%s): Error: %s", err_file, err_line, message); - throw new Exception(err_msg, filename, line); - } + static if (__VERSION__ >= 2065) enforceEx!JSONException(cond, message, file, line); + else if (!cond) throw new JSONException(message); +} + +private void enforceJson(string file = __FILE__, size_t line = __LINE__)(bool cond, lazy string message, string err_file, int err_line) +{ + auto errmsg() { return format("%s(%s): Error: %s", err_file, err_line+1, message); } + static if (__VERSION__ >= 2065) enforceEx!JSONException(cond, errmsg, file, line); + else if (!cond) throw new JSONException(errmsg); +} + +private void enforceJson(string file = __FILE__, size_t line = __LINE__)(bool cond, lazy string message, string err_file, int* err_line) +{ + enforceJson!(file, line)(cond, message, err_file, err_line ? *err_line : -1); } diff --git a/source/dub/internal/vibecompat/data/serialization.d b/source/dub/internal/vibecompat/data/serialization.d new file mode 100644 index 0000000..ccbb115 --- /dev/null +++ b/source/dub/internal/vibecompat/data/serialization.d @@ -0,0 +1,1307 @@ +/** + Generic serialization framework. + + This module provides general means for implementing (de-)serialization with + a standardized behavior. + + Supported_types: + The following rules are applied in order when serializing or + deserializing a certain type: + + $(OL + $(LI An `enum` type is serialized as its raw value, except if + `@byName` is used, in which case the name of the enum value + is serialized.) + $(LI Any type that is specifically supported by the serializer + is directly serialized. For example, the BSON serializer + supports `BsonObjectID` directly.) + $(LI Arrays and tuples (`std.typecons.Tuple`) are serialized + using the array serialization functions where each element is + serialized again according to these rules.) + $(LI Associative arrays are serialized similar to arrays. The key + type of the AA must satisfy the `isStringSerializable` trait + and will always be serialized as a string.) + $(LI Any `Nullable!T` will be serialized as either `null`, or + as the contained value (subject to these rules again).) + $(LI Any `BitFlags!T` value will be serialized as `T[]`) + $(LI Types satisfying the `isPolicySerializable` trait for the + supplied `Policy` will be serialized as the value returned + by the policy `toRepresentation` function (again subject to + these rules).) + $(LI Types satisfying the `isCustomSerializable` trait will be + serialized as the value returned by their `toRepresentation` + method (again subject to these rules).) + $(LI Types satisfying the `isISOExtStringSerializable` trait will be + serialized as a string, as returned by their `toISOExtString` + method. This causes types such as `SysTime` to be serialized + as strings.) + $(LI Types satisfying the `isStringSerializable` trait will be + serialized as a string, as returned by their `toString` + method.) + $(LI Struct and class types by default will be serialized as + associative arrays, where the key is the name of the + corresponding field (can be overridden using the `@name` + attribute). If the struct/class is annotated with `@asArray`, + it will instead be serialized as a flat array of values in the + order of declaration. Null class references will be serialized + as `null`.) + $(LI Pointer types will be serialized as either `null`, or as + the value they point to.) + $(LI Built-in integers and floating point values, as well as + boolean values will be converted to strings, if the serializer + doesn't support them directly.) + ) + + Note that no aliasing detection is performed, so that pointers, class + references and arrays referencing the same memory will be serialized + as multiple copies. When in turn deserializing the data, they will also + end up as separate copies in memory. + + Serializer_implementation: + Serializers are implemented in terms of a struct with template methods that + get called by the serialization framework: + + --- + struct ExampleSerializer { + enum isSupportedValueType(T) = is(T == string) || is(T == typeof(null)); + + // serialization + auto getSerializedResult(); + void beginWriteDictionary(T)(); + void endWriteDictionary(T)(); + void beginWriteDictionaryEntry(T)(string name); + void endWriteDictionaryEntry(T)(string name); + void beginWriteArray(T)(size_t length); + void endWriteArray(T)(); + void beginWriteArrayEntry(T)(size_t index); + void endWriteArrayEntry(T)(size_t index); + void writeValue(T)(T value); + + // deserialization + void readDictionary(T)(scope void delegate(string) entry_callback); + void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback); + T readValue(T)(); + bool tryReadNull(); + } + --- + + Copyright: © 2013-2014 rejectedsoftware e.K. + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Sönke Ludwig +*/ +module dub.internal.vibecompat.data.serialization; + +version (Have_vibe_d_data) public import vibe.data.serialization; +else: + +import dub.internal.vibecompat.data.utils; + +import std.array : Appender, appender; +import std.conv : to; +import std.exception : enforce; +import std.traits; +import std.typetuple; + + +/** + Serializes a value with the given serializer. + + The serializer must have a value result for the first form + to work. Otherwise, use the range based form. + + See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` +*/ +auto serialize(Serializer, T, ARGS...)(T value, ARGS args) +{ + auto serializer = Serializer(args); + serialize(serializer, value); + return serializer.getSerializedResult(); +} +/// ditto +void serialize(Serializer, T)(ref Serializer serializer, T value) +{ + serializeImpl!(Serializer, DefaultPolicy, T)(serializer, value); +} + +/** Note that there is a convenience function `vibe.data.json.serializeToJson` + that can be used instead of manually invoking `serialize`. +*/ +unittest { + import dub.internal.vibecompat.data.json; + + struct Test { + int value; + string text; + } + + Test test; + test.value = 12; + test.text = "Hello"; + + Json serialized = serialize!JsonSerializer(test); + assert(serialized["value"].get!int == 12); + assert(serialized["text"].get!string == "Hello"); +} + +unittest { + import dub.internal.vibecompat.data.json; + + // Make sure that immutable(char[]) works just like string + // (i.e., immutable(char)[]). + immutable key = "answer"; + auto ints = [key: 42]; + auto serialized = serialize!JsonSerializer(ints); + assert(serialized[key].get!int == 42); +} + +/** + Serializes a value with the given serializer, representing values according to `Policy` when possible. + + The serializer must have a value result for the first form + to work. Otherwise, use the range based form. + + See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` +*/ +auto serializeWithPolicy(Serializer, alias Policy, T, ARGS...)(T value, ARGS args) +{ + auto serializer = Serializer(args); + serializeWithPolicy!(Serializer, Policy)(serializer, value); + return serializer.getSerializedResult(); +} +/// ditto +void serializeWithPolicy(Serializer, alias Policy, T)(ref Serializer serializer, T value) +{ + serializeImpl!(Serializer, Policy, T)(serializer, value); +} +/// +version (unittest) +{ + template SizePol(T) + { + import std.conv; + import std.array; + + string toRepresentation(T value) { + return to!string(value.x) ~ "x" ~ to!string(value.y); + } + + T fromRepresentation(string value) { + string[] fields = value.split('x'); + alias fieldT = typeof(T.x); + auto x = to!fieldT(fields[0]); + auto y = to!fieldT(fields[1]); + return T(x, y); + } + } +} + +/// +static if (__VERSION__ >= 2065) unittest { + import dub.internal.vibecompat.data.json; + + static struct SizeI { + int x; + int y; + } + SizeI sizeI = SizeI(1,2); + Json serializedI = serializeWithPolicy!(JsonSerializer, SizePol)(sizeI); + assert(serializedI.get!string == "1x2"); + + static struct SizeF { + float x; + float y; + } + SizeF sizeF = SizeF(0.1f,0.2f); + Json serializedF = serializeWithPolicy!(JsonSerializer, SizePol)(sizeF); + assert(serializedF.get!string == "0.1x0.2"); +} + + +/** + Deserializes and returns a serialized value. + + serialized_data can be either an input range or a value containing + the serialized data, depending on the type of serializer used. + + See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` +*/ +T deserialize(Serializer, T, ARGS...)(ARGS args) +{ + auto deserializer = Serializer(args); + return deserializeImpl!(T, DefaultPolicy, Serializer)(deserializer); +} + +/** Note that there is a convenience function `vibe.data.json.deserializeJson` + that can be used instead of manually invoking `deserialize`. +*/ +unittest { + import dub.internal.vibecompat.data.json; + + struct Test { + int value; + string text; + } + + Json serialized = Json.emptyObject; + serialized["value"] = 12; + serialized["text"] = "Hello"; + + Test test = deserialize!(JsonSerializer, Test)(serialized); + assert(test.value == 12); + assert(test.text == "Hello"); +} + +/** + Deserializes and returns a serialized value, interpreting values according to `Policy` when possible. + + serialized_data can be either an input range or a value containing + the serialized data, depending on the type of serializer used. + + See_Also: `vibe.data.json.JsonSerializer`, `vibe.data.json.JsonStringSerializer`, `vibe.data.bson.BsonSerializer` +*/ +T deserializeWithPolicy(Serializer, alias Policy, T, ARGS...)(ARGS args) +{ + auto deserializer = Serializer(args); + return deserializeImpl!(T, Policy, Serializer)(deserializer); +} + +/// +static if (__VERSION__ >= 2065) unittest { + import dub.internal.vibecompat.data.json; + + static struct SizeI { + int x; + int y; + } + + Json serializedI = "1x2"; + SizeI sizeI = deserializeWithPolicy!(JsonSerializer, SizePol, SizeI)(serializedI); + assert(sizeI.x == 1); + assert(sizeI.y == 2); + + static struct SizeF { + float x; + float y; + } + Json serializedF = "0.1x0.2"; + SizeF sizeF = deserializeWithPolicy!(JsonSerializer, SizePol, SizeF)(serializedF); + assert(sizeF.x == 0.1f); + assert(sizeF.y == 0.2f); +} + +private void serializeImpl(Serializer, alias Policy, T, ATTRIBUTES...)(ref Serializer serializer, T value) +{ + import std.typecons : Nullable, Tuple, tuple; + static if (__VERSION__ >= 2067) import std.typecons : BitFlags; + + static assert(Serializer.isSupportedValueType!string, "All serializers must support string values."); + static assert(Serializer.isSupportedValueType!(typeof(null)), "All serializers must support null values."); + + alias TU = Unqual!T; + + static if (is(TU == enum)) { + static if (hasAttributeL!(ByNameAttribute, ATTRIBUTES)) { + serializeImpl!(Serializer, Policy, string)(serializer, value.to!string()); + } else { + serializeImpl!(Serializer, Policy, OriginalType!TU)(serializer, cast(OriginalType!TU)value); + } + } else static if (Serializer.isSupportedValueType!TU) { + static if (is(TU == typeof(null))) serializer.writeValue!TU(null); + else serializer.writeValue!TU(value); + } else static if (/*isInstanceOf!(Tuple, TU)*/is(T == Tuple!TPS, TPS...)) { + static if (TU.Types.length == 1) { + serializeImpl!(Serializer, Policy, typeof(value[0]), ATTRIBUTES)(serializer, value[0]); + } else { + serializer.beginWriteArray!TU(value.length); + foreach (i, TV; T.Types) { + serializer.beginWriteArrayEntry!TV(i); + serializeImpl!(Serializer, Policy, TV, ATTRIBUTES)(serializer, value[i]); + serializer.endWriteArrayEntry!TV(i); + } + serializer.endWriteArray!TU(); + } + } else static if (isArray!TU) { + alias TV = typeof(value[0]); + serializer.beginWriteArray!TU(value.length); + foreach (i, ref el; value) { + serializer.beginWriteArrayEntry!TV(i); + serializeImpl!(Serializer, Policy, TV, ATTRIBUTES)(serializer, el); + serializer.endWriteArrayEntry!TV(i); + } + serializer.endWriteArray!TU(); + } else static if (isAssociativeArray!TU) { + alias TK = KeyType!TU; + alias TV = ValueType!TU; + static if (__traits(compiles, serializer.beginWriteDictionary!TU(0))) { + auto nfields = value.length; + serializer.beginWriteDictionary!TU(nfields); + } else { + serializer.beginWriteDictionary!TU(); + } + foreach (key, ref el; value) { + string keyname; + static if (is(TK : string)) keyname = key; + else static if (is(TK : real) || is(TK : long) || is(TK == enum)) keyname = key.to!string; + else static if (isStringSerializable!TK) keyname = key.toString(); + else static assert(false, "Associative array keys must be strings, numbers, enums, or have toString/fromString methods."); + serializer.beginWriteDictionaryEntry!TV(keyname); + serializeImpl!(Serializer, Policy, TV, ATTRIBUTES)(serializer, el); + serializer.endWriteDictionaryEntry!TV(keyname); + } + static if (__traits(compiles, serializer.endWriteDictionary!TU(0))) { + serializer.endWriteDictionary!TU(nfields); + } else { + serializer.endWriteDictionary!TU(); + } + } else static if (/*isInstanceOf!(Nullable, TU)*/is(T == Nullable!TPS, TPS...)) { + if (value.isNull()) serializeImpl!(Serializer, Policy, typeof(null))(serializer, null); + else serializeImpl!(Serializer, Policy, typeof(value.get()), ATTRIBUTES)(serializer, value.get()); + } else static if (__VERSION__ >= 2067 && is(T == BitFlags!E, E)) { + size_t cnt = 0; + foreach (v; EnumMembers!E) + if (value & v) + cnt++; + + serializer.beginWriteArray!(E[])(cnt); + cnt = 0; + foreach (v; EnumMembers!E) + if (value & v) { + serializer.beginWriteArrayEntry!E(cnt); + serializeImpl!(Serializer, Policy, E, ATTRIBUTES)(serializer, v); + serializer.endWriteArrayEntry!E(cnt); + cnt++; + } + serializer.endWriteArray!(E[])(); + } else static if (isPolicySerializable!(Policy, TU)) { + alias CustomType = typeof(Policy!TU.toRepresentation(TU.init)); + serializeImpl!(Serializer, Policy, CustomType, ATTRIBUTES)(serializer, Policy!TU.toRepresentation(value)); + } else static if (isCustomSerializable!TU) { + alias CustomType = typeof(T.init.toRepresentation()); + serializeImpl!(Serializer, Policy, CustomType, ATTRIBUTES)(serializer, value.toRepresentation()); + } else static if (isISOExtStringSerializable!TU) { + serializer.writeValue(value.toISOExtString()); + } else static if (isStringSerializable!TU) { + serializer.writeValue(value.toString()); + } else static if (is(TU == struct) || is(TU == class)) { + static if (!hasSerializableFields!TU) + pragma(msg, "Serializing composite type "~T.stringof~" which has no serializable fields"); + static if (is(TU == class)) { + if (value is null) { + serializeImpl!(Serializer, Policy, typeof(null))(serializer, null); + return; + } + } + static if (hasAttributeL!(AsArrayAttribute, ATTRIBUTES)) { + enum nfields = getExpandedFieldCount!(TU, SerializableFields!TU); + serializer.beginWriteArray!TU(nfields); + foreach (mname; SerializableFields!TU) { + alias TMS = TypeTuple!(typeof(__traits(getMember, value, mname))); + foreach (j, TM; TMS) { + alias TA = TypeTuple!(__traits(getAttributes, TypeTuple!(__traits(getMember, T, mname))[j])); + serializer.beginWriteArrayEntry!TM(j); + serializeImpl!(Serializer, Policy, TM, TA)(serializer, tuple(__traits(getMember, value, mname))[j]); + serializer.endWriteArrayEntry!TM(j); + } + } + serializer.endWriteArray!TU(); + } else { + static if (__traits(compiles, serializer.beginWriteDictionary!TU(0))) { + enum nfields = getExpandedFieldCount!(TU, SerializableFields!TU); + serializer.beginWriteDictionary!TU(nfields); + } else { + serializer.beginWriteDictionary!TU(); + } + foreach (mname; SerializableFields!TU) { + alias TM = TypeTuple!(typeof(__traits(getMember, value, mname))); + static if (TM.length == 1) { + alias TA = TypeTuple!(__traits(getAttributes, __traits(getMember, T, mname))); + enum name = getAttribute!(TU, mname, NameAttribute)(NameAttribute(underscoreStrip(mname))).name; + auto vt = __traits(getMember, value, mname); + serializer.beginWriteDictionaryEntry!(typeof(vt))(name); + serializeImpl!(Serializer, Policy, typeof(vt), TA)(serializer, vt); + serializer.endWriteDictionaryEntry!(typeof(vt))(name); + } else { + alias TA = TypeTuple!(); // FIXME: support attributes for tuples somehow + enum name = underscoreStrip(mname); + auto vt = tuple(__traits(getMember, value, mname)); + serializer.beginWriteDictionaryEntry!(typeof(vt))(name); + serializeImpl!(Serializer, Policy, typeof(vt), TA)(serializer, vt); + serializer.endWriteDictionaryEntry!(typeof(vt))(name); + } + } + static if (__traits(compiles, serializer.endWriteDictionary!TU(0))) { + serializer.endWriteDictionary!TU(nfields); + } else { + serializer.endWriteDictionary!TU(); + } + } + } else static if (isPointer!TU) { + if (value is null) { + serializer.writeValue(null); + return; + } + serializeImpl!(Serializer, Policy, PointerTarget!TU)(serializer, *value); + } else static if (is(TU == bool) || is(TU : real) || is(TU : long)) { + serializeImpl!(Serializer, Policy, string)(serializer, to!string(value)); + } else static assert(false, "Unsupported serialization type: " ~ T.stringof); +} + + +private T deserializeImpl(T, alias Policy, Serializer, ATTRIBUTES...)(ref Serializer deserializer) +{ + import std.typecons : Nullable; + static if (__VERSION__ >= 2067) import std.typecons : BitFlags; + + static assert(Serializer.isSupportedValueType!string, "All serializers must support string values."); + static assert(Serializer.isSupportedValueType!(typeof(null)), "All serializers must support null values."); + + static if (is(T == enum)) { + static if (hasAttributeL!(ByNameAttribute, ATTRIBUTES)) { + return deserializeImpl!(string, Policy, Serializer)(deserializer).to!T(); + } else { + return cast(T)deserializeImpl!(OriginalType!T, Policy, Serializer)(deserializer); + } + } else static if (Serializer.isSupportedValueType!T) { + return deserializer.readValue!T(); + } else static if (isStaticArray!T) { + alias TV = typeof(T.init[0]); + T ret; + size_t i = 0; + deserializer.readArray!T((sz) { assert(sz == 0 || sz == T.length); }, { + assert(i < T.length); + ret[i++] = deserializeImpl!(TV, Policy, Serializer, ATTRIBUTES)(deserializer); + }); + return ret; + } else static if (isDynamicArray!T) { + alias TV = typeof(T.init[0]); + //auto ret = appender!T(); + T ret; // Cannot use appender because of DMD BUG 10690/10859/11357 + deserializer.readArray!T((sz) { ret.reserve(sz); }, () { + ret ~= deserializeImpl!(TV, Policy, Serializer, ATTRIBUTES)(deserializer); + }); + return ret;//cast(T)ret.data; + } else static if (isAssociativeArray!T) { + alias TK = KeyType!T; + alias TV = ValueType!T; + T ret; + deserializer.readDictionary!T((name) { + TK key; + static if (is(TK == string)) key = name; + else static if (is(TK : real) || is(TK : long) || is(TK == enum)) key = name.to!TK; + else static if (isStringSerializable!TK) key = TK.fromString(name); + else static assert(false, "Associative array keys must be strings, numbers, enums, or have toString/fromString methods."); + ret[key] = deserializeImpl!(TV, Policy, Serializer, ATTRIBUTES)(deserializer); + }); + return ret; + } else static if (isInstanceOf!(Nullable, T)) { + if (deserializer.tryReadNull()) return T.init; + return T(deserializeImpl!(typeof(T.init.get()), Policy, Serializer, ATTRIBUTES)(deserializer)); + } else static if (__VERSION__ >= 2067 && is(T == BitFlags!E, E)) { + T ret; + deserializer.readArray!(E[])((sz) {}, { + ret |= deserializeImpl!(E, Policy, Serializer, ATTRIBUTES)(deserializer); + }); + return ret; + } else static if (isPolicySerializable!(Policy, T)) { + alias CustomType = typeof(Policy!T.toRepresentation(T.init)); + return Policy!T.fromRepresentation(deserializeImpl!(CustomType, Policy, Serializer, ATTRIBUTES)(deserializer)); + } else static if (isCustomSerializable!T) { + alias CustomType = typeof(T.init.toRepresentation()); + return T.fromRepresentation(deserializeImpl!(CustomType, Policy, Serializer, ATTRIBUTES)(deserializer)); + } else static if (isISOExtStringSerializable!T) { + return T.fromISOExtString(deserializer.readValue!string()); + } else static if (isStringSerializable!T) { + return T.fromString(deserializer.readValue!string()); + } else static if (is(T == struct) || is(T == class)) { + static if (is(T == class)) { + if (deserializer.tryReadNull()) return null; + } + + bool[__traits(allMembers, T).length] set; + string name; + T ret; + static if (is(T == class)) ret = new T; + + static if (hasAttributeL!(AsArrayAttribute, ATTRIBUTES)) { + size_t idx = 0; + deserializer.readArray!T((sz){}, { + static if (hasSerializableFields!T) { + switch (idx++) { + default: break; + foreach (i, mname; SerializableFields!T) { + alias TM = typeof(__traits(getMember, ret, mname)); + alias TA = TypeTuple!(__traits(getAttributes, __traits(getMember, ret, mname))); + case i: + static if (hasAttribute!(OptionalAttribute, __traits(getMember, T, mname))) + if (deserializer.tryReadNull()) return; + set[i] = true; + __traits(getMember, ret, mname) = deserializeImpl!(TM, Serializer, TA)(deserializer); + break; + } + } + } else { + pragma(msg, "Deserializing composite type "~T.stringof~" which has no serializable fields."); + } + }); + } else { + deserializer.readDictionary!T((name) { + static if (hasSerializableFields!T) { + switch (name) { + default: break; + foreach (i, mname; SerializableFields!T) { + alias TM = typeof(__traits(getMember, ret, mname)); + alias TA = TypeTuple!(__traits(getAttributes, __traits(getMember, ret, mname))); + enum fname = getAttribute!(T, mname, NameAttribute)(NameAttribute(underscoreStrip(mname))).name; + case fname: + static if (hasAttribute!(OptionalAttribute, __traits(getMember, T, mname))) + if (deserializer.tryReadNull()) return; + set[i] = true; + __traits(getMember, ret, mname) = deserializeImpl!(TM, Policy, Serializer, TA)(deserializer); + break; + } + } + } else { + pragma(msg, "Deserializing composite type "~T.stringof~" which has no serializable fields."); + } + }); + } + foreach (i, mname; SerializableFields!T) + static if (!hasAttribute!(OptionalAttribute, __traits(getMember, T, mname))) + enforce(set[i], "Missing non-optional field '"~mname~"' of type '"~T.stringof~"'."); + return ret; + } else static if (isPointer!T) { + if (deserializer.tryReadNull()) return null; + alias PT = PointerTarget!T; + auto ret = new PT; + *ret = deserializeImpl!(PT, Policy, Serializer)(deserializer); + return ret; + } else static if (is(T == bool) || is(T : real) || is(T : long)) { + return to!T(deserializeImpl!(string, Policy, Serializer)(deserializer)); + } else static assert(false, "Unsupported serialization type: " ~ T.stringof); +} + + +/** + Attribute for overriding the field name during (de-)serialization. +*/ +NameAttribute name(string name) +{ + return NameAttribute(name); +} +/// +unittest { + struct Test { + @name("screen-size") int screenSize; + } +} + + +/** + Attribute marking a field as optional during deserialization. +*/ +@property OptionalAttribute optional() +{ + return OptionalAttribute(); +} +/// +unittest { + struct Test { + // does not need to be present during deserialization + @optional int screenSize = 100; + } +} + + +/** + Attribute for marking non-serialized fields. +*/ +@property IgnoreAttribute ignore() +{ + return IgnoreAttribute(); +} +/// +unittest { + struct Test { + // is neither serialized not deserialized + @ignore int screenSize; + } +} + + +/** + Attribute for forcing serialization of enum fields by name instead of by value. +*/ +@property ByNameAttribute byName() +{ + return ByNameAttribute(); +} +/// +unittest { + enum Color { + red, + green, + blue + } + + struct Test { + // serialized as an int (e.g. 1 for Color.green) + Color color; + // serialized as a string (e.g. "green" for Color.green) + @byName Color namedColor; + // serialized as array of ints + Color[] colorArray; + // serialized as array of strings + @byName Color[] namedColorArray; + } +} + + +/** + Attribute for representing a struct/class as an array instead of an object. + + Usually structs and class objects are serialized as dictionaries mapping + from field name to value. Using this attribute, they will be serialized + as a flat array instead. Note that changing the layout will make any + already serialized data mismatch when this attribute is used. +*/ +@property AsArrayAttribute asArray() +{ + return AsArrayAttribute(); +} +/// +unittest { + struct Fields { + int f1; + string f2; + double f3; + } + + struct Test { + // serialized as name:value pairs ["f1": int, "f2": string, "f3": double] + Fields object; + // serialized as a sequential list of values [int, string, double] + @asArray Fields array; + } + + import dub.internal.vibecompat.data.json; + static assert(is(typeof(serializeToJson(Test())))); +} + + +/// +enum FieldExistence +{ + missing, + exists, + defer +} + +/// User defined attribute (not intended for direct use) +struct NameAttribute { string name; } +/// ditto +struct OptionalAttribute {} +/// ditto +struct IgnoreAttribute {} +/// ditto +struct ByNameAttribute {} +/// ditto +struct AsArrayAttribute {} + +/** + Checks if a given type has a custom serialization representation. + + A class or struct type is custom serializable if it defines a pair of + `toRepresentation`/`fromRepresentation` methods. Any class or + struct type that has this trait will be serialized by using the return + value of it's `toRepresentation` method instead of the original value. + + This trait has precedence over `isISOExtStringSerializable` and + `isStringSerializable`. +*/ +template isCustomSerializable(T) +{ + enum bool isCustomSerializable = is(typeof(T.init.toRepresentation())) && is(typeof(T.fromRepresentation(T.init.toRepresentation())) == T); +} +/// +unittest { + // represented as a single uint when serialized + static struct S { + ushort x, y; + + uint toRepresentation() const { return x + (y << 16); } + static S fromRepresentation(uint i) { return S(i & 0xFFFF, i >> 16); } + } + + static assert(isCustomSerializable!S); +} + + +/** + Checks if a given type has an ISO extended string serialization representation. + + A class or struct type is ISO extended string serializable if it defines a + pair of `toISOExtString`/`fromISOExtString` methods. Any class or + struct type that has this trait will be serialized by using the return + value of it's `toISOExtString` method instead of the original value. + + This is mainly useful for supporting serialization of the the date/time + types in `std.datetime`. + + This trait has precedence over `isStringSerializable`. +*/ +template isISOExtStringSerializable(T) +{ + enum bool isISOExtStringSerializable = is(typeof(T.init.toISOExtString()) == string) && is(typeof(T.fromISOExtString("")) == T); +} +/// +unittest { + import std.datetime; + + static assert(isISOExtStringSerializable!DateTime); + static assert(isISOExtStringSerializable!SysTime); + + // represented as an ISO extended string when serialized + static struct S { + // dummy example implementations + string toISOExtString() const { return ""; } + static S fromISOExtString(string s) { return S.init; } + } + + static assert(isISOExtStringSerializable!S); +} + + +/** + Checks if a given type has a string serialization representation. + + A class or struct type is string serializable if it defines a pair of + `toString`/`fromString` methods. Any class or struct type that + has this trait will be serialized by using the return value of it's + `toString` method instead of the original value. +*/ +template isStringSerializable(T) +{ + enum bool isStringSerializable = is(typeof(T.init.toString()) == string) && is(typeof(T.fromString("")) == T); +} +/// +unittest { + import std.conv; + + // represented as the boxed value when serialized + static struct Box(T) { + T value; + } + + template BoxPol(S) + { + auto toRepresentation(S s) { + return s.value; + } + + S fromRepresentation(typeof(S.init.value) v) { + return S(v); + } + } + static assert(isPolicySerializable!(BoxPol, Box!int)); +} + +private template DefaultPolicy(T) +{ +} + +/** + Checks if a given policy supports custom serialization for a given type. + + A class or struct type is custom serializable according to a policy if + the policy defines a pair of `toRepresentation`/`fromRepresentation` + functions. Any class or struct type that has this trait for the policy supplied to + `serializeWithPolicy` will be serialized by using the return value of the + policy `toRepresentation` function instead of the original value. + + This trait has precedence over `isCustomSerializable`, + `isISOExtStringSerializable` and `isStringSerializable`. + + See_Also: `vibe.data.serialization.serializeWithPolicy` +*/ +template isPolicySerializable(alias Policy, T) +{ + enum bool isPolicySerializable = is(typeof(Policy!T.toRepresentation(T.init))) && + is(typeof(Policy!T.fromRepresentation(Policy!T.toRepresentation(T.init))) == T); +} +/// +unittest { + import std.conv; + + // represented as a string when serialized + static struct S { + int value; + + // dummy example implementations + string toString() const { return value.to!string(); } + static S fromString(string s) { return S(s.to!int()); } + } + + static assert(isStringSerializable!S); +} + +/** + Chains serialization policy. + + Constructs a serialization policy that given a type `T` will apply the + first compatible policy `toRepresentation` and `fromRepresentation` + functions. Policies are evaluated left-to-right according to + `isPolicySerializable`. + + See_Also: `vibe.data.serialization.serializeWithPolicy` +*/ +template ChainedPolicy(alias Primary, Fallbacks...) +{ + static if (Fallbacks.length == 0) { + alias ChainedPolicy = Primary; + } else { + alias ChainedPolicy = ChainedPolicy!(ChainedPolicyImpl!(Primary, Fallbacks[0]), Fallbacks[1..$]); + } +} +/// +unittest { + import std.conv; + + // To be represented as the boxed value when serialized + static struct Box(T) { + T value; + } + // Also to berepresented as the boxed value when serialized, but has + // a different way to access the value. + static struct Box2(T) { + private T v; + ref T get() { + return v; + } + } + template BoxPol(S) + { + auto toRepresentation(S s) { + return s.value; + } + + S fromRepresentation(typeof(toRepresentation(S.init)) v) { + return S(v); + } + } + template Box2Pol(S) + { + auto toRepresentation(S s) { + return s.get(); + } + + S fromRepresentation(typeof(toRepresentation(S.init)) v) { + S s; + s.get() = v; + return s; + } + } + alias ChainPol = ChainedPolicy!(BoxPol, Box2Pol); + static assert(!isPolicySerializable!(BoxPol, Box2!int)); + static assert(!isPolicySerializable!(Box2Pol, Box!int)); + static assert(isPolicySerializable!(ChainPol, Box!int)); + static assert(isPolicySerializable!(ChainPol, Box2!int)); +} + +private template ChainedPolicyImpl(alias Primary, alias Fallback) +{ + template Pol(T) + { + static if (isPolicySerializable!(Primary, T)) { + alias toRepresentation = Primary!T.toRepresentation; + alias fromRepresentation = Primary!T.fromRepresentation; + } else { + alias toRepresentation = Fallback!T.toRepresentation; + alias fromRepresentation = Fallback!T.fromRepresentation; + } + } + alias ChainedPolicyImpl = Pol; +} + +private template hasAttribute(T, alias decl) { enum hasAttribute = findFirstUDA!(T, decl).found; } + +unittest { + @asArray int i1; + static assert(hasAttribute!(AsArrayAttribute, i1)); + int i2; + static assert(!hasAttribute!(AsArrayAttribute, i2)); +} + +private template hasAttributeL(T, ATTRIBUTES...) { + static if (ATTRIBUTES.length == 1) { + enum hasAttributeL = is(typeof(ATTRIBUTES[0]) == T); + } else static if (ATTRIBUTES.length > 1) { + enum hasAttributeL = hasAttributeL!(T, ATTRIBUTES[0 .. $/2]) || hasAttributeL!(T, ATTRIBUTES[$/2 .. $]); + } else { + enum hasAttributeL = false; + } +} + +unittest { + static assert(hasAttributeL!(AsArrayAttribute, byName, asArray)); + static assert(!hasAttributeL!(AsArrayAttribute, byName)); +} + +private static T getAttribute(TT, string mname, T)(T default_value) +{ + enum val = findFirstUDA!(T, __traits(getMember, TT, mname)); + static if (val.found) return val.value; + else return default_value; +} + +private string underscoreStrip(string field_name) +{ + if( field_name.length < 1 || field_name[$-1] != '_' ) return field_name; + else return field_name[0 .. $-1]; +} + + +private template hasSerializableFields(T, size_t idx = 0) +{ + enum hasSerializableFields = SerializableFields!(T).length > 0; + /*static if (idx < __traits(allMembers, T).length) { + enum mname = __traits(allMembers, T)[idx]; + static if (!isRWPlainField!(T, mname) && !isRWField!(T, mname)) enum hasSerializableFields = hasSerializableFields!(T, idx+1); + else static if (hasAttribute!(IgnoreAttribute, __traits(getMember, T, mname))) enum hasSerializableFields = hasSerializableFields!(T, idx+1); + else enum hasSerializableFields = true; + } else enum hasSerializableFields = false;*/ +} + +private template SerializableFields(COMPOSITE) +{ + alias SerializableFields = FilterSerializableFields!(COMPOSITE, __traits(allMembers, COMPOSITE)); +} + +private template FilterSerializableFields(COMPOSITE, FIELDS...) +{ + static if (FIELDS.length > 1) { + alias FilterSerializableFields = TypeTuple!( + FilterSerializableFields!(COMPOSITE, FIELDS[0 .. $/2]), + FilterSerializableFields!(COMPOSITE, FIELDS[$/2 .. $])); + } else static if (FIELDS.length == 1) { + alias T = COMPOSITE; + enum mname = FIELDS[0]; + static if (isRWPlainField!(T, mname) || isRWField!(T, mname)) { + alias Tup = TypeTuple!(__traits(getMember, COMPOSITE, FIELDS[0])); + static if (Tup.length != 1) { + alias FilterSerializableFields = TypeTuple!(mname); + } else { + static if (!hasAttribute!(IgnoreAttribute, __traits(getMember, T, mname))) + alias FilterSerializableFields = TypeTuple!(mname); + else alias FilterSerializableFields = TypeTuple!(); + } + } else alias FilterSerializableFields = TypeTuple!(); + } else alias FilterSerializableFields = TypeTuple!(); +} + +private size_t getExpandedFieldCount(T, FIELDS...)() +{ + size_t ret = 0; + foreach (F; FIELDS) ret += TypeTuple!(__traits(getMember, T, F)).length; + return ret; +} + +/******************************************************************************/ +/* General serialization unit testing */ +/******************************************************************************/ + +version (unittest) { + private struct TestSerializer { + import std.array, std.conv, std.string; + + string result; + + enum isSupportedValueType(T) = is(T == string) || is(T == typeof(null)) || is(T == float) || is (T == int); + + string getSerializedResult() { return result; } + void beginWriteDictionary(T)() { result ~= "D("~T.mangleof~"){"; } + void endWriteDictionary(T)() { result ~= "}D("~T.mangleof~")"; } + void beginWriteDictionaryEntry(T)(string name) { result ~= "DE("~T.mangleof~","~name~")("; } + void endWriteDictionaryEntry(T)(string name) { result ~= ")DE("~T.mangleof~","~name~")"; } + void beginWriteArray(T)(size_t length) { result ~= "A("~T.mangleof~")["~length.to!string~"]["; } + void endWriteArray(T)() { result ~= "]A("~T.mangleof~")"; } + void beginWriteArrayEntry(T)(size_t i) { result ~= "AE("~T.mangleof~","~i.to!string~")("; } + void endWriteArrayEntry(T)(size_t i) { result ~= ")AE("~T.mangleof~","~i.to!string~")"; } + void writeValue(T)(T value) { + if (is(T == typeof(null))) result ~= "null"; + else { + assert(isSupportedValueType!T); + result ~= "V("~T.mangleof~")("~value.to!string~")"; + } + } + + // deserialization + void readDictionary(T)(scope void delegate(string) entry_callback) + { + skip("D("~T.mangleof~"){"); + while (result.startsWith("DE(")) { + result = result[3 .. $]; + auto idx = result.indexOf(','); + auto idx2 = result.indexOf(")("); + assert(idx > 0 && idx2 > idx); + auto t = result[0 .. idx]; + auto n = result[idx+1 .. idx2]; + result = result[idx2+2 .. $]; + entry_callback(n); + skip(")DE("~t~","~n~")"); + } + skip("}D("~T.mangleof~")"); + } + + void readArray(T)(scope void delegate(size_t) size_callback, scope void delegate() entry_callback) + { + skip("A("~T.mangleof~")["); + auto bidx = result.indexOf("]["); + assert(bidx > 0); + auto cnt = result[0 .. bidx].to!size_t; + result = result[bidx+2 .. $]; + + size_t i = 0; + while (result.startsWith("AE(")) { + result = result[3 .. $]; + auto idx = result.indexOf(','); + auto idx2 = result.indexOf(")("); + assert(idx > 0 && idx2 > idx); + auto t = result[0 .. idx]; + auto n = result[idx+1 .. idx2]; + result = result[idx2+2 .. $]; + assert(n == i.to!string); + entry_callback(); + skip(")AE("~t~","~n~")"); + i++; + } + skip("]A("~T.mangleof~")"); + + assert(i == cnt); + } + + T readValue(T)() + { + skip("V("~T.mangleof~")("); + auto idx = result.indexOf(')'); + assert(idx >= 0); + auto ret = result[0 .. idx].to!T; + result = result[idx+1 .. $]; + return ret; + } + + void skip(string prefix) + { + assert(result.startsWith(prefix), result); + result = result[prefix.length .. $]; + } + + bool tryReadNull() + { + if (result.startsWith("null")) { + result = result[4 .. $]; + return true; + } else return false; + } + } +} + +unittest { // basic serialization behavior + import std.typecons : Nullable; + + static void test(T)(T value, string expected) { + assert(serialize!TestSerializer(value) == expected, serialize!TestSerializer(value)); + static if (isPointer!T) { + if (value) assert(*deserialize!(TestSerializer, T)(expected) == *value); + else assert(deserialize!(TestSerializer, T)(expected) is null); + } else static if (is(T == Nullable!U, U)) { + if (value.isNull()) assert(deserialize!(TestSerializer, T)(expected).isNull); + else assert(deserialize!(TestSerializer, T)(expected) == value); + } else assert(deserialize!(TestSerializer, T)(expected) == value); + } + + test("hello", "V(Aya)(hello)"); + test(12, "V(i)(12)"); + test(12.0, "V(Aya)(12)"); + test(12.0f, "V(f)(12)"); + assert(serialize!TestSerializer(null) == "null"); + test(["hello", "world"], "A(AAya)[2][AE(Aya,0)(V(Aya)(hello))AE(Aya,0)AE(Aya,1)(V(Aya)(world))AE(Aya,1)]A(AAya)"); + test(["hello": "world"], "D(HAyaAya){DE(Aya,hello)(V(Aya)(world))DE(Aya,hello)}D(HAyaAya)"); + test(cast(int*)null, "null"); + int i = 42; + test(&i, "V(i)(42)"); + Nullable!int j; + test(j, "null"); + j = 42; + test(j, "V(i)(42)"); +} + +unittest { // basic user defined types + static struct S { string f; } + enum Sm = S.mangleof; + auto s = S("hello"); + enum s_ser = "D("~Sm~"){DE(Aya,f)(V(Aya)(hello))DE(Aya,f)}D("~Sm~")"; + assert(serialize!TestSerializer(s) == s_ser, serialize!TestSerializer(s)); + assert(deserialize!(TestSerializer, S)(s_ser) == s); + + static class C { string f; } + enum Cm = C.mangleof; + C c; + assert(serialize!TestSerializer(c) == "null"); + c = new C; + c.f = "hello"; + enum c_ser = "D("~Cm~"){DE(Aya,f)(V(Aya)(hello))DE(Aya,f)}D("~Cm~")"; + assert(serialize!TestSerializer(c) == c_ser); + assert(deserialize!(TestSerializer, C)(c_ser).f == c.f); + + enum E { hello, world } + assert(serialize!TestSerializer(E.hello) == "V(i)(0)"); + assert(serialize!TestSerializer(E.world) == "V(i)(1)"); +} + +unittest { // tuple serialization + import std.typecons : Tuple; + + static struct S(T...) { T f; } + enum Sm = S!(int, string).mangleof; + enum Tum = Tuple!(int, string).mangleof; + auto s = S!(int, string)(42, "hello"); + assert(serialize!TestSerializer(s) == + "D("~Sm~"){DE("~Tum~",f)(A("~Tum~")[2][AE(i,0)(V(i)(42))AE(i,0)AE(Aya,1)(V(Aya)(hello))AE(Aya,1)]A("~Tum~"))DE("~Tum~",f)}D("~Sm~")"); + + static struct T { @asArray S!(int, string) g; } + enum Tm = T.mangleof; + auto t = T(s); + assert(serialize!TestSerializer(t) == + "D("~Tm~"){DE("~Sm~",g)(A("~Sm~")[2][AE(i,0)(V(i)(42))AE(i,0)AE(Aya,1)(V(Aya)(hello))AE(Aya,1)]A("~Sm~"))DE("~Sm~",g)}D("~Tm~")"); +} + +unittest { // testing the various UDAs + enum E { hello, world } + enum Em = E.mangleof; + static struct S { + @byName E e; + @ignore int i; + @optional float f; + } + enum Sm = S.mangleof; + auto s = S(E.world, 42, 1.0f); + assert(serialize!TestSerializer(s) == + "D("~Sm~"){DE("~Em~",e)(V(Aya)(world))DE("~Em~",e)DE(f,f)(V(f)(1))DE(f,f)}D("~Sm~")"); +} + +unittest { // custom serialization support + // iso-ext + import std.datetime; + auto t = TimeOfDay(6, 31, 23); + assert(serialize!TestSerializer(t) == "V(Aya)(06:31:23)"); + auto d = Date(1964, 1, 23); + assert(serialize!TestSerializer(d) == "V(Aya)(1964-01-23)"); + auto dt = DateTime(d, t); + assert(serialize!TestSerializer(dt) == "V(Aya)(1964-01-23T06:31:23)"); + auto st = SysTime(dt, UTC()); + assert(serialize!TestSerializer(st) == "V(Aya)(1964-01-23T06:31:23Z)"); + + // string + struct S1 { int i; string toString() const { return "hello"; } static S1 fromString(string) { return S1.init; } } + struct S2 { int i; string toString() const { return "hello"; } } + enum S2m = S2.mangleof; + struct S3 { int i; static S3 fromString(string) { return S3.init; } } + enum S3m = S3.mangleof; + assert(serialize!TestSerializer(S1.init) == "V(Aya)(hello)"); + assert(serialize!TestSerializer(S2.init) == "D("~S2m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~S2m~")"); + assert(serialize!TestSerializer(S3.init) == "D("~S3m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~S3m~")"); + + // custom + struct C1 { int i; float toRepresentation() const { return 1.0f; } static C1 fromRepresentation(float f) { return C1.init; } } + struct C2 { int i; float toRepresentation() const { return 1.0f; } } + enum C2m = C2.mangleof; + struct C3 { int i; static C3 fromRepresentation(float f) { return C3.init; } } + enum C3m = C3.mangleof; + assert(serialize!TestSerializer(C1.init) == "V(f)(1)"); + assert(serialize!TestSerializer(C2.init) == "D("~C2m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~C2m~")"); + assert(serialize!TestSerializer(C3.init) == "D("~C3m~"){DE(i,i)(V(i)(0))DE(i,i)}D("~C3m~")"); +} + +unittest // Testing corner case: member function returning by ref +{ + import dub.internal.vibecompat.data.json; + + static struct S + { + int i; + ref int foo() { return i; } + } + + static assert(__traits(compiles, { S().serializeToJson(); })); + static assert(__traits(compiles, { Json().deserializeJson!S(); })); + + auto s = S(1); + assert(s.serializeToJson().deserializeJson!S() == s); +} + +unittest // Testing corner case: Variadic template constructors and methods +{ + import dub.internal.vibecompat.data.json; + + static struct S + { + int i; + this(Args...)(Args args) {} + int foo(Args...)(Args args) { return i; } + ref int bar(Args...)(Args args) { return i; } + } + + static assert(__traits(compiles, { S().serializeToJson(); })); + static assert(__traits(compiles, { Json().deserializeJson!S(); })); + + auto s = S(1); + assert(s.serializeToJson().deserializeJson!S() == s); +} + +unittest // Make sure serializing through properties still works +{ + import dub.internal.vibecompat.data.json; + + static struct S + { + public int i; + private int privateJ; + + @property int j() { return privateJ; } + @property void j(int j) { privateJ = j; } + } + + auto s = S(1, 2); + assert(s.serializeToJson().deserializeJson!S() == s); +} + +static if (__VERSION__ >= 2067) +unittest { // test BitFlags serialization + import std.typecons : BitFlags; + + enum Flag { + a = 1<<0, + b = 1<<1, + c = 1<<2 + } + enum Flagm = Flag.mangleof; + + alias Flags = BitFlags!Flag; + enum Flagsm = Flags.mangleof; + + enum Fi_ser = "A(A"~Flagm~")[0][]A(A"~Flagm~")"; + assert(serialize!TestSerializer(Flags.init) == Fi_ser); + + enum Fac_ser = "A(A"~Flagm~")[2][AE("~Flagm~",0)(V(i)(1))AE("~Flagm~",0)AE("~Flagm~",1)(V(i)(4))AE("~Flagm~",1)]A(A"~Flagm~")"; + assert(serialize!TestSerializer(Flags(Flag.a, Flag.c)) == Fac_ser); + + struct S { @byName Flags f; } + enum Sm = S.mangleof; + enum Sac_ser = "D("~Sm~"){DE("~Flagsm~",f)(A(A"~Flagm~")[2][AE("~Flagm~",0)(V(Aya)(a))AE("~Flagm~",0)AE("~Flagm~",1)(V(Aya)(c))AE("~Flagm~",1)]A(A"~Flagm~"))DE("~Flagsm~",f)}D("~Sm~")"; + + assert(serialize!TestSerializer(S(Flags(Flag.a, Flag.c))) == Sac_ser); + + assert(deserialize!(TestSerializer, Flags)(Fi_ser) == Flags.init); + assert(deserialize!(TestSerializer, Flags)(Fac_ser) == Flags(Flag.a, Flag.c)); + assert(deserialize!(TestSerializer, S)(Sac_ser) == S(Flags(Flag.a, Flag.c))); +} diff --git a/source/dub/internal/vibecompat/data/utils.d b/source/dub/internal/vibecompat/data/utils.d index 5269fb3..a967455 100644 --- a/source/dub/internal/vibecompat/data/utils.d +++ b/source/dub/internal/vibecompat/data/utils.d @@ -7,24 +7,673 @@ */ module dub.internal.vibecompat.data.utils; +version (Have_vibe_d_data) {} +else: + public import std.traits; +/** + Checks if given type is a getter function type -template isRWPlainField(T, string M) + Returns: `true` if argument is a getter + */ +template isPropertyGetter(T...) + if (T.length == 1) { - static if( !__traits(compiles, typeof(__traits(getMember, T, M))) ){ - enum isRWPlainField = false; - } else { - //pragma(msg, T.stringof~"."~M~":"~typeof(__traits(getMember, T, M)).stringof); - enum isRWPlainField = isRWField!(T, M) && __traits(compiles, *(&__traits(getMember, Tgen!T(), M)) = *(&__traits(getMember, Tgen!T(), M))); + import std.traits : functionAttributes, FunctionAttribute, ReturnType, + isSomeFunction; + static if (isSomeFunction!(T[0])) { + enum isPropertyGetter = + (functionAttributes!(T[0]) & FunctionAttribute.property) != 0 + && !is(ReturnType!T == void); + } + else + enum isPropertyGetter = false; +} + +/// +unittest +{ + interface Test + { + @property int getter(); + @property void setter(int); + int simple(); + } + + static assert(isPropertyGetter!(typeof(&Test.getter))); + static assert(!isPropertyGetter!(typeof(&Test.setter))); + static assert(!isPropertyGetter!(typeof(&Test.simple))); + static assert(!isPropertyGetter!int); +} + +/** + Checks if given type is a setter function type + + Returns: `true` if argument is a setter + */ +template isPropertySetter(T...) + if (T.length == 1) +{ + import std.traits : functionAttributes, FunctionAttribute, ReturnType, + isSomeFunction; + + static if (isSomeFunction!(T[0])) { + enum isPropertySetter = + (functionAttributes!(T) & FunctionAttribute.property) != 0 + && is(ReturnType!(T[0]) == void); + } + else + enum isPropertySetter = false; +} + +/// +unittest +{ + interface Test + { + @property int getter(); + @property void setter(int); + int simple(); + } + + static assert(isPropertySetter!(typeof(&Test.setter))); + static assert(!isPropertySetter!(typeof(&Test.getter))); + static assert(!isPropertySetter!(typeof(&Test.simple))); + static assert(!isPropertySetter!int); +} + +/** + Deduces single base interface for a type. Multiple interfaces + will result in compile-time error. + + Params: + T = interface or class type + + Returns: + T if it is an interface. If T is a class, interface it implements. +*/ +template baseInterface(T) + if (is(T == interface) || is(T == class)) +{ + import std.traits : InterfacesTuple; + + static if (is(T == interface)) { + alias baseInterface = T; + } + else + { + alias Ifaces = InterfacesTuple!T; + static assert ( + Ifaces.length == 1, + "Type must be either provided as an interface or implement only one interface" + ); + alias baseInterface = Ifaces[0]; } } -template isRWField(T, string M) +/// +unittest { - enum isRWField = __traits(compiles, __traits(getMember, Tgen!T(), M) = __traits(getMember, Tgen!T(), M)); - //pragma(msg, T.stringof~"."~M~": "~(isRWField?"1":"0")); + interface I1 { } + class A : I1 { } + interface I2 { } + class B : I1, I2 { } + + static assert (is(baseInterface!I1 == I1)); + static assert (is(baseInterface!A == I1)); + static assert (!is(typeof(baseInterface!B))); } -/// private -private T Tgen(T)(){ return T.init; } + +/** + Determins if a member is a public, non-static data field. +*/ +template isRWPlainField(T, string M) +{ + static if (!isRWField!(T, M)) enum isRWPlainField = false; + else { + //pragma(msg, T.stringof~"."~M~":"~typeof(__traits(getMember, T, M)).stringof); + enum isRWPlainField = __traits(compiles, *(&__traits(getMember, Tgen!T(), M)) = *(&__traits(getMember, Tgen!T(), M))); + } +} + +/** + Determines if a member is a public, non-static, de-facto data field. + + In addition to plain data fields, R/W properties are also accepted. +*/ +template isRWField(T, string M) +{ + import std.traits; + import std.typetuple; + + static void testAssign()() { + T t = void; + __traits(getMember, t, M) = __traits(getMember, t, M); + } + + // reject type aliases + static if (is(TypeTuple!(__traits(getMember, T, M)))) enum isRWField = false; + // reject non-public members + else static if (!isPublicMember!(T, M)) enum isRWField = false; + // reject static members + else static if (!isNonStaticMember!(T, M)) enum isRWField = false; + // reject non-typed members + else static if (!is(typeof(__traits(getMember, T, M)))) enum isRWField = false; + // reject void typed members (includes templates) + else static if (is(typeof(__traits(getMember, T, M)) == void)) enum isRWField = false; + // reject non-assignable members + else static if (!__traits(compiles, testAssign!()())) enum isRWField = false; + else static if (anySatisfy!(isSomeFunction, __traits(getMember, T, M))) { + // If M is a function, reject if not @property or returns by ref + private enum FA = functionAttributes!(__traits(getMember, T, M)); + enum isRWField = (FA & FunctionAttribute.property) != 0; + } else { + enum isRWField = true; + } +} + +unittest { + import std.algorithm; + + struct S { + alias a = int; // alias + int i; // plain RW field + enum j = 42; // manifest constant + static int k = 42; // static field + private int privateJ; // private RW field + + this(Args...)(Args args) {} + + // read-write property (OK) + @property int p1() { return privateJ; } + @property void p1(int j) { privateJ = j; } + // read-only property (NO) + @property int p2() { return privateJ; } + // write-only property (NO) + @property void p3(int value) { privateJ = value; } + // ref returning property (OK) + @property ref int p4() { return i; } + // parameter-less template property (OK) + @property ref int p5()() { return i; } + // not treated as a property by DMD, so not a field + @property int p6()() { return privateJ; } + @property void p6(int j)() { privateJ = j; } + + static @property int p7() { return k; } + static @property void p7(int value) { k = value; } + + ref int f1() { return i; } // ref returning function (no field) + + int f2(Args...)(Args args) { return i; } + + ref int f3(Args...)(Args args) { return i; } + + void someMethod() {} + + ref int someTempl()() { return i; } + } + + enum plainFields = ["i"]; + enum fields = ["i", "p1", "p4", "p5"]; + + foreach (mem; __traits(allMembers, S)) { + static if (isRWField!(S, mem)) static assert(fields.canFind(mem), mem~" detected as field."); + else static assert(!fields.canFind(mem), mem~" not detected as field."); + + static if (isRWPlainField!(S, mem)) static assert(plainFields.canFind(mem), mem~" not detected as plain field."); + else static assert(!plainFields.canFind(mem), mem~" not detected as plain field."); + } +} + +package T Tgen(T)(){ return T.init; } + + +/** + Tests if the protection of a member is public. +*/ +template isPublicMember(T, string M) +{ + import std.algorithm, std.typetuple : TypeTuple; + + static if (!__traits(compiles, TypeTuple!(__traits(getMember, T, M)))) enum isPublicMember = false; + else { + alias MEM = TypeTuple!(__traits(getMember, T, M)); + enum _prot = __traits(getProtection, MEM); + enum isPublicMember = _prot == "public" || _prot == "export"; + } +} + +unittest { + class C { + int a; + export int b; + protected int c; + private int d; + package int e; + void f() {} + static void g() {} + private void h() {} + private static void i() {} + } + + static assert (isPublicMember!(C, "a")); + static assert (isPublicMember!(C, "b")); + static assert (!isPublicMember!(C, "c")); + static assert (!isPublicMember!(C, "d")); + static assert (!isPublicMember!(C, "e")); + static assert (isPublicMember!(C, "f")); + static assert (isPublicMember!(C, "g")); + static assert (!isPublicMember!(C, "h")); + static assert (!isPublicMember!(C, "i")); + + struct S { + int a; + export int b; + private int d; + package int e; + } + static assert (isPublicMember!(S, "a")); + static assert (isPublicMember!(S, "b")); + static assert (!isPublicMember!(S, "d")); + static assert (!isPublicMember!(S, "e")); + + S s; + s.a = 21; + assert(s.a == 21); +} + +/** + Tests if a member requires $(D this) to be used. +*/ +template isNonStaticMember(T, string M) +{ + import std.typetuple; + import std.traits; + + alias MF = TypeTuple!(__traits(getMember, T, M)); + static if (M.length == 0) { + enum isNonStaticMember = false; + } else static if (anySatisfy!(isSomeFunction, MF)) { + enum isNonStaticMember = !__traits(isStaticFunction, MF); + } else { + enum isNonStaticMember = !__traits(compiles, (){ auto x = __traits(getMember, T, M); }()); + } +} + +unittest { // normal fields + struct S { + int a; + static int b; + enum c = 42; + void f(); + static void g(); + ref int h() { return a; } + static ref int i() { return b; } + } + static assert(isNonStaticMember!(S, "a")); + static assert(!isNonStaticMember!(S, "b")); + static assert(!isNonStaticMember!(S, "c")); + static assert(isNonStaticMember!(S, "f")); + static assert(!isNonStaticMember!(S, "g")); + static assert(isNonStaticMember!(S, "h")); + static assert(!isNonStaticMember!(S, "i")); +} + +unittest { // tuple fields + struct S(T...) { + T a; + static T b; + } + + alias T = S!(int, float); + auto p = T.b; + static assert(isNonStaticMember!(T, "a")); + static assert(!isNonStaticMember!(T, "b")); + + alias U = S!(); + static assert(!isNonStaticMember!(U, "a")); + static assert(!isNonStaticMember!(U, "b")); +} + + +/** + Tests if a Group of types is implicitly convertible to a Group of target types. +*/ +bool areConvertibleTo(alias TYPES, alias TARGET_TYPES)() + if (isGroup!TYPES && isGroup!TARGET_TYPES) +{ + static assert(TYPES.expand.length == TARGET_TYPES.expand.length); + foreach (i, V; TYPES.expand) + if (!is(V : TARGET_TYPES.expand[i])) + return false; + return true; +} + +/// Test if the type $(D DG) is a correct delegate for an opApply where the +/// key/index is of type $(D TKEY) and the value of type $(D TVALUE). +template isOpApplyDg(DG, TKEY, TVALUE) { + import std.traits; + static if (is(DG == delegate) && is(ReturnType!DG : int)) { + private alias PTT = ParameterTypeTuple!(DG); + private alias PSCT = ParameterStorageClassTuple!(DG); + private alias STC = ParameterStorageClass; + // Just a value + static if (PTT.length == 1) { + enum isOpApplyDg = (is(PTT[0] == TVALUE) && PSCT[0] == STC.ref_); + } else static if (PTT.length == 2) { + enum isOpApplyDg = (is(PTT[0] == TKEY) && PSCT[0] == STC.ref_) + && (is(PTT[1] == TKEY) && PSCT[1] == STC.ref_); + } else + enum isOpApplyDg = false; + } else { + enum isOpApplyDg = false; + } +} + +/** + TypeTuple which does not auto-expand. + + Useful when you need + to multiple several type tuples as different template argument + list parameters, without merging those. +*/ +template Group(T...) +{ + alias expand = T; +} + +/// +unittest +{ + alias group = Group!(int, double, string); + static assert (!is(typeof(group.length))); + static assert (group.expand.length == 3); + static assert (is(group.expand[1] == double)); +} + +/** +*/ +template isGroup(T...) +{ + static if (T.length != 1) enum isGroup = false; + else enum isGroup = + !is(T[0]) && is(typeof(T[0]) == void) // does not evaluate to something + && is(typeof(T[0].expand.length) : size_t) // expands to something with length + && !is(typeof(&(T[0].expand))); // expands to not addressable +} + +version (unittest) // NOTE: GDC complains about template definitions in unittest blocks +{ + import std.typetuple; + + alias group = Group!(int, double, string); + alias group2 = Group!(); + + template Fake(T...) + { + int[] expand; + } + alias fake = Fake!(int, double, string); + + alias fake2 = TypeTuple!(int, double, string); + + static assert (isGroup!group); + static assert (isGroup!group2); + static assert (!isGroup!fake); + static assert (!isGroup!fake2); +} + +/* Copied from Phobos as it is private there. + */ +private template isSame(ab...) + if (ab.length == 2) +{ + static if (is(ab[0]) && is(ab[1])) + { + enum isSame = is(ab[0] == ab[1]); + } + else static if (!is(ab[0]) && + !is(ab[1]) && + is(typeof(ab[0] == ab[1]) == bool) && + (ab[0] == ab[1])) + { + static if (!__traits(compiles, &ab[0]) || + !__traits(compiles, &ab[1])) + enum isSame = (ab[0] == ab[1]); + else + enum isSame = __traits(isSame, ab[0], ab[1]); + } + else + { + enum isSame = __traits(isSame, ab[0], ab[1]); + } +} + + +/** + Small convenience wrapper to find and extract certain UDA from given type. + Will stop on first element which is of required type. + + Params: + UDA = type or template to search for in UDA list + Symbol = symbol to query for UDA's + allow_types = if set to `false` considers attached `UDA` types an error + (only accepts instances/values) + + Returns: aggregated search result struct with 3 field. `value` aliases found UDA. + `found` is boolean flag for having a valid find. `index` is integer index in + attribute list this UDA was found at. +*/ +template findFirstUDA(alias UDA, alias Symbol, bool allow_types = false) if (!is(UDA)) +{ + enum findFirstUDA = findNextUDA!(UDA, Symbol, 0, allow_types); +} + +/// Ditto +template findFirstUDA(UDA, alias Symbol, bool allow_types = false) +{ + enum findFirstUDA = findNextUDA!(UDA, Symbol, 0, allow_types); +} + +private struct UdaSearchResult(alias UDA) +{ + alias value = UDA; + bool found = false; + long index = -1; +} + +/** + Small convenience wrapper to find and extract certain UDA from given type. + Will start at the given index and stop on the next element which is of required type. + + Params: + UDA = type or template to search for in UDA list + Symbol = symbol to query for UDA's + idx = 0-based index to start at. Should be positive, and under the total number of attributes. + allow_types = if set to `false` considers attached `UDA` types an error + (only accepts instances/values) + + Returns: aggregated search result struct with 3 field. `value` aliases found UDA. + `found` is boolean flag for having a valid find. `index` is integer index in + attribute list this UDA was found at. + */ +template findNextUDA(alias UDA, alias Symbol, long idx, bool allow_types = false) if (!is(UDA)) +{ + import std.traits : isInstanceOf; + import std.typetuple : TypeTuple; + + private alias udaTuple = TypeTuple!(__traits(getAttributes, Symbol)); + + static assert(idx >= 0, "Index given to findNextUDA can't be negative"); + static assert(idx <= udaTuple.length, "Index given to findNextUDA is above the number of attribute"); + + public template extract(size_t index, list...) + { + static if (!list.length) enum extract = UdaSearchResult!(null)(false, -1); + else { + static if (is(list[0])) { + static if (is(UDA) && is(list[0] == UDA) || !is(UDA) && isInstanceOf!(UDA, list[0])) { + static assert (allow_types, "findNextUDA is designed to look up values, not types"); + enum extract = UdaSearchResult!(list[0])(true, index); + } else enum extract = extract!(index + 1, list[1..$]); + } else { + static if (is(UDA) && is(typeof(list[0]) == UDA) || !is(UDA) && isInstanceOf!(UDA, typeof(list[0]))) { + import vibe.internal.meta.traits : isPropertyGetter; + static if (isPropertyGetter!(list[0])) { + enum value = list[0]; + enum extract = UdaSearchResult!(value)(true, index); + } else enum extract = UdaSearchResult!(list[0])(true, index); + } else enum extract = extract!(index + 1, list[1..$]); + } + } + } + + enum findNextUDA = extract!(idx, udaTuple[idx .. $]); +} +/// ditto +template findNextUDA(UDA, alias Symbol, long idx, bool allow_types = false) +{ + import std.traits : isInstanceOf; + import std.typetuple : TypeTuple; + + private alias udaTuple = TypeTuple!(__traits(getAttributes, Symbol)); + + static assert(idx >= 0, "Index given to findNextUDA can't be negative"); + static assert(idx <= udaTuple.length, "Index given to findNextUDA is above the number of attribute"); + + public template extract(size_t index, list...) + { + static if (!list.length) enum extract = UdaSearchResult!(null)(false, -1); + else { + static if (is(list[0])) { + static if (is(list[0] == UDA)) { + static assert (allow_types, "findNextUDA is designed to look up values, not types"); + enum extract = UdaSearchResult!(list[0])(true, index); + } else enum extract = extract!(index + 1, list[1..$]); + } else { + static if (is(typeof(list[0]) == UDA)) { + static if (isPropertyGetter!(list[0])) { + enum value = list[0]; + enum extract = UdaSearchResult!(value)(true, index); + } else enum extract = UdaSearchResult!(list[0])(true, index); + } else enum extract = extract!(index + 1, list[1..$]); + } + } + } + + enum findNextUDA = extract!(idx, udaTuple[idx .. $]); +} + + +/// +unittest +{ + struct Attribute { int x; } + + @("something", Attribute(42), Attribute(41)) + void symbol(); + + enum result0 = findNextUDA!(string, symbol, 0); + static assert (result0.found); + static assert (result0.index == 0); + static assert (result0.value == "something"); + + enum result1 = findNextUDA!(Attribute, symbol, 0); + static assert (result1.found); + static assert (result1.index == 1); + static assert (result1.value == Attribute(42)); + + enum result2 = findNextUDA!(int, symbol, 0); + static assert (!result2.found); + + enum result3 = findNextUDA!(Attribute, symbol, result1.index + 1); + static assert (result3.found); + static assert (result3.index == 2); + static assert (result3.value == Attribute(41)); +} + +unittest +{ + struct Attribute { int x; } + + @(Attribute) void symbol(); + + static assert (!is(findNextUDA!(Attribute, symbol, 0))); + + enum result0 = findNextUDA!(Attribute, symbol, 0, true); + static assert (result0.found); + static assert (result0.index == 0); + static assert (is(result0.value == Attribute)); +} + +unittest +{ + struct Attribute { int x; } + enum Dummy; + + @property static Attribute getter() + { + return Attribute(42); + } + + @Dummy @getter void symbol(); + + enum result0 = findNextUDA!(Attribute, symbol, 0); + static assert (result0.found); + static assert (result0.index == 1); + static assert (result0.value == Attribute(42)); +} + +/// Eager version of findNextUDA that represent all instances of UDA in a Tuple. +/// If one of the attribute is a type instead of an instance, compilation will fail. +template UDATuple(alias UDA, alias Sym) { + import std.typetuple : TypeTuple; + + private template extract(size_t maxSize, Founds...) + { + private alias LastFound = Founds[$ - 1]; + // No more to find + static if (!LastFound.found) + enum extract = Founds[0 .. $ - 1]; + else { + // For ease of use, this is a Tuple of UDA, not a tuple of UdaSearchResult!(...) + private alias Result = TypeTuple!(Founds[0 .. $ - 1], LastFound.value); + // We're at the last parameter + static if (LastFound.index == maxSize) + enum extract = Result; + else + enum extract = extract!(maxSize, Result, findNextUDA!(UDA, Sym, LastFound.index + 1)); + } + } + + private enum maxIndex = TypeTuple!(__traits(getAttributes, Sym)).length; + enum UDATuple = extract!(maxIndex, findNextUDA!(UDA, Sym, 0)); +} + +unittest +{ + import std.typetuple : TypeTuple; + + struct Attribute { int x; } + enum Dummy; + + @(Dummy, Attribute(21), Dummy, Attribute(42), Attribute(84)) void symbol() {} + @(Dummy, Attribute(21), Dummy, Attribute(42), Attribute) void wrong() {} + + alias Cmp = TypeTuple!(Attribute(21), Attribute(42), Attribute(84)); + static assert(Cmp == UDATuple!(Attribute, symbol)); + static assert(!is(UDATuple!(Attribute, wrong))); +} + +/// Avoid repeating the same error message again and again. +/// ---- +/// if (!__ctfe) +/// assert(0, onlyAsUda!func); +/// ---- +template onlyAsUda(string from /*= __FUNCTION__*/) +{ + // With default param, DMD think expression is void, even when writing 'enum string onlyAsUda = ...' + enum onlyAsUda = from~" must only be used as an attribute - not called as a runtime function."; +} diff --git a/source/dub/internal/vibecompat/inet/path.d b/source/dub/internal/vibecompat/inet/path.d index 6b54292..f5bd81f 100644 --- a/source/dub/internal/vibecompat/inet/path.d +++ b/source/dub/internal/vibecompat/inet/path.d @@ -7,7 +7,7 @@ */ module dub.internal.vibecompat.inet.path; -version (Have_vibe_d) public import vibe.inet.path; +version (Have_vibe_d_core) public import vibe.inet.path; else: import std.algorithm; @@ -34,7 +34,8 @@ /// Constructs a Path object by parsing a path string. this(string pathstr) { - m_nodes = cast(immutable)splitPath(pathstr); + static if (__VERSION__ < 2066) m_nodes = splitPath(pathstr).idup; + else m_nodes = splitPath(pathstr); m_absolute = (pathstr.startsWith("/") || m_nodes.length > 0 && (m_nodes[0].toString().countUntil(':')>0 || m_nodes[0] == "\\")); m_endsWithSlash = pathstr.endsWith("/"); } @@ -149,7 +150,7 @@ /// Computes the relative path from `parentPath` to this path. Path relativeTo(const Path parentPath) const { - assert(this.absolute && parentPath.absolute); + assert(this.absolute && parentPath.absolute, "Determining relative path between non-absolute paths."); version(Windows){ // a path such as ..\C:\windows is not valid, so force the path to stay absolute in this case if( this.absolute && !this.empty && @@ -163,7 +164,9 @@ while( parentPath.length > nup && !startsWith(parentPath[0 .. parentPath.length-nup]) ){ nup++; } + assert(m_nodes.length >= parentPath.length - nup); Path ret = Path(null, false); + assert(m_nodes.length >= parentPath.length - nup); ret.m_endsWithSlash = true; foreach( i; 0 .. nup ) ret ~= ".."; ret ~= Path(m_nodes[parentPath.length-nup .. $], false); @@ -172,7 +175,7 @@ } /// The last entry of the path - @property ref immutable(PathEntry) head() const { enforce(m_nodes.length > 0); return m_nodes[$-1]; } + @property ref immutable(PathEntry) head() const { enforce(m_nodes.length > 0, "Getting head of empty path."); return m_nodes[$-1]; } /// The parent path @property Path parentPath() const { return this[0 .. length-1]; } @@ -274,14 +277,14 @@ } this(string str) - { + pure { assert(str.countUntil('/') < 0 && (str.countUntil('\\') < 0 || str.length == 1)); m_name = str; } - string toString() const { return m_name; } + string toString() const pure { return m_name; } - Path opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { return Path(cast(immutable)[this, rhs], false); } + Path opBinary(string OP)(PathEntry rhs) const if( OP == "~" ) { return Path([this, rhs], false); } bool opEquals(ref const PathEntry rhs) const { return m_name == rhs.m_name; } bool opEquals(PathEntry rhs) const { return m_name == rhs.m_name; } @@ -307,7 +310,7 @@ /// Splits up a path string into its elements/folders PathEntry[] splitPath(string path) -{ +pure { if( path.startsWith("/") || path.startsWith("\\") ) path = path[1 .. $]; if( path.empty ) return null; if( path.endsWith("/") || path.endsWith("\\") ) path = path[0 .. $-1]; @@ -457,3 +460,18 @@ } } } + +unittest { + assert(Path("/foo/bar/baz").relativeTo(Path("/foo")).toString == "bar/baz"); + assert(Path("/foo/bar/baz/").relativeTo(Path("/foo")).toString == "bar/baz/"); + assert(Path("/foo/bar").relativeTo(Path("/foo")).toString == "bar"); + assert(Path("/foo/bar/").relativeTo(Path("/foo")).toString == "bar/"); + assert(Path("/foo").relativeTo(Path("/foo/bar")).toString() == ".."); + assert(Path("/foo/").relativeTo(Path("/foo/bar")).toString() == "../"); + assert(Path("/foo/baz").relativeTo(Path("/foo/bar/baz")).toString() == "../../baz"); + assert(Path("/foo/baz/").relativeTo(Path("/foo/bar/baz")).toString() == "../../baz/"); + assert(Path("/foo/").relativeTo(Path("/foo/bar/baz")).toString() == "../../"); + assert(Path("/foo/").relativeTo(Path("/foo/bar/baz/mumpitz")).toString() == "../../../"); + assert(Path("/foo").relativeTo(Path("/foo")).toString() == ""); + assert(Path("/foo/").relativeTo(Path("/foo")).toString() == ""); +} diff --git a/source/dub/internal/vibecompat/inet/url.d b/source/dub/internal/vibecompat/inet/url.d index d9f021f..9115c9c 100644 --- a/source/dub/internal/vibecompat/inet/url.d +++ b/source/dub/internal/vibecompat/inet/url.d @@ -9,7 +9,7 @@ public import dub.internal.vibecompat.inet.path; -version (Have_vibe_d) public import vibe.inet.url; +version (Have_vibe_d_core) public import vibe.inet.url; else: import std.algorithm; @@ -107,7 +107,7 @@ } } - this.localURI = str; + this.localURI = (str == "") ? "/" : str; } /// ditto static URL parse(string url_string) @@ -277,4 +277,10 @@ assert(url.path.toString() == "/sub2/index.html", url.path.toString()); assert(url.queryString == "query", url.queryString); assert(url.anchor == "anchor", url.anchor); + + url = URL("http://localhost")~Path("packages"); + assert(url.toString() == "http://localhost/packages", url.toString()); + + url = URL("http://localhost/")~Path("packages"); + assert(url.toString() == "http://localhost/packages", url.toString()); } diff --git a/source/dub/package_.d b/source/dub/package_.d index 0f3d895..c499c6e 100644 --- a/source/dub/package_.d +++ b/source/dub/package_.d @@ -1,9 +1,9 @@ /** - Stuff with dependencies. + Contains high-level functionality for working with packages. - Copyright: © 2012-2013 Matthias Dondorff + Copyright: © 2012-2013 Matthias Dondorff, © 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. - Authors: Matthias Dondorff + Authors: Matthias Dondorff, Sönke Ludwig, Martin Nowak, Nick Sabalausky */ module dub.package_; @@ -11,6 +11,7 @@ import dub.compilers.compiler; import dub.dependency; +import dub.description; import dub.recipe.json; import dub.recipe.sdl; @@ -27,114 +28,87 @@ import std.file; import std.range; import std.string; +import std.typecons : Nullable; +/// Lists the supported package recipe formats. +enum PackageFormat { + json, /// JSON based, using the ".json" file extension + sdl /// SDLang based, using the ".sdl" file extension +} -enum PackageFormat { json, sdl } -struct FilenameAndFormat -{ +struct FilenameAndFormat { string filename; PackageFormat format; } -struct PathAndFormat -{ - Path path; - PackageFormat format; - @property bool empty() { return path.empty; } - string toString() { return path.toString(); } -} -// Supported package descriptions in decreasing order of preference. +/// Supported package descriptions in decreasing order of preference. static immutable FilenameAndFormat[] packageInfoFiles = [ {"dub.json", PackageFormat.json}, - /*{"dub.sdl",PackageFormat.sdl},*/ + {"dub.sdl", PackageFormat.sdl}, {"package.json", PackageFormat.json} ]; +/// Returns a list of all recognized package recipe file names in descending order of precedence. @property string[] packageInfoFilenames() { return packageInfoFiles.map!(f => cast(string)f.filename).array; } +/// Returns the default package recile file name. @property string defaultPackageFilename() { return packageInfoFiles[0].filename; } -/** - Represents a package, including its sub packages - - Documentation of the dub.json can be found at - http://registry.vibed.org/package-format +/** Represents a package, including its sub packages. */ class Package { private { Path m_path; - PathAndFormat m_infoFile; + Path m_infoFile; PackageRecipe m_info; + PackageRecipe m_rawRecipe; Package m_parentPackage; } - static PathAndFormat findPackageFile(Path path) + /** Constructs a `Package` using an in-memory package recipe. + + Params: + json_recipe = The package recipe in JSON format + recipe = The package recipe in generic format + root = The directory in which the package resides (if any). + parent = Reference to the parent package, if the new package is a + sub package. + version_override = Optional version to associate to the package + instead of the one declared in the package recipe, or the one + determined by invoking the VCS (GIT currently). + */ + this(Json json_recipe, Path root = Path(), Package parent = null, string version_override = "") { - foreach(file; packageInfoFiles) { - auto filename = path ~ file.filename; - if(existsFile(filename)) return PathAndFormat(filename, file.format); - } - return PathAndFormat(Path()); - } + import dub.recipe.json; - this(Path root, PathAndFormat infoFile = PathAndFormat(), Package parent = null, string versionOverride = "") - { - RawPackage raw_package; - m_infoFile = infoFile; - - try { - if(m_infoFile.empty) { - m_infoFile = findPackageFile(root); - if(m_infoFile.empty) throw new Exception("no package file was found, expected one of the following: "~to!string(packageInfoFiles)); - } - raw_package = rawPackageFromFile(m_infoFile); - } catch (Exception ex) throw ex;//throw new Exception(format("Failed to load package %s: %s", m_infoFile.toNativeString(), ex.msg)); - - enforce(raw_package !is null, format("Missing package description for package at %s", root.toNativeString())); - this(raw_package, root, parent, versionOverride); - } - - this(Json package_info, Path root = Path(), Package parent = null, string versionOverride = "") - { - this(new JsonPackage(package_info), root, parent, versionOverride); - } - - this(RawPackage raw_package, Path root = Path(), Package parent = null, string versionOverride = "") - { PackageRecipe recipe; + parseJson(recipe, json_recipe, parent ? parent.name : null); + this(recipe, root, parent, version_override); + } + /// ditto + this(PackageRecipe recipe, Path root = Path(), Package parent = null, string version_override = "") + { + // save the original recipe + m_rawRecipe = recipe.clone; - // parse the Package description - if(raw_package !is null) - { - scope(failure) logError("Failed to parse package description for %s %s in %s.", - raw_package.package_name, versionOverride.length ? versionOverride : raw_package.version_, - root.length ? root.toNativeString() : "remote location"); - raw_package.parseInto(recipe, parent ? parent.name : null); + if (!version_override.empty) + recipe.version_ = version_override; - if (!versionOverride.empty) - recipe.version_ = versionOverride; + // try to run git to determine the version of the package if no explicit version was given + if (recipe.version_.length == 0 && !parent) { + try recipe.version_ = determineVersionFromSCM(root); + catch (Exception e) logDebug("Failed to determine version by SCM: %s", e.msg); - // try to run git to determine the version of the package if no explicit version was given - if (recipe.version_.length == 0 && !parent) { - try recipe.version_ = determineVersionFromSCM(root); - catch (Exception e) logDebug("Failed to determine version by SCM: %s", e.msg); - - if (recipe.version_.length == 0) { - logDiagnostic("Note: Failed to determine version of package %s at %s. Assuming ~master.", recipe.name, this.path.toNativeString()); - // TODO: Assume unknown version here? - // recipe.version_ = Version.UNKNOWN.toString(); - recipe.version_ = Version.MASTER.toString(); - } else logDiagnostic("Determined package version using GIT: %s %s", recipe.name, recipe.version_); - } + if (recipe.version_.length == 0) { + logDiagnostic("Note: Failed to determine version of package %s at %s. Assuming ~master.", recipe.name, this.path.toNativeString()); + // TODO: Assume unknown version here? + // recipe.version_ = Version.unknown.toString(); + recipe.version_ = Version.masterBranch.toString(); + } else logDiagnostic("Determined package version using GIT: %s %s", recipe.name, recipe.version_); } - this(recipe, root, parent); - } - - this(PackageRecipe recipe, Path root = Path(), Package parent = null) - { m_parentPackage = parent; m_path = root; m_path.endsWithSlash = true; @@ -146,65 +120,181 @@ simpleLint(); } + /** Searches the given directory for package recipe files. + + Params: + directory = The directory to search + + Returns: + Returns the full path to the package file, if any was found. + Otherwise returns an empty path. + */ + static Path findPackageFile(Path directory) + { + foreach (file; packageInfoFiles) { + auto filename = directory ~ file.filename; + if (existsFile(filename)) return filename; + } + return Path.init; + } + + /** Constructs a `Package` using a package that is physically present on the local file system. + + Params: + root = The directory in which the package resides. + recipe_file = Optional path to the package recipe file. If left + empty, the `root` directory will be searched for a recipe file. + parent = Reference to the parent package, if the new package is a + sub package. + version_override = Optional version to associate to the package + instead of the one declared in the package recipe, or the one + determined by invoking the VCS (GIT currently). + */ + static Package load(Path root, Path recipe_file = Path.init, Package parent = null, string version_override = "") + { + import dub.recipe.io; + + if (recipe_file.empty) recipe_file = findPackageFile(root); + + enforce(!recipe_file.empty, + "No package file found in %s, expected one of %s" + .format(root.toNativeString(), + packageInfoFiles.map!(f => cast(string)f.filename).join("/"))); + + auto recipe = readPackageRecipe(recipe_file, parent ? parent.name : null); + + auto ret = new Package(recipe, root, parent, version_override); + ret.m_infoFile = recipe_file; + return ret; + } + + /** Returns the qualified name of the package. + + The qualified name includes any possible parent package if this package + is a sub package. + */ @property string name() const { if (m_parentPackage) return m_parentPackage.name ~ ":" ~ m_info.name; else return m_info.name; } - @property string vers() const { return m_parentPackage ? m_parentPackage.vers : m_info.version_; } - @property Version ver() const { return Version(this.vers); } - @property void ver(Version ver) { assert(m_parentPackage is null); m_info.version_ = ver.toString(); } - @property ref inout(PackageRecipe) info() inout { return m_info; } + + /** Returns the directory in which the package resides. + + Note that this can be empty for packages that are not stored in the + local file system. + */ @property Path path() const { return m_path; } - @property Path packageInfoFilename() const { return m_infoFile.path; } - @property const(Dependency[string]) dependencies() const { return m_info.dependencies; } + + + /** Accesses the version associated with this package. + + Note that this is a shortcut to `this.recipe.version_`. + */ + @property Version version_() const { return m_parentPackage ? m_parentPackage.version_ : Version(m_info.version_); } + /// ditto + @property void version_(Version value) { assert(m_parentPackage is null); m_info.version_ = value.toString(); } + + /** Accesses the recipe contents of this package. + + The recipe contains any default values and configurations added by DUB. + To access the raw user recipe, use the `rawRecipe` property. + + See_Also: `rawRecipe` + */ + @property ref inout(PackageRecipe) recipe() inout { return m_info; } + + /** Accesses the original package recipe. + + The returned recipe matches exactly the contents of the original package + recipe. For the effective package recipe, augmented with DUB generated + default settings and configurations, use the `recipe` property. + + See_Also: `recipe` + */ + @property ref const(PackageRecipe) rawRecipe() const { return m_rawRecipe; } + + /** Returns the path to the package recipe file. + + Note that this can be empty for packages that are not stored in the + local file system. + */ + @property Path recipePath() const { return m_infoFile; } + + + /** Returns the base package of this package. + + The base package is the root of the sub package hierarchy (i.e. the + topmost parent). This will be `null` for packages that are not sub + packages. + */ @property inout(Package) basePackage() inout { return m_parentPackage ? m_parentPackage.basePackage : this; } + + /** Returns the parent of this package. + + The parent package is the package that contains a sub package. This will + be `null` for packages that are not sub packages. + */ @property inout(Package) parentPackage() inout { return m_parentPackage; } + + /** Returns the list of all sub packages. + + Note that this is a shortcut for `this.recipe.subPackages`. + */ @property inout(SubPackage)[] subPackages() inout { return m_info.subPackages; } + /** Returns the list of all build configuration names. + + Configuration contents can be accessed using `this.recipe.configurations`. + */ @property string[] configurations() const { auto ret = appender!(string[])(); - foreach( ref config; m_info.configurations ) + foreach (ref config; m_info.configurations) ret.put(config.name); return ret.data; } - const(Dependency[string]) getDependencies(string config) - const { - Dependency[string] ret; - foreach (k, v; m_info.buildSettings.dependencies) - ret[k] = v; - foreach (ref conf; m_info.configurations) - if (conf.name == config) { - foreach (k, v; conf.buildSettings.dependencies) - ret[k] = v; - break; - } - return ret; - } + /** Writes the current recipe contents to a recipe file. - /** Overwrites the packge description file using the default filename with the current information. + The parameter-less overload writes to `this.path`, which must not be + empty. The default recipe file name will be used in this case. */ void storeInfo() { - enforce(!ver.isUnknown, "Trying to store a package with an 'unknown' version, this is not supported."); - auto filename = m_path ~ defaultPackageFilename; - auto dstFile = openFile(filename.toNativeString(), FileMode.CreateTrunc); + storeInfo(m_path); + m_infoFile = m_path ~ defaultPackageFilename; + } + /// ditto + void storeInfo(Path path) + const { + enforce(!version_.isUnknown, "Trying to store a package with an 'unknown' version, this is not supported."); + auto filename = path ~ defaultPackageFilename; + auto dstFile = openFile(filename.toNativeString(), FileMode.createTrunc); scope(exit) dstFile.close(); dstFile.writePrettyJsonString(m_info.toJson()); - m_infoFile = PathAndFormat(filename); } - /*inout(Package) getSubPackage(string name, bool silent_fail = false) - inout { - foreach (p; m_info.subPackages) - if (p.package_ !is null && p.package_.name == this.name ~ ":" ~ name) - return p.package_; - enforce(silent_fail, format("Unknown sub package: %s:%s", this.name, name)); - return null; - }*/ + /** Returns the package recipe of a non-path-based sub package. + For sub packages that are declared within the package recipe of the + parent package, this function will return the corresponding recipe. Sub + packages declared using a path must be loaded manually (or using the + `PackageManager`). + */ + Nullable!PackageRecipe getInternalSubPackage(string name) + { + foreach (ref p; m_info.subPackages) + if (p.path.empty && p.recipe.name == name) + return Nullable!PackageRecipe(p.recipe); + return Nullable!PackageRecipe(); + } + + /** Searches for use of compiler-specific flags that have generic + alternatives. + + This will output a warning message for each such flag to the console. + */ void warnOnSpecialCompilerFlags() { // warn about use of special flags @@ -213,6 +303,16 @@ config.buildSettings.warnOnSpecialCompilerFlags(m_info.name, config.name); } + /** Retrieves a build settings template. + + If no `config` is given, this returns the build settings declared at the + root level of the package recipe. Otherwise returns the settings + declared within the given configuration (excluding those at the root + level). + + Note that this is a shortcut to accessing `this.recipe.buildSettings` or + `this.recipe.configurations[].buildSettings`. + */ const(BuildSettingsTemplate) getBuildSettings(string config = null) const { if (config.length) { @@ -225,7 +325,13 @@ } } - /// Returns all BuildSettings for the given platform and config. + /** Returns all BuildSettings for the given platform and configuration. + + This will gather the effective build settings declared in tha package + recipe for when building on a particular platform and configuration. + Root build settings and configuration specific settings will be + merged. + */ BuildSettings getBuildSettings(in BuildPlatform platform, string config) const { BuildSettings ret; @@ -248,7 +354,12 @@ return ret; } - /// Returns the combination of all build settings for all configurations and platforms + /** Returns the combination of all build settings for all configurations + and platforms. + + This can be useful for IDEs to gather a list of all potentially used + files or settings. + */ BuildSettings getCombinedBuildSettings() const { BuildSettings ret; @@ -265,6 +376,13 @@ return ret; } + /** Adds build type specific settings to an existing set of build settings. + + This function searches the package recipe for overridden build types. If + none is found, the default build settings will be applied, if + `build_type` matches a default build type name. An exception is thrown + otherwise. + */ void addBuildTypeSettings(ref BuildSettings settings, in BuildPlatform platform, string build_type) const { if (build_type == "$DFLAGS") { @@ -278,22 +396,32 @@ logDiagnostic("Using custom build type '%s'.", build_type); pbt.getPlatformSettings(settings, platform, this.path); } else { - with(BuildOptions) switch (build_type) { + with(BuildOption) switch (build_type) { default: throw new Exception(format("Unknown build type for %s: '%s'", this.name, build_type)); case "plain": break; case "debug": settings.addOptions(debugMode, debugInfo); break; case "release": settings.addOptions(releaseMode, optimize, inline); break; + case "release-debug": settings.addOptions(releaseMode, optimize, inline, debugInfo); break; case "release-nobounds": settings.addOptions(releaseMode, optimize, inline, noBoundsCheck); break; case "unittest": settings.addOptions(unittests, debugMode, debugInfo); break; - case "docs": settings.addOptions(syntaxOnly); settings.addDFlags("-c", "-Dddocs"); break; - case "ddox": settings.addOptions(syntaxOnly); settings.addDFlags("-c", "-Df__dummy.html", "-Xfdocs.json"); break; + case "docs": settings.addOptions(syntaxOnly, _docs); break; + case "ddox": settings.addOptions(syntaxOnly, _ddox); break; case "profile": settings.addOptions(profile, optimize, inline, debugInfo); break; + case "profile-gc": settings.addOptions(profileGC, debugInfo); break; case "cov": settings.addOptions(coverage, debugInfo); break; case "unittest-cov": settings.addOptions(unittests, coverage, debugMode, debugInfo); break; } } } + /** Returns the selected configuration for a certain dependency. + + If no configuration is specified in the package recipe, null will be + returned instead. + + FIXME: The `platform` parameter is currently ignored, as the + `"subConfigurations"` field doesn't support platform suffixes. + */ string getSubConfiguration(string config, in Package dependency, in BuildPlatform platform) const { bool found = false; @@ -309,7 +437,15 @@ return null; } - /// Returns the default configuration to build for the given platform + /** Returns the default configuration to build for the given platform. + + This will return the first configuration that is applicable to the given + platform, or `null` if none is applicable. By default, only library + configurations will be returned. Setting `allow_non_library` to `true` + will also return executable configurations. + + See_Also: `getPlatformConfigurations` + */ string getDefaultConfiguration(in BuildPlatform platform, bool allow_non_library = false) const { foreach (ref conf; m_info.configurations) { @@ -320,93 +456,157 @@ return null; } - /// Returns a list of configurations suitable for the given platform - string[] getPlatformConfigurations(in BuildPlatform platform, bool is_main_package = false) + /** Returns a list of configurations suitable for the given platform. + + Params: + platform = The platform against which to match configurations + allow_non_library = If set to true, executable configurations will + also be included. + + See_Also: `getDefaultConfiguration` + */ + string[] getPlatformConfigurations(in BuildPlatform platform, bool allow_non_library = false) const { auto ret = appender!(string[]); foreach(ref conf; m_info.configurations){ if (!conf.matchesPlatform(platform)) continue; - if (!is_main_package && conf.buildSettings.targetType == TargetType.executable) continue; + if (!allow_non_library && conf.buildSettings.targetType == TargetType.executable) continue; ret ~= conf.name; } if (ret.data.length == 0) ret.put(null); return ret.data; } - /// Human readable information of this package and its dependencies. - string generateInfoString() const { - string s; - s ~= m_info.name ~ ", version '" ~ m_info.version_ ~ "'"; - s ~= "\n Dependencies:"; - foreach(string p, ref const Dependency v; m_info.dependencies) - s ~= "\n " ~ p ~ ", version '" ~ v.toString() ~ "'"; - return s; - } + /** Determines if the package has a dependency to a certain package. - bool hasDependency(string depname, string config) + Params: + dependency_name = The name of the package to search for + config = Name of the configuration to use when searching + for dependencies + + See_Also: `getDependencies` + */ + bool hasDependency(string dependency_name, string config) const { - if (depname in m_info.buildSettings.dependencies) return true; + if (dependency_name in m_info.buildSettings.dependencies) return true; foreach (ref c; m_info.configurations) - if ((config.empty || c.name == config) && depname in c.buildSettings.dependencies) + if ((config.empty || c.name == config) && dependency_name in c.buildSettings.dependencies) return true; return false; } - void describe(ref Json dst, BuildPlatform platform, string config) - { - dst.path = m_path.toNativeString(); - dst.name = this.name; - dst["version"] = this.vers; - dst.description = m_info.description; - dst.homepage = m_info.homepage; - dst.authors = m_info.authors.serializeToJson(); - dst.copyright = m_info.copyright; - dst.license = m_info.license; - dst.dependencies = m_info.dependencies.keys.serializeToJson(); + /** Retrieves all dependencies for a particular configuration. + + This includes dependencies that are declared at the root level of the + package recipe, as well as those declared within the specified + configuration. If no configuration with the given name exists, only + dependencies declared at the root level will be retunred. + + See_Also: `hasDependency` + */ + const(Dependency[string]) getDependencies(string config) + const { + Dependency[string] ret; + foreach (k, v; m_info.buildSettings.dependencies) + ret[k] = v; + foreach (ref conf; m_info.configurations) + if (conf.name == config) { + foreach (k, v; conf.buildSettings.dependencies) + ret[k] = v; + break; + } + return ret; + } + + /** Returns a list of all possible dependencies of the package. + + This list includes all dependencies of all configurations. The same + package may occur multiple times with possibly different `Dependency` + values. + */ + PackageDependency[] getAllDependencies() + const { + auto ret = appender!(PackageDependency[]); + foreach (n, d; this.recipe.buildSettings.dependencies) + ret ~= PackageDependency(n, d); + foreach (ref c; this.recipe.configurations) + foreach (n, d; c.buildSettings.dependencies) + ret ~= PackageDependency(n, d); + return ret.data; + } + + + /** Returns a description of the package for use in IDEs or build tools. + */ + PackageDescription describe(BuildPlatform platform, string config) + const { + return describe(platform, getCompiler(platform.compilerBinary), config); + } + /// ditto + PackageDescription describe(BuildPlatform platform, Compiler compiler, string config) + const { + PackageDescription ret; + ret.configuration = config; + ret.path = m_path.toNativeString(); + ret.name = this.name; + ret.version_ = this.version_; + ret.description = m_info.description; + ret.homepage = m_info.homepage; + ret.authors = m_info.authors.dup; + ret.copyright = m_info.copyright; + ret.license = m_info.license; + ret.dependencies = getDependencies(config).keys; // save build settings BuildSettings bs = getBuildSettings(platform, config); BuildSettings allbs = getCombinedBuildSettings(); - foreach (string k, v; bs.serializeToJson()) dst[k] = v; - dst.remove("requirements"); - dst.remove("sourceFiles"); - dst.remove("importFiles"); - dst.remove("stringImportFiles"); - dst.targetType = bs.targetType.to!string(); - if (dst.targetType != TargetType.none) - dst.targetFileName = getTargetFileName(bs, platform); + ret.targetType = bs.targetType; + ret.targetPath = bs.targetPath; + ret.targetName = bs.targetName; + if (ret.targetType != TargetType.none && compiler) + ret.targetFileName = compiler.getTargetFileName(bs, platform); + ret.workingDirectory = bs.workingDirectory; + ret.mainSourceFile = bs.mainSourceFile; + ret.dflags = bs.dflags; + ret.lflags = bs.lflags; + ret.libs = bs.libs; + ret.copyFiles = bs.copyFiles; + ret.versions = bs.versions; + ret.debugVersions = bs.debugVersions; + ret.importPaths = bs.importPaths; + ret.stringImportPaths = bs.stringImportPaths; + ret.preGenerateCommands = bs.preGenerateCommands; + ret.postGenerateCommands = bs.postGenerateCommands; + ret.preBuildCommands = bs.preBuildCommands; + ret.postBuildCommands = bs.postBuildCommands; // prettify build requirements output - Json[] breqs; - for (int i = 1; i <= BuildRequirements.max; i <<= 1) - if (bs.requirements & i) - breqs ~= Json(to!string(cast(BuildRequirements)i)); - dst.buildRequirements = breqs; + for (int i = 1; i <= BuildRequirement.max; i <<= 1) + if (bs.requirements & cast(BuildRequirement)i) + ret.buildRequirements ~= cast(BuildRequirement)i; // prettify options output - Json[] bopts; - for (int i = 1; i <= BuildOptions.max; i <<= 1) - if (bs.options & i) - bopts ~= Json(to!string(cast(BuildOptions)i)); - dst.options = bopts; + for (int i = 1; i <= BuildOption.max; i <<= 1) + if (bs.options & cast(BuildOption)i) + ret.options ~= cast(BuildOption)i; // collect all possible source files and determine their types - string[string] sourceFileTypes; - foreach (f; allbs.stringImportFiles) sourceFileTypes[f] = "unusedStringImport"; - foreach (f; allbs.importFiles) sourceFileTypes[f] = "unusedImport"; - foreach (f; allbs.sourceFiles) sourceFileTypes[f] = "unusedSource"; - foreach (f; bs.stringImportFiles) sourceFileTypes[f] = "stringImport"; - foreach (f; bs.importFiles) sourceFileTypes[f] = "import"; - foreach (f; bs.sourceFiles) sourceFileTypes[f] = "source"; - Json[] files; + SourceFileRole[string] sourceFileTypes; + foreach (f; allbs.stringImportFiles) sourceFileTypes[f] = SourceFileRole.unusedStringImport; + foreach (f; allbs.importFiles) sourceFileTypes[f] = SourceFileRole.unusedImport; + foreach (f; allbs.sourceFiles) sourceFileTypes[f] = SourceFileRole.unusedSource; + foreach (f; bs.stringImportFiles) sourceFileTypes[f] = SourceFileRole.stringImport; + foreach (f; bs.importFiles) sourceFileTypes[f] = SourceFileRole.import_; + foreach (f; bs.sourceFiles) sourceFileTypes[f] = SourceFileRole.source; foreach (f; sourceFileTypes.byKey.array.sort()) { - auto jf = Json.emptyObject; - jf["path"] = f; - jf["type"] = sourceFileTypes[f]; - files ~= jf; + SourceFileDescription sf; + sf.path = f; + sf.role = sourceFileTypes[f]; + ret.files ~= sf; } - dst.files = Json(files); + + return ret; } private void fillWithDefaults() @@ -474,79 +674,75 @@ } } - private void simpleLint() const { + private void simpleLint() + const { if (m_parentPackage) { if (m_parentPackage.path != path) { - if (info.license.length && info.license != m_parentPackage.info.license) - logWarn("License in subpackage %s is different than it's parent package, this is discouraged.", name); + if (this.recipe.license.length && this.recipe.license != m_parentPackage.recipe.license) + logWarn("Warning: License in subpackage %s is different than it's parent package, this is discouraged.", name); } } - if (name.empty) logWarn("The package in %s has no name.", path); - } - - private static RawPackage rawPackageFromFile(PathAndFormat file, bool silent_fail = false) { - if( silent_fail && !existsFile(file.path) ) return null; - - string text; - - { - auto f = openFile(file.path.toNativeString(), FileMode.Read); - scope(exit) f.close(); - text = stripUTF8Bom(cast(string)f.readAll()); - } - - final switch(file.format) { - case PackageFormat.json: - return new JsonPackage(parseJsonString(text, file.path.toNativeString())); - case PackageFormat.sdl: - if(silent_fail) return null; throw new Exception("SDL not implemented"); - } - } - - static abstract class RawPackage - { - string package_name; // Should already be lower case - string version_; - abstract void parseInto(ref PackageRecipe package_, string parent_name); - } - private static class JsonPackage : RawPackage - { - Json json; - this(Json json) { - this.json = json; - - string nameLower; - if(json.type == Json.Type.string) { - nameLower = json.get!string.toLower(); - this.json = nameLower; - } else { - nameLower = json.name.get!string.toLower(); - this.json.name = nameLower; - this.package_name = nameLower; - - Json versionJson = json["version"]; - this.version_ = (versionJson.type == Json.Type.undefined) ? null : versionJson.get!string; - } - - this.package_name = nameLower; - } - override void parseInto(ref PackageRecipe recipe, string parent_name) - { - recipe.parseJson(json, parent_name); - } - } - private static class SdlPackage : RawPackage - { - override void parseInto(ref PackageRecipe package_, string parent_name) - { - throw new Exception("SDL packages not implemented yet"); + if (name.empty) logWarn("Warning: The package in %s has no name.", path); + bool[string] cnames; + foreach (ref c; this.recipe.configurations) { + if (c.name in cnames) + logWarn("Warning: Multiple configurations with the name \"%s\" are defined in package \"%s\". This will most likely cause configuration resolution issues.", + c.name, this.name); + cnames[c.name] = true; } } } - private string determineVersionFromSCM(Path path) { + // On Windows, which is slow at running external processes, + // cache the version numbers that are determined using + // GIT to speed up the initialization phase. + version (Windows) { + import std.file : exists, readText; + + // quickly determine head commit without invoking GIT + string head_commit; + auto hpath = (path ~ ".git/HEAD").toNativeString(); + if (exists(hpath)) { + auto head_ref = readText(hpath).strip(); + if (head_ref.startsWith("ref: ")) { + auto rpath = (path ~ (".git/"~head_ref[5 .. $])).toNativeString(); + if (exists(rpath)) + head_commit = readText(rpath).strip(); + } + } + + // return the last determined version for that commit + // not that this is not always correct, most notably when + // a tag gets added/removed/changed and changes the outcome + // of the full version detection computation + auto vcachepath = path ~ ".dub/version.json"; + if (existsFile(vcachepath)) { + auto ver = jsonFromFile(vcachepath); + if (head_commit == ver["commit"].opt!string) + return ver["version"].get!string; + } + } + + // if no cache file or the HEAD commit changed, perform full detection + auto ret = determineVersionWithGIT(path); + + version (Windows) { + // update version cache file + if (head_commit.length) { + if (!existsFile(path ~".dub")) createDirectory(path ~ ".dub"); + atomicWriteJsonFile(vcachepath, Json(["commit": Json(head_commit), "version": Json(ret)])); + } + } + + return ret; +} + +// determines the version of a package that is stored in a GIT working copy +// by invoking the "git" executable +private string determineVersionWithGIT(Path path) +{ import std.process; import dub.semver; @@ -561,7 +757,8 @@ return null; } - if (auto tag = exec("git", git_dir_param, "describe", "--long", "--tags")) { + auto tag = exec("git", git_dir_param, "describe", "--long", "--tags"); + if (tag !is null) { auto parts = tag.split("-"); auto commit = parts[$-1]; auto num = parts[$-2].to!int; @@ -573,7 +770,8 @@ } } - if (auto branch = exec("git", git_dir_param, "rev-parse", "--abbrev-ref", "HEAD")) { + auto branch = exec("git", git_dir_param, "rev-parse", "--abbrev-ref", "HEAD"); + if (branch !is null) { if (branch != "HEAD") return "~" ~ branch; } diff --git a/source/dub/packagemanager.d b/source/dub/packagemanager.d index ff03744..4ff4e6f 100644 --- a/source/dub/packagemanager.d +++ b/source/dub/packagemanager.d @@ -1,7 +1,7 @@ /** Management of packages on the local computer. - Copyright: © 2012-2013 rejectedsoftware e.K. + Copyright: © 2012-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig, Matthias Dondorff */ @@ -44,14 +44,19 @@ if (refresh_packages) refresh(true); } + /** Gets/sets the list of paths to search for local packages. + */ @property void searchPath(Path[] paths) { if (paths == m_searchPath) return; m_searchPath = paths.dup; refresh(false); } + /// ditto @property const(Path)[] searchPath() const { return m_searchPath; } + /** Disables searching DUB's predefined search paths. + */ @property void disableDefaultSearchPaths(bool val) { if (val == m_disableDefaultSearchPaths) return; @@ -59,15 +64,17 @@ refresh(true); } + /** Returns the effective list of search paths, including default ones. + */ @property const(Path)[] completeSearchPath() const { auto ret = appender!(Path[])(); - ret.put(m_searchPath); + ret.put(cast(Path[])m_searchPath); // work around Phobos 17251 if (!m_disableDefaultSearchPaths) { - ret.put(m_repositories[LocalPackageType.user].searchPath); - ret.put(m_repositories[LocalPackageType.user].packagePath); - ret.put(m_repositories[LocalPackageType.system].searchPath); - ret.put(m_repositories[LocalPackageType.system].packagePath); + ret.put(cast(Path[])m_repositories[LocalPackageType.user].searchPath); + ret.put(cast(Path)m_repositories[LocalPackageType.user].packagePath); + ret.put(cast(Path[])m_repositories[LocalPackageType.system].searchPath); + ret.put(cast(Path)m_repositories[LocalPackageType.system].packagePath); } return ret.data; } @@ -97,7 +104,7 @@ foreach (ovr; m_repositories[tp].overrides) if (ovr.package_ == name && ovr.version_.matches(ver)) { Package pack; - if (!ovr.targetPath.empty) pack = getPackage(name, ovr.targetPath); + if (!ovr.targetPath.empty) pack = getOrLoadPackage(ovr.targetPath); else pack = getPackage(name, ovr.targetVersion, false); if (pack) return pack; @@ -107,7 +114,7 @@ } foreach (p; getPackageIterator(name)) - if (p.ver == ver) + if (p.version_ == ver) return p; return null; @@ -123,7 +130,7 @@ Package getPackage(string name, Version ver, Path path) { auto ret = getPackage(name, path); - if (!ret || ret.ver != ver) return null; + if (!ret || ret.version_ != ver) return null; return ret; } @@ -152,13 +159,26 @@ return null; } - Package getOrLoadPackage(Path path, PathAndFormat infoFile = PathAndFormat()) + /** For a given package path, returns the corresponding package. + + If the package is already loaded, a reference is returned. Otherwise + the package gets loaded and cached for the next call to this function. + + Params: + path = Path to the root directory of the package + recipe_path = Optional path to the recipe file of the package + allow_sub_packages = Also return a sub package if it resides in the given folder + + Returns: The packages loaded from the given path + Throws: Throws an exception if no package can be loaded + */ + Package getOrLoadPackage(Path path, Path recipe_path = Path.init, bool allow_sub_packages = false) { path.endsWithSlash = true; foreach (p; getPackageIterator()) - if (!p.parentPackage && p.path == path) + if (p.path == path && (!p.parentPackage || (allow_sub_packages && p.parentPackage.path != p.path))) return p; - auto pack = new Package(path, infoFile); + auto pack = Package.load(path, recipe_path); addPackages(m_temporaryPackages, pack); return pack; } @@ -170,11 +190,11 @@ { Package ret; foreach (p; getPackageIterator(name)) - if (version_spec.matches(p.ver) && (!ret || p.ver > ret.ver)) + if (version_spec.matches(p.version_) && (!ret || p.version_ > ret.version_)) ret = p; if (enable_overrides && ret) { - if (auto ovr = getPackage(name, ret.ver)) + if (auto ovr = getPackage(name, ret.version_)) return ovr; } return ret; @@ -186,12 +206,25 @@ return getBestPackage(name, Dependency(version_spec)); } + /** Gets the a specific sub package. + + In contrast to `Package.getSubPackage`, this function supports path + based sub packages. + + Params: + base_package = The package from which to get a sub package + sub_name = Name of the sub package (not prefixed with the base + package name) + silent_fail = If set to true, the function will return `null` if no + package is found. Otherwise will throw an exception. + + */ Package getSubPackage(Package base_package, string sub_name, bool silent_fail) { foreach (p; getPackageIterator(base_package.name~":"~sub_name)) if (p.parentPackage is base_package) return p; - enforce(silent_fail, "Sub package "~base_package.name~":"~sub_name~" doesn't exist."); + enforce(silent_fail, "Sub package \""~base_package.name~":"~sub_name~"\" doesn't exist."); return null; } @@ -203,14 +236,28 @@ bool isManagedPackage(Package pack) const { auto ppath = pack.basePackage.path; + return isManagedPath(ppath); + } + + /** Determines if a specific path is within a DUB managed package folder. + + By default, managed folders are "~/.dub/packages" and + "/var/lib/dub/packages". + */ + bool isManagedPath(Path path) + const { foreach (rep; m_repositories) { - auto rpath = rep.packagePath; - if (ppath.startsWith(rpath)) + Path rpath = rep.packagePath; + if (path.startsWith(rpath)) return true; } return false; } + /** Enables iteration over all known local packages. + + Returns: A delegate suitable for use with `foreach` is returned. + */ int delegate(int delegate(ref Package)) getPackageIterator() { int iterator(int delegate(ref Package) del) @@ -233,6 +280,10 @@ return &iterator; } + /** Enables iteration over all known local packages with a certain name. + + Returns: A delegate suitable for use with `foreach` is returned. + */ int delegate(int delegate(ref Package)) getPackageIterator(string name) { int iterator(int delegate(ref Package) del) @@ -287,11 +338,11 @@ /// destination and sets a version field in the package description. Package storeFetchedPackage(Path zip_file_path, Json package_info, Path destination) { - auto package_name = package_info.name.get!string; + auto package_name = package_info["name"].get!string; auto package_version = package_info["version"].get!string; auto clean_package_version = package_version[package_version.startsWith("~") ? 1 : 0 .. $]; - logDiagnostic("Placing package '%s' version '%s' to location '%s' from file '%s'", + logDebug("Placing package '%s' version '%s' to location '%s' from file '%s'", package_name, package_version, destination.toNativeString(), zip_file_path.toNativeString()); if( existsFile(destination) ){ @@ -302,7 +353,7 @@ ZipArchive archive; { logDebug("Opening file %s", zip_file_path); - auto f = openFile(zip_file_path, FileMode.Read); + auto f = openFile(zip_file_path, FileMode.read); scope(exit) f.close(); archive = new ZipArchive(f.readAll()); } @@ -330,7 +381,7 @@ // extract & place mkdirRecurse(destination.toNativeString()); - logDiagnostic("Copying all files..."); + logDebug("Copying all files..."); int countFiles = 0; foreach(ArchiveMember a; archive.directory) { auto cleanedPath = getCleanedPath(a.name); @@ -344,29 +395,29 @@ } else { if( !existsDirectory(dst_path.parentPath) ) mkdirRecurse(dst_path.parentPath.toNativeString()); - auto dstFile = openFile(dst_path, FileMode.CreateTrunc); + auto dstFile = openFile(dst_path, FileMode.createTrunc); scope(exit) dstFile.close(); dstFile.put(archive.expand(a)); ++countFiles; } } - logDiagnostic("%s file(s) copied.", to!string(countFiles)); + logDebug("%s file(s) copied.", to!string(countFiles)); // overwrite dub.json (this one includes a version field) - auto pack = new Package(destination, PathAndFormat(), null, package_info["version"].get!string); + auto pack = Package.load(destination, Path.init, null, package_info["version"].get!string); - if (pack.packageInfoFilename.head != defaultPackageFilename) + if (pack.recipePath.head != defaultPackageFilename) // Storeinfo saved a default file, this could be different to the file from the zip. - removeFile(pack.packageInfoFilename); + removeFile(pack.recipePath); pack.storeInfo(); addPackages(m_packages, pack); return pack; } /// Removes the given the package. - void remove(in Package pack, bool force_remove) + void remove(in Package pack) { - logDebug("Remove %s, version %s, path '%s'", pack.name, pack.vers, pack.path); + logDebug("Remove %s, version %s, path '%s'", pack.name, pack.version_, pack.path); enforce(!pack.path.empty, "Cannot remove package "~pack.name~" without a path."); // remove package from repositories' list @@ -374,7 +425,7 @@ bool removeFrom(Package[] packs, in Package pack) { auto packPos = countUntil!("a.path == b.path")(packs, pack); if(packPos != -1) { - packs = std.algorithm.remove(packs, packPos); + packs = .remove(packs, packPos); return true; } return false; @@ -394,20 +445,26 @@ logInfo("Removed package: '"~pack.name~"'"); } + /// Compatibility overload. Use the version without a `force_remove` argument instead. + void remove(in Package pack, bool force_remove) + { + remove(pack); + } + Package addLocalPackage(Path path, string verName, LocalPackageType type) { path.endsWithSlash = true; - auto pack = new Package(path); + auto pack = Package.load(path); enforce(pack.name.length, "The package has no name, defined in: " ~ path.toString()); if (verName.length) - pack.ver = Version(verName); + pack.version_ = Version(verName); // don't double-add packages Package[]* packs = &m_repositories[type].localPackages; foreach (p; *packs) { if (p.path == path) { - enforce(p.ver == pack.ver, "Adding the same local package twice with differing versions is not allowed."); - logInfo("Package is already registered: %s (version: %s)", p.name, p.ver); + enforce(p.version_ == pack.version_, "Adding the same local package twice with differing versions is not allowed."); + logInfo("Package is already registered: %s (version: %s)", p.name, p.version_); return p; } } @@ -416,7 +473,7 @@ writeLocalPackageList(type); - logInfo("Registered package: %s (version: %s)", pack.name, pack.ver); + logInfo("Registered package: %s (version: %s)", pack.name, pack.version_); return pack; } @@ -433,14 +490,14 @@ string[Version] removed; foreach_reverse( i; to_remove ) { - removed[(*packs)[i].ver] = (*packs)[i].name; + removed[(*packs)[i].version_] = (*packs)[i].name; *packs = (*packs)[0 .. i] ~ (*packs)[i+1 .. $]; } writeLocalPackageList(type); foreach(ver, name; removed) - logInfo("Unregistered package: %s (version: %s)", name, ver); + logInfo("Deregistered package: %s (version: %s)", name, ver); } /// For the given type add another path where packages will be looked up. @@ -476,8 +533,8 @@ enforce(packlist.type == Json.Type.array, LocalPackagesFilename~" must contain an array."); foreach( pentry; packlist ){ try { - auto name = pentry.name.get!string; - auto path = Path(pentry.path.get!string); + auto name = pentry["name"].get!string; + auto path = Path(pentry["path"].get!string); if (name == "*") { paths ~= path; } else { @@ -494,19 +551,19 @@ if (!pp) { auto infoFile = Package.findPackageFile(path); - if (!infoFile.empty) pp = new Package(path, infoFile); + if (!infoFile.empty) pp = Package.load(path, infoFile); else { - logWarn("Locally registered package %s %s was not found. Please run \"dub remove-local %s\".", + logWarn("Locally registered package %s %s was not found. Please run 'dub remove-local \"%s\"'.", name, ver, path.toNativeString()); auto info = Json.emptyObject; - info.name = name; + info["name"] = name; pp = new Package(info, path); } } if (pp.name != name) logWarn("Local package at %s has different name than %s (%s)", path.toNativeString(), name, pp.name); - pp.ver = ver; + pp.version_ = ver; addPackages(packs, pp); } @@ -532,9 +589,23 @@ logDebug("iterating dir %s", path.toNativeString()); try foreach( pdir; iterateDirectory(path) ){ logDebug("iterating dir %s entry %s", path.toNativeString(), pdir.name); - if( !pdir.isDirectory ) continue; + if (!pdir.isDirectory) continue; + auto pack_path = path ~ (pdir.name ~ "/"); + auto packageFile = Package.findPackageFile(pack_path); + + if (isManagedPath(path) && packageFile.empty) { + // Search for a single directory within this directory which happen to be a prefix of pdir + // This is to support new folder structure installed over the ancient one. + foreach (subdir; iterateDirectory(path ~ (pdir.name ~ "/"))) + if (subdir.isDirectory && pdir.name.startsWith(subdir.name)) {// eg: package vibe-d will be in "vibe-d-x.y.z/vibe-d" + pack_path ~= subdir.name ~ "/"; + packageFile = Package.findPackageFile(pack_path); + break; + } + } + if (packageFile.empty) continue; Package p; try { @@ -544,7 +615,7 @@ p = pp; break; } - if (!p) p = new Package(pack_path, packageFile); + if (!p) p = Package.load(pack_path, packageFile); addPackages(m_packages, p); } catch( Exception e ){ logError("Failed to load package in %s: %s", pack_path, e.msg); @@ -566,7 +637,7 @@ if (existsFile(ovrfilepath)) { foreach (entry; jsonFromFile(ovrfilepath)) { PackageOverride ovr; - ovr.package_ = entry.name.get!string; + ovr.package_ = entry["name"].get!string; ovr.version_ = Dependency(entry["version"].get!string); if (auto pv = "targetVersion" in entry) ovr.targetVersion = Version(pv.get!string); if (auto pv = "targetPath" in entry) ovr.targetPath = Path(pv.get!string); @@ -613,8 +684,8 @@ Json[] newlist; foreach (p; m_repositories[type].searchPath) { auto entry = Json.emptyObject; - entry.name = "*"; - entry.path = p.toNativeString(); + entry["name"] = "*"; + entry["path"] = p.toNativeString(); newlist ~= entry; } @@ -622,7 +693,7 @@ if (p.parentPackage) continue; // do not store sub packages auto entry = Json.emptyObject; entry["name"] = p.name; - entry["version"] = p.ver.toString(); + entry["version"] = p.version_.toString(); entry["path"] = p.path.toNativeString(); newlist ~= entry; } @@ -637,10 +708,10 @@ Json[] newlist; foreach (ovr; m_repositories[type].overrides) { auto jovr = Json.emptyObject; - jovr.name = ovr.package_; - jovr["version"] = ovr.version_.versionString; - if (!ovr.targetPath.empty) jovr.targetPath = ovr.targetPath.toNativeString(); - else jovr.targetVersion = ovr.targetVersion.toString(); + jovr["name"] = ovr.package_; + jovr["version"] = ovr.version_.versionSpec; + if (!ovr.targetPath.empty) jovr["targetPath"] = ovr.targetPath.toNativeString(); + else jovr["targetVersion"] = ovr.targetVersion.toString(); newlist ~= jovr; } auto path = m_repositories[type].packagePath; @@ -669,7 +740,7 @@ logError("Package %s declared a sub-package, definition file is missing: %s", pack.name, path.toNativeString()); continue; } - sp = new Package(path, PathAndFormat(), pack); + sp = Package.load(path, Path.init, pack); } else sp = new Package(spr.recipe, pack.path, pack); // Add the subpackage. @@ -710,8 +781,8 @@ system } -enum LocalPackagesFilename = "local-packages.json"; -enum LocalOverridesFilename = "local-overrides.json"; +private enum LocalPackagesFilename = "local-packages.json"; +private enum LocalOverridesFilename = "local-overrides.json"; private struct Repository { diff --git a/source/dub/packagesupplier.d b/source/dub/packagesupplier.d index 12e9b04..cf12f3c 100644 --- a/source/dub/packagesupplier.d +++ b/source/dub/packagesupplier.d @@ -1,7 +1,7 @@ /** - A package supplier, able to get some packages to the local FS. + Contains (remote) package supplier interface and implementations. - Copyright: © 2012-2013 Matthias Dondorff + Copyright: © 2012-2013 Matthias Dondorff, 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff */ @@ -23,33 +23,68 @@ import std.string : format; import std.zip; -// TODO: drop the "best package" behavior and let retrievePackage/getPackageDescription take a Version instead of Dependency +// TODO: Could drop the "best package" behavior and let retrievePackage/ +// getPackageDescription take a Version instead of Dependency. But note +// this means that two requests to the registry are necessary to retrieve +// a package recipe instead of one (first get version list, then the +// package recipe) -/// Supplies packages, this is done by supplying the latest possible version -/// which is available. +/** + Base interface for remote package suppliers. + + Provides functionality necessary to query package versions, recipes and + contents. +*/ interface PackageSupplier { - /// Returns a hunman readable representation of the supplier + /// Represents a single package search result. + static struct SearchResult { string name, description, version_; } + + /// Returns a human-readable representation of the package supplier. @property string description(); + /** Retrieves a list of all available versions(/branches) of a package. + + Throws: Throws an exception if the package name is not known, or if + an error occurred while retrieving the version list. + */ Version[] getVersions(string package_id); - /// path: absolute path to store the package (usually in a zip format) - void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release); + /** Downloads a package and stores it as a ZIP file. - /// returns the metadata for the package - Json getPackageDescription(string packageId, Dependency dep, bool pre_release); + Params: + path = Absolute path of the target ZIP file + package_id = Name of the package to retrieve + dep: Version constraint to match against + pre_release: If true, matches the latest pre-release version. + Otherwise prefers stable versions. + */ + void fetchPackage(Path path, string package_id, Dependency dep, bool pre_release); - /// perform cache operation - void cacheOp(Path cacheDir, CacheOp op); + /** Retrieves only the recipe of a particular package. + + Params: + package_id = Name of the package of which to retrieve the recipe + dep: Version constraint to match against + pre_release: If true, matches the latest pre-release version. + Otherwise prefers stable versions. + */ + Json fetchPackageRecipe(string package_id, Dependency dep, bool pre_release); + + /** Searches for packages matching the given search query term. + + Search queries are currently a simple list of words separated by + white space. Results will get ordered from best match to worst. + */ + SearchResult[] searchPackages(string query); } -/// operations on package supplier cache -enum CacheOp { - load, - store, - clean, -} +/** + File system based package supplier. + + This package supplier searches a certain directory for files with names of + the form "[package name]-[version].zip". +*/ class FileSystemPackageSupplier : PackageSupplier { private { Path m_path; @@ -74,7 +109,7 @@ return ret; } - void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release) + void fetchPackage(Path path, string packageId, Dependency dep, bool pre_release) { enforce(path.absolute); logInfo("Storing package '"~packageId~"', version requirements: %s", dep); @@ -83,13 +118,16 @@ copyFile(filename, path); } - Json getPackageDescription(string packageId, Dependency dep, bool pre_release) + Json fetchPackageRecipe(string packageId, Dependency dep, bool pre_release) { auto filename = bestPackageFile(packageId, dep, pre_release); return jsonFromZip(filename, "dub.json"); } - void cacheOp(Path cacheDir, CacheOp op) { + SearchResult[] searchPackages(string query) + { + // TODO! + return null; } private Path bestPackageFile(string packageId, Dependency dep, bool pre_release) @@ -108,17 +146,21 @@ } -/// Client PackageSupplier using the registry available via registerVpmRegistry +/** + Online registry based package supplier. + + This package supplier connects to an online registry (e.g. + $(LINK https://code.dlang.org/)) to search for available packages. +*/ class RegistryPackageSupplier : PackageSupplier { private { URL m_registryUrl; struct CacheEntry { Json data; SysTime cacheTime; } CacheEntry[string] m_metadataCache; Duration m_maxCacheTime; - bool m_metadataCacheDirty; } - this(URL registry) + this(URL registry) { m_registryUrl = registry; m_maxCacheTime = 24.hours(); @@ -128,8 +170,10 @@ Version[] getVersions(string package_id) { + auto md = getMetadata(package_id); + if (md.type == Json.Type.null_) + return null; Version[] ret; - Json md = getMetadata(package_id); foreach (json; md["versions"]) { auto cur = Version(cast(string)json["version"]); ret ~= cur; @@ -138,54 +182,23 @@ return ret; } - void retrievePackage(Path path, string packageId, Dependency dep, bool pre_release) + void fetchPackage(Path path, string packageId, Dependency dep, bool pre_release) { import std.array : replace; Json best = getBestPackage(packageId, dep, pre_release); + if (best.type == Json.Type.null_) + return; auto vers = best["version"].get!string; auto url = m_registryUrl ~ Path(PackagesPath~"/"~packageId~"/"~vers~".zip"); - logDiagnostic("Found download URL: '%s'", url); + logDiagnostic("Downloading from '%s'", url); download(url, path); } - Json getPackageDescription(string packageId, Dependency dep, bool pre_release) + Json fetchPackageRecipe(string packageId, Dependency dep, bool pre_release) { return getBestPackage(packageId, dep, pre_release); } - void cacheOp(Path cacheDir, CacheOp op) - { - auto path = cacheDir ~ cacheFileName; - final switch (op) - { - case CacheOp.store: - if (!m_metadataCacheDirty) return; - if (!cacheDir.existsFile()) - mkdirRecurse(cacheDir.toNativeString()); - // TODO: method is slow due to Json escaping - writeJsonFile(path, m_metadataCache.serializeToJson()); - break; - - case CacheOp.load: - if (!path.existsFile()) return; - deserializeJson(m_metadataCache, jsonFromFile(path)); - break; - - case CacheOp.clean: - if (path.existsFile()) removeFile(path); - m_metadataCache.destroy(); - break; - } - m_metadataCacheDirty = false; - } - - private @property string cacheFileName() - { - import std.digest.md; - auto hash = m_registryUrl.toString.md5Of(); - return m_registryUrl.host ~ hash[0 .. $/2].toHexString().idup ~ ".json"; - } - private Json getMetadata(string packageId) { auto now = Clock.currTime(UTC()); @@ -193,7 +206,6 @@ if (pentry.cacheTime + m_maxCacheTime > now) return pentry.data; m_metadataCache.remove(packageId); - m_metadataCacheDirty = true; } auto url = m_registryUrl ~ Path(PackagesPath ~ "/" ~ packageId ~ ".json"); @@ -201,19 +213,41 @@ logDebug("Downloading metadata for %s", packageId); logDebug("Getting from %s", url); - auto jsonData = cast(string)download(url); + string jsonData; + try + jsonData = cast(string)download(url); + catch (HTTPStatusException e) + { + if (e.status != 404) + throw e; + logDebug("Package %s not found in %s: %s", packageId, description, e.msg); + return Json(null); + } Json json = parseJsonString(jsonData, url.toString()); // strip readme data (to save size and time) foreach (ref v; json["versions"]) v.remove("readme"); m_metadataCache[packageId] = CacheEntry(json, now); - m_metadataCacheDirty = true; return json; } + SearchResult[] searchPackages(string query) { + import std.uri : encodeComponent; + auto url = m_registryUrl; + url.localURI = "/api/packages/search?q="~encodeComponent(query); + string data; + data = cast(string)download(url); + import std.algorithm : map; + return data.parseJson.opt!(Json[]) + .map!(j => SearchResult(j["name"].opt!string, j["description"].opt!string, j["version"].opt!string)) + .array; + } + private Json getBestPackage(string packageId, Dependency dep, bool pre_release) { Json md = getMetadata(packageId); + if (md.type == Json.Type.null_) + return md; Json best = null; Version bestver; foreach (json; md["versions"]) { diff --git a/source/dub/platform.d b/source/dub/platform.d index acc9baf..830c1d4 100644 --- a/source/dub/platform.d +++ b/source/dub/platform.d @@ -1,7 +1,14 @@ /** - Determines the strings to identify the current build platform. + Build platform identification and speficiation matching. - Copyright: © 2012 rejectedsoftware e.K. + This module is useful for determining the build platform for a certain + machine and compiler invocation. Example applications include classifying + CI slave machines. + + It also contains means to match build platforms against a platform + specification string as used in package reciptes. + + Copyright: © 2012-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ @@ -9,6 +16,33 @@ import std.array; + +/** Determines the full build platform used for the current build. + + Note that the `BuildPlatform.compilerBinary` field will be left empty. + + See_Also: `determinePlatform`, `determineArchitecture`, `determineCompiler` +*/ +BuildPlatform determineBuildPlatform() +{ + BuildPlatform ret; + ret.platform = determinePlatform(); + ret.architecture = determineArchitecture(); + ret.compiler = determineCompiler(); + ret.frontendVersion = __VERSION__; + return ret; +} + + +/** Returns a list of platform identifiers that apply to the current + build. + + Example results are `["windows"]` or `["posix", "osx"]`. The identifiers + correspond to the compiler defined version constants built into the + language, except that they are converted to lower case. + + See_Also: `determineBuildPlatform` +*/ string[] determinePlatform() { auto ret = appender!(string[])(); @@ -34,6 +68,15 @@ return ret.data; } +/** Returns a list of architecture identifiers that apply to the current + build. + + Example results are `["x86_64"]` or `["arm", "arm_softfloat"]`. The + identifiers correspond to the compiler defined version constants built into + the language, except that they are converted to lower case. + + See_Also: `determineBuildPlatform` +*/ string[] determineArchitecture() { auto ret = appender!(string[])(); @@ -77,6 +120,13 @@ return ret.data; } +/** Determines the canonical compiler name used for the current build. + + The possible values currently are "dmd", "gdc", "ldc2" or "sdc". If an + unknown compiler is used, this function will return an empty string. + + See_Also: `determineBuildPlatform` +*/ string determineCompiler() { version(DigitalMars) return "dmd"; @@ -85,3 +135,85 @@ else version(SDC) return "sdc"; else return null; } + +/** Matches a platform specification string against a build platform. + + Specifications are build upon the following scheme, where each component + is optional (indicated by []), but the order is obligatory: + "[-platform][-architecture][-compiler]" + + So the following strings are valid specifications: `"-windows-x86-dmd"`, + `"-dmd"`, `"-arm"`, `"-arm-dmd"`, `"-windows-dmd"` + + Params: + platform = The build platform to match agains the platform specification + specification = The specification being matched. It must either be an + empty string or start with a dash. + + Returns: + `true` if the given specification matches the build platform, `false` + otherwise. Using an empty string as the platform specification will + always result in a match. +*/ +bool matchesSpecification(in BuildPlatform platform, const(char)[] specification) +{ + import std.string : format; + import std.algorithm : canFind, splitter; + import std.exception : enforce; + + if (specification.empty) return true; + if (platform == BuildPlatform.any) return true; + + auto splitted = specification.splitter('-'); + assert(!splitted.empty, "No valid platform specification! The leading hyphen is required!"); + splitted.popFront(); // Drop leading empty match. + enforce(!splitted.empty, format("Platform specification, if present, must not be empty: \"%s\"", specification)); + + if (platform.platform.canFind(splitted.front)) { + splitted.popFront(); + if (splitted.empty) + return true; + } + if (platform.architecture.canFind(splitted.front)) { + splitted.popFront(); + if (splitted.empty) + return true; + } + if (platform.compiler == splitted.front) { + splitted.popFront(); + enforce(splitted.empty, "No valid specification! The compiler has to be the last element: " ~ specification); + return true; + } + return false; +} + +/// +unittest { + auto platform=BuildPlatform(["posix", "linux"], ["x86_64"], "dmd"); + assert(platform.matchesSpecification("")); + assert(platform.matchesSpecification("-posix")); + assert(platform.matchesSpecification("-linux")); + assert(platform.matchesSpecification("-linux-dmd")); + assert(platform.matchesSpecification("-linux-x86_64-dmd")); + assert(platform.matchesSpecification("-x86_64")); + assert(!platform.matchesSpecification("-windows")); + assert(!platform.matchesSpecification("-ldc")); + assert(!platform.matchesSpecification("-windows-dmd")); +} + +/// Represents a platform a package can be build upon. +struct BuildPlatform { + /// Special constant used to denote matching any build platform. + enum any = BuildPlatform(null, null, null, null, -1); + + /// Platform identifiers, e.g. ["posix", "windows"] + string[] platform; + /// CPU architecture identifiers, e.g. ["x86", "x86_64"] + string[] architecture; + /// Canonical compiler name e.g. "dmd" + string compiler; + /// Compiler binary name e.g. "ldmd2" + string compilerBinary; + /// Compiled frontend version (e.g. `2067` for frontend versions 2.067.x) + int frontendVersion; +} diff --git a/source/dub/project.d b/source/dub/project.d index f71da6e..b8f7813 100644 --- a/source/dub/project.d +++ b/source/dub/project.d @@ -1,7 +1,7 @@ /** Representing a full project, with a root Package and several dependencies. - Copyright: © 2012-2013 Matthias Dondorff + Copyright: © 2012-2013 Matthias Dondorff, 2012-2016 Sönke Ludwig License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Matthias Dondorff, Sönke Ludwig */ @@ -9,6 +9,7 @@ import dub.compilers.compiler; import dub.dependency; +import dub.description; import dub.internal.utils; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.core.log; @@ -33,7 +34,14 @@ import std.zip; import std.encoding : sanitize; -/// Representing a full project, with a root Package and several dependencies. +/** + Represents a full project, a root package with its dependencies and package + selection. + + All dependencies must be available locally so that the package dependency + graph can be built. Use `Project.reinit` if necessary for reloading + dependencies after more packages are available. +*/ class Project { private { PackageManager m_packageManager; @@ -42,15 +50,25 @@ Package[] m_dependencies; Package[][Package] m_dependees; SelectedVersions m_selections; + bool m_hasAllDependencies; + string[string] m_overriddenConfigs; } + /** Loads a project. + + Params: + package_manager = Package manager instance to use for loading + dependencies + project_path = Path of the root package to load + pack = An existing `Package` instance to use as the root package + */ this(PackageManager package_manager, Path project_path) { Package pack; auto packageFile = Package.findPackageFile(project_path); if (packageFile.empty) { logWarn("There was no package description found for the application in '%s'.", project_path.toNativeString()); - pack = new Package(null, project_path); + pack = new Package(PackageRecipe.init, project_path); } else { pack = package_manager.getOrLoadPackage(project_path, packageFile); } @@ -58,6 +76,7 @@ this(package_manager, pack); } + /// ditto this(PackageManager package_manager, Package pack) { m_packageManager = package_manager; @@ -71,7 +90,8 @@ if (existsFile(selverfile)) { try m_selections = new SelectedVersions(selverfile); catch(Exception e) { - logDiagnostic("A " ~ SelectedVersions.defaultFile ~ " file was not found or failed to load:\n%s", e.msg); + logWarn("Failed to load %s: %s", SelectedVersions.defaultFile, e.msg); + logDiagnostic("Full error: %s", e.toString().sanitize); m_selections = new SelectedVersions; } } else m_selections = new SelectedVersions; @@ -79,31 +99,14 @@ reinit(); } - /// Gathers information - @property string info() - const { - if(!m_rootPackage) - return "-Unrecognized application in '"~m_rootPackage.path.toNativeString()~"' (probably no dub.json in this directory)"; - string s = "-Application identifier: " ~ m_rootPackage.name; - s ~= "\n" ~ m_rootPackage.generateInfoString(); - s ~= "\n-Retrieved dependencies:"; - foreach(p; m_dependencies) - s ~= "\n" ~ p.generateInfoString(); - return s; - } + /** List of all resolved dependencies. - /// Gets all retrieved packages as a "packageId" = "version" associative array - @property string[string] cachedPackagesIDs() const { - string[string] pkgs; - foreach(p; m_dependencies) - pkgs[p.name] = p.vers; - return pkgs; - } - - /// List of retrieved dependency Packages + This includes all direct and indirect dependencies of all configurations + combined. Optional dependencies that were not chosen are not included. + */ @property const(Package[]) dependencies() const { return m_dependencies; } - /// Main package. + /// The root package of the project. @property inout(Package) rootPackage() inout { return m_rootPackage; } /// The versions to use for all dependencies. Call reinit() after changing these. @@ -112,8 +115,22 @@ /// Package manager instance used by the project. @property inout(PackageManager) packageManager() inout { return m_packageManager; } + /** Determines if all dependencies necessary to build have been collected. + + If this function returns `false`, it may be necessary to add more entries + to `selections`, or to use `Dub.upgrade` to automatically select all + missing dependencies. + */ + bool hasAllDependencies() const { return m_hasAllDependencies; } + /** Allows iteration of the dependency tree in topological order */ + int delegate(int delegate(ref Package)) getTopologicalPackageList(bool children_first = false, Package root_package = null, string[string] configs = null) + { + // ugly way to avoid code duplication since inout isn't compatible with foreach type inference + return cast(int delegate(int delegate(ref Package)))(cast(const)this).getTopologicalPackageList(children_first, root_package, configs); + } + /// ditto int delegate(int delegate(ref const Package)) getTopologicalPackageList(bool children_first = false, in Package root_package = null, string[string] configs = null) const { const(Package) rootpack = root_package ? root_package : m_rootPackage; @@ -133,10 +150,18 @@ auto cfg = configs.get(p.name, null); - foreach (dn, dv; p.dependencies) { - // filter out dependencies not in the current configuration set - if (!p.hasDependency(dn, cfg)) continue; - auto dependency = getDependency(dn, dv.optional); + PackageDependency[] deps; + if (!cfg.length) deps = p.getAllDependencies(); + else { + auto depmap = p.getDependencies(cfg); + deps = depmap.byKey.map!(k => PackageDependency(k, depmap[k])).array; + } + deps.sort!((a, b) => a.name < b.name); + + foreach (d; deps) { + auto dependency = getDependency(d.name, true); + assert(dependency || d.spec.optional, + format("Non-optional dependency %s of %s not found in dependency tree!?.", d.name, p.name)); if(dependency) perform_rec(dependency); if( ret ) return; } @@ -153,135 +178,212 @@ return &iterator; } - inout(Package) getDependency(string name, bool isOptional) + /** Retrieves a particular dependency by name. + + Params: + name = (Qualified) package name of the dependency + is_optional = If set to true, will return `null` for unsatisfiable + dependencies instead of throwing an exception. + */ + inout(Package) getDependency(string name, bool is_optional) inout { foreach(dp; m_dependencies) if( dp.name == name ) return dp; - if(!isOptional) throw new Exception("Unknown dependency: "~name); + if (!is_optional) throw new Exception("Unknown dependency: "~name); else return null; } + /** Returns the name of the default build configuration for the specified + target platform. + + Params: + platform = The target build platform + allow_non_library_configs = If set to true, will use the first + possible configuration instead of the first "executable" + configuration. + */ string getDefaultConfiguration(BuildPlatform platform, bool allow_non_library_configs = true) const { auto cfgs = getPackageConfigs(platform, null, allow_non_library_configs); return cfgs[m_rootPackage.name]; } + /** Overrides the configuration chosen for a particular package in the + dependency graph. + + Setting a certain configuration here is equivalent to removing all + but one configuration from the package. + + Params: + package_ = The package for which to force selecting a certain + dependency + config = Name of the configuration to force + */ + void overrideConfiguration(string package_, string config) + { + auto p = getDependency(package_, true); + enforce(p !is null, + format("Package '%s', marked for configuration override, is not present in dependency graph.", package_)); + enforce(p.configurations.canFind(config), + format("Package '%s' does not have a configuration named '%s'.", package_, config)); + m_overriddenConfigs[package_] = config; + } + + /** Performs basic validation of various aspects of the package. + + This will emit warnings to `stderr` if any discouraged names or + dependency patterns are found. + */ void validate() { // some basic package lint m_rootPackage.warnOnSpecialCompilerFlags(); - if (m_rootPackage.name != m_rootPackage.name.toLower()) { - logWarn(`WARNING: DUB package names should always be lower case, please change ` - ~ `to {"name": "%s"}. You can use {"targetName": "%s"} to keep the current ` - ~ `executable name.`, - m_rootPackage.name.toLower(), m_rootPackage.name); + string nameSuggestion() { + string ret; + ret ~= `Please modify the "name" field in %s accordingly.`.format(m_rootPackage.recipePath.toNativeString()); + if (!m_rootPackage.recipe.buildSettings.targetName.length) { + if (m_rootPackage.recipePath.head.toString().endsWith(".sdl")) { + ret ~= ` You can then add 'targetName "%s"' to keep the current executable name.`.format(m_rootPackage.name); + } else { + ret ~= ` You can then add '"targetName": "%s"' to keep the current executable name.`.format(m_rootPackage.name); + } + } + return ret; } + if (m_rootPackage.name != m_rootPackage.name.toLower()) { + logWarn(`WARNING: DUB package names should always be lower case. %s`, nameSuggestion()); + } else if (!m_rootPackage.recipe.name.all!(ch => ch >= 'a' && ch <= 'z' || ch >= '0' && ch <= '9' || ch == '-' || ch == '_')) { + logWarn(`WARNING: DUB package names may only contain alphanumeric characters, ` + ~ `as well as '-' and '_'. %s`, nameSuggestion()); + } + enforce(!m_rootPackage.name.canFind(' '), "Aborting due to the package name containing spaces."); - foreach (dn, ds; m_rootPackage.dependencies) - if (ds.isExactVersion && ds.version_.isBranch) { + foreach (d; m_rootPackage.getAllDependencies()) + if (d.spec.isExactVersion && d.spec.version_.isBranch) { logWarn("WARNING: A deprecated branch based version specification is used " ~ "for the dependency %s. Please use numbered versions instead. Also " ~ "note that you can still use the %s file to override a certain " ~ "dependency to use a branch instead.", - dn, SelectedVersions.defaultFile); + d.name, SelectedVersions.defaultFile); } - bool[string] visited; + bool[Package] visited; void validateDependenciesRec(Package pack) { - foreach (name, vspec_; pack.dependencies) { - if (name in visited) continue; - visited[name] = true; - - auto basename = getBasePackageName(name); + foreach (d; pack.getAllDependencies()) { + auto basename = getBasePackageName(d.name); if (m_selections.hasSelectedVersion(basename)) { auto selver = m_selections.getSelectedVersion(basename); - if (vspec_.merge(selver) == Dependency.invalid) { - logWarn("Selected package %s %s does not match the dependency specification in package %s (%s). Need to \"dub upgrade\"?", - basename, selver, pack.name, vspec_); + if (d.spec.merge(selver) == Dependency.invalid) { + logWarn("Selected package %s %s does not match the dependency specification %s in package %s. Need to \"dub upgrade\"?", + basename, selver, d.spec, pack.name); } } auto deppack = getDependency(name, true); + if (deppack in visited) continue; + visited[deppack] = true; if (deppack) validateDependenciesRec(deppack); } } validateDependenciesRec(m_rootPackage); } - /// Rereads the applications state. + /// Reloads dependencies. void reinit() { m_dependencies = null; + m_hasAllDependencies = true; m_packageManager.refresh(false); - void collectDependenciesRec(Package pack) + void collectDependenciesRec(Package pack, int depth = 0) { - logDebug("Collecting dependencies for %s", pack.name); - foreach (name, vspec_; pack.dependencies) { - Dependency vspec = vspec_; - Package p; - if (!vspec.path.empty) { - Path path = vspec.path; - if (!path.absolute) path = pack.path ~ path; - logDiagnostic("Adding local %s", path); - p = m_packageManager.getOrLoadPackage(path); - if (name.canFind(':')) p = m_packageManager.getSubPackage(p, getSubPackageName(name), false); - enforce(p.name == name, - format("Path based dependency %s is referenced with a wrong name: %s vs. %s", - path.toNativeString(), name, p.name)); - } + auto indent = replicate(" ", depth); + logDebug("%sCollecting dependencies for %s", indent, pack.name); + indent ~= " "; - if (!p) { - auto basename = getBasePackageName(name); - if (name == m_rootPackage.basePackage.name) { - vspec = Dependency(m_rootPackage.ver); - p = m_rootPackage.basePackage; - } else if (basename == m_rootPackage.basePackage.name) { - vspec = Dependency(m_rootPackage.ver); - try p = m_packageManager.getSubPackage(m_rootPackage.basePackage, getSubPackageName(name), false); - catch (Exception e) { - logDiagnostic("Error getting sub package %s: %s", name, e.msg); - continue; - } - } else if (m_selections.hasSelectedVersion(basename)) { - vspec = m_selections.getSelectedVersion(basename); - p = m_packageManager.getBestPackage(name, vspec); - } else if (m_dependencies.canFind!(d => getBasePackageName(d.name) == basename)) { - auto idx = m_dependencies.countUntil!(d => getBasePackageName(d.name) == basename); - auto bp = m_dependencies[idx].basePackage; - vspec = Dependency(bp.path); - p = m_packageManager.getSubPackage(bp, getSubPackageName(name), false); - } else { - logDiagnostic("Version selection for dependency %s (%s) of %s is missing.", - basename, name, pack.name); + foreach (dep; pack.getAllDependencies()) { + Dependency vspec = dep.spec; + Package p; + + auto basename = getBasePackageName(dep.name); + auto subname = getSubPackageName(dep.name); + + // non-optional and optional-default dependencies (if no selections file exists) + // need to be satisfied + bool is_desired = !vspec.optional || m_selections.hasSelectedVersion(basename) || (vspec.default_ && m_selections.bare); + + if (dep.name == m_rootPackage.basePackage.name) { + vspec = Dependency(m_rootPackage.version_); + p = m_rootPackage.basePackage; + } else if (basename == m_rootPackage.basePackage.name) { + vspec = Dependency(m_rootPackage.version_); + try p = m_packageManager.getSubPackage(m_rootPackage.basePackage, subname, false); + catch (Exception e) { + logDiagnostic("%sError getting sub package %s: %s", indent, dep.name, e.msg); + if (is_desired) m_hasAllDependencies = false; continue; } + } else if (m_selections.hasSelectedVersion(basename)) { + vspec = m_selections.getSelectedVersion(basename); + if (vspec.path.empty) p = m_packageManager.getBestPackage(dep.name, vspec); + else { + auto path = vspec.path; + if (!path.absolute) path = m_rootPackage.path ~ path; + p = m_packageManager.getOrLoadPackage(path, Path.init, true); + if (subname.length) p = m_packageManager.getSubPackage(p, subname, true); + } + } else if (m_dependencies.canFind!(d => getBasePackageName(d.name) == basename)) { + auto idx = m_dependencies.countUntil!(d => getBasePackageName(d.name) == basename); + auto bp = m_dependencies[idx].basePackage; + vspec = Dependency(bp.path); + if (subname.length) p = m_packageManager.getSubPackage(bp, subname, false); + else p = bp; + } else { + logDiagnostic("%sVersion selection for dependency %s (%s) of %s is missing.", + indent, basename, dep.name, pack.name); + } + + if (!p && !vspec.path.empty) { + Path path = vspec.path; + if (!path.absolute) path = pack.path ~ path; + logDiagnostic("%sAdding local %s in %s", indent, dep.name, path); + p = m_packageManager.getOrLoadPackage(path, Path.init, true); + if (p.parentPackage !is null) { + logWarn("%sSub package %s must be referenced using the path to it's parent package.", indent, dep.name); + p = p.parentPackage; + } + if (subname.length) p = m_packageManager.getSubPackage(p, subname, false); + enforce(p.name == dep.name, + format("Path based dependency %s is referenced with a wrong name: %s vs. %s", + path.toNativeString(), dep.name, p.name)); } if (!p) { - logDiagnostic("Missing dependency %s %s of %s", name, vspec, pack.name); + logDiagnostic("%sMissing dependency %s %s of %s", indent, dep.name, vspec, pack.name); + if (is_desired) m_hasAllDependencies = false; continue; } if (!m_dependencies.canFind(p)) { - logDiagnostic("Found dependency %s %s", name, vspec.toString()); + logDiagnostic("%sFound dependency %s %s", indent, dep.name, vspec.toString()); m_dependencies ~= p; - p.warnOnSpecialCompilerFlags(); - collectDependenciesRec(p); + if (basename == m_rootPackage.basePackage.name) + p.warnOnSpecialCompilerFlags(); + collectDependenciesRec(p, depth+1); } m_dependees[p] ~= pack; - //enforce(p !is null, "Failed to resolve dependency "~name~" "~vspec.toString()); + //enforce(p !is null, "Failed to resolve dependency "~dep.name~" "~vspec.toString()); } } collectDependenciesRec(m_rootPackage); } - /// Returns the applications name. + /// Returns the name of the root package. @property string name() const { return m_rootPackage ? m_rootPackage.name : "app"; } + /// Returns the names of all configurations of the root package. @property string[] configurations() const { return m_rootPackage.configurations; } /// Returns a map with the configuration for all packages in the dependency tree. @@ -295,14 +397,14 @@ string[][string] parents; parents[m_rootPackage.name] = null; foreach (p; getTopologicalPackageList()) - foreach (d; p.dependencies.byKey) - parents[d] ~= p.name; - + foreach (d; p.getAllDependencies()) + parents[d.name] ~= p.name; size_t createConfig(string pack, string config) { foreach (i, v; configs) if (v.pack == pack && v.config == config) return i; + assert(pack !in m_overriddenConfigs || config == m_overriddenConfigs[pack]); logDebug("Add config %s %s", pack, config); configs ~= Vertex(pack, config); return configs.length-1; @@ -322,12 +424,26 @@ void removeConfig(size_t i) { logDebug("Eliminating config %s for %s", configs[i].config, configs[i].pack); - configs = configs.remove(i); - edges = edges.filter!(e => e.from != i && e.to != i).array(); - foreach (ref e; edges) { - if (e.from > i) e.from--; - if (e.to > i) e.to--; - } + auto had_dep_to_pack = new bool[configs.length]; + auto still_has_dep_to_pack = new bool[configs.length]; + + edges = edges.filter!((e) { + if (e.to == i) { + had_dep_to_pack[e.from] = true; + return false; + } else if (configs[e.to].pack == configs[i].pack) { + still_has_dep_to_pack[e.from] = true; + } + if (e.from == i) return false; + return true; + }).array; + + configs[i] = Vertex.init; // mark config as removed + + // also remove any configs that cannot be satisfied anymore + foreach (j; 0 .. configs.length) + if (j != i && had_dep_to_pack[j] && !still_has_dep_to_pack[j]) + removeConfig(j); } bool isReachable(string pack, string conf) { @@ -351,6 +467,39 @@ } string[] allconfigs_path; + + void determineDependencyConfigs(in Package p, string c) + { + string[][string] depconfigs; + foreach (d; p.getAllDependencies()) { + auto dp = getDependency(d.name, true); + if (!dp) continue; + + string[] cfgs; + if (auto pc = dp.name in m_overriddenConfigs) cfgs = [*pc]; + else { + auto subconf = p.getSubConfiguration(c, dp, platform); + if (!subconf.empty) cfgs = [subconf]; + else cfgs = dp.getPlatformConfigurations(platform); + } + cfgs = cfgs.filter!(c => haveConfig(d.name, c)).array; + + // if no valid configuration was found for a dependency, don't include the + // current configuration + if (!cfgs.length) { + logDebug("Skip %s %s (missing configuration for %s)", p.name, c, dp.name); + return; + } + depconfigs[d.name] = cfgs; + } + + // add this configuration to the graph + size_t cidx = createConfig(p.name, c); + foreach (d; p.getAllDependencies()) + foreach (sc; depconfigs.get(d.name, null)) + createEdge(cidx, createConfig(d.name, sc)); + } + // create a graph of all possible package configurations (package, config) -> (subpackage, subconfig) void determineAllConfigs(in Package p) { @@ -360,40 +509,18 @@ scope (exit) allconfigs_path.length--; // first, add all dependency configurations - foreach (dn; p.dependencies.byKey) { - auto dp = getDependency(dn, true); + foreach (d; p.getAllDependencies) { + auto dp = getDependency(d.name, true); if (!dp) continue; determineAllConfigs(dp); } // for each configuration, determine the configurations usable for the dependencies - outer: foreach (c; p.getPlatformConfigurations(platform, p is m_rootPackage && allow_non_library)) { - string[][string] depconfigs; - foreach (dn; p.dependencies.byKey) { - auto dp = getDependency(dn, true); - if (!dp) continue; - - string[] cfgs; - auto subconf = p.getSubConfiguration(c, dp, platform); - if (!subconf.empty) cfgs = [subconf]; - else cfgs = dp.getPlatformConfigurations(platform); - cfgs = cfgs.filter!(c => haveConfig(dn, c)).array; - - // if no valid configuration was found for a dependency, don't include the - // current configuration - if (!cfgs.length) { - logDebug("Skip %s %s (missing configuration for %s)", p.name, c, dp.name); - continue outer; - } - depconfigs[dn] = cfgs; - } - - // add this configuration to the graph - size_t cidx = createConfig(p.name, c); - foreach (dn; p.dependencies.byKey) - foreach (sc; depconfigs.get(dn, null)) - createEdge(cidx, createConfig(dn, sc)); - } + if (auto pc = p.name in m_overriddenConfigs) + determineDependencyConfigs(p, *pc); + else + foreach (c; p.getPlatformConfigurations(platform, p is m_rootPackage && allow_non_library)) + determineDependencyConfigs(p, c); } if (config.length) createConfig(m_rootPackage.name, config); determineAllConfigs(m_rootPackage); @@ -403,26 +530,24 @@ do { // remove all configs that are not reachable by all parent packages changed = false; - for (size_t i = 0; i < configs.length; ) { + foreach (i, ref c; configs) { + if (c == Vertex.init) continue; // ignore deleted configurations if (!isReachableByAllParentPacks(i)) { - logDebug("NOT REACHABLE by (%s):", parents[configs[i].pack]); + logDebug("%s %s NOT REACHABLE by all of (%s):", c.pack, c.config, parents[c.pack]); removeConfig(i); changed = true; - } else i++; + } } // when all edges are cleaned up, pick one package and remove all but one config if (!changed) { foreach (p; getTopologicalPackageList()) { size_t cnt = 0; - for (size_t i = 0; i < configs.length; ) { - if (configs[i].pack == p.name) { - if (++cnt > 1) { - logDebug("NON-PRIMARY:"); - removeConfig(i); - } else i++; - } else i++; - } + foreach (i, ref c; configs) + if (c.pack == p.name && ++cnt > 1) { + logDebug("NON-PRIMARY: %s %s", c.pack, c.config); + removeConfig(i); + } if (cnt > 1) { changed = true; break; @@ -437,6 +562,7 @@ // return the resulting configuration set as an AA string[string] ret; foreach (c; configs) { + if (c == Vertex.init) continue; // ignore deleted configurations assert(ret.get(c.pack, c.config) == c.config, format("Conflicting configurations for %s found: %s vs. %s", c.pack, c.config, ret[c.pack])); logDebug("Using configuration '%s' for %s", c.config, c.pack); ret[c.pack] = c.config; @@ -457,9 +583,9 @@ } /** - * Fills dst with values from this project. + * Fills `dst` with values from this project. * - * dst gets initialized according to the given platform and config. + * `dst` gets initialized according to the given platform and config. * * Params: * dst = The BuildSettings struct to fill with data. @@ -470,6 +596,8 @@ */ void addBuildSettings(ref BuildSettings dst, in BuildPlatform platform, string config, in Package root_package = null, bool shallow = false) const { + import dub.internal.utils : stripDlangSpecialChars; + auto configs = getPackageConfigs(platform, config); foreach (pkg; this.getTopologicalPackageList(false, root_package, configs)) { @@ -511,68 +639,398 @@ } } - void addBuildTypeSettings(ref BuildSettings dst, in BuildPlatform platform, string build_type) + /** Fills `dst` with build settings specific to the given build type. + + Params: + dst = The `BuildSettings` instance to add the build settings to + platform = Target build platform + build_type = Name of the build type + for_root_package = Selects if the build settings are for the root + package or for one of the dependencies. Unittest flags will + only be added to the root package. + */ + void addBuildTypeSettings(ref BuildSettings dst, in BuildPlatform platform, string build_type, bool for_root_package = true) { - bool usedefflags = !(dst.requirements & BuildRequirements.noDefaultFlags); + bool usedefflags = !(dst.requirements & BuildRequirement.noDefaultFlags); if (usedefflags) { BuildSettings btsettings; m_rootPackage.addBuildTypeSettings(btsettings, platform, build_type); + + if (!for_root_package) { + // don't propagate unittest switch to dependencies, as dependent + // unit tests aren't run anyway and the additional code may + // cause linking to fail on Windows (issue #640) + btsettings.removeOptions(BuildOption.unittests); + } + processVars(dst, this, m_rootPackage, btsettings); } } - /// Determines if the given dependency is already indirectly referenced by other dependencies of pack. - bool isRedundantDependency(in Package pack, in Package dependency) - const { - foreach (dep; pack.dependencies.byKey) { - auto dp = getDependency(dep, true); - if (!dp) continue; - if (dp is dependency) continue; - foreach (ddp; getTopologicalPackageList(false, dp)) - if (ddp is dependency) return true; - } - return false; - } - - /*bool iterateDependencies(bool delegate(Package pack, string dep_name, Dependency dep_spec) del) + /// Outputs a build description of the project, including its dependencies. + ProjectDescription describe(GeneratorSettings settings) { - bool all_found = true; + import dub.generators.targetdescription; - bool[string] visited; - void iterate(Package pack) - { - if (pack.name in visited) return; - visited[pack.name] = true; + // store basic build parameters + ProjectDescription ret; + ret.rootPackage = m_rootPackage.name; + ret.configuration = settings.config; + ret.buildType = settings.buildType; + ret.compiler = settings.platform.compiler; + ret.architecture = settings.platform.architecture; + ret.platform = settings.platform.platform; - foreach (dn, ds; pack.dependencies) { - auto dep = del(pack, dn, ds); - if (dep) iterateDependencies(dep); - else all_found = false; + // collect high level information about projects (useful for IDE display) + auto configs = getPackageConfigs(settings.platform, settings.config); + ret.packages ~= m_rootPackage.describe(settings.platform, settings.config); + foreach (dep; m_dependencies) + ret.packages ~= dep.describe(settings.platform, configs[dep.name]); + + foreach (p; getTopologicalPackageList(false, null, configs)) + ret.packages[ret.packages.countUntil!(pp => pp.name == p.name)].active = true; + + if (settings.buildType.length) { + // collect build target information (useful for build tools) + auto gen = new TargetDescriptionGenerator(this); + try { + gen.generate(settings); + ret.targets = gen.targetDescriptions; + ret.targetLookup = gen.targetDescriptionLookup; + } catch (Exception e) { + logDiagnostic("Skipping targets description: %s", e.msg); + logDebug("Full error: %s", e.toString().sanitize); } } - return all_found; - }*/ + return ret; + } - /// Outputs a JSON description of the project, including its deoendencies. - void describe(ref Json dst, BuildPlatform platform, string config) + private string[] listBuildSetting(string attributeName)(BuildPlatform platform, + string config, ProjectDescription projectDescription, Compiler compiler, bool disableEscaping) { - dst.mainPackage = m_rootPackage.name; // deprecated - dst.rootPackage = m_rootPackage.name; + return listBuildSetting!attributeName(platform, getPackageConfigs(platform, config), + projectDescription, compiler, disableEscaping); + } - auto configs = getPackageConfigs(platform, config); + private string[] listBuildSetting(string attributeName)(BuildPlatform platform, + string[string] configs, ProjectDescription projectDescription, Compiler compiler, bool disableEscaping) + { + if (compiler) + return formatBuildSettingCompiler!attributeName(platform, configs, projectDescription, compiler, disableEscaping); + else + return formatBuildSettingPlain!attributeName(platform, configs, projectDescription); + } - auto mp = Json.emptyObject; - m_rootPackage.describe(mp, platform, config); - dst.packages = Json([mp]); + // Output a build setting formatted for a compiler + private string[] formatBuildSettingCompiler(string attributeName)(BuildPlatform platform, + string[string] configs, ProjectDescription projectDescription, Compiler compiler, bool disableEscaping) + { + import std.process : escapeShellFileName; + import std.path : dirSeparator; - foreach (dep; m_dependencies) { - auto dp = Json.emptyObject; - dep.describe(dp, platform, configs[dep.name]); - dst.packages = dst.packages.get!(Json[]) ~ dp; + assert(compiler); + + auto targetDescription = projectDescription.lookupTarget(projectDescription.rootPackage); + auto buildSettings = targetDescription.buildSettings; + + string[] values; + switch (attributeName) + { + case "dflags": + case "linkerFiles": + case "mainSourceFile": + case "importFiles": + values = formatBuildSettingPlain!attributeName(platform, configs, projectDescription); + break; + + case "lflags": + case "sourceFiles": + case "versions": + case "debugVersions": + case "importPaths": + case "stringImportPaths": + case "options": + auto bs = buildSettings.dup; + bs.dflags = null; + + // Ensure trailing slash on directory paths + auto ensureTrailingSlash = (string path) => path.endsWith(dirSeparator) ? path : path ~ dirSeparator; + static if (attributeName == "importPaths") + bs.importPaths = bs.importPaths.map!(ensureTrailingSlash).array(); + else static if (attributeName == "stringImportPaths") + bs.stringImportPaths = bs.stringImportPaths.map!(ensureTrailingSlash).array(); + + compiler.prepareBuildSettings(bs, BuildSetting.all & ~to!BuildSetting(attributeName)); + values = bs.dflags; + break; + + case "libs": + auto bs = buildSettings.dup; + bs.dflags = null; + bs.lflags = null; + bs.sourceFiles = null; + bs.targetType = TargetType.none; // Force Compiler to NOT omit dependency libs when package is a library. + + compiler.prepareBuildSettings(bs, BuildSetting.all & ~to!BuildSetting(attributeName)); + + if (bs.lflags) + values = compiler.lflagsToDFlags( bs.lflags ); + else if (bs.sourceFiles) + values = compiler.lflagsToDFlags( bs.sourceFiles ); + else + values = bs.dflags; + + break; + + default: assert(0); + } + + // Escape filenames and paths + if(!disableEscaping) + { + switch (attributeName) + { + case "mainSourceFile": + case "linkerFiles": + case "copyFiles": + case "importFiles": + case "stringImportFiles": + case "sourceFiles": + case "importPaths": + case "stringImportPaths": + return values.map!(escapeShellFileName).array(); + + default: + return values; + } + } + + return values; + } + + // Output a build setting without formatting for any particular compiler + private string[] formatBuildSettingPlain(string attributeName)(BuildPlatform platform, string[string] configs, ProjectDescription projectDescription) + { + import std.path : buildNormalizedPath, dirSeparator; + import std.range : only; + + string[] list; + + enforce(attributeName == "targetType" || projectDescription.lookupRootPackage().targetType != TargetType.none, + "Target type is 'none'. Cannot list build settings."); + + static if (attributeName == "targetType") + if (projectDescription.rootPackage !in projectDescription.targetLookup) + return ["none"]; + + auto targetDescription = projectDescription.lookupTarget(projectDescription.rootPackage); + auto buildSettings = targetDescription.buildSettings; + + // Return any BuildSetting member attributeName as a range of strings. Don't attempt to fixup values. + // allowEmptyString: When the value is a string (as opposed to string[]), + // is empty string an actual permitted value instead of + // a missing value? + auto getRawBuildSetting(Package pack, bool allowEmptyString) { + auto value = __traits(getMember, buildSettings, attributeName); + + static if( is(typeof(value) == string[]) ) + return value; + else static if( is(typeof(value) == string) ) + { + auto ret = only(value); + + // only() has a different return type from only(value), so we + // have to empty the range rather than just returning only(). + if(value.empty && !allowEmptyString) { + ret.popFront(); + assert(ret.empty); + } + + return ret; + } + else static if( is(typeof(value) == enum) ) + return only(value); + else static if( is(typeof(value) == BuildRequirements) ) + return only(cast(BuildRequirement) cast(int) value.values); + else static if( is(typeof(value) == BuildOptions) ) + return only(cast(BuildOption) cast(int) value.values); + else + static assert(false, "Type of BuildSettings."~attributeName~" is unsupported."); + } + + // Adjust BuildSetting member attributeName as needed. + // Returns a range of strings. + auto getFixedBuildSetting(Package pack) { + // Is relative path(s) to a directory? + enum isRelativeDirectory = + attributeName == "importPaths" || attributeName == "stringImportPaths" || + attributeName == "targetPath" || attributeName == "workingDirectory"; + + // Is relative path(s) to a file? + enum isRelativeFile = + attributeName == "sourceFiles" || attributeName == "linkerFiles" || + attributeName == "importFiles" || attributeName == "stringImportFiles" || + attributeName == "copyFiles" || attributeName == "mainSourceFile"; + + // For these, empty string means "main project directory", not "missing value" + enum allowEmptyString = + attributeName == "targetPath" || attributeName == "workingDirectory"; + + enum isEnumBitfield = + attributeName == "requirements" || attributeName == "options"; + + enum isEnum = attributeName == "targetType"; + + auto values = getRawBuildSetting(pack, allowEmptyString); + string fixRelativePath(string importPath) { return buildNormalizedPath(pack.path.toString(), importPath); } + static string ensureTrailingSlash(string path) { return path.endsWith(dirSeparator) ? path : path ~ dirSeparator; } + + static if(isRelativeDirectory) { + // Return full paths for the paths, making sure a + // directory separator is on the end of each path. + return values.map!(fixRelativePath).map!(ensureTrailingSlash); + } + else static if(isRelativeFile) { + // Return full paths. + return values.map!(fixRelativePath); + } + else static if(isEnumBitfield) + return bitFieldNames(values.front); + else static if (isEnum) + return [values.front.to!string]; + else + return values; + } + + foreach(value; getFixedBuildSetting(m_rootPackage)) { + list ~= value; + } + + return list; + } + + // The "compiler" arg is for choosing which compiler the output should be formatted for, + // or null to imply "list" format. + private string[] listBuildSetting(BuildPlatform platform, string[string] configs, + ProjectDescription projectDescription, string requestedData, Compiler compiler, bool disableEscaping) + { + // Certain data cannot be formatter for a compiler + if (compiler) + { + switch (requestedData) + { + case "target-type": + case "target-path": + case "target-name": + case "working-directory": + case "string-import-files": + case "copy-files": + case "pre-generate-commands": + case "post-generate-commands": + case "pre-build-commands": + case "post-build-commands": + enforce(false, "--data="~requestedData~" can only be used with --data-list or --data-0."); + break; + + case "requirements": + enforce(false, "--data=requirements can only be used with --data-list or --data-0. Use --data=options instead."); + break; + + default: break; + } + } + + import std.typetuple : TypeTuple; + auto args = TypeTuple!(platform, configs, projectDescription, compiler, disableEscaping); + switch (requestedData) + { + case "target-type": return listBuildSetting!"targetType"(args); + case "target-path": return listBuildSetting!"targetPath"(args); + case "target-name": return listBuildSetting!"targetName"(args); + case "working-directory": return listBuildSetting!"workingDirectory"(args); + case "main-source-file": return listBuildSetting!"mainSourceFile"(args); + case "dflags": return listBuildSetting!"dflags"(args); + case "lflags": return listBuildSetting!"lflags"(args); + case "libs": return listBuildSetting!"libs"(args); + case "linker-files": return listBuildSetting!"linkerFiles"(args); + case "source-files": return listBuildSetting!"sourceFiles"(args); + case "copy-files": return listBuildSetting!"copyFiles"(args); + case "versions": return listBuildSetting!"versions"(args); + case "debug-versions": return listBuildSetting!"debugVersions"(args); + case "import-paths": return listBuildSetting!"importPaths"(args); + case "string-import-paths": return listBuildSetting!"stringImportPaths"(args); + case "import-files": return listBuildSetting!"importFiles"(args); + case "string-import-files": return listBuildSetting!"stringImportFiles"(args); + case "pre-generate-commands": return listBuildSetting!"preGenerateCommands"(args); + case "post-generate-commands": return listBuildSetting!"postGenerateCommands"(args); + case "pre-build-commands": return listBuildSetting!"preBuildCommands"(args); + case "post-build-commands": return listBuildSetting!"postBuildCommands"(args); + case "requirements": return listBuildSetting!"requirements"(args); + case "options": return listBuildSetting!"options"(args); + + default: + enforce(false, "--data="~requestedData~ + " is not a valid option. See 'dub describe --help' for accepted --data= values."); + } + + assert(0); + } + + /// Outputs requested data for the project, optionally including its dependencies. + string[] listBuildSettings(GeneratorSettings settings, string[] requestedData, ListBuildSettingsFormat list_type) + { + import dub.compilers.utils : isLinkerFile; + + auto projectDescription = describe(settings); + auto configs = getPackageConfigs(settings.platform, settings.config); + PackageDescription packageDescription; + foreach (pack; projectDescription.packages) { + if (pack.name == projectDescription.rootPackage) + packageDescription = pack; + } + + if (projectDescription.rootPackage in projectDescription.targetLookup) { + // Copy linker files from sourceFiles to linkerFiles + auto target = projectDescription.lookupTarget(projectDescription.rootPackage); + foreach (file; target.buildSettings.sourceFiles.filter!(isLinkerFile)) + target.buildSettings.addLinkerFiles(file); + + // Remove linker files from sourceFiles + target.buildSettings.sourceFiles = + target.buildSettings.sourceFiles + .filter!(a => !isLinkerFile(a)) + .array(); + projectDescription.lookupTarget(projectDescription.rootPackage) = target; + } + + Compiler compiler; + bool no_escape; + final switch (list_type) with (ListBuildSettingsFormat) { + case list: break; + case listNul: no_escape = true; break; + case commandLine: compiler = settings.compiler; break; + case commandLineNul: compiler = settings.compiler; no_escape = true; break; + + } + + auto result = requestedData + .map!(dataName => listBuildSetting(settings.platform, configs, projectDescription, dataName, compiler, no_escape)); + + final switch (list_type) with (ListBuildSettingsFormat) { + case list: return result.map!(l => l.join("\n")).array(); + case listNul: return result.map!(l => l.join("\0")).array; + case commandLine: return result.map!(l => l.join(" ")).array; + case commandLineNul: return result.map!(l => l.join("\0")).array; } } + /** Saves the currently selected dependency versions to disk. + + The selections will be written to a file named + `SelectedVersions.defaultFile` ("dub.selections.json") within the + directory of the root package. Any existing file will get overwritten. + */ void saveSelections() { assert(m_selections !is null, "Cannot save selections for non-disk based project (has no selections)."); @@ -584,6 +1042,11 @@ m_selections.save(path); } + /** Checks if the cached upgrade information is still considered up to date. + + The cache will be considered out of date after 24 hours after the last + online check. + */ bool isUpgradeCacheUpToDate() { try { @@ -598,6 +1061,11 @@ } } + /** Returns the currently cached upgrade information. + + The returned dictionary maps from dependency package name to the latest + available version that matches the dependency specifications. + */ Dependency[string] getUpgradeCache() { try { @@ -611,11 +1079,13 @@ } } + /** Sets a new set of versions for the upgrade cache. + */ void setUpgradeCache(Dependency[string] versions) { logDebug("markUpToDate"); Json create(ref Json json, string object) { - if( object !in json ) json[object] = Json.emptyObject; + if (json[object].type == Json.Type.undefined) json[object] = Json.emptyObject; return json[object]; } create(m_packageSettings, "dub"); @@ -636,7 +1106,7 @@ logDebug("writeDubJson"); auto dubpath = m_rootPackage.path~".dub"; if( !exists(dubpath.toNativeString()) ) mkdir(dubpath.toNativeString()); - auto dstFile = openFile((dubpath~"dub.json").toString(), FileMode.CreateTrunc); + auto dstFile = openFile((dubpath~"dub.json").toString(), FileMode.createTrunc); scope(exit) dstFile.close(); dstFile.writePrettyJsonString(m_packageSettings); } catch( Exception e ){ @@ -645,73 +1115,19 @@ } } -/// Actions to be performed by the dub -struct Action { - enum Type { - fetch, - remove, - conflict, - failure - } - immutable { - Type type; - string packageId; - PlacementLocation location; - Dependency vers; - Version existingVersion; - } - const Package pack; - const Dependency[string] issuer; - - static Action get(string pkg, PlacementLocation location, in Dependency dep, Dependency[string] context, Version old_version = Version.UNKNOWN) - { - return Action(Type.fetch, pkg, location, dep, context, old_version); - } - - static Action remove(Package pkg, Dependency[string] context) - { - return Action(Type.remove, pkg, context); - } - - static Action conflict(string pkg, in Dependency dep, Dependency[string] context) - { - return Action(Type.conflict, pkg, PlacementLocation.user, dep, context); - } - - static Action failure(string pkg, in Dependency dep, Dependency[string] context) - { - return Action(Type.failure, pkg, PlacementLocation.user, dep, context); - } - - private this(Type id, string pkg, PlacementLocation location, in Dependency d, Dependency[string] issue, Version existing_version = Version.UNKNOWN) - { - this.type = id; - this.packageId = pkg; - this.location = location; - this.vers = d; - this.issuer = issue; - this.existingVersion = existing_version; - } - - private this(Type id, Package pkg, Dependency[string] issue) - { - pack = pkg; - type = id; - packageId = pkg.name; - vers = cast(immutable)Dependency(pkg.ver); - issuer = issue; - } - - string toString() const { - return to!string(type) ~ ": " ~ packageId ~ ", " ~ to!string(vers); - } +/// Determines the output format used for `Project.listBuildSettings`. +enum ListBuildSettingsFormat { + list, /// Newline separated list entries + listNul, /// NUL character separated list entries (unescaped) + commandLine, /// Formatted for compiler command line (one data list per line) + commandLineNul, /// NUL character separated list entries (unescaped, data lists separated by two NUL characters) } /// Indicates where a package has been or should be placed to. enum PlacementLocation { - /// Packages retrived with 'local' will be placed in the current folder + /// Packages retrieved with 'local' will be placed in the current folder /// using the package name as destination. local, /// Packages with 'userWide' will be placed in a folder accessible by @@ -722,11 +1138,8 @@ system } -/// The default placement location of fetched packages. Can be changed by --local or --system. -auto defaultPlacementLocation = PlacementLocation.user; - -void processVars(ref BuildSettings dst, in Project project, in Package pack, BuildSettings settings, bool include_target_settings = false) - +void processVars(ref BuildSettings dst, in Project project, in Package pack, + BuildSettings settings, bool include_target_settings = false) { dst.addDFlags(processVars(project, pack, settings.dflags)); dst.addLFlags(processVars(project, pack, settings.lflags)); @@ -798,7 +1211,6 @@ if (is_path) { auto p = Path(var); if (!p.absolute) { - logDebug("Fixing relative path: %s ~ %s", pack.path.toNativeString(), p.toNativeString()); return (pack.path ~ p).toNativeString(); } else return p.toNativeString(); } else return var; @@ -816,26 +1228,18 @@ return prj.path.toNativeString(); } - if (auto envvar = environment.get(name)) return envvar; + auto envvar = environment.get(name); + if (envvar !is null) return envvar; throw new Exception("Invalid variable: "~name); } -private bool isIdentChar(dchar ch) -{ - return ch >= 'A' && ch <= 'Z' || ch >= 'a' && ch <= 'z' || ch >= '0' && ch <= '9' || ch == '_'; -} -string stripDlangSpecialChars(string s) -{ - import std.array; - import std.uni; - auto ret = appender!string(); - foreach(ch; s) - ret.put(isIdentChar(ch) ? ch : '_'); - return ret.data; -} +/** Holds and stores a set of version selections for package dependencies. + This is the runtime representation of the information contained in + "dub.selections.json" within a package's directory. +*/ final class SelectedVersions { private struct Selected { Dependency dep; @@ -845,41 +1249,60 @@ enum FileVersion = 1; Selected[string] m_selections; bool m_dirty = false; // has changes since last save + bool m_bare = true; } + /// Default file name to use for storing selections. enum defaultFile = "dub.selections.json"; + /// Constructs a new empty version selection. this() {} + /** Constructs a new version selection from JSON data. + + The structure of the JSON document must match the contents of the + "dub.selections.json" file. + */ this(Json data) { deserialize(data); m_dirty = false; } + /** Constructs a new version selections from an existing JSON file. + */ this(Path path) { auto json = jsonFromFile(path); deserialize(json); m_dirty = false; + m_bare = false; } + /// Returns a list of names for all packages that have a version selection. @property string[] selectedPackages() const { return m_selections.keys; } + /// Determines if any changes have been made after loading the selections from a file. @property bool dirty() const { return m_dirty; } + /// Determine if this set of selections is still empty (but not `clear`ed). + @property bool bare() const { return m_bare && !m_dirty; } + + /// Removes all selections. void clear() { m_selections = null; m_dirty = true; } + /// Duplicates the set of selected versions from another instance. void set(SelectedVersions versions) { m_selections = versions.m_selections.dup; m_dirty = true; } + /// Selects a certain version for a specific package. void selectVersion(string package_id, Version version_) { if (auto ps = package_id in m_selections) { @@ -890,6 +1313,7 @@ m_dirty = true; } + /// Selects a certain path for a specific package. void selectVersion(string package_id, Path path) { if (auto ps = package_id in m_selections) { @@ -900,31 +1324,64 @@ m_dirty = true; } + /// Removes the selection for a particular package. void deselectVersion(string package_id) { m_selections.remove(package_id); m_dirty = true; } + /// Determines if a particular package has a selection set. bool hasSelectedVersion(string packageId) const { return (packageId in m_selections) !is null; } + /** Returns the selection for a particular package. + + Note that the returned `Dependency` can either have the + `Dependency.path` property set to a non-empty value, in which case this + is a path based selection, or its `Dependency.version_` property is + valid and it is a version selection. + */ Dependency getSelectedVersion(string packageId) const { enforce(hasSelectedVersion(packageId)); return m_selections[packageId].dep; } + /** Stores the selections to disk. + + The target file will be written in JSON format. Usually, `defaultFile` + should be used as the file name and the directory should be the root + directory of the project's root package. + */ void save(Path path) { Json json = serialize(); - auto file = openFile(path, FileMode.CreateTrunc); + auto file = openFile(path, FileMode.createTrunc); scope(exit) file.close(); - file.writePrettyJsonString(json); - file.put('\n'); + + assert(json.type == Json.Type.object); + assert(json.length == 2); + assert(json["versions"].type != Json.Type.undefined); + + file.write("{\n\t\"fileVersion\": "); + file.writeJsonString(json["fileVersion"]); + file.write(",\n\t\"versions\": {"); + auto vers = json["versions"].get!(Json[string]); + bool first = true; + foreach (k; vers.byKey.array.sort()) { + if (!first) file.write(","); + else first = false; + file.write("\n\t\t"); + file.writeJsonString(Json(k)); + file.write(": "); + file.writeJsonString(vers[k]); + } + file.write("\n\t}\n}\n"); m_dirty = false; + m_bare = false; } static Json dependencyToJson(Dependency d) @@ -938,7 +1395,7 @@ if (j.type == Json.Type.string) return Dependency(Version(j.get!string)); else if (j.type == Json.Type.object) - return Dependency(Path(j.path.get!string)); + return Dependency(Path(j["path"].get!string)); else throw new Exception(format("Unexpected type for dependency: %s", j.type)); } @@ -946,10 +1403,10 @@ const { Json json = serializeToJson(m_selections); Json serialized = Json.emptyObject; - serialized.fileVersion = FileVersion; - serialized.versions = Json.emptyObject; + serialized["fileVersion"] = FileVersion; + serialized["versions"] = Json.emptyObject; foreach (p, v; m_selections) - serialized.versions[p] = dependencyToJson(v.dep); + serialized["versions"][p] = dependencyToJson(v.dep); return serialized; } @@ -958,7 +1415,8 @@ enforce(cast(int)json["fileVersion"] == FileVersion, "Mismatched dub.select.json version: " ~ to!string(cast(int)json["fileVersion"]) ~ "vs. " ~to!string(FileVersion)); clear(); scope(failure) clear(); - foreach (string p, v; json.versions) + foreach (string p, v; json["versions"]) m_selections[p] = Selected(dependencyFromJson(v)); } } + diff --git a/source/dub/recipe/io.d b/source/dub/recipe/io.d new file mode 100644 index 0000000..52dbefe --- /dev/null +++ b/source/dub/recipe/io.d @@ -0,0 +1,163 @@ +/** + Package recipe reading/writing facilities. + + Copyright: © 2015-2016, Sönke Ludwig + License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. + Authors: Sönke Ludwig +*/ +module dub.recipe.io; + +import dub.recipe.packagerecipe; +import dub.internal.vibecompat.inet.path; + + +/** Reads a package recipe from a file. + + The file format (JSON/SDLang) will be determined from the file extension. + + Params: + filename = Path of the package recipe file + parent_name = Optional name of the parent package (if this is a sub package) + + Returns: Returns the package recipe contents + Throws: Throws an exception if an I/O or syntax error occurs +*/ +PackageRecipe readPackageRecipe(string filename, string parent_name = null) +{ + return readPackageRecipe(Path(filename), parent_name); +} +/// ditto +PackageRecipe readPackageRecipe(Path filename, string parent_name = null) +{ + import dub.internal.utils : stripUTF8Bom; + import dub.internal.vibecompat.core.file : openFile, FileMode; + + string text; + + { + auto f = openFile(filename.toNativeString(), FileMode.read); + scope(exit) f.close(); + text = stripUTF8Bom(cast(string)f.readAll()); + } + + return parsePackageRecipe(text, filename.toNativeString(), parent_name); +} + +/** Parses an in-memory package recipe. + + The file format (JSON/SDLang) will be determined from the file extension. + + Params: + contents = The contents of the recipe file + filename = Name associated with the package recipe - this is only used + to determine the file format from the file extension + parent_name = Optional name of the parent package (if this is a sub + package) + default_package_name = Optional default package name (if no package name + is found in the recipe this value will be used) + + Returns: Returns the package recipe contents + Throws: Throws an exception if an I/O or syntax error occurs +*/ +PackageRecipe parsePackageRecipe(string contents, string filename, string parent_name = null, + string default_package_name = null) +{ + import std.algorithm : endsWith; + import dub.internal.vibecompat.data.json; + import dub.recipe.json : parseJson; + import dub.recipe.sdl : parseSDL; + + PackageRecipe ret; + + ret.name = default_package_name; + + if (filename.endsWith(".json")) parseJson(ret, parseJsonString(contents, filename), parent_name); + else if (filename.endsWith(".sdl")) parseSDL(ret, contents, parent_name, filename); + else assert(false, "readPackageRecipe called with filename with unknown extension: "~filename); + return ret; +} + + +unittest { // issue #711 - configuration default target type not correct for SDL + import dub.compilers.buildsettings : TargetType; + auto inputs = [ + "dub.sdl": "name \"test\"\nconfiguration \"a\" {\n}", + "dub.json": "{\"name\": \"test\", \"configurations\": [{\"name\": \"a\"}]}" + ]; + foreach (file, content; inputs) { + auto pr = parsePackageRecipe(content, file); + assert(pr.name == "test"); + assert(pr.configurations.length == 1); + assert(pr.configurations[0].name == "a"); + assert(pr.configurations[0].buildSettings.targetType == TargetType.library); + } +} + +unittest { // issue #711 - configuration default target type not correct for SDL + import dub.compilers.buildsettings : TargetType; + auto inputs = [ + "dub.sdl": "name \"test\"\ntargetType \"autodetect\"\nconfiguration \"a\" {\n}", + "dub.json": "{\"name\": \"test\", \"targetType\": \"autodetect\", \"configurations\": [{\"name\": \"a\"}]}" + ]; + foreach (file, content; inputs) { + auto pr = parsePackageRecipe(content, file); + assert(pr.name == "test"); + assert(pr.configurations.length == 1); + assert(pr.configurations[0].name == "a"); + assert(pr.configurations[0].buildSettings.targetType == TargetType.library); + } +} + +unittest { // issue #711 - configuration default target type not correct for SDL + import dub.compilers.buildsettings : TargetType; + auto inputs = [ + "dub.sdl": "name \"test\"\ntargetType \"executable\"\nconfiguration \"a\" {\n}", + "dub.json": "{\"name\": \"test\", \"targetType\": \"executable\", \"configurations\": [{\"name\": \"a\"}]}" + ]; + foreach (file, content; inputs) { + auto pr = parsePackageRecipe(content, file); + assert(pr.name == "test"); + assert(pr.configurations.length == 1); + assert(pr.configurations[0].name == "a"); + assert(pr.configurations[0].buildSettings.targetType == TargetType.executable); + } +} + + +/** Writes the textual representation of a package recipe to a file. + + Note that the file extension must be either "json" or "sdl". +*/ +void writePackageRecipe(string filename, in ref PackageRecipe recipe) +{ + import dub.internal.vibecompat.core.file : openFile, FileMode; + auto f = openFile(filename, FileMode.createTrunc); + scope(exit) f.close(); + serializePackageRecipe(f, recipe, filename); +} + +/// ditto +void writePackageRecipe(Path filename, in ref PackageRecipe recipe) +{ + writePackageRecipe(filename.toNativeString, recipe); +} + +/** Converts a package recipe to its textual representation. + + The extension of the supplied `filename` must be either "json" or "sdl". + The output format is chosen accordingly. +*/ +void serializePackageRecipe(R)(ref R dst, in ref PackageRecipe recipe, string filename) +{ + import std.algorithm : endsWith; + import dub.internal.vibecompat.data.json : writeJsonString; + import dub.recipe.json : toJson; + import dub.recipe.sdl : toSDL; + + if (filename.endsWith(".json")) + dst.writeJsonString!(R, true)(toJson(recipe)); + else if (filename.endsWith(".sdl")) + toSDL(recipe).toSDLDocument(dst); + else assert(false, "writePackageRecipe called with filename with unknown extension: "~filename); +} + diff --git a/source/dub/recipe/json.d b/source/dub/recipe/json.d index ccf2a59..a254e89 100644 --- a/source/dub/recipe/json.d +++ b/source/dub/recipe/json.d @@ -17,7 +17,7 @@ import std.conv : to; import std.exception : enforce; import std.range; -import std.string : format; +import std.string : format, indexOf; import std.traits : EnumMembers; @@ -42,6 +42,7 @@ } break; case "-ddoxFilterArgs": recipe.ddoxFilterArgs = deserializeJson!(string[])(value); break; + case "-ddoxTool": recipe.ddoxTool = value.get!string; break; } } @@ -72,13 +73,13 @@ Json toJson(in ref PackageRecipe recipe) { auto ret = recipe.buildSettings.toJson(); - ret.name = recipe.name; + ret["name"] = recipe.name; if (!recipe.version_.empty) ret["version"] = recipe.version_; - if (!recipe.description.empty) ret.description = recipe.description; - if (!recipe.homepage.empty) ret.homepage = recipe.homepage; - if (!recipe.authors.empty) ret.authors = serializeToJson(recipe.authors); - if (!recipe.copyright.empty) ret.copyright = recipe.copyright; - if (!recipe.license.empty) ret.license = recipe.license; + if (!recipe.description.empty) ret["description"] = recipe.description; + if (!recipe.homepage.empty) ret["homepage"] = recipe.homepage; + if (!recipe.authors.empty) ret["authors"] = serializeToJson(recipe.authors); + if (!recipe.copyright.empty) ret["copyright"] = recipe.copyright; + if (!recipe.license.empty) ret["license"] = recipe.license; if (!recipe.subPackages.empty) { Json[] jsonSubPackages = new Json[recipe.subPackages.length]; foreach (i, subPackage; recipe.subPackages) { @@ -88,21 +89,22 @@ jsonSubPackages[i] = subPackage.recipe.toJson(); } } - ret.subPackages = jsonSubPackages; + ret["subPackages"] = jsonSubPackages; } - if (recipe.configurations) { + if (recipe.configurations.length) { Json[] configs; foreach(config; recipe.configurations) configs ~= config.toJson(); - ret.configurations = configs; + ret["configurations"] = configs; } if (recipe.buildTypes.length) { Json[string] types; foreach (name, settings; recipe.buildTypes) types[name] = settings.toJson(); - ret.buildTypes = types; + ret["buildTypes"] = types; } if (!recipe.ddoxFilterArgs.empty) ret["-ddoxFilterArgs"] = recipe.ddoxFilterArgs.serializeToJson(); + if (!recipe.ddoxTool.empty) ret["-ddoxTool"] = recipe.ddoxTool; return ret; } @@ -149,8 +151,8 @@ private Json toJson(in ref ConfigurationInfo config) { auto ret = config.buildSettings.toJson(); - ret.name = config.name; - if (config.platforms.length) ret.platforms = serializeToJson(config.platforms); + ret["name"] = config.name; + if (config.platforms.length) ret["platforms"] = serializeToJson(config.platforms); return ret; } @@ -158,7 +160,7 @@ { foreach(string name, value; json) { - auto idx = std.string.indexOf(name, "-"); + auto idx = indexOf(name, "-"); string basename, suffix; if( idx >= 0 ) { basename = name[0 .. idx]; suffix = name[idx .. $]; } else basename = name; @@ -221,34 +223,35 @@ case "buildRequirements": BuildRequirements reqs; foreach (req; deserializeJson!(string[])(value)) - reqs |= to!BuildRequirements(req); + reqs |= to!BuildRequirement(req); bs.buildRequirements[suffix] = reqs; break; case "buildOptions": BuildOptions options; foreach (opt; deserializeJson!(string[])(value)) - options |= to!BuildOptions(opt); + options |= to!BuildOption(opt); bs.buildOptions[suffix] = options; break; } } } -Json toJson(in ref BuildSettingsTemplate bs) +private Json toJson(in ref BuildSettingsTemplate bs) { auto ret = Json.emptyObject; if( bs.dependencies !is null ){ auto deps = Json.emptyObject; foreach( pack, d; bs.dependencies ) deps[pack] = serializeToJson(d); - ret.dependencies = deps; + ret["dependencies"] = deps; } - if (bs.systemDependencies !is null) ret.systemDependencies = bs.systemDependencies; + if (bs.systemDependencies !is null) ret["systemDependencies"] = bs.systemDependencies; if (bs.targetType != TargetType.autodetect) ret["targetType"] = bs.targetType.to!string(); if (!bs.targetPath.empty) ret["targetPath"] = bs.targetPath; if (!bs.targetName.empty) ret["targetName"] = bs.targetName; if (!bs.workingDirectory.empty) ret["workingDirectory"] = bs.workingDirectory; if (!bs.mainSourceFile.empty) ret["mainSourceFile"] = bs.mainSourceFile; + if (bs.subConfigurations.length > 0) ret["subConfigurations"] = serializeToJson(bs.subConfigurations); foreach (suffix, arr; bs.dflags) ret["dflags"~suffix] = serializeToJson(arr); foreach (suffix, arr; bs.lflags) ret["lflags"~suffix] = serializeToJson(arr); foreach (suffix, arr; bs.libs) ret["libs"~suffix] = serializeToJson(arr); @@ -266,13 +269,13 @@ foreach (suffix, arr; bs.postBuildCommands) ret["postBuildCommands"~suffix] = serializeToJson(arr); foreach (suffix, arr; bs.buildRequirements) { string[] val; - foreach (i; [EnumMembers!BuildRequirements]) + foreach (i; [EnumMembers!BuildRequirement]) if (arr & i) val ~= to!string(i); ret["buildRequirements"~suffix] = serializeToJson(val); } foreach (suffix, arr; bs.buildOptions) { string[] val; - foreach (i; [EnumMembers!BuildOptions]) + foreach (i; [EnumMembers!BuildOption]) if (arr & i) val ~= to!string(i); ret["buildOptions"~suffix] = serializeToJson(val); } diff --git a/source/dub/recipe/packagerecipe.d b/source/dub/recipe/packagerecipe.d index dad1220..e76d487 100644 --- a/source/dub/recipe/packagerecipe.d +++ b/source/dub/recipe/packagerecipe.d @@ -8,13 +8,14 @@ module dub.recipe.packagerecipe; import dub.compilers.compiler; +import dub.compilers.utils : warnOnSpecialCompilerFlags; import dub.dependency; import dub.internal.vibecompat.core.file; import dub.internal.vibecompat.core.log; import dub.internal.vibecompat.inet.url; -import std.algorithm : sort; +import std.algorithm : findSplit, sort; import std.array : join, split; import std.exception : enforce; import std.file; @@ -26,7 +27,7 @@ Sub qualified package names are lists of package names separated by ":". For example, "packa:packb:packc" references a package named "packc" that is a - sub package of "packb", wich in turn is a sub package of "packa". + sub package of "packb", which in turn is a sub package of "packa". */ string[] getSubPackagePath(string package_name) { @@ -40,7 +41,7 @@ */ string getBasePackageName(string package_name) { - return package_name.getSubPackagePath()[0]; + return package_name.findSplit(":")[0]; } /** @@ -51,10 +52,18 @@ */ string getSubPackageName(string package_name) { - return getSubPackagePath(package_name)[1 .. $].join(":"); + return package_name.findSplit(":")[2]; } - +unittest +{ + assert(getSubPackagePath("packa:packb:packc") == ["packa", "packb", "packc"]); + assert(getSubPackagePath("pack") == ["pack"]); + assert(getBasePackageName("packa:packb:packc") == "packa"); + assert(getBasePackageName("pack") == "pack"); + assert(getSubPackageName("packa:packb:packc") == "packb:packc"); + assert(getSubPackageName("pack") == ""); +} /** Represents the contents of a package recipe file (dub.json/dub.sdl) in an abstract way. @@ -71,23 +80,13 @@ string copyright; string license; string[] ddoxFilterArgs; + string ddoxTool; BuildSettingsTemplate buildSettings; ConfigurationInfo[] configurations; BuildSettingsTemplate[string] buildTypes; SubPackage[] subPackages; - @property const(Dependency)[string] dependencies() - const { - Dependency[string] ret; - foreach (n, d; this.buildSettings.dependencies) - ret[n] = d; - foreach (ref c; configurations) - foreach (n, d; c.buildSettings.dependencies) - ret[n] = d; - return ret; - } - inout(ConfigurationInfo) getConfiguration(string name) inout { foreach (c; configurations) @@ -95,6 +94,10 @@ return c; throw new Exception("Unknown configuration: "~name); } + + /** Clones the package recipe recursively. + */ + PackageRecipe clone() const { return .clone(this); } } struct SubPackage @@ -170,8 +173,10 @@ dst.addSourceFiles(this.mainSourceFile); } - void collectFiles(string method)(in string[][string] paths_map, string pattern) + string[] collectFiles(in string[][string] paths_map, string pattern) { + auto files = appender!(string[]); + foreach (suffix, paths; paths_map) { if (!platform.matchesSpecification(suffix)) continue; @@ -186,22 +191,30 @@ } foreach (d; dirEntries(path.toNativeString(), pattern, SpanMode.depth)) { - if (isDir(d.name)) continue; + import std.path : baseName; + if (baseName(d.name)[0] == '.' || d.isDir) continue; auto src = Path(d.name).relativeTo(base_path); - __traits(getMember, dst, method)(src.toNativeString()); + files ~= src.toNativeString(); } } } + + return files.data; } - // collect files from all source/import folders - collectFiles!"addSourceFiles"(sourcePaths, "*.d"); - collectFiles!"addImportFiles"(importPaths, "*.{d,di}"); - dst.removeImportFiles(dst.sourceFiles); - collectFiles!"addStringImportFiles"(stringImportPaths, "*"); + // collect source files + dst.addSourceFiles(collectFiles(sourcePaths, "*.d")); + auto sourceFiles = dst.sourceFiles.sort(); - // ensure a deterministic order of files as passed to the compiler - dst.sourceFiles.sort(); + // collect import files and remove sources + import std.algorithm : copy, setDifference; + + auto importFiles = collectFiles(importPaths, "*.{d,di}").sort(); + immutable nremoved = importFiles.setDifference(sourceFiles).copy(importFiles.release).length; + importFiles = importFiles[0 .. $ - nremoved]; + dst.addImportFiles(importFiles.release); + + dst.addStringImportFiles(collectFiles(stringImportPaths, "*")); getPlatformSetting!("dflags", "addDFlags")(dst, platform); getPlatformSetting!("lflags", "addLFlags")(dst, platform); @@ -234,8 +247,8 @@ auto nodef = false; auto noprop = false; foreach (req; this.buildRequirements) { - if (req & BuildRequirements.noDefaultFlags) nodef = true; - if (req & BuildRequirements.relaxProperties) noprop = true; + if (req & BuildRequirement.noDefaultFlags) nodef = true; + if (req & BuildRequirement.relaxProperties) noprop = true; } if (noprop) { @@ -255,3 +268,32 @@ } } } + +private T clone(T)(ref const(T) val) +{ + import std.traits : isSomeString, isDynamicArray, isAssociativeArray, isBasicType, ValueType; + + static if (is(T == immutable)) return val; + else static if (isBasicType!T) return val; + else static if (isDynamicArray!T) { + alias V = typeof(T.init[0]); + static if (is(V == immutable)) return val; + else { + T ret = new V[val.length]; + foreach (i, ref f; val) + ret[i] = clone!V(f); + return ret; + } + } else static if (isAssociativeArray!T) { + alias V = ValueType!T; + T ret; + foreach (k, ref f; val) + ret[k] = clone!V(f); + return ret; + } else static if (is(T == struct)) { + T ret; + foreach (i, M; typeof(T.tupleof)) + ret.tupleof[i] = clone!M(val.tupleof[i]); + return ret; + } else static assert(false, "Unsupported type: "~T.stringof); +} diff --git a/source/dub/recipe/sdl.d b/source/dub/recipe/sdl.d index 3987567..0e2bab6 100644 --- a/source/dub/recipe/sdl.d +++ b/source/dub/recipe/sdl.d @@ -1,17 +1,534 @@ /** SDL format support for PackageRecipe - Copyright: © 2014 rejectedsoftware e.K. + Copyright: © 2014-2015 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ module dub.recipe.sdl; +import dub.compilers.compiler; +import dub.dependency; +import dub.internal.sdlang; +import dub.internal.vibecompat.core.log; +import dub.internal.vibecompat.inet.path; import dub.recipe.packagerecipe; -alias SDLNode = void*; // TODO +import std.algorithm : map; +import std.array : array; +import std.conv; +import std.string : startsWith; -void parseSDL(ref PackageRecipe recipe, SDLNode json, string parent_name) + +void parseSDL(ref PackageRecipe recipe, string sdl, string parent_name, string filename) { - assert(false); + parseSDL(recipe, parseSource(sdl, filename), parent_name); +} + +void parseSDL(ref PackageRecipe recipe, Tag sdl, string parent_name) +{ + Tag[] subpacks; + Tag[] configs; + + // parse top-level fields + foreach (n; sdl.all.tags) { + enforceSDL(n.name.length > 0, "Anonymous tags are not allowed at the root level.", n); + switch (n.fullName) { + default: break; + case "name": recipe.name = n.stringTagValue; break; + case "version": recipe.version_ = n.stringTagValue; break; + case "description": recipe.description = n.stringTagValue; break; + case "homepage": recipe.homepage = n.stringTagValue; break; + case "authors": recipe.authors ~= n.stringArrayTagValue; break; + case "copyright": recipe.copyright = n.stringTagValue; break; + case "license": recipe.license = n.stringTagValue; break; + case "subPackage": subpacks ~= n; break; + case "configuration": configs ~= n; break; + case "buildType": + auto name = n.stringTagValue(true); + BuildSettingsTemplate bt; + parseBuildSettings(n, bt, parent_name); + recipe.buildTypes[name] = bt; + break; + case "x:ddoxFilterArgs": recipe.ddoxFilterArgs ~= n.stringArrayTagValue; break; + case "x:ddoxTool": recipe.ddoxTool = n.stringTagValue; break; + } + } + + enforceSDL(recipe.name.length > 0, "The package \"name\" field is missing or empty.", sdl); + string full_name = parent_name.length ? parent_name ~ ":" ~ recipe.name : recipe.name; + + // parse general build settings + parseBuildSettings(sdl, recipe.buildSettings, full_name); + + // determine default target type for configurations + auto defttype = recipe.buildSettings.targetType; + if (defttype == TargetType.autodetect) + defttype = TargetType.library; + + // parse configurations + recipe.configurations.length = configs.length; + foreach (i, n; configs) { + recipe.configurations[i].buildSettings.targetType = defttype; + parseConfiguration(n, recipe.configurations[i], full_name); + } + + // finally parse all sub packages + recipe.subPackages.length = subpacks.length; + foreach (i, n; subpacks) { + if (n.values.length) { + recipe.subPackages[i].path = n.stringTagValue; + } else { + enforceSDL(n.attributes.length == 0, "No attributes allowed for inline sub package definitions.", n); + parseSDL(recipe.subPackages[i].recipe, n, full_name); + } + } +} + +Tag toSDL(in ref PackageRecipe recipe) +{ + Tag ret = new Tag; + void add(T)(string field, T value) { ret.add(new Tag(null, field, [Value(value)])); } + add("name", recipe.name); + if (recipe.version_.length) add("version", recipe.version_); + if (recipe.description.length) add("description", recipe.description); + if (recipe.homepage.length) add("homepage", recipe.homepage); + if (recipe.authors.length) ret.add(new Tag(null, "authors", recipe.authors.map!(a => Value(a)).array)); + if (recipe.copyright.length) add("copyright", recipe.copyright); + if (recipe.license.length) add("license", recipe.license); + foreach (name, settings; recipe.buildTypes) { + auto t = new Tag(null, "buildType", [Value(name)]); + t.add(settings.toSDL()); + ret.add(t); + } + if (recipe.ddoxFilterArgs.length) + ret.add(new Tag("x", "ddoxFilterArgs", recipe.ddoxFilterArgs.map!(a => Value(a)).array)); + if (recipe.ddoxTool.length) ret.add(new Tag("x", "ddoxTool", [Value(recipe.ddoxTool)])); + ret.add(recipe.buildSettings.toSDL()); + foreach(config; recipe.configurations) + ret.add(config.toSDL()); + foreach (i, subPackage; recipe.subPackages) { + if (subPackage.path !is null) { + add("subPackage", subPackage.path); + } else { + auto t = subPackage.recipe.toSDL(); + t.name = "subPackage"; + ret.add(t); + } + } + return ret; +} + +private void parseBuildSettings(Tag settings, ref BuildSettingsTemplate bs, string package_name) +{ + foreach (setting; settings.tags) + parseBuildSetting(setting, bs, package_name); +} + +private void parseBuildSetting(Tag setting, ref BuildSettingsTemplate bs, string package_name) +{ + switch (setting.fullName) { + default: break; + case "dependency": parseDependency(setting, bs, package_name); break; + case "systemDependencies": bs.systemDependencies = setting.stringTagValue; break; + case "targetType": bs.targetType = setting.stringTagValue.to!TargetType; break; + case "targetName": bs.targetName = setting.stringTagValue; break; + case "targetPath": bs.targetPath = setting.stringTagValue; break; + case "workingDirectory": bs.workingDirectory = setting.stringTagValue; break; + case "subConfiguration": + auto args = setting.stringArrayTagValue; + enforceSDL(args.length == 2, "Expecting package and configuration names as arguments.", setting); + bs.subConfigurations[expandPackageName(args[0], package_name, setting)] = args[1]; + break; + case "dflags": setting.parsePlatformStringArray(bs.dflags); break; + case "lflags": setting.parsePlatformStringArray(bs.lflags); break; + case "libs": setting.parsePlatformStringArray(bs.libs); break; + case "sourceFiles": setting.parsePlatformStringArray(bs.sourceFiles); break; + case "sourcePaths": setting.parsePlatformStringArray(bs.sourcePaths); break; + case "excludedSourceFiles": setting.parsePlatformStringArray(bs.excludedSourceFiles); break; + case "mainSourceFile": bs.mainSourceFile = setting.stringTagValue; break; + case "copyFiles": setting.parsePlatformStringArray(bs.copyFiles); break; + case "versions": setting.parsePlatformStringArray(bs.versions); break; + case "debugVersions": setting.parsePlatformStringArray(bs.debugVersions); break; + case "importPaths": setting.parsePlatformStringArray(bs.importPaths); break; + case "stringImportPaths": setting.parsePlatformStringArray(bs.stringImportPaths); break; + case "preGenerateCommands": setting.parsePlatformStringArray(bs.preGenerateCommands); break; + case "postGenerateCommands": setting.parsePlatformStringArray(bs.postGenerateCommands); break; + case "preBuildCommands": setting.parsePlatformStringArray(bs.preBuildCommands); break; + case "postBuildCommands": setting.parsePlatformStringArray(bs.postBuildCommands); break; + case "buildRequirements": setting.parsePlatformEnumArray!BuildRequirement(bs.buildRequirements); break; + case "buildOptions": setting.parsePlatformEnumArray!BuildOption(bs.buildOptions); break; + } +} + +private void parseDependency(Tag t, ref BuildSettingsTemplate bs, string package_name) +{ + enforceSDL(t.values.length != 0, "Missing dependency name.", t); + enforceSDL(t.values.length == 1, "Multiple dependency names.", t); + auto pkg = expandPackageName(t.values[0].get!string, package_name, t); + enforceSDL(pkg !in bs.dependencies, "The dependency '"~pkg~"' is specified more than once.", t); + + Dependency dep = Dependency.any; + auto attrs = t.attributes; + + auto pv = "version" in attrs; + + if ("path" in attrs) { + if ("version" in attrs) + logDiagnostic("Ignoring version specification (%s) for path based dependency %s", attrs["version"][0].value.get!string, attrs["path"][0].value.get!string); + dep.versionSpec = "*"; + dep.path = Path(attrs["path"][0].value.get!string); + } else { + enforceSDL("version" in attrs, "Missing version specification.", t); + dep.versionSpec = attrs["version"][0].value.get!string; + } + + if ("optional" in attrs) + dep.optional = attrs["optional"][0].value.get!bool; + + if ("default" in attrs) + dep.default_ = attrs["default"][0].value.get!bool; + + bs.dependencies[pkg] = dep; +} + +private void parseConfiguration(Tag t, ref ConfigurationInfo ret, string package_name) +{ + ret.name = t.stringTagValue(true); + foreach (f; t.tags) { + switch (f.fullName) { + default: parseBuildSetting(f, ret.buildSettings, package_name); break; + case "platforms": ret.platforms ~= f.stringArrayTagValue; break; + } + } +} + +private Tag toSDL(in ref ConfigurationInfo config) +{ + auto ret = new Tag(null, "configuration", [Value(config.name)]); + if (config.platforms.length) ret.add(new Tag(null, "platforms", config.platforms[].map!(p => Value(p)).array)); + ret.add(config.buildSettings.toSDL()); + return ret; +} + +private Tag[] toSDL(in ref BuildSettingsTemplate bs) +{ + Tag[] ret; + void add(string name, string value) { ret ~= new Tag(null, name, [Value(value)]); } + void adda(string name, string suffix, in string[] values) { + ret ~= new Tag(null, name, values[].map!(v => Value(v)).array, + suffix.length ? [new Attribute(null, "platform", Value(suffix[1 .. $]))] : null); + } + + string[] toNameArray(T, U)(U bits) if(is(T == enum)) { + string[] ret; + foreach (m; __traits(allMembers, T)) + if (bits & __traits(getMember, T, m)) + ret ~= m; + return ret; + } + + foreach (pack, d; bs.dependencies) { + Attribute[] attribs; + if (d.path.length) attribs ~= new Attribute(null, "path", Value(d.path.toString())); + else attribs ~= new Attribute(null, "version", Value(d.versionSpec)); + if (d.optional) attribs ~= new Attribute(null, "optional", Value(true)); + ret ~= new Tag(null, "dependency", [Value(pack)], attribs); + } + if (bs.systemDependencies !is null) add("systemDependencies", bs.systemDependencies); + if (bs.targetType != TargetType.autodetect) add("targetType", bs.targetType.to!string()); + if (bs.targetPath.length) add("targetPath", bs.targetPath); + if (bs.targetName.length) add("targetName", bs.targetName); + if (bs.workingDirectory.length) add("workingDirectory", bs.workingDirectory); + if (bs.mainSourceFile.length) add("mainSourceFile", bs.mainSourceFile); + foreach (pack, conf; bs.subConfigurations) ret ~= new Tag(null, "subConfiguration", [Value(pack), Value(conf)]); + foreach (suffix, arr; bs.dflags) adda("dflags", suffix, arr); + foreach (suffix, arr; bs.lflags) adda("lflags", suffix, arr); + foreach (suffix, arr; bs.libs) adda("libs", suffix, arr); + foreach (suffix, arr; bs.sourceFiles) adda("sourceFiles", suffix, arr); + foreach (suffix, arr; bs.sourcePaths) adda("sourcePaths", suffix, arr); + foreach (suffix, arr; bs.excludedSourceFiles) adda("excludedSourceFiles", suffix, arr); + foreach (suffix, arr; bs.copyFiles) adda("copyFiles", suffix, arr); + foreach (suffix, arr; bs.versions) adda("versions", suffix, arr); + foreach (suffix, arr; bs.debugVersions) adda("debugVersions", suffix, arr); + foreach (suffix, arr; bs.importPaths) adda("importPaths", suffix, arr); + foreach (suffix, arr; bs.stringImportPaths) adda("stringImportPaths", suffix, arr); + foreach (suffix, arr; bs.preGenerateCommands) adda("preGenerateCommands", suffix, arr); + foreach (suffix, arr; bs.postGenerateCommands) adda("postGenerateCommands", suffix, arr); + foreach (suffix, arr; bs.preBuildCommands) adda("preBuildCommands", suffix, arr); + foreach (suffix, arr; bs.postBuildCommands) adda("postBuildCommands", suffix, arr); + foreach (suffix, bits; bs.buildRequirements) adda("buildRequirements", suffix, toNameArray!BuildRequirement(bits)); + foreach (suffix, bits; bs.buildOptions) adda("buildOptions", suffix, toNameArray!BuildOption(bits)); + return ret; +} + +private string expandPackageName(string name, string parent_name, Tag tag) +{ + import std.algorithm : canFind; + import std.string : format; + if (name.startsWith(":")) { + enforceSDL(!parent_name.canFind(':'), format("Short-hand packages syntax not allowed within sub packages: %s -> %s", parent_name, name), tag); + return parent_name ~ name; + } else return name; +} + +private string stringTagValue(Tag t, bool allow_child_tags = false) +{ + import std.string : format; + enforceSDL(t.values.length > 0, format("Missing string value for '%s'.", t.fullName), t); + enforceSDL(t.values.length == 1, format("Expected only one value for '%s'.", t.fullName), t); + enforceSDL(t.values[0].peek!string !is null, format("Expected value of type string for '%s'.", t.fullName), t); + enforceSDL(allow_child_tags || t.tags.length == 0, format("No child tags allowed for '%s'.", t.fullName), t); + // Q: should attributes be disallowed, or just ignored for forward compatibility reasons? + //enforceSDL(t.attributes.length == 0, format("No attributes allowed for '%s'.", t.fullName), t); + return t.values[0].get!string; +} + +private string[] stringArrayTagValue(Tag t, bool allow_child_tags = false) +{ + import std.string : format; + enforceSDL(allow_child_tags || t.tags.length == 0, format("No child tags allowed for '%s'.", t.fullName), t); + // Q: should attributes be disallowed, or just ignored for forward compatibility reasons? + //enforceSDL(t.attributes.length == 0, format("No attributes allowed for '%s'.", t.fullName), t); + + string[] ret; + foreach (v; t.values) { + enforceSDL(t.values[0].peek!string !is null, format("Values for '%s' must be strings.", t.fullName), t); + ret ~= v.get!string; + } + return ret; +} + +private void parsePlatformStringArray(Tag t, ref string[][string] dst) +{ + string platform; + if ("platform" in t.attributes) + platform = "-" ~ t.attributes["platform"][0].value.get!string; + dst[platform] ~= t.values.map!(v => v.get!string).array; +} + +private void parsePlatformEnumArray(E, Es)(Tag t, ref Es[string] dst) +{ + string platform; + if ("platform" in t.attributes) + platform = "-" ~ t.attributes["platform"][0].value.get!string; + foreach (v; t.values) { + if (platform !in dst) dst[platform] = Es.init; + dst[platform] |= v.get!string.to!E; + } +} + +private void enforceSDL(bool condition, lazy string message, Tag tag, string file = __FILE__, int line = __LINE__) +{ + import std.string : format; + if (!condition) { + throw new Exception(format("%s(%s): Error: %s", tag.location.file, tag.location.line, message), file, line); + } +} + + +unittest { // test all possible fields + auto sdl = +`name "projectname"; +description "project description"; +homepage "http://example.com" +authors "author 1" "author 2" +authors "author 3" +copyright "copyright string" +license "license string" +version "1.0.0" +subPackage { + name "subpackage1" +} +subPackage { + name "subpackage2" + dependency "projectname:subpackage1" version="*" +} +subPackage "pathsp3" +configuration "config1" { + platforms "windows" "linux" + targetType "library" +} +configuration "config2" { + platforms "windows-x86" + targetType "executable" +} +buildType "debug" { + dflags "-g" "-debug" +} +buildType "release" { + dflags "-release" "-O" +} +x:ddoxFilterArgs "-arg1" "-arg2" +x:ddoxFilterArgs "-arg3" +x:ddoxTool "ddoxtool" + +dependency ":subpackage1" optional=false path="." +dependency "somedep" version="1.0.0" optional=true +systemDependencies "system dependencies" +targetType "executable" +targetName "target name" +targetPath "target path" +workingDirectory "working directory" +subConfiguration ":subpackage2" "library" +buildRequirements "allowWarnings" "silenceDeprecations" +buildOptions "verbose" "ignoreUnknownPragmas" +libs "lib1" "lib2" +libs "lib3" +sourceFiles "source1" "source2" +sourceFiles "source3" +sourcePaths "sourcepath1" "sourcepath2" +sourcePaths "sourcepath3" +excludedSourceFiles "excluded1" "excluded2" +excludedSourceFiles "excluded3" +mainSourceFile "main source" +copyFiles "copy1" "copy2" +copyFiles "copy3" +versions "version1" "version2" +versions "version3" +debugVersions "debug1" "debug2" +debugVersions "debug3" +importPaths "import1" "import2" +importPaths "import3" +stringImportPaths "string1" "string2" +stringImportPaths "string3" +preGenerateCommands "preg1" "preg2" +preGenerateCommands "preg3" +postGenerateCommands "postg1" "postg2" +postGenerateCommands "postg3" +preBuildCommands "preb1" "preb2" +preBuildCommands "preb3" +postBuildCommands "postb1" "postb2" +postBuildCommands "postb3" +dflags "df1" "df2" +dflags "df3" +lflags "lf1" "lf2" +lflags "lf3" +`; + PackageRecipe rec1; + parseSDL(rec1, sdl, null, "testfile"); + PackageRecipe rec; + parseSDL(rec, rec1.toSDL(), null); // verify that all fields are serialized properly + + assert(rec.name == "projectname"); + assert(rec.description == "project description"); + assert(rec.homepage == "http://example.com"); + assert(rec.authors == ["author 1", "author 2", "author 3"]); + assert(rec.copyright == "copyright string"); + assert(rec.license == "license string"); + assert(rec.version_ == "1.0.0"); + assert(rec.subPackages.length == 3); + assert(rec.subPackages[0].path == ""); + assert(rec.subPackages[0].recipe.name == "subpackage1"); + assert(rec.subPackages[1].path == ""); + assert(rec.subPackages[1].recipe.name == "subpackage2"); + assert(rec.subPackages[1].recipe.buildSettings.dependencies.length == 1); + assert("projectname:subpackage1" in rec.subPackages[1].recipe.buildSettings.dependencies); + assert(rec.subPackages[2].path == "pathsp3"); + assert(rec.configurations.length == 2); + assert(rec.configurations[0].name == "config1"); + assert(rec.configurations[0].platforms == ["windows", "linux"]); + assert(rec.configurations[0].buildSettings.targetType == TargetType.library); + assert(rec.configurations[1].name == "config2"); + assert(rec.configurations[1].platforms == ["windows-x86"]); + assert(rec.configurations[1].buildSettings.targetType == TargetType.executable); + assert(rec.buildTypes.length == 2); + assert(rec.buildTypes["debug"].dflags == ["": ["-g", "-debug"]]); + assert(rec.buildTypes["release"].dflags == ["": ["-release", "-O"]]); + assert(rec.ddoxFilterArgs == ["-arg1", "-arg2", "-arg3"], rec.ddoxFilterArgs.to!string); + assert(rec.ddoxTool == "ddoxtool"); + assert(rec.buildSettings.dependencies.length == 2); + assert(rec.buildSettings.dependencies["projectname:subpackage1"].optional == false); + assert(rec.buildSettings.dependencies["projectname:subpackage1"].path == Path(".")); + assert(rec.buildSettings.dependencies["somedep"].versionSpec == "1.0.0"); + assert(rec.buildSettings.dependencies["somedep"].optional == true); + assert(rec.buildSettings.dependencies["somedep"].path.empty); + assert(rec.buildSettings.systemDependencies == "system dependencies"); + assert(rec.buildSettings.targetType == TargetType.executable); + assert(rec.buildSettings.targetName == "target name"); + assert(rec.buildSettings.targetPath == "target path"); + assert(rec.buildSettings.workingDirectory == "working directory"); + assert(rec.buildSettings.subConfigurations.length == 1); + assert(rec.buildSettings.subConfigurations["projectname:subpackage2"] == "library"); + assert(rec.buildSettings.buildRequirements == ["": cast(BuildRequirements)(BuildRequirement.allowWarnings | BuildRequirement.silenceDeprecations)]); + assert(rec.buildSettings.buildOptions == ["": cast(BuildOptions)(BuildOption.verbose | BuildOption.ignoreUnknownPragmas)]); + assert(rec.buildSettings.libs == ["": ["lib1", "lib2", "lib3"]]); + assert(rec.buildSettings.sourceFiles == ["": ["source1", "source2", "source3"]]); + assert(rec.buildSettings.sourcePaths == ["": ["sourcepath1", "sourcepath2", "sourcepath3"]]); + assert(rec.buildSettings.excludedSourceFiles == ["": ["excluded1", "excluded2", "excluded3"]]); + assert(rec.buildSettings.mainSourceFile == "main source"); + assert(rec.buildSettings.copyFiles == ["": ["copy1", "copy2", "copy3"]]); + assert(rec.buildSettings.versions == ["": ["version1", "version2", "version3"]]); + assert(rec.buildSettings.debugVersions == ["": ["debug1", "debug2", "debug3"]]); + assert(rec.buildSettings.importPaths == ["": ["import1", "import2", "import3"]]); + assert(rec.buildSettings.stringImportPaths == ["": ["string1", "string2", "string3"]]); + assert(rec.buildSettings.preGenerateCommands == ["": ["preg1", "preg2", "preg3"]]); + assert(rec.buildSettings.postGenerateCommands == ["": ["postg1", "postg2", "postg3"]]); + assert(rec.buildSettings.preBuildCommands == ["": ["preb1", "preb2", "preb3"]]); + assert(rec.buildSettings.postBuildCommands == ["": ["postb1", "postb2", "postb3"]]); + assert(rec.buildSettings.dflags == ["": ["df1", "df2", "df3"]]); + assert(rec.buildSettings.lflags == ["": ["lf1", "lf2", "lf3"]]); +} + +unittest { // test platform identifiers + auto sdl = +`name "testproject" +dflags "-a" "-b" platform="windows-x86" +dflags "-c" platform="windows-x86" +dflags "-e" "-f" +dflags "-g" +dflags "-h" "-i" platform="linux" +dflags "-j" platform="linux" +`; + PackageRecipe rec; + parseSDL(rec, sdl, null, "testfile"); + assert(rec.buildSettings.dflags.length == 3); + assert(rec.buildSettings.dflags["-windows-x86"] == ["-a", "-b", "-c"]); + assert(rec.buildSettings.dflags[""] == ["-e", "-f", "-g"]); + assert(rec.buildSettings.dflags["-linux"] == ["-h", "-i", "-j"]); +} + +unittest { // test for missing name field + import std.exception; + auto sdl = `description "missing name"`; + PackageRecipe rec; + assertThrown(parseSDL(rec, sdl, null, "testfile")); +} + +unittest { // test single value fields + import std.exception; + PackageRecipe rec; + assertThrown!Exception(parseSDL(rec, `name "hello" "world"`, null, "testfile")); + assertThrown!Exception(parseSDL(rec, `name`, null, "testfile")); + assertThrown!Exception(parseSDL(rec, `name 10`, null, "testfile")); + assertThrown!Exception(parseSDL(rec, + `name "hello" { + world + }`, null, "testfile")); + assertThrown!Exception(parseSDL(rec, + `name "" + versions "hello" 10` + , null, "testfile")); +} + +unittest { // test basic serialization + PackageRecipe p; + p.name = "test"; + p.authors = ["foo", "bar"]; + p.buildSettings.dflags["-windows"] = ["-a"]; + p.buildSettings.lflags[""] = ["-b", "-c"]; + auto sdl = toSDL(p).toSDLDocument(); + assert(sdl == +`name "test" +authors "foo" "bar" +dflags "-a" platform="windows" +lflags "-b" "-c" +`); +} + +unittest { + auto sdl = "name \"test\"\nsourcePaths"; + PackageRecipe rec; + parseSDL(rec, sdl, null, "testfile"); + assert("" in rec.buildSettings.sourcePaths); } diff --git a/source/dub/semver.d b/source/dub/semver.d index 48924ec..ac8f6e7 100644 --- a/source/dub/semver.d +++ b/source/dub/semver.d @@ -1,7 +1,15 @@ /** - Implementes version validation and comparison according to the semantic versioning specification. + Implementes version validation and comparison according to the semantic + versioning specification. - Copyright: © 2013 rejectedsoftware e.K. + The general format of a semantiv version is: a.b.c[-x.y...][+x.y...] + a/b/c must be integer numbers with no leading zeros, and x/y/... must be + either numbers or identifiers containing only ASCII alphabetic characters + or hyphens. Identifiers may not start with a digit. + + See_Also: http://semver.org/ + + Copyright: © 2013-2016 rejectedsoftware e.K. License: Subject to the terms of the MIT license, as written in the included LICENSE.txt file. Authors: Sönke Ludwig */ @@ -12,11 +20,7 @@ import std.algorithm : max; import std.conv; -/* - General format of SemVer: a.b.c[-x.y...][+x.y...] - a/b/c must be integer numbers with no leading zeros - x/y/... must be either numbers or identifiers containing only ASCII alphabetic characters or hyphens -*/ +@safe: /** Validates a version string according to the SemVer specification. @@ -64,6 +68,7 @@ return true; } +/// unittest { assert(isValidVersion("1.9.0")); assert(isValidVersion("0.10.0")); @@ -93,6 +98,10 @@ assert(!isValidVersion("1.0-1.0")); } + +/** + Determines if a given valid SemVer version has a pre-release suffix. +*/ bool isPreReleaseVersion(string ver) in { assert(isValidVersion(ver)); } body { @@ -106,11 +115,25 @@ return isValidNumber(ver[0 .. di]); } +/// +unittest { + assert(isPreReleaseVersion("1.0.0-alpha")); + assert(isPreReleaseVersion("1.0.0-alpha+b1")); + assert(isPreReleaseVersion("0.9.0-beta.1")); + assert(!isPreReleaseVersion("0.9.0")); + assert(!isPreReleaseVersion("0.9.0+b1")); +} + /** Compares the precedence of two SemVer version strings. - The version strings must be validated using isValidVersion() before being - passed to this function. + The version strings must be validated using `isValidVersion` before being + passed to this function. Note that the build meta data suffix (if any) is + being ignored when comparing version numbers. + + Returns: + Returns a negative number if `a` is a lower version than `b`, `0` if they are + equal, and a positive number otherwise. */ int compareVersions(string a, string b) { @@ -145,6 +168,15 @@ return bempty - aempty; } +/// +unittest { + assert(compareVersions("1.0.0", "1.0.0") == 0); + assert(compareVersions("1.0.0+b1", "1.0.0+b2") == 0); + assert(compareVersions("1.0.0", "2.0.0") < 0); + assert(compareVersions("1.0.0-beta", "1.0.0") < 0); + assert(compareVersions("1.0.1", "1.0.0") > 0); +} + unittest { void assertLess(string a, string b) { assert(compareVersions(a, b) < 0, "Failed for "~a~" < "~b); @@ -177,26 +209,28 @@ /** - Given version string, increments the next to last version number. - Prerelease and build metadata information is ignored. - @param ver Does not need to be a valid semver version. - @return Valid semver version + Increments a given (partial) version number to the next higher version. + + Prerelease and build metadata information is ignored. The given version + can skip the minor and patch digits. If no digits are skipped, the next + minor version will be selected. If the patch or minor versions are skipped, + the next major version will be selected. + + This function corresponds to the semantivs of the "~>" comparison operator's + upper bound. The semantics of this are the same as for the "approximate" version specifier from rubygems. (https://github.com/rubygems/rubygems/tree/81d806d818baeb5dcb6398ca631d772a003d078e/lib/rubygems/version.rb) - Examples: - 1.5 -> 2.0 - 1.5.67 -> 1.6.0 - 1.5.67-a -> 1.6.0 + See_Also: `expandVersion` */ string bumpVersion(string ver) { // Cut off metadata and prerelease information. auto mi = ver.indexOfAny("+-"); if (mi > 0) ver = ver[0..mi]; // Increment next to last version from a[.b[.c]]. - auto splitted = split(ver, "."); + auto splitted = () @trusted { return split(ver, "."); } (); // DMD 2.065.0 assert(splitted.length > 0 && splitted.length <= 3, "Version corrupt: " ~ ver); auto to_inc = splitted.length == 3? 1 : 0; splitted = splitted[0 .. to_inc+1]; @@ -205,7 +239,7 @@ while (splitted.length < 3) splitted ~= "0"; return splitted.join("."); } - +/// unittest { assert("1.0.0" == bumpVersion("0")); assert("1.0.0" == bumpVersion("0.0")); @@ -217,8 +251,12 @@ } /** - Takes a abbreviated version and expands it to a valid SemVer version. - E.g. "1.0" -> "1.0.0" + Takes a partial version and expands it to a valid SemVer version. + + This function corresponds to the semantivs of the "~>" comparison operator's + lower bound. + + See_Also: `bumpVersion` */ string expandVersion(string ver) { auto mi = ver.indexOfAny("+-"); @@ -227,12 +265,12 @@ sub = ver[mi..$]; ver = ver[0..mi]; } - auto splitted = split(ver, "."); + auto splitted = () @trusted { return split(ver, "."); } (); // DMD 2.065.0 assert(splitted.length > 0 && splitted.length <= 3, "Version corrupt: " ~ ver); while (splitted.length < 3) splitted ~= "0"; return splitted.join(".") ~ sub; } - +/// unittest { assert("1.0.0" == expandVersion("1")); assert("1.0.0" == expandVersion("1.0")); diff --git a/source/dub/version_.d b/source/dub/version_.d index 8947c5e..59ff65f 100644 --- a/source/dub/version_.d +++ b/source/dub/version_.d @@ -1,3 +1,2 @@ -module dub.version_; -enum dubVersion = "v0.9.22"; -enum initialCompilerBinary = "dmd"; +module dub.version_; +enum dubVersion = "v1.4.0-beta.1"; diff --git a/test/0-init-fail-json.sh b/test/0-init-fail-json.sh new file mode 100755 index 0000000..069014e --- /dev/null +++ b/test/0-init-fail-json.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +packname="0-init-fail-pack" +deps="logger PACKAGE_DONT_EXIST" # would be very unlucky if it does exist... + +if $$DUB init -n $packname $deps -f json 2>/dev/null; then + die $LINENO 'Init with unknown non-existing dependency expected to fail' +fi + + +function cleanup { + rm -rf $packname +} + +if [ -e $packname/dub.json ]; then # package is there, it should have failed + cleanup + die $LINENO "$packname/dub.json was not created" +fi diff --git a/test/0-init-fail.sh b/test/0-init-fail.sh new file mode 100755 index 0000000..c440a57 --- /dev/null +++ b/test/0-init-fail.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +packname="0-init-fail-pack" +deps="logger PACKAGE_DONT_EXIST" # would be very unlucky if it does exist... + +if $DUB init -n $packname $deps 2>/dev/null; then + die $LINENO 'Init with unknown non-existing dependency expected to fail' +fi + +function cleanup { + rm -rf $packname +} + +if [ -e $packname/dub.sdl ]; then # package is there, it should have failed + cleanup + die $LINENO "$packname/dub.sdl was not created" +fi diff --git a/test/0-init-fail/.gitignore b/test/0-init-fail/.gitignore deleted file mode 100644 index 433d266..0000000 --- a/test/0-init-fail/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -.dub -docs.json -__dummy.html -*.o -*.obj diff --git a/test/0-init-fail/0-init-fail.sh b/test/0-init-fail/0-init-fail.sh deleted file mode 100755 index 11fadf4..0000000 --- a/test/0-init-fail/0-init-fail.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -packname="0-init-fail-pack" -deps="logger PACKAGE_DONT_EXIST" # would be very unlucky if it does exist... - -$DUB init $packname $deps - -function cleanup { - rm -rf $packname -} - -if [ -e $packname/dub.json ]; then # package is there, it should have failed - cleanup - exit 1 -fi -exit 0 diff --git a/test/0-init-fail/dub.json b/test/0-init-fail/dub.json deleted file mode 100644 index 6e6605b..0000000 --- a/test/0-init-fail/dub.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "name": "0-init-fail", - "description": "A minimal D application.", - "copyright": "Copyright © 2014, colin", - "authors": ["colin"], - "dependencies": { - } -} diff --git a/test/0-init-fail/source/app.d b/test/0-init-fail/source/app.d deleted file mode 100644 index 0569360..0000000 --- a/test/0-init-fail/source/app.d +++ /dev/null @@ -1,10 +0,0 @@ -import std.stdio; - -import std.process : execute; -int main(string[] args) -{ - writefln("Executing init test - fail"); - auto script = args[0] ~ ".sh"; - auto dubInit = execute(script); - return dubInit.status; -} diff --git a/test/0-init-interactive.dub.sdl b/test/0-init-interactive.dub.sdl new file mode 100644 index 0000000..3eaf63c --- /dev/null +++ b/test/0-init-interactive.dub.sdl @@ -0,0 +1,5 @@ +name "test" +description "desc" +authors "author" +copyright "copy" +license "gpl" diff --git a/test/0-init-interactive.sh b/test/0-init-interactive.sh new file mode 100755 index 0000000..945838b --- /dev/null +++ b/test/0-init-interactive.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +packname="0-init-interactive" + +echo -e "sdl\ntest\ndesc\nauthor\ngpl\ncopy\n\n" | $DUB init $packname + +function cleanup { + rm -rf $packname +} + +if [ ! -e $packname/dub.sdl ]; then # it failed + cleanup + die $LINENO 'No dub.sdl file has been generated.' +fi + +if ! diff $packname/dub.sdl "$CURR_DIR"/0-init-interactive.dub.sdl; then + cleanup + die $LINENO 'Contents of generated dub.sdl not as expected.' +fi + +cleanup diff --git a/test/0-init-multi-json.sh b/test/0-init-multi-json.sh new file mode 100755 index 0000000..239c419 --- /dev/null +++ b/test/0-init-multi-json.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +packname="0-init-multi-pack" +deps="openssl logger" +type="vibe.d" + +$DUB init -n $packname $deps --type=$type -f json + +function cleanup { + rm -rf $packname +} + +if [ ! -e $packname/dub.json ]; then + die $LINENO '$packname/dub.json not created' +else # check if resulting dub.json has all dependencies in tow + deps="$deps vibe-d"; + IFS=" " read -a arr <<< "$deps" + for ele in "${arr[@]}" + do + if [ `grep -c "$ele" $packname/dub.json` -ne 1 ]; then #something went wrong + cleanup + die $LINENO "$ele not in $packname/dub.json" + fi + done + cleanup +fi diff --git a/test/0-init-multi.sh b/test/0-init-multi.sh new file mode 100755 index 0000000..8432b96 --- /dev/null +++ b/test/0-init-multi.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +packname="0-init-multi-pack" +deps="openssl logger" +type="vibe.d" + +$DUB init -n $packname $deps --type=$type --format sdl + +function cleanup { + rm -rf $packname +} + +if [ ! -e $packname/dub.sdl ]; then + cleanup + die $LINENO 'No dub.sdl file has been generated.' +else # check if resulting dub.sdl has all dependencies in tow + deps="$deps vibe-d"; + IFS=" " read -a arr <<< "$deps" + for ele in "${arr[@]}" + do + if [ `grep -c "$ele" $packname/dub.sdl` -ne 1 ]; then #something went wrong + cleanup + die $LINENO "$ele not in $packname/dub.sdl" + fi + done + cleanup +fi diff --git a/test/0-init-multi/.gitignore b/test/0-init-multi/.gitignore deleted file mode 100644 index 433d266..0000000 --- a/test/0-init-multi/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -.dub -docs.json -__dummy.html -*.o -*.obj diff --git a/test/0-init-multi/0-init-multi.sh b/test/0-init-multi/0-init-multi.sh deleted file mode 100755 index 54b151e..0000000 --- a/test/0-init-multi/0-init-multi.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash - -packname="0-init-multi-pack" -deps="openssl logger" -type="vibe.d" - -$DUB init $packname $deps --type=$type - -function cleanup { - rm -rf $packname -} - -if [ ! -e $packname/dub.json ]; then # it failed, exit 1 - exit 1 -else # check if resulting dub.json has all dependancies in tow - deps="$deps vibe-d"; - IFS=" " read -a arr <<< "$deps" - for ele in "${arr[@]}" - do - if [ `grep -c "$ele" $packname/dub.json` -ne 1 ]; then #something went wrong - echo "$ele not in $packname/dub.json" - cleanup - exit 1 - fi - done - cleanup - exit 0 - -fi diff --git a/test/0-init-multi/dub.json b/test/0-init-multi/dub.json deleted file mode 100644 index 752ff18..0000000 --- a/test/0-init-multi/dub.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "name": "0-init-multi", - "description": "A minimal D application.", - "copyright": "Copyright © 2014, colin", - "authors": ["colin"], - "dependencies": { - } -} diff --git a/test/0-init-multi/source/app.d b/test/0-init-multi/source/app.d deleted file mode 100644 index acd9893..0000000 --- a/test/0-init-multi/source/app.d +++ /dev/null @@ -1,10 +0,0 @@ -import std.stdio; - -import std.process : execute; -int main(string[] args) -{ - writefln("Executing init test - multi"); - auto script = args[0] ~ ".sh"; - auto dubInit = execute(script); - return dubInit.status; -} diff --git a/test/0-init-simple-json.sh b/test/0-init-simple-json.sh new file mode 100755 index 0000000..2a7ec8a --- /dev/null +++ b/test/0-init-simple-json.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +packname="0-init-simple-pack" + +$DUB init -n $packname -f json + +function cleanup { + rm -rf $packname +} + +if [ ! -e $packname/dub.json ]; then + cleanup + die $LINENO 'No dub.json file has been generated.' +fi +cleanup diff --git a/test/0-init-simple.sh b/test/0-init-simple.sh new file mode 100755 index 0000000..f4fee2e --- /dev/null +++ b/test/0-init-simple.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +packname="0-init-simple-pack" + +$DUB init -n $packname --format sdl + +function cleanup { + rm -rf $packname +} + +if [ ! -e $packname/dub.sdl ]; then # it failed + cleanup + die $LINENO 'No dub.sdl file has been generated.' +fi +cleanup diff --git a/test/0-init-simple/.gitignore b/test/0-init-simple/.gitignore deleted file mode 100644 index 433d266..0000000 --- a/test/0-init-simple/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -.dub -docs.json -__dummy.html -*.o -*.obj diff --git a/test/0-init-simple/0-init-simple.sh b/test/0-init-simple/0-init-simple.sh deleted file mode 100755 index b5f9227..0000000 --- a/test/0-init-simple/0-init-simple.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -packname="0-init-simple-pack" - -$DUB init $packname - -function cleanup { - rm -rf $packname -} - -if [ ! -e $packname/dub.json ]; then # it failed - cleanup - exit 1 -fi -cleanup -exit 0 diff --git a/test/0-init-simple/dub.json b/test/0-init-simple/dub.json deleted file mode 100644 index 276804f..0000000 --- a/test/0-init-simple/dub.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "name": "0-init-simple", - "description": "A minimal D application.", - "copyright": "Copyright © 2014, colin", - "authors": ["colin"], - "dependencies": { - } -} diff --git a/test/0-init-simple/source/app.d b/test/0-init-simple/source/app.d deleted file mode 100644 index 2bd9e78..0000000 --- a/test/0-init-simple/source/app.d +++ /dev/null @@ -1,10 +0,0 @@ -import std.stdio; - -import std.process : execute; -int main(string[] args) -{ - writefln("Executing init test - simple"); - auto script = args[0] ~ ".sh"; - auto dubInit = execute(script); - return dubInit.status; -} diff --git a/test/1-dynLib-simple/.no_build b/test/1-dynLib-simple/.no_build deleted file mode 100644 index 72679d2..0000000 --- a/test/1-dynLib-simple/.no_build +++ /dev/null @@ -1 +0,0 @@ -Remove me when bug with dynamic libs get fixed. diff --git a/test/1-dynLib-simple/.no_build_gdc b/test/1-dynLib-simple/.no_build_gdc new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/1-dynLib-simple/.no_build_gdc diff --git a/test/1-dynLib-simple/.no_build_ldc2 b/test/1-dynLib-simple/.no_build_ldc2 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/1-dynLib-simple/.no_build_ldc2 diff --git a/test/1-dynLib-simple/.no_run b/test/1-dynLib-simple/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/1-dynLib-simple/.no_run diff --git a/test/1-exec-simple-package-json/package.json b/test/1-exec-simple-package-json/package.json new file mode 100644 index 0000000..016c4ea --- /dev/null +++ b/test/1-exec-simple-package-json/package.json @@ -0,0 +1,4 @@ +{ + "name": "exec-simple", + "targetType": "executable" +} diff --git a/test/1-exec-simple-package-json/source/app.d b/test/1-exec-simple-package-json/source/app.d new file mode 100644 index 0000000..dbab869 --- /dev/null +++ b/test/1-exec-simple-package-json/source/app.d @@ -0,0 +1,6 @@ +import std.stdio; + +void main() +{ + writeln(__FUNCTION__); +} diff --git a/test/1-sourceLib-simple/dub.json b/test/1-sourceLib-simple/dub.json index 0bc0c7b..528128e 100644 --- a/test/1-sourceLib-simple/dub.json +++ b/test/1-sourceLib-simple/dub.json @@ -1,4 +1,4 @@ { - "name": "sourceLib-simple", + "name": "sourcelib-simple", "targetType": "sourceLibrary" } diff --git a/test/1-staticLib-simple/.no_run b/test/1-staticLib-simple/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/1-staticLib-simple/.no_run diff --git a/test/2-dynLib-with-staticLib-dep/.no_build_gdc b/test/2-dynLib-with-staticLib-dep/.no_build_gdc new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/2-dynLib-with-staticLib-dep/.no_build_gdc diff --git a/test/2-dynLib-with-staticLib-dep/.no_build_ldc2 b/test/2-dynLib-with-staticLib-dep/.no_build_ldc2 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/2-dynLib-with-staticLib-dep/.no_build_ldc2 diff --git a/test/2-dynLib-with-staticLib-dep/.no_run b/test/2-dynLib-with-staticLib-dep/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/2-dynLib-with-staticLib-dep/.no_run diff --git a/test/2-dynLib-with-staticLib-dep/dub.json b/test/2-dynLib-with-staticLib-dep/dub.json new file mode 100644 index 0000000..f0b6043 --- /dev/null +++ b/test/2-dynLib-with-staticLib-dep/dub.json @@ -0,0 +1,7 @@ +{ + "name": "dynlib-with-staticlib-dep", + "targetType": "dynamicLibrary", + "dependencies": { + "staticlib-simple": { "path": "../1-staticLib-simple/" } + } +} diff --git a/test/2-dynLib-with-staticLib-dep/source/dynlib/app.d b/test/2-dynLib-with-staticLib-dep/source/dynlib/app.d new file mode 100644 index 0000000..9741cae --- /dev/null +++ b/test/2-dynLib-with-staticLib-dep/source/dynlib/app.d @@ -0,0 +1,8 @@ +module dynlib.app; +import std.stdio; +import staticlib.app; + +void foo() +{ + entry(); +} diff --git a/test/4-describe-data-1-list.sh b/test/4-describe-data-1-list.sh new file mode 100755 index 0000000..3cfe5dc --- /dev/null +++ b/test/4-describe-data-1-list.sh @@ -0,0 +1,139 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd "$CURR_DIR"/describe-project + +temp_file=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file +} + +trap cleanup EXIT + +if ! $DUB describe --compiler=$DC --data-list \ + '--data= target-type , target-path , target-name ' \ + '--data= working-directory ' \ + --data=main-source-file \ + '--data=dflags,lflags' \ + '--data=libs, linker-files' \ + '--data=source-files, copy-files' \ + '--data=versions, debug-versions' \ + --data=import-paths \ + --data=string-import-paths \ + --data=import-files \ + --data=string-import-files \ + --data=pre-generate-commands \ + --data=post-generate-commands \ + --data=pre-build-commands \ + --data=post-build-commands \ + '--data=requirements, options' \ + > "$temp_file"; then + die $LINENO 'Printing project data failed!' +fi + +# Create the expected output path file to compare against. +expected_file="$CURR_DIR/expected-describe-data-1-list-output" +# --data=target-type +echo "executable" > "$expected_file" +echo >> "$expected_file" +# --data=target-path +echo "$CURR_DIR/describe-project/" >> "$expected_file" +echo >> "$expected_file" +# --data=target-name +echo "describe-project" >> "$expected_file" +echo >> "$expected_file" +# --data=working-directory +echo "$CURR_DIR/describe-project/" >> "$expected_file" +echo >> "$expected_file" +# --data=main-source-file +echo "$CURR_DIR/describe-project/src/dummy.d" >> "$expected_file" +echo >> "$expected_file" +# --data=dflags +echo "--some-dflag" >> "$expected_file" +echo "--another-dflag" >> "$expected_file" +echo >> "$expected_file" +# --data=lflags +echo "--some-lflag" >> "$expected_file" +echo "--another-lflag" >> "$expected_file" +echo >> "$expected_file" +# --data=libs +echo "somelib" >> "$expected_file" +echo "anotherlib" >> "$expected_file" +echo >> "$expected_file" +# --data=linker-files +echo "$CURR_DIR/describe-dependency-3/libdescribe-dependency-3.a" >> "$expected_file" +echo "$CURR_DIR/describe-project/some.a" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-1/dep.a" >> "$expected_file" +echo >> "$expected_file" +# --data=source-files +echo "$CURR_DIR/describe-project/src/dummy.d" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-1/source/dummy.d" >> "$expected_file" +echo >> "$expected_file" +# --data=copy-files +echo "$CURR_DIR/describe-project/data/dummy.dat" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-1/data/*" >> "$expected_file" +echo >> "$expected_file" +# --data=versions +echo "someVerIdent" >> "$expected_file" +echo "anotherVerIdent" >> "$expected_file" +echo "Have_describe_project" >> "$expected_file" +echo "Have_describe_dependency_1" >> "$expected_file" +echo "Have_describe_dependency_2" >> "$expected_file" +echo "Have_describe_dependency_3" >> "$expected_file" +echo >> "$expected_file" +# --data=debug-versions +echo "someDebugVerIdent" >> "$expected_file" +echo "anotherDebugVerIdent" >> "$expected_file" +echo >> "$expected_file" +# --data=import-paths +echo "$CURR_DIR/describe-project/src/" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-1/source/" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-2/some-path/" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-3/dep3-source/" >> "$expected_file" +echo >> "$expected_file" +# --data=string-import-paths +echo "$CURR_DIR/describe-project/views/" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-2/some-extra-string-import-path/" >> "$expected_file" +echo "$CURR_DIR/describe-dependency-3/dep3-string-import-path/" >> "$expected_file" +echo >> "$expected_file" +# --data=import-files +echo "$CURR_DIR/describe-dependency-2/some-path/dummy.d" >> "$expected_file" +echo >> "$expected_file" +# --data=string-import-files +echo "$CURR_DIR/describe-project/views/dummy.d" >> "$expected_file" +#echo "$CURR_DIR/describe-dependency-2/some-extra-string-import-path/dummy.d" >> "$expected_file" # This is missing from result, is that a bug? +echo >> "$expected_file" +# --data=pre-generate-commands +echo "./do-preGenerateCommands.sh" >> "$expected_file" +echo "../describe-dependency-1/dependency-preGenerateCommands.sh" >> "$expected_file" +echo >> "$expected_file" +# --data=post-generate-commands +echo "./do-postGenerateCommands.sh" >> "$expected_file" +echo "../describe-dependency-1/dependency-postGenerateCommands.sh" >> "$expected_file" +echo >> "$expected_file" +# --data=pre-build-commands +echo "./do-preBuildCommands.sh" >> "$expected_file" +echo "../describe-dependency-1/dependency-preBuildCommands.sh" >> "$expected_file" +echo >> "$expected_file" +# --data=post-build-commands +echo "./do-postBuildCommands.sh" >> "$expected_file" +echo "../describe-dependency-1/dependency-postBuildCommands.sh" >> "$expected_file" +echo >> "$expected_file" +# --data=requirements +echo "allowWarnings" >> "$expected_file" +echo "disallowInlining" >> "$expected_file" +#echo "requireContracts" >> "$expected_file" # Not sure if this (from a sourceLib dependency) should be missing from the result +echo >> "$expected_file" +# --data=options +echo "debugMode" >> "$expected_file" +echo "releaseMode" >> "$expected_file" +echo "debugInfo" >> "$expected_file" +echo "warnings" >> "$expected_file" +#echo "stackStomping" >> "$expected_file" # Not sure if this (from a sourceLib dependency) should be missing from the result + +if ! diff "$expected_file" "$temp_file"; then + die $LINENO 'The project data did not match the expected output!' +fi + diff --git a/test/4-describe-data-2-dmd.sh b/test/4-describe-data-2-dmd.sh new file mode 100755 index 0000000..d323bc5 --- /dev/null +++ b/test/4-describe-data-2-dmd.sh @@ -0,0 +1,87 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +if [ "${DC}" != "dmd" ]; then + echo Skipping DMD-centric test on configuration that lacks DMD. + exit +fi + +cd "$CURR_DIR"/describe-project + +temp_file=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file +} + +trap cleanup EXIT + +if ! $DUB describe --compiler=${DC} \ + --data=main-source-file \ + --data=dflags,lflags \ + --data=libs,linker-files \ + --data=source-files \ + --data=versions \ + --data=debug-versions \ + --data=import-paths \ + --data=string-import-paths \ + --data=import-files \ + --data=options \ + > "$temp_file"; then + die 'Printing project data failed!' +fi + +# Create the expected output path file to compare against. +expected_file="$CURR_DIR/expected-describe-data-2-dmd-output" +# --data=main-source-file +echo -n "'$CURR_DIR/describe-project/src/dummy.d' " > "$expected_file" +# --data=dflags +echo -n "--some-dflag " >> "$expected_file" +echo -n "--another-dflag " >> "$expected_file" +# --data=lflags +echo -n "-L--some-lflag " >> "$expected_file" +echo -n "-L--another-lflag " >> "$expected_file" +# --data=libs +echo -n "-L-lsomelib " >> "$expected_file" +echo -n "-L-lanotherlib " >> "$expected_file" +# --data=linker-files +echo -n "'$CURR_DIR/describe-dependency-3/libdescribe-dependency-3.a' " >> "$expected_file" +echo -n "'$CURR_DIR/describe-project/some.a' " >> "$expected_file" +echo -n "'$CURR_DIR/describe-dependency-1/dep.a' " >> "$expected_file" +# --data=source-files +echo -n "'$CURR_DIR/describe-project/src/dummy.d' " >> "$expected_file" +echo -n "'$CURR_DIR/describe-dependency-1/source/dummy.d' " >> "$expected_file" +# --data=versions +echo -n "-version=someVerIdent " >> "$expected_file" +echo -n "-version=anotherVerIdent " >> "$expected_file" +echo -n "-version=Have_describe_project " >> "$expected_file" +echo -n "-version=Have_describe_dependency_1 " >> "$expected_file" +echo -n "-version=Have_describe_dependency_2 " >> "$expected_file" +echo -n "-version=Have_describe_dependency_3 " >> "$expected_file" +# --data=debug-versions +echo -n "-debug=someDebugVerIdent " >> "$expected_file" +echo -n "-debug=anotherDebugVerIdent " >> "$expected_file" +# --data=import-paths +echo -n "'-I$CURR_DIR/describe-project/src/' " >> "$expected_file" +echo -n "'-I$CURR_DIR/describe-dependency-1/source/' " >> "$expected_file" +echo -n "'-I$CURR_DIR/describe-dependency-2/some-path/' " >> "$expected_file" +echo -n "'-I$CURR_DIR/describe-dependency-3/dep3-source/' " >> "$expected_file" +# --data=string-import-paths +echo -n "'-J$CURR_DIR/describe-project/views/' " >> "$expected_file" +echo -n "'-J$CURR_DIR/describe-dependency-2/some-extra-string-import-path/' " >> "$expected_file" +echo -n "'-J$CURR_DIR/describe-dependency-3/dep3-string-import-path/' " >> "$expected_file" +# --data=import-files +echo -n "'$CURR_DIR/describe-dependency-2/some-path/dummy.d' " >> "$expected_file" +# --data=options +echo -n "-debug " >> "$expected_file" +echo -n "-release " >> "$expected_file" +echo -n "-g " >> "$expected_file" +echo -n "-wi" >> "$expected_file" +#echo -n "-gx " >> "$expected_file" # Not sure if this (from a sourceLib dependency) should be missing from the result +echo "" >> "$expected_file" + +if ! diff "$expected_file" "$temp_file"; then + die 'The project data did not match the expected output!' +fi + diff --git a/test/4-describe-data-3-zero-delim.sh b/test/4-describe-data-3-zero-delim.sh new file mode 100755 index 0000000..aee4d08 --- /dev/null +++ b/test/4-describe-data-3-zero-delim.sh @@ -0,0 +1,127 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd "$CURR_DIR"/describe-project + +temp_file_normal=$(mktemp $(basename $0).XXXXXX) +temp_file_zero_delim=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file_normal + rm $temp_file_zero_delim +} + +trap cleanup EXIT + +# Test list-style project data +if ! $DUB describe --compiler=$DC --data-list \ + --data=target-type \ + --data=target-path \ + --data=target-name \ + --data=working-directory \ + --data=main-source-file \ + --data=dflags \ + --data=lflags \ + --data=libs \ + --data=linker-files \ + --data=source-files \ + --data=copy-files \ + --data=versions \ + --data=debug-versions \ + --data=import-paths \ + --data=string-import-paths \ + --data=import-files \ + --data=string-import-files \ + --data=pre-generate-commands \ + --data=post-generate-commands \ + --data=pre-build-commands \ + --data=post-build-commands \ + --data=requirements \ + --data=options \ + > "$temp_file_normal"; then + die 'Printing list-style project data failed!' +fi + +if ! $DUB describe --compiler=$DC --data-0 --data-list \ + --data=target-type \ + --data=target-path \ + --data=target-name \ + --data=working-directory \ + --data=main-source-file \ + --data=dflags \ + --data=lflags \ + --data=libs \ + --data=linker-files \ + --data=source-files \ + --data=copy-files \ + --data=versions \ + --data=debug-versions \ + --data=import-paths \ + --data=string-import-paths \ + --data=import-files \ + --data=string-import-files \ + --data=pre-generate-commands \ + --data=post-generate-commands \ + --data=pre-build-commands \ + --data=post-build-commands \ + --data=requirements \ + --data=options \ + | xargs -0 printf "%s\n" > "$temp_file_zero_delim"; then + die 'Printing null-delimited list-style project data failed!' +fi + +if ! diff -Z "$temp_file_normal" "$temp_file_zero_delim"; then + die 'The null-delimited list-style project data did not match the expected output!' +fi + +# Test --import-paths +if ! $DUB describe --compiler=$DC --import-paths \ + > "$temp_file_normal"; then + die 'Printing --import-paths failed!' +fi + +if ! $DUB describe --compiler=$DC --data-0 --import-paths \ + | xargs -0 printf "%s\n" > "$temp_file_zero_delim"; then + die 'Printing null-delimited --import-paths failed!' +fi + +if ! diff -Z -B "$temp_file_normal" "$temp_file_zero_delim"; then + die 'The null-delimited --import-paths data did not match the expected output!' +fi + +# DMD-only beyond this point +if [ "${DC}" != "dmd" ]; then + echo Skipping DMD-centric tests on configuration that lacks DMD. + exit +fi + +# Test dmd-style --data=versions +if ! $DUB describe --compiler=$DC --data=versions \ + > "$temp_file_normal"; then + die 'Printing dmd-style --data=versions failed!' +fi + +if ! $DUB describe --compiler=$DC --data-0 --data=versions \ + | xargs -0 printf "%s " > "$temp_file_zero_delim"; then + die 'Printing null-delimited dmd-style --data=versions failed!' +fi + +if ! diff -Z "$temp_file_normal" "$temp_file_zero_delim"; then + die 'The null-delimited dmd-style --data=versions did not match the expected output!' +fi + +# Test dmd-style --data=source-files +if ! $DUB describe --compiler=$DC --data=source-files \ + > "$temp_file_normal"; then + die 'Printing dmd-style --data=source-files failed!' +fi + +if ! $DUB describe --compiler=$DC --data-0 --data=source-files \ + | xargs -0 printf "'%s' " > "$temp_file_zero_delim"; then + die 'Printing null-delimited dmd-style --data=source-files failed!' +fi + +if ! diff -Z "$temp_file_normal" "$temp_file_zero_delim"; then + die 'The null-delimited dmd-style --data=source-files did not match the expected output!' +fi diff --git a/test/4-describe-import-paths.sh b/test/4-describe-import-paths.sh new file mode 100755 index 0000000..375bc40 --- /dev/null +++ b/test/4-describe-import-paths.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd "$CURR_DIR"/describe-project + +temp_file=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file +} + +trap cleanup EXIT + +if ! $DUB describe --compiler=$DC --import-paths > "$temp_file"; then + die 'Printing import paths failed!' +fi + +# Create the expected output path file to compare against. +echo "$CURR_DIR/describe-project/src/" > "$CURR_DIR/expected-import-path-output" +echo "$CURR_DIR/describe-dependency-1/source/" >> "$CURR_DIR/expected-import-path-output" +echo "$CURR_DIR/describe-dependency-2/some-path/" >> "$CURR_DIR/expected-import-path-output" +echo "$CURR_DIR/describe-dependency-3/dep3-source/" >> "$CURR_DIR/expected-import-path-output" + +if ! diff "$CURR_DIR"/expected-import-path-output "$temp_file"; then + die 'The import paths did not match the expected output!' +fi + diff --git a/test/4-describe-json.sh b/test/4-describe-json.sh new file mode 100755 index 0000000..21f5105 --- /dev/null +++ b/test/4-describe-json.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd "$CURR_DIR"/describe-project + +temp_file=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file +} + +trap cleanup EXIT + +if ! $DUB describe --compiler=$DC > "$temp_file"; then + die 'Printing describe JSON failed!' +fi + diff --git a/test/4-describe-string-import-paths.sh b/test/4-describe-string-import-paths.sh new file mode 100755 index 0000000..c1106a6 --- /dev/null +++ b/test/4-describe-string-import-paths.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd "$CURR_DIR"/describe-project + +temp_file=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file +} + +trap cleanup EXIT + +if ! $DUB describe --compiler=$DC --string-import-paths > "$temp_file"; then + die 'Printing string import paths failed!' +fi + +# Create the expected output path file to compare against. +echo "$CURR_DIR/describe-project/views/" > "$CURR_DIR/expected-string-import-path-output" +echo "$CURR_DIR/describe-dependency-2/some-extra-string-import-path/" >> "$CURR_DIR/expected-string-import-path-output" +echo "$CURR_DIR/describe-dependency-3/dep3-string-import-path/" >> "$CURR_DIR/expected-string-import-path-output" + +if ! diff "$CURR_DIR"/expected-string-import-path-output "$temp_file"; then + die 'The string import paths did not match the expected output!' +fi + diff --git a/test/5-convert-stdout.sh b/test/5-convert-stdout.sh new file mode 100755 index 0000000..ae7b491 --- /dev/null +++ b/test/5-convert-stdout.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd ${CURR_DIR}/1-exec-simple + +EXPECTED="name \"exec-simple\" +targetType \"executable\"" + +RESULT=`${DUB} convert -s -f sdl` + +if [ ! -f dub.json ]; then + die $LINENO 'Package recipe got modified!' +fi + +if [ -f dub.sdl ]; then + die $LINENO 'An SDL recipe got written.' +fi + +if [ "$RESULT" != "$EXPECTED" ]; then + die $LINENO 'Unexpected SDLang output.' +fi diff --git a/test/5-convert.sh b/test/5-convert.sh new file mode 100755 index 0000000..5cdbaa9 --- /dev/null +++ b/test/5-convert.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd "$CURR_DIR"/5-convert + +temp_file=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file +} +trap cleanup EXIT + +cp dub.sdl dub.sdl.ref + +$DUB convert -f json + +if [ -f "dub.sdl" ]; then die $LINENO 'Old recipe file not removed.'; fi +if [ ! -f "dub.json" ]; then die $LINENO 'New recipe file not created.'; fi + +$DUB convert -f sdl + +if [ -f "dub.json" ]; then die $LINENO 'Old recipe file not removed.'; fi +if [ ! -f "dub.sdl" ]; then die $LINENO 'New recipe file not created.'; fi + +if ! diff "dub.sdl" "dub.sdl.ref"; then + die $LINENO 'The project data did not match the expected output!' +fi + +rm dub.sdl.ref + diff --git a/test/5-convert/.no_build b/test/5-convert/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/5-convert/.no_build @@ -0,0 +1 @@ + diff --git a/test/5-convert/dub.sdl b/test/5-convert/dub.sdl new file mode 100644 index 0000000..cd3a039 --- /dev/null +++ b/test/5-convert/dub.sdl @@ -0,0 +1,36 @@ +name "describe-dependency-1" +version "~master" +description "A test describe project" +homepage "fake.com" +authors "nobody" +copyright "Copyright © 2015, nobody" +license "BSD 2-clause" +x:ddoxFilterArgs "dfa1" "dfa2" +x:ddoxTool "ddoxtool" +dependency "describe-dependency-1:sub1" version=">=0.0.0" +targetType "sourceLibrary" +subConfiguration "describe-dependency-1:sub1" "library" +dflags "--another-dflag" +lflags "--another-lflag" +libs "anotherlib" +sourceFiles "dep.lib" platform="windows" +sourcePaths "source/" +copyFiles "data/*" +versions "anotherVerIdent" +debugVersions "anotherDebugVerIdent" +importPaths "source/" +preGenerateCommands "../describe-dependency-1/dependency-preGenerateCommands.sh" platform="posix" +postGenerateCommands "../describe-dependency-1/dependency-postGenerateCommands.sh" platform="posix" +preBuildCommands "../describe-dependency-1/dependency-preBuildCommands.sh" platform="posix" +postBuildCommands "../describe-dependency-1/dependency-postBuildCommands.sh" platform="posix" +buildRequirements "requireContracts" +buildOptions "stackStomping" +configuration "my-dependency-1-config" { + targetType "sourceLibrary" +} +subPackage { + name "sub1" +} +subPackage { + name "sub2" +} diff --git a/test/common.sh b/test/common.sh new file mode 100644 index 0000000..b43c656 --- /dev/null +++ b/test/common.sh @@ -0,0 +1,13 @@ +SOURCE_FILE=$_ + +set -ueEo pipefail + +# lineno[, msg] +function die() { + local line=$1 + local msg=${2:-command failed} + local rc=${3:-1} + >&2 echo "[ERROR] $SOURCE_FILE:$1 $msg" + exit $rc +} +trap 'die $LINENO' ERR diff --git a/test/ddox.sh b/test/ddox.sh new file mode 100755 index 0000000..44f836e --- /dev/null +++ b/test/ddox.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +(cd $CURR_DIR/ddox/default && $DUB build -b ddox) +grep -qF ddox_project $CURR_DIR/ddox/default/docs/index.html + +$DUB add-local $CURR_DIR/ddox/custom-tool +(cd $CURR_DIR/ddox/custom && $DUB build -b ddox) +grep -qF custom-tool $CURR_DIR/ddox/custom/docs/custom_tool_output +diff $CURR_DIR/ddox/custom-tool/public/copied $CURR_DIR/ddox/custom/docs/copied +$DUB remove-local $CURR_DIR/ddox/custom-tool diff --git a/test/ddox.sh.min_frontend b/test/ddox.sh.min_frontend new file mode 100644 index 0000000..340aa8d --- /dev/null +++ b/test/ddox.sh.min_frontend @@ -0,0 +1 @@ +2.069 diff --git a/test/ddox/.no_build b/test/ddox/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/ddox/.no_build diff --git a/test/ddox/custom-tool/.gitignore b/test/ddox/custom-tool/.gitignore new file mode 100644 index 0000000..3dc7018 --- /dev/null +++ b/test/ddox/custom-tool/.gitignore @@ -0,0 +1,6 @@ +.dub +docs.json +__dummy.html +*.o +*.obj +custom-tool diff --git a/test/ddox/custom-tool/dub.sdl b/test/ddox/custom-tool/dub.sdl new file mode 100644 index 0000000..558a7ee --- /dev/null +++ b/test/ddox/custom-tool/dub.sdl @@ -0,0 +1,4 @@ +name "custom-tool" +description "A minimal D application." +copyright "Copyright © 2015, dawg" +authors "dawg" diff --git a/test/ddox/custom-tool/public/copied b/test/ddox/custom-tool/public/copied new file mode 100644 index 0000000..d95f3ad --- /dev/null +++ b/test/ddox/custom-tool/public/copied @@ -0,0 +1 @@ +content diff --git a/test/ddox/custom-tool/source/app.d b/test/ddox/custom-tool/source/app.d new file mode 100644 index 0000000..e3b5f4b --- /dev/null +++ b/test/ddox/custom-tool/source/app.d @@ -0,0 +1,9 @@ +import std.file, std.stdio, std.string; + +void main(string[] args) +{ + if (args[1] != "generate-html") + return; + mkdirRecurse(args[$-1]); + File(args[$-1]~"/custom_tool_output", "w").writeln(args.join(" ")); +} diff --git a/test/ddox/custom/.gitignore b/test/ddox/custom/.gitignore new file mode 100644 index 0000000..7bd19a7 --- /dev/null +++ b/test/ddox/custom/.gitignore @@ -0,0 +1,6 @@ +.dub +docs +docs.json +__dummy.html +*.o +*.obj diff --git a/test/ddox/custom/dub.sdl b/test/ddox/custom/dub.sdl new file mode 100644 index 0000000..5a17c87 --- /dev/null +++ b/test/ddox/custom/dub.sdl @@ -0,0 +1,2 @@ +name "ddox-project" +x:ddoxTool "custom-tool" diff --git a/test/ddox/custom/source/ddox_project.d b/test/ddox/custom/source/ddox_project.d new file mode 100644 index 0000000..dc5478b --- /dev/null +++ b/test/ddox/custom/source/ddox_project.d @@ -0,0 +1,5 @@ +/// +module ddox_project; + +/// docstring +int foo; diff --git a/test/ddox/default/.gitignore b/test/ddox/default/.gitignore new file mode 100644 index 0000000..7bd19a7 --- /dev/null +++ b/test/ddox/default/.gitignore @@ -0,0 +1,6 @@ +.dub +docs +docs.json +__dummy.html +*.o +*.obj diff --git a/test/ddox/default/dub.sdl b/test/ddox/default/dub.sdl new file mode 100644 index 0000000..6dea9aa --- /dev/null +++ b/test/ddox/default/dub.sdl @@ -0,0 +1 @@ +name "ddox-project" diff --git a/test/ddox/default/source/ddox_project.d b/test/ddox/default/source/ddox_project.d new file mode 100644 index 0000000..dc5478b --- /dev/null +++ b/test/ddox/default/source/ddox_project.d @@ -0,0 +1,5 @@ +/// +module ddox_project; + +/// docstring +int foo; diff --git a/test/describe-dependency-1/.no_build b/test/describe-dependency-1/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/describe-dependency-1/.no_build @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-1/data/dummy-dep1.dat b/test/describe-dependency-1/data/dummy-dep1.dat new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/describe-dependency-1/data/dummy-dep1.dat @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-1/dependency-postGenerateCommands.sh b/test/describe-dependency-1/dependency-postGenerateCommands.sh new file mode 100755 index 0000000..1a24852 --- /dev/null +++ b/test/describe-dependency-1/dependency-postGenerateCommands.sh @@ -0,0 +1 @@ +#!/bin/sh diff --git a/test/describe-dependency-1/dependency-preGenerateCommands.sh b/test/describe-dependency-1/dependency-preGenerateCommands.sh new file mode 100755 index 0000000..1a24852 --- /dev/null +++ b/test/describe-dependency-1/dependency-preGenerateCommands.sh @@ -0,0 +1 @@ +#!/bin/sh diff --git a/test/describe-dependency-1/dub.json b/test/describe-dependency-1/dub.json new file mode 100644 index 0000000..89e12ca --- /dev/null +++ b/test/describe-dependency-1/dub.json @@ -0,0 +1,28 @@ +{ + "name": "describe-dependency-1", + "targetType": "sourceLibrary", + "description": "A test describe project", + "authors": ["nobody"], + "homepage": "fake.com", + "license": "BSD 2-clause", + "copyright": "Copyright © 2015, nobody", + "sourceFiles-posix": ["dep.a"], + "sourceFiles-windows": ["dep.lib"], + "dflags": ["--another-dflag"], + "lflags": ["--another-lflag"], + "libs": ["anotherlib"], + "copyFiles": ["data/*"], + "versions": ["anotherVerIdent"], + "debugVersions": ["anotherDebugVerIdent"], + "preGenerateCommands-posix": ["../describe-dependency-1/dependency-preGenerateCommands.sh"], + "postGenerateCommands-posix": ["../describe-dependency-1/dependency-postGenerateCommands.sh"], + "preBuildCommands-posix": ["../describe-dependency-1/dependency-preBuildCommands.sh"], + "postBuildCommands-posix": ["../describe-dependency-1/dependency-postBuildCommands.sh"], + "buildRequirements": ["requireContracts"], + "buildOptions": ["stackStomping"], + "configurations": [ + { + "name": "my-dependency-1-config" + } + ], +} diff --git a/test/describe-dependency-1/otherdir/dummy.d b/test/describe-dependency-1/otherdir/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-dependency-1/otherdir/dummy.d @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-1/source/dummy.d b/test/describe-dependency-1/source/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-dependency-1/source/dummy.d @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-2/.no_build b/test/describe-dependency-2/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/describe-dependency-2/.no_build @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-2/dub.json b/test/describe-dependency-2/dub.json new file mode 100644 index 0000000..0b62a92 --- /dev/null +++ b/test/describe-dependency-2/dub.json @@ -0,0 +1,11 @@ +{ + "name": "describe-dependency-2", + "targetType": "sourceLibrary", + "description": "A test describe project", + "authors": ["nobody"], + "homepage": "fake.com", + "license": "BSD 2-clause", + "copyright": "Copyright © 2015, nobody", + "importPaths": ["some-path"], + "stringImportPaths": ["some-extra-string-import-path"], +} diff --git a/test/describe-dependency-2/some-extra-string-import-path/dummy.d b/test/describe-dependency-2/some-extra-string-import-path/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-dependency-2/some-extra-string-import-path/dummy.d @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-2/some-path/dummy.d b/test/describe-dependency-2/some-path/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-dependency-2/some-path/dummy.d @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-3/.no_build b/test/describe-dependency-3/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/describe-dependency-3/.no_build @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-3/dep3-source/dummy.d b/test/describe-dependency-3/dep3-source/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-dependency-3/dep3-source/dummy.d @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-3/dep3-string-import-path/dummy.d b/test/describe-dependency-3/dep3-string-import-path/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-dependency-3/dep3-string-import-path/dummy.d @@ -0,0 +1 @@ + diff --git a/test/describe-dependency-3/dub.json b/test/describe-dependency-3/dub.json new file mode 100644 index 0000000..40f6991 --- /dev/null +++ b/test/describe-dependency-3/dub.json @@ -0,0 +1,13 @@ +{ + "name": "describe-dependency-3", + "targetType": "staticLibrary", + "description": "A test describe project", + "authors": ["nobody"], + "homepage": "fake.com", + "license": "BSD 2-clause", + "copyright": "Copyright © 2015, nobody", + "importPaths": ["dep3-source"], + "sourcePaths": ["dep3-source"], + "stringImportPaths": ["dep3-string-import-path"], + "buildOptions": ["profile"] +} diff --git a/test/describe-project/.no_build b/test/describe-project/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/describe-project/.no_build @@ -0,0 +1 @@ + diff --git a/test/describe-project/data/dummy.dat b/test/describe-project/data/dummy.dat new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/describe-project/data/dummy.dat @@ -0,0 +1 @@ + diff --git a/test/describe-project/do-postGenerateCommands.sh b/test/describe-project/do-postGenerateCommands.sh new file mode 100755 index 0000000..1a24852 --- /dev/null +++ b/test/describe-project/do-postGenerateCommands.sh @@ -0,0 +1 @@ +#!/bin/sh diff --git a/test/describe-project/do-preGenerateCommands.sh b/test/describe-project/do-preGenerateCommands.sh new file mode 100755 index 0000000..1a24852 --- /dev/null +++ b/test/describe-project/do-preGenerateCommands.sh @@ -0,0 +1 @@ +#!/bin/sh diff --git a/test/describe-project/dub.json b/test/describe-project/dub.json new file mode 100644 index 0000000..c52d085 --- /dev/null +++ b/test/describe-project/dub.json @@ -0,0 +1,46 @@ +{ + "name": "describe-project", + "targetType": "executable", + "description": "A test describe project", + "authors": ["nobody"], + "homepage": "fake.com", + "license": "BSD 2-clause", + "copyright": "Copyright © 2015, nobody", + "mainSourceFile": "src/dummy.d", + "sourceFiles-posix": ["./some.a"], + "sourceFiles-windows": ["./some.lib"], + "dflags": ["--some-dflag"], + "lflags": ["--some-lflag"], + "libs": ["somelib"], + "copyFiles": ["data/dummy.dat"], + "versions": ["someVerIdent"], + "debugVersions": ["someDebugVerIdent"], + "preGenerateCommands-posix": ["./do-preGenerateCommands.sh"], + "postGenerateCommands-posix": ["./do-postGenerateCommands.sh"], + "preBuildCommands-posix": ["./do-preBuildCommands.sh"], + "postBuildCommands-posix": ["./do-postBuildCommands.sh"], + "buildRequirements": ["allowWarnings", "disallowInlining"], + "buildOptions": ["releaseMode", "debugInfo"], + "dependencies": { + "describe-dependency-1": { + "version": "1.0", + "path": "../describe-dependency-1" + }, + "describe-dependency-2": { + "version": "1.0", + "path": "../describe-dependency-2" + }, + "describe-dependency-3": { + "version": "1.0", + "path": "../describe-dependency-3" + } + }, + "configurations": [ + { + "name": "my-project-config" + } + ], + "subConfigurations": { + "describe-dependency-1": "my-dependency-1-config" + }, +} diff --git a/test/describe-project/src/dummy.d b/test/describe-project/src/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-project/src/dummy.d @@ -0,0 +1 @@ + diff --git a/test/describe-project/views/dummy.d b/test/describe-project/views/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/describe-project/views/dummy.d @@ -0,0 +1 @@ + diff --git a/test/feat663-search.sh b/test/feat663-search.sh new file mode 100755 index 0000000..4778a51 --- /dev/null +++ b/test/feat663-search.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +if ${DUB} search 2>/dev/null; then + die $LINENO '`dub search` succeeded' +fi +if ${DUB} search nonexistent123456789package 2>/dev/null; then + die $LINENO '`dub search nonexistent123456789package` succeeded' +fi +if ! ${DUB} search dub | grep -q '^dub'; then + die $LINENO '`dub search dub` failed' +fi diff --git a/test/ignore-hidden-1/.gitignore b/test/ignore-hidden-1/.gitignore new file mode 100644 index 0000000..433d266 --- /dev/null +++ b/test/ignore-hidden-1/.gitignore @@ -0,0 +1,5 @@ +.dub +docs.json +__dummy.html +*.o +*.obj diff --git a/test/ignore-hidden-1/.no_run b/test/ignore-hidden-1/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/ignore-hidden-1/.no_run diff --git a/test/ignore-hidden-1/.no_test b/test/ignore-hidden-1/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/ignore-hidden-1/.no_test diff --git a/test/ignore-hidden-1/dub.json b/test/ignore-hidden-1/dub.json new file mode 100644 index 0000000..477805a --- /dev/null +++ b/test/ignore-hidden-1/dub.json @@ -0,0 +1,3 @@ +{ + "name": "ignore-hidden-1", +} diff --git a/test/ignore-hidden-1/source/.hidden.d b/test/ignore-hidden-1/source/.hidden.d new file mode 100644 index 0000000..da932b0 --- /dev/null +++ b/test/ignore-hidden-1/source/.hidden.d @@ -0,0 +1,3 @@ +// need module declarations as '.' is not allowed in module names +module hidden; +static assert(0, "Dub should not compile "~__FILE__~"."); diff --git a/test/ignore-hidden-1/source/app.d b/test/ignore-hidden-1/source/app.d new file mode 100644 index 0000000..9198103 --- /dev/null +++ b/test/ignore-hidden-1/source/app.d @@ -0,0 +1,3 @@ +void main() +{ +} diff --git a/test/ignore-hidden-2/.gitignore b/test/ignore-hidden-2/.gitignore new file mode 100644 index 0000000..433d266 --- /dev/null +++ b/test/ignore-hidden-2/.gitignore @@ -0,0 +1,5 @@ +.dub +docs.json +__dummy.html +*.o +*.obj diff --git a/test/ignore-hidden-2/.no_run b/test/ignore-hidden-2/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/ignore-hidden-2/.no_run diff --git a/test/ignore-hidden-2/.no_test b/test/ignore-hidden-2/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/ignore-hidden-2/.no_test diff --git a/test/ignore-hidden-2/dub.json b/test/ignore-hidden-2/dub.json new file mode 100644 index 0000000..4f232d3 --- /dev/null +++ b/test/ignore-hidden-2/dub.json @@ -0,0 +1,4 @@ +{ + "name": "ignore-hidden-2", + "sourceFiles": ["source/.hidden.d"], +} diff --git a/test/ignore-hidden-2/source/.hidden.d b/test/ignore-hidden-2/source/.hidden.d new file mode 100644 index 0000000..0c4d2cc --- /dev/null +++ b/test/ignore-hidden-2/source/.hidden.d @@ -0,0 +1 @@ +module hidden; diff --git a/test/ignore-hidden-2/source/app.d b/test/ignore-hidden-2/source/app.d new file mode 100644 index 0000000..024fdcd --- /dev/null +++ b/test/ignore-hidden-2/source/app.d @@ -0,0 +1,5 @@ +import hidden; + +void main() +{ +} diff --git a/test/interactive-remove.sh b/test/interactive-remove.sh new file mode 100755 index 0000000..f689fdd --- /dev/null +++ b/test/interactive-remove.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +$DUB fetch dub --version=0.9.20 && [ -d $HOME/.dub/packages/dub-0.9.20/dub ] +$DUB fetch dub --version=0.9.21 && [ -d $HOME/.dub/packages/dub-0.9.21/dub ] +if $DUB remove dub --non-interactive 2>/dev/null; then + die $LINENO 'Non-interactive remove should fail' +fi +echo 1 | $DUB remove dub | tr --delete '\n' | grep --ignore-case 'select.*0\.9\.20.*0\.9\.21.*' +if [ -d $HOME/.dub/packages/dub-0.9.20/dub ]; then + die $LINENO 'Failed to remove dub-0.9.20' +fi +$DUB fetch dub --version=0.9.20 && [ -d $HOME/.dub/packages/dub-0.9.20/dub ] +# EOF aborts remove +echo -xn '' | $DUB remove dub +if [ ! -d $HOME/.dub/packages/dub-0.9.20/dub ] || [ ! -d $HOME/.dub/packages/dub-0.9.21/dub ]; then + die $LINENO 'Aborted dub still removed a package' +fi +# validates input +echo -e 'abc\n4\n-1\n3' | $DUB remove dub +if [ -d $HOME/.dub/packages/dub-0.9.20/dub ] || [ -d $HOME/.dub/packages/dub-0.9.21/dub ]; then + die $LINENO 'Failed to remove all version of dub' +fi +$DUB fetch dub --version=0.9.20 && [ -d $HOME/.dub/packages/dub-0.9.20/dub ] +$DUB fetch dub --version=0.9.21 && [ -d $HOME/.dub/packages/dub-0.9.21/dub ] +# is non-interactive with --version= +$DUB remove dub --version=\* +if [ -d $HOME/.dub/packages/dub-0.9.20/dub ] || [ -d $HOME/.dub/packages/dub-0.9.21/dub ]; then + die $LINENO 'Failed to non-interactively remove specified versions' +fi diff --git a/test/issue1004-override-config.sh b/test/issue1004-override-config.sh new file mode 100755 index 0000000..96080f1 --- /dev/null +++ b/test/issue1004-override-config.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue1004-override-config +${DUB} build --bare main --override-config a/success diff --git a/test/issue1004-override-config/.no_build b/test/issue1004-override-config/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue1004-override-config/.no_build diff --git a/test/issue1004-override-config/a/a.d b/test/issue1004-override-config/a/a.d new file mode 100644 index 0000000..af17f99 --- /dev/null +++ b/test/issue1004-override-config/a/a.d @@ -0,0 +1,5 @@ +module a; + +void test() +{ +} diff --git a/test/issue1004-override-config/a/dub.sdl b/test/issue1004-override-config/a/dub.sdl new file mode 100644 index 0000000..9c01efd --- /dev/null +++ b/test/issue1004-override-config/a/dub.sdl @@ -0,0 +1,9 @@ +name "a" + +configuration "fail" { +} + +configuration "success" { + sourceFiles "a.d" + importPaths "." +} diff --git a/test/issue1004-override-config/main/dub.sdl b/test/issue1004-override-config/main/dub.sdl new file mode 100644 index 0000000..b2287f0 --- /dev/null +++ b/test/issue1004-override-config/main/dub.sdl @@ -0,0 +1,2 @@ +name "main" +dependency "a" version="*" diff --git a/test/issue1004-override-config/main/source/main.d b/test/issue1004-override-config/main/source/main.d new file mode 100644 index 0000000..b248b89 --- /dev/null +++ b/test/issue1004-override-config/main/source/main.d @@ -0,0 +1,6 @@ +import a; + +void main() +{ + test(); +} diff --git a/test/issue1005-configuration-resolution.sh b/test/issue1005-configuration-resolution.sh new file mode 100755 index 0000000..1233e76 --- /dev/null +++ b/test/issue1005-configuration-resolution.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue1005-configuration-resolution +${DUB} build --bare main diff --git a/test/issue1005-configuration-resolution/.no_build b/test/issue1005-configuration-resolution/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue1005-configuration-resolution/.no_build diff --git a/test/issue1005-configuration-resolution/a/dub.sdl b/test/issue1005-configuration-resolution/a/dub.sdl new file mode 100644 index 0000000..d19952b --- /dev/null +++ b/test/issue1005-configuration-resolution/a/dub.sdl @@ -0,0 +1,10 @@ +name "a" +dependency "b" version="*" + +configuration "x" { + subConfiguration "b" "x" +} + +configuration "y" { + subConfiguration "b" "y" +} diff --git a/test/issue1005-configuration-resolution/b/dub.sdl b/test/issue1005-configuration-resolution/b/dub.sdl new file mode 100644 index 0000000..3cfa48b --- /dev/null +++ b/test/issue1005-configuration-resolution/b/dub.sdl @@ -0,0 +1,7 @@ +name "b" + +configuration "x" { +} + +configuration "y" { +} \ No newline at end of file diff --git a/test/issue1005-configuration-resolution/b/source/b.d b/test/issue1005-configuration-resolution/b/source/b.d new file mode 100644 index 0000000..2a9bb41 --- /dev/null +++ b/test/issue1005-configuration-resolution/b/source/b.d @@ -0,0 +1,3 @@ +module b; + +void foo() {} diff --git a/test/issue1005-configuration-resolution/c/dub.sdl b/test/issue1005-configuration-resolution/c/dub.sdl new file mode 100644 index 0000000..e46b148 --- /dev/null +++ b/test/issue1005-configuration-resolution/c/dub.sdl @@ -0,0 +1,2 @@ +name "c" +dependency "a" version="*" diff --git a/test/issue1005-configuration-resolution/main/dub.sdl b/test/issue1005-configuration-resolution/main/dub.sdl new file mode 100644 index 0000000..d492491 --- /dev/null +++ b/test/issue1005-configuration-resolution/main/dub.sdl @@ -0,0 +1,6 @@ +name "main" + +dependency "b" version="*" +dependency "c" version="*" + +subConfiguration "b" "y" diff --git a/test/issue1005-configuration-resolution/main/source/app.d b/test/issue1005-configuration-resolution/main/source/app.d new file mode 100644 index 0000000..0ec7361 --- /dev/null +++ b/test/issue1005-configuration-resolution/main/source/app.d @@ -0,0 +1,6 @@ +import b; + +void main() +{ + foo(); +} diff --git a/test/issue1024-selective-upgrade.sh b/test/issue1024-selective-upgrade.sh new file mode 100755 index 0000000..dc7c009 --- /dev/null +++ b/test/issue1024-selective-upgrade.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue1024-selective-upgrade +echo "{\"fileVersion\": 1,\"versions\": {\"a\": \"1.0.0\", \"b\": \"1.0.0\"}}" > main/dub.selections.json +$DUB upgrade --bare --root=main a + +if ! grep -c -e "\"a\": \"1.0.1\"" main/dub.selections.json; then + die $LINENO "Specified dependency was not upgraded." +fi + +if grep -c -e "\"b\": \"1.0.1\"" main/dub.selections.json; then + die $LINENO "Non-specified dependency got upgraded." +fi diff --git a/test/issue1024-selective-upgrade/.no_build b/test/issue1024-selective-upgrade/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue1024-selective-upgrade/.no_build diff --git a/test/issue1024-selective-upgrade/a-1.0.0/dub.sdl b/test/issue1024-selective-upgrade/a-1.0.0/dub.sdl new file mode 100644 index 0000000..7ff9fa1 --- /dev/null +++ b/test/issue1024-selective-upgrade/a-1.0.0/dub.sdl @@ -0,0 +1,2 @@ +name "a" +version "1.0.0" diff --git a/test/issue1024-selective-upgrade/a-1.0.1/dub.sdl b/test/issue1024-selective-upgrade/a-1.0.1/dub.sdl new file mode 100644 index 0000000..5c8a407 --- /dev/null +++ b/test/issue1024-selective-upgrade/a-1.0.1/dub.sdl @@ -0,0 +1,2 @@ +name "a" +version "1.0.1" diff --git a/test/issue1024-selective-upgrade/b-1.0.0/dub.sdl b/test/issue1024-selective-upgrade/b-1.0.0/dub.sdl new file mode 100644 index 0000000..5597559 --- /dev/null +++ b/test/issue1024-selective-upgrade/b-1.0.0/dub.sdl @@ -0,0 +1,2 @@ +name "b" +version "1.0.0" diff --git a/test/issue1024-selective-upgrade/b-1.0.1/dub.sdl b/test/issue1024-selective-upgrade/b-1.0.1/dub.sdl new file mode 100644 index 0000000..5e0c01a --- /dev/null +++ b/test/issue1024-selective-upgrade/b-1.0.1/dub.sdl @@ -0,0 +1,2 @@ +name "b" +version "1.0.1" diff --git a/test/issue1024-selective-upgrade/main/dub.sdl b/test/issue1024-selective-upgrade/main/dub.sdl new file mode 100644 index 0000000..a9da177 --- /dev/null +++ b/test/issue1024-selective-upgrade/main/dub.sdl @@ -0,0 +1,3 @@ +name "test" +dependency "a" version="~>1.0.0" +dependency "b" version="~>1.0.0" diff --git a/test/issue103-single-file-package-json.d b/test/issue103-single-file-package-json.d new file mode 100644 index 0000000..f2479a9 --- /dev/null +++ b/test/issue103-single-file-package-json.d @@ -0,0 +1,10 @@ +/+ dub.json: { + "name": "single-file-test" +} +/ +module hello; + +void main() +{ + import std.stdio : writeln; + writeln("Hello, World!"); +} diff --git a/test/issue103-single-file-package-w-dep.d b/test/issue103-single-file-package-w-dep.d new file mode 100644 index 0000000..5f66241 --- /dev/null +++ b/test/issue103-single-file-package-w-dep.d @@ -0,0 +1,12 @@ +/+ dub.sdl: +name "single-file-test" +dependency "sourcelib-simple" path="1-sourceLib-simple" ++/ +module hello; + +import sourcelib.app; + +void main() +{ + entry(); +} diff --git a/test/issue103-single-file-package.d b/test/issue103-single-file-package.d new file mode 100755 index 0000000..8c76638 --- /dev/null +++ b/test/issue103-single-file-package.d @@ -0,0 +1,12 @@ +#!../bin/dub +/+ dub.sdl: + name "single-file-test" ++/ +module hello; + +void main(string[] args) +{ + import std.stdio : writeln; + assert(args.length == 4 && args[1 .. 4] == ["foo", "--", "bar"]); + writeln("Hello, World!"); +} diff --git a/test/issue103-single-file-package.sh b/test/issue103-single-file-package.sh new file mode 100755 index 0000000..0c23eb2 --- /dev/null +++ b/test/issue103-single-file-package.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR} +rm -f single-file-test + +${DUB} run --single issue103-single-file-package-json.d --compiler=${DC} +if [ ! -f single-file-test ]; then + die $LINENO 'Normal invocation did not produce a binary in the current directory' +fi +rm single-file-test + +./issue103-single-file-package.d foo -- bar + +${DUB} issue103-single-file-package-w-dep.d + +if [ -f single-file-test ]; then + die $LINENO 'Shebang invocation produced binary in current directory' +fi + +if ${DUB} "issue103-single-file-package-error.d" 2> /dev/null; then + echo "Invalid package comment syntax did not trigger an error." + exit 1 +fi diff --git a/test/issue1091-bogus-rebuild.sh b/test/issue1091-bogus-rebuild.sh new file mode 100755 index 0000000..ae440eb --- /dev/null +++ b/test/issue1091-bogus-rebuild.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd ${CURR_DIR}/1-exec-simple +rm -f dub.selections.json +${DUB} build --compiler=${DC} 2>&1 | grep -e 'building configuration' -c +${DUB} build --compiler=${DC} 2>&1 | { ! grep -e 'building configuration' -c; } diff --git "a/test/issue130-unicode-\320\241\320\235\320\220\320\257\320\220\320\241\320\242\320\225\320\257\320\205/dub.sdl" "b/test/issue130-unicode-\320\241\320\235\320\220\320\257\320\220\320\241\320\242\320\225\320\257\320\205/dub.sdl" new file mode 100644 index 0000000..6bc471c --- /dev/null +++ "b/test/issue130-unicode-\320\241\320\235\320\220\320\257\320\220\320\241\320\242\320\225\320\257\320\205/dub.sdl" @@ -0,0 +1 @@ +name "tests" diff --git "a/test/issue130-unicode-\320\241\320\235\320\220\320\257\320\220\320\241\320\242\320\225\320\257\320\205/source/app.d" "b/test/issue130-unicode-\320\241\320\235\320\220\320\257\320\220\320\241\320\242\320\225\320\257\320\205/source/app.d" new file mode 100644 index 0000000..b9b93f9 --- /dev/null +++ "b/test/issue130-unicode-\320\241\320\235\320\220\320\257\320\220\320\241\320\242\320\225\320\257\320\205/source/app.d" @@ -0,0 +1,6 @@ +import std.stdio; + +void main() +{ + writeln("Success."); +} diff --git a/test/issue346-redundant-flags.sh b/test/issue346-redundant-flags.sh new file mode 100755 index 0000000..c6b27f2 --- /dev/null +++ b/test/issue346-redundant-flags.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue346-redundant-flags +${DUB} build --bare --force --compiler=${DC} -a x86_64 -v main 2>&1 | { ! grep -e '-m64 -m64' -c; } diff --git a/test/issue346-redundant-flags/.no_build b/test/issue346-redundant-flags/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue346-redundant-flags/.no_build diff --git a/test/issue346-redundant-flags/.no_run b/test/issue346-redundant-flags/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue346-redundant-flags/.no_run diff --git a/test/issue346-redundant-flags/.no_test b/test/issue346-redundant-flags/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue346-redundant-flags/.no_test diff --git a/test/issue346-redundant-flags/a/dub.json b/test/issue346-redundant-flags/a/dub.json new file mode 100644 index 0000000..5b91181 --- /dev/null +++ b/test/issue346-redundant-flags/a/dub.json @@ -0,0 +1,3 @@ +{ + "name": "a" +} \ No newline at end of file diff --git a/test/issue346-redundant-flags/a/source/a.d b/test/issue346-redundant-flags/a/source/a.d new file mode 100644 index 0000000..1581659 --- /dev/null +++ b/test/issue346-redundant-flags/a/source/a.d @@ -0,0 +1,5 @@ +module a; + +void afun() +{ +} diff --git a/test/issue346-redundant-flags/b/dub.json b/test/issue346-redundant-flags/b/dub.json new file mode 100644 index 0000000..2824d72 --- /dev/null +++ b/test/issue346-redundant-flags/b/dub.json @@ -0,0 +1,3 @@ +{ + "name": "b" +} \ No newline at end of file diff --git a/test/issue346-redundant-flags/b/source/b.d b/test/issue346-redundant-flags/b/source/b.d new file mode 100644 index 0000000..003a74e --- /dev/null +++ b/test/issue346-redundant-flags/b/source/b.d @@ -0,0 +1,5 @@ +module b; + +void bfun() +{ +} diff --git a/test/issue346-redundant-flags/main/dub.json b/test/issue346-redundant-flags/main/dub.json new file mode 100644 index 0000000..0160e79 --- /dev/null +++ b/test/issue346-redundant-flags/main/dub.json @@ -0,0 +1,7 @@ +{ + "name": "main", + "dependencies": { + "a": {"path": "../a"}, + "b": {"path": "../b"} + } +} \ No newline at end of file diff --git a/test/issue346-redundant-flags/main/source/main.d b/test/issue346-redundant-flags/main/source/main.d new file mode 100644 index 0000000..fba3ebb --- /dev/null +++ b/test/issue346-redundant-flags/main/source/main.d @@ -0,0 +1,8 @@ +import a; +import b; + +void main() +{ + afun(); + bfun(); +} \ No newline at end of file diff --git a/test/issue361-optional-deps.sh b/test/issue361-optional-deps.sh new file mode 100755 index 0000000..db87794 --- /dev/null +++ b/test/issue361-optional-deps.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue361-optional-deps +rm -rf a/.dub +rm -rf a/b/.dub +rm -rf main1/.dub +rm -rf main2/.dub +rm -f main1/dub.selections.json + +${DUB} build --bare --compiler=${DC} main1 +echo "{" > cmp.tmp +echo " \"fileVersion\": 1," >> cmp.tmp +echo " \"versions\": {" >> cmp.tmp +echo " \"b\": \"~master\"" >> cmp.tmp +echo " }" >> cmp.tmp +echo "}" >> cmp.tmp +diff cmp.tmp main1/dub.selections.json + +${DUB} build --bare --compiler=${DC} main2 +echo "{" > cmp.tmp +echo " \"fileVersion\": 1," >> cmp.tmp +echo " \"versions\": {" >> cmp.tmp +echo " \"a\": \"~master\"" >> cmp.tmp +echo " }" >> cmp.tmp +echo "}" >> cmp.tmp +diff cmp.tmp main2/dub.selections.json diff --git a/test/issue361-optional-deps/.no_build b/test/issue361-optional-deps/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue361-optional-deps/.no_build diff --git a/test/issue361-optional-deps/a/dub.sdl b/test/issue361-optional-deps/a/dub.sdl new file mode 100644 index 0000000..5730427 --- /dev/null +++ b/test/issue361-optional-deps/a/dub.sdl @@ -0,0 +1 @@ +name "a" \ No newline at end of file diff --git a/test/issue361-optional-deps/a/src/a.d b/test/issue361-optional-deps/a/src/a.d new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue361-optional-deps/a/src/a.d diff --git a/test/issue361-optional-deps/b/dub.sdl b/test/issue361-optional-deps/b/dub.sdl new file mode 100644 index 0000000..c37c6fc --- /dev/null +++ b/test/issue361-optional-deps/b/dub.sdl @@ -0,0 +1 @@ +name "b" \ No newline at end of file diff --git a/test/issue361-optional-deps/b/src/b.d b/test/issue361-optional-deps/b/src/b.d new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue361-optional-deps/b/src/b.d diff --git a/test/issue361-optional-deps/main1/dub.sdl b/test/issue361-optional-deps/main1/dub.sdl new file mode 100644 index 0000000..49c30d7 --- /dev/null +++ b/test/issue361-optional-deps/main1/dub.sdl @@ -0,0 +1,3 @@ +name "main1" +dependency "a" version="*" optional=true +dependency "b" version="*" optional=true default=true \ No newline at end of file diff --git a/test/issue361-optional-deps/main1/src/main1.d b/test/issue361-optional-deps/main1/src/main1.d new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue361-optional-deps/main1/src/main1.d diff --git a/test/issue361-optional-deps/main2/dub.sdl b/test/issue361-optional-deps/main2/dub.sdl new file mode 100644 index 0000000..d098466 --- /dev/null +++ b/test/issue361-optional-deps/main2/dub.sdl @@ -0,0 +1,3 @@ +name "main2" +dependency "a" version="*" optional=true +dependency "b" version="*" optional=true default=true \ No newline at end of file diff --git a/test/issue361-optional-deps/main2/dub.selections.json b/test/issue361-optional-deps/main2/dub.selections.json new file mode 100644 index 0000000..633ce9c --- /dev/null +++ b/test/issue361-optional-deps/main2/dub.selections.json @@ -0,0 +1,6 @@ +{ + "fileVersion": 1, + "versions": { + "a": "~master" + } +} diff --git a/test/issue361-optional-deps/main2/src/main2.d b/test/issue361-optional-deps/main2/src/main2.d new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue361-optional-deps/main2/src/main2.d diff --git a/test/issue502-root-import/dub.json b/test/issue502-root-import/dub.json new file mode 100644 index 0000000..cdd4b0b --- /dev/null +++ b/test/issue502-root-import/dub.json @@ -0,0 +1,7 @@ +{ + "name": "issue502-root-import", + "dependencies": + { + "gitcompatibledubpackage": "~>1.0" + } +} \ No newline at end of file diff --git a/test/issue502-root-import/source/app.d b/test/issue502-root-import/source/app.d new file mode 100644 index 0000000..9241919 --- /dev/null +++ b/test/issue502-root-import/source/app.d @@ -0,0 +1,10 @@ +import gitcompatibledubpackage.subdir.file; + +void main(string[] args) +{ +} + +unittest +{ + assert(!hasTheWorldExploded()); +} \ No newline at end of file diff --git a/test/issue564-invalid-upgrade-dependency.sh b/test/issue564-invalid-upgrade-dependency.sh new file mode 100755 index 0000000..19258ce --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue564-invalid-upgrade-dependency +rm -rf a-1.0.0/.dub +rm -rf a-1.1.0/.dub +rm -rf main/.dub +${DUB} build --bare --compiler=${DC} main diff --git a/test/issue564-invalid-upgrade-dependency/.no_build b/test/issue564-invalid-upgrade-dependency/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/.no_build diff --git a/test/issue564-invalid-upgrade-dependency/.no_run b/test/issue564-invalid-upgrade-dependency/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/.no_run diff --git a/test/issue564-invalid-upgrade-dependency/.no_test b/test/issue564-invalid-upgrade-dependency/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/.no_test diff --git a/test/issue564-invalid-upgrade-dependency/a-1.0.0/dub.json b/test/issue564-invalid-upgrade-dependency/a-1.0.0/dub.json new file mode 100644 index 0000000..cc36ecb --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/a-1.0.0/dub.json @@ -0,0 +1,4 @@ +{ + "name": "a", + "version": "1.0.0", +} diff --git a/test/issue564-invalid-upgrade-dependency/a-1.0.0/source/a.d b/test/issue564-invalid-upgrade-dependency/a-1.0.0/source/a.d new file mode 100644 index 0000000..b430cbc --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/a-1.0.0/source/a.d @@ -0,0 +1,3 @@ +void test() +{ +} \ No newline at end of file diff --git a/test/issue564-invalid-upgrade-dependency/a-1.1.0/dub.json b/test/issue564-invalid-upgrade-dependency/a-1.1.0/dub.json new file mode 100644 index 0000000..4103fe5 --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/a-1.1.0/dub.json @@ -0,0 +1,7 @@ +{ + "name": "a", + "version": "1.1.0", + "dependencies": { + "invalid": {"path": "invalid"} + } +} diff --git a/test/issue564-invalid-upgrade-dependency/a-1.1.0/source/a.d b/test/issue564-invalid-upgrade-dependency/a-1.1.0/source/a.d new file mode 100644 index 0000000..b430cbc --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/a-1.1.0/source/a.d @@ -0,0 +1,3 @@ +void test() +{ +} \ No newline at end of file diff --git a/test/issue564-invalid-upgrade-dependency/main/dub.json b/test/issue564-invalid-upgrade-dependency/main/dub.json new file mode 100644 index 0000000..7d27d9d --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/main/dub.json @@ -0,0 +1,6 @@ +{ + "name": "main", + "dependencies": { + "a": "~>1.0" + } +} diff --git a/test/issue564-invalid-upgrade-dependency/main/dub.selections.json b/test/issue564-invalid-upgrade-dependency/main/dub.selections.json new file mode 100644 index 0000000..e24adfe --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/main/dub.selections.json @@ -0,0 +1,6 @@ +{ + "fileVersion": 1, + "versions": { + "a": "1.0.0" + } +} diff --git a/test/issue564-invalid-upgrade-dependency/main/source/app.d b/test/issue564-invalid-upgrade-dependency/main/source/app.d new file mode 100644 index 0000000..c0b76f0 --- /dev/null +++ b/test/issue564-invalid-upgrade-dependency/main/source/app.d @@ -0,0 +1,6 @@ +import a; + +void main() +{ + test(); +} diff --git a/test/issue586-subpack-dep.sh b/test/issue586-subpack-dep.sh new file mode 100755 index 0000000..306bca7 --- /dev/null +++ b/test/issue586-subpack-dep.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue586-subpack-dep +rm -rf a/.dub +rm -rf a/b/.dub +rm -rf main/.dub +${DUB} build --bare --compiler=${DC} main +${DUB} run --bare --compiler=${DC} main diff --git a/test/issue586-subpack-dep/.no_build b/test/issue586-subpack-dep/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue586-subpack-dep/.no_build diff --git a/test/issue586-subpack-dep/.no_run b/test/issue586-subpack-dep/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue586-subpack-dep/.no_run diff --git a/test/issue586-subpack-dep/.no_test b/test/issue586-subpack-dep/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue586-subpack-dep/.no_test diff --git a/test/issue586-subpack-dep/a/b/dub.sdl b/test/issue586-subpack-dep/a/b/dub.sdl new file mode 100644 index 0000000..ad65ed5 --- /dev/null +++ b/test/issue586-subpack-dep/a/b/dub.sdl @@ -0,0 +1 @@ +name "b" diff --git a/test/issue586-subpack-dep/a/b/source/b.d b/test/issue586-subpack-dep/a/b/source/b.d new file mode 100644 index 0000000..a63d610 --- /dev/null +++ b/test/issue586-subpack-dep/a/b/source/b.d @@ -0,0 +1,3 @@ +module b; + +int bfun() { return 2; } \ No newline at end of file diff --git a/test/issue586-subpack-dep/a/dub.sdl b/test/issue586-subpack-dep/a/dub.sdl new file mode 100644 index 0000000..27b8e00 --- /dev/null +++ b/test/issue586-subpack-dep/a/dub.sdl @@ -0,0 +1,3 @@ +name "a" +dependency ":b" version="*" +subPackage "b/" diff --git a/test/issue586-subpack-dep/a/source/a.d b/test/issue586-subpack-dep/a/source/a.d new file mode 100644 index 0000000..e981113 --- /dev/null +++ b/test/issue586-subpack-dep/a/source/a.d @@ -0,0 +1,8 @@ +module a; + +import b; + +int afun() +{ + return 1 + bfun(); +} diff --git a/test/issue586-subpack-dep/main/dub.sdl b/test/issue586-subpack-dep/main/dub.sdl new file mode 100644 index 0000000..b5c43c1 --- /dev/null +++ b/test/issue586-subpack-dep/main/dub.sdl @@ -0,0 +1,3 @@ +name "main" +dependency "a" version="*" +targetType "executable" \ No newline at end of file diff --git a/test/issue586-subpack-dep/main/dub.selections.json b/test/issue586-subpack-dep/main/dub.selections.json new file mode 100644 index 0000000..633ce9c --- /dev/null +++ b/test/issue586-subpack-dep/main/dub.selections.json @@ -0,0 +1,6 @@ +{ + "fileVersion": 1, + "versions": { + "a": "~master" + } +} diff --git a/test/issue586-subpack-dep/main/source/c.d b/test/issue586-subpack-dep/main/source/c.d new file mode 100644 index 0000000..14a3abe --- /dev/null +++ b/test/issue586-subpack-dep/main/source/c.d @@ -0,0 +1,8 @@ +module c; + +import a; + +void main() +{ + assert(afun() == 3); +} diff --git a/test/issue613-dynlib-pic.sh b/test/issue613-dynlib-pic.sh new file mode 100755 index 0000000..b8fc5e7 --- /dev/null +++ b/test/issue613-dynlib-pic.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue613-dynlib-pic +rm -rf .dub +if [ "${DC}" = "dmd" ]; then + ${DUB} build --compiler=${DC} +else + echo "Skipping shared library test for ${DC}..." +fi diff --git a/test/issue613-dynlib-pic/.gitignore b/test/issue613-dynlib-pic/.gitignore new file mode 100644 index 0000000..433d266 --- /dev/null +++ b/test/issue613-dynlib-pic/.gitignore @@ -0,0 +1,5 @@ +.dub +docs.json +__dummy.html +*.o +*.obj diff --git a/test/issue613-dynlib-pic/.no_build b/test/issue613-dynlib-pic/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue613-dynlib-pic/.no_build diff --git a/test/issue613-dynlib-pic/.no_run b/test/issue613-dynlib-pic/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue613-dynlib-pic/.no_run diff --git a/test/issue613-dynlib-pic/.no_test b/test/issue613-dynlib-pic/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue613-dynlib-pic/.no_test diff --git a/test/issue613-dynlib-pic/dub.sdl b/test/issue613-dynlib-pic/dub.sdl new file mode 100644 index 0000000..4e79125 --- /dev/null +++ b/test/issue613-dynlib-pic/dub.sdl @@ -0,0 +1,4 @@ +name "issue613-dynlib-pic" +targetType "dynamicLibrary" + +// TODO: instead of just testing build success, test if -shared, -fPIC and -defaultlib have all been specified correctly \ No newline at end of file diff --git a/test/issue613-dynlib-pic/source/app.d b/test/issue613-dynlib-pic/source/app.d new file mode 100644 index 0000000..8b92d48 --- /dev/null +++ b/test/issue613-dynlib-pic/source/app.d @@ -0,0 +1,4 @@ +void test() +{ + +} \ No newline at end of file diff --git a/test/issue616-describe-vs-generate-commands.sh b/test/issue616-describe-vs-generate-commands.sh new file mode 100755 index 0000000..698ef63 --- /dev/null +++ b/test/issue616-describe-vs-generate-commands.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd "$CURR_DIR"/issue616-describe-vs-generate-commands + +temp_file=$(mktemp $(basename $0).XXXXXX) + +function cleanup { + rm $temp_file +} + +trap cleanup EXIT + +if ! $DUB describe --compiler=$DC --data-list --data=target-name \ + > "$temp_file" 2>&1; then + die 'Printing project data failed!' +fi + +# Create the expected output file to compare stdout against. +expected_file="$CURR_DIR/expected-issue616-output" +echo "preGenerateCommands: DUB_PACKAGES_USED=issue616-describe-vs-generate-commands,issue616-subpack,issue616-subsubpack" > "$expected_file" +echo "$CURR_DIR/issue616-describe-vs-generate-commands/src/" >> "$expected_file" +echo "$CURR_DIR/issue616-subpack/src/" >> "$expected_file" +echo "$CURR_DIR/issue616-subsubpack/src/" >> "$expected_file" +echo "issue616-describe-vs-generate-commands" >> "$expected_file" + +if ! diff "$expected_file" "$temp_file"; then + die 'The stdout output did not match the expected output!' +fi diff --git a/test/issue616-describe-vs-generate-commands/.no_build b/test/issue616-describe-vs-generate-commands/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/issue616-describe-vs-generate-commands/.no_build @@ -0,0 +1 @@ + diff --git a/test/issue616-describe-vs-generate-commands/do-preGenerateCommands.sh b/test/issue616-describe-vs-generate-commands/do-preGenerateCommands.sh new file mode 100755 index 0000000..84468cd --- /dev/null +++ b/test/issue616-describe-vs-generate-commands/do-preGenerateCommands.sh @@ -0,0 +1,10 @@ +#!/bin/sh +if [ -n "${dub_issue616}" ]; then + echo 'Fail! preGenerateCommands recursion detected!' >&2 + exit 0 # Don't return a non-zero error code here. This way the test gives a better diagnostic. +fi + +echo preGenerateCommands: DUB_PACKAGES_USED=$DUB_PACKAGES_USED >&2 + +export dub_issue616=true +$DUB describe --compiler=$DC --data-list --data=import-paths >&2 diff --git a/test/issue616-describe-vs-generate-commands/dub.json b/test/issue616-describe-vs-generate-commands/dub.json new file mode 100644 index 0000000..8aec1ed --- /dev/null +++ b/test/issue616-describe-vs-generate-commands/dub.json @@ -0,0 +1,11 @@ +{ + "name": "issue616-describe-vs-generate-commands", + "targetType": "executable", + "preGenerateCommands-posix": ["cd $PACKAGE_DIR && ./do-preGenerateCommands.sh"], + "dependencies": { + "issue616-subpack": { + "version": "1.0", + "path": "../issue616-subpack" + } + } +} diff --git a/test/issue616-describe-vs-generate-commands/src/dummy.d b/test/issue616-describe-vs-generate-commands/src/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/issue616-describe-vs-generate-commands/src/dummy.d @@ -0,0 +1 @@ + diff --git a/test/issue616-subpack/.no_build b/test/issue616-subpack/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/issue616-subpack/.no_build @@ -0,0 +1 @@ + diff --git a/test/issue616-subpack/dub.json b/test/issue616-subpack/dub.json new file mode 100644 index 0000000..552ddcd --- /dev/null +++ b/test/issue616-subpack/dub.json @@ -0,0 +1,10 @@ +{ + "name": "issue616-subpack", + "targetType": "executable", + "dependencies": { + "issue616-subsubpack": { + "version": "1.0", + "path": "../issue616-subsubpack" + } + } +} diff --git a/test/issue616-subpack/src/dummy.d b/test/issue616-subpack/src/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/issue616-subpack/src/dummy.d @@ -0,0 +1 @@ + diff --git a/test/issue616-subsubpack/.no_build b/test/issue616-subsubpack/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/issue616-subsubpack/.no_build @@ -0,0 +1 @@ + diff --git a/test/issue616-subsubpack/dub.json b/test/issue616-subsubpack/dub.json new file mode 100644 index 0000000..e4e4b5b --- /dev/null +++ b/test/issue616-subsubpack/dub.json @@ -0,0 +1,4 @@ +{ + "name": "issue616-subsubpack", + "targetType": "executable" +} diff --git a/test/issue616-subsubpack/src/dummy.d b/test/issue616-subsubpack/src/dummy.d new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/test/issue616-subsubpack/src/dummy.d @@ -0,0 +1 @@ + diff --git a/test/issue672-upgrade-optional.sh b/test/issue672-upgrade-optional.sh new file mode 100755 index 0000000..15e07d2 --- /dev/null +++ b/test/issue672-upgrade-optional.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue672-upgrade-optional +rm -rf b/.dub +echo "{\"fileVersion\": 1,\"versions\": {\"dub\": \"1.0.0\"}}" > dub.selections.json +${DUB} upgrade + +if ! grep -c -e "\"dub\": \"1.1.0\"" dub.selections.json; then + die $LINENO 'Dependency not upgraded.' +fi diff --git a/test/issue672-upgrade-optional/.no_build b/test/issue672-upgrade-optional/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue672-upgrade-optional/.no_build diff --git a/test/issue672-upgrade-optional/dub.sdl b/test/issue672-upgrade-optional/dub.sdl new file mode 100644 index 0000000..9b142c3 --- /dev/null +++ b/test/issue672-upgrade-optional/dub.sdl @@ -0,0 +1,2 @@ +name "b" +dependency "dub" version=">=1.0.0 <=1.1.0" optional=true diff --git a/test/issue672-upgrade-optional/dub.selections.json b/test/issue672-upgrade-optional/dub.selections.json new file mode 100644 index 0000000..712a9f6 --- /dev/null +++ b/test/issue672-upgrade-optional/dub.selections.json @@ -0,0 +1,6 @@ +{ + "fileVersion": 1, + "versions": { + "dub": "1.1.0" + } +} diff --git a/test/issue674-concurrent-dub.sh b/test/issue674-concurrent-dub.sh new file mode 100755 index 0000000..d49bdd3 --- /dev/null +++ b/test/issue674-concurrent-dub.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +TMPDIR=$(mktemp -d $(basename $0).XXXXXX) + +function cleanup { + rm -rf ${TMPDIR} +} +trap cleanup EXIT + +cd ${TMPDIR} && $DUB fetch --cache=local bloom & +pid1=$! +sleep 0.5 +cd ${TMPDIR} && $DUB fetch --cache=local bloom & +pid2=$! +wait $pid1 +wait $pid2 +[ -d ${TMPDIR}/bloom* ] diff --git a/test/issue686-multiple-march.sh b/test/issue686-multiple-march.sh new file mode 100755 index 0000000..24b84b7 --- /dev/null +++ b/test/issue686-multiple-march.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue686-multiple-march +${DUB} build --bare --force --compiler=${DC} -a x86_64 -v main 2>&1 | { ! grep -e '-m64 -m64' -c; } diff --git a/test/issue686-multiple-march/.no_build b/test/issue686-multiple-march/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue686-multiple-march/.no_build diff --git a/test/issue686-multiple-march/.no_run b/test/issue686-multiple-march/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue686-multiple-march/.no_run diff --git a/test/issue686-multiple-march/.no_test b/test/issue686-multiple-march/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue686-multiple-march/.no_test diff --git a/test/issue686-multiple-march/a/dub.json b/test/issue686-multiple-march/a/dub.json new file mode 100644 index 0000000..341d126 --- /dev/null +++ b/test/issue686-multiple-march/a/dub.json @@ -0,0 +1,6 @@ +{ + "name": "a", + "dependencies": { + "b": {"path": "../b"} + } +} \ No newline at end of file diff --git a/test/issue686-multiple-march/a/source/a.d b/test/issue686-multiple-march/a/source/a.d new file mode 100644 index 0000000..1581659 --- /dev/null +++ b/test/issue686-multiple-march/a/source/a.d @@ -0,0 +1,5 @@ +module a; + +void afun() +{ +} diff --git a/test/issue686-multiple-march/b/dub.json b/test/issue686-multiple-march/b/dub.json new file mode 100644 index 0000000..2824d72 --- /dev/null +++ b/test/issue686-multiple-march/b/dub.json @@ -0,0 +1,3 @@ +{ + "name": "b" +} \ No newline at end of file diff --git a/test/issue686-multiple-march/b/source/b.d b/test/issue686-multiple-march/b/source/b.d new file mode 100644 index 0000000..003a74e --- /dev/null +++ b/test/issue686-multiple-march/b/source/b.d @@ -0,0 +1,5 @@ +module b; + +void bfun() +{ +} diff --git a/test/issue686-multiple-march/main/dub.json b/test/issue686-multiple-march/main/dub.json new file mode 100644 index 0000000..0160e79 --- /dev/null +++ b/test/issue686-multiple-march/main/dub.json @@ -0,0 +1,7 @@ +{ + "name": "main", + "dependencies": { + "a": {"path": "../a"}, + "b": {"path": "../b"} + } +} \ No newline at end of file diff --git a/test/issue686-multiple-march/main/source/main.d b/test/issue686-multiple-march/main/source/main.d new file mode 100644 index 0000000..fba3ebb --- /dev/null +++ b/test/issue686-multiple-march/main/source/main.d @@ -0,0 +1,8 @@ +import a; +import b; + +void main() +{ + afun(); + bfun(); +} \ No newline at end of file diff --git a/test/issue754-path-selection-fail/a-1.0/dub.sdl b/test/issue754-path-selection-fail/a-1.0/dub.sdl new file mode 100644 index 0000000..7ff9fa1 --- /dev/null +++ b/test/issue754-path-selection-fail/a-1.0/dub.sdl @@ -0,0 +1,2 @@ +name "a" +version "1.0.0" diff --git a/test/issue754-path-selection-fail/a-1.0/source/a.d b/test/issue754-path-selection-fail/a-1.0/source/a.d new file mode 100644 index 0000000..0714d18 --- /dev/null +++ b/test/issue754-path-selection-fail/a-1.0/source/a.d @@ -0,0 +1,3 @@ +module a; + +void test() {} \ No newline at end of file diff --git a/test/issue754-path-selection-fail/a-2.0/dub.sdl b/test/issue754-path-selection-fail/a-2.0/dub.sdl new file mode 100644 index 0000000..1cb3694 --- /dev/null +++ b/test/issue754-path-selection-fail/a-2.0/dub.sdl @@ -0,0 +1,3 @@ +name "a" +version "2.0.0" + diff --git a/test/issue754-path-selection-fail/dub.sdl b/test/issue754-path-selection-fail/dub.sdl new file mode 100644 index 0000000..b36515c --- /dev/null +++ b/test/issue754-path-selection-fail/dub.sdl @@ -0,0 +1,2 @@ +name "test" +dependency "a" path="a-2.0" diff --git a/test/issue754-path-selection-fail/dub.selections.json b/test/issue754-path-selection-fail/dub.selections.json new file mode 100644 index 0000000..61e9539 --- /dev/null +++ b/test/issue754-path-selection-fail/dub.selections.json @@ -0,0 +1,6 @@ +{ + "fileVersion": 1, + "versions": { + "a": {"path": "a-1.0"} + } +} diff --git a/test/issue754-path-selection-fail/source/app.d b/test/issue754-path-selection-fail/source/app.d new file mode 100644 index 0000000..b248b89 --- /dev/null +++ b/test/issue754-path-selection-fail/source/app.d @@ -0,0 +1,6 @@ +import a; + +void main() +{ + test(); +} diff --git a/test/issue777-bogus-path-dependency/b/a.d b/test/issue777-bogus-path-dependency/b/a.d new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue777-bogus-path-dependency/b/a.d diff --git a/test/issue777-bogus-path-dependency/b/dub.sdl b/test/issue777-bogus-path-dependency/b/dub.sdl new file mode 100644 index 0000000..7aff103 --- /dev/null +++ b/test/issue777-bogus-path-dependency/b/dub.sdl @@ -0,0 +1,10 @@ +name "b" +targetType "none" + +configuration "a" { + dependency "c" version="*" +} + +configuration "b" { + dependency "c" path="../c-err" +} diff --git a/test/issue777-bogus-path-dependency/c-err/dub.sdl b/test/issue777-bogus-path-dependency/c-err/dub.sdl new file mode 100644 index 0000000..1ed791a --- /dev/null +++ b/test/issue777-bogus-path-dependency/c-err/dub.sdl @@ -0,0 +1 @@ +name "c" diff --git a/test/issue777-bogus-path-dependency/c-err/source/lib.d b/test/issue777-bogus-path-dependency/c-err/source/lib.d new file mode 100644 index 0000000..065f38b --- /dev/null +++ b/test/issue777-bogus-path-dependency/c-err/source/lib.d @@ -0,0 +1,6 @@ +module lib; + +void c() +{ + error +} diff --git a/test/issue777-bogus-path-dependency/c/dub.sdl b/test/issue777-bogus-path-dependency/c/dub.sdl new file mode 100644 index 0000000..1ed791a --- /dev/null +++ b/test/issue777-bogus-path-dependency/c/dub.sdl @@ -0,0 +1 @@ +name "c" diff --git a/test/issue777-bogus-path-dependency/c/source/lib.d b/test/issue777-bogus-path-dependency/c/source/lib.d new file mode 100644 index 0000000..6729fad --- /dev/null +++ b/test/issue777-bogus-path-dependency/c/source/lib.d @@ -0,0 +1,5 @@ +module lib; + +void c() +{ +} diff --git a/test/issue777-bogus-path-dependency/dub.sdl b/test/issue777-bogus-path-dependency/dub.sdl new file mode 100644 index 0000000..441587c --- /dev/null +++ b/test/issue777-bogus-path-dependency/dub.sdl @@ -0,0 +1,2 @@ +name "test" +dependency "b" path="b" diff --git a/test/issue777-bogus-path-dependency/dub.selections.json b/test/issue777-bogus-path-dependency/dub.selections.json new file mode 100644 index 0000000..a446d1f --- /dev/null +++ b/test/issue777-bogus-path-dependency/dub.selections.json @@ -0,0 +1,7 @@ +{ + "fileVersion": 1, + "versions": { + "b": {"path":"b"}, + "c": {"path":"c"} + } +} diff --git a/test/issue777-bogus-path-dependency/source/app.d b/test/issue777-bogus-path-dependency/source/app.d new file mode 100644 index 0000000..806dcdc --- /dev/null +++ b/test/issue777-bogus-path-dependency/source/app.d @@ -0,0 +1,6 @@ +import lib; + +void main() +{ + c(); +} diff --git a/test/issue782-gtkd-pkg-config.sh b/test/issue782-gtkd-pkg-config.sh new file mode 100755 index 0000000..a91d738 --- /dev/null +++ b/test/issue782-gtkd-pkg-config.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +if [ "${DC}" != "dmd" ]; then + echo "Skipping issue782-dtkd-pkg-config test for ${DC}..." +else + echo ${CURR_DIR-$(pwd)} + # the ${CURR_DIR-$(pwd)} allows running issue782-gtkd-pkg-config.sh stand-alone from the test directory + cd ${CURR_DIR-$(pwd)}/issue782-gtkd-pkg-config + rm -rf fake-gtkd/.dub + rm -f fake-gtkd/libfake-gtkd.so + rm -rf main/.dub + rm -f main/fake-gtkd-test + echo ${DUB} + cd fake-gtkd && ${DUB} build --compiler=${DC} + cd ../main + + # `run` needs to find the fake-gtkd shared library, so set LD_LIBRARY_PATH to where it is + export LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-}${LD_LIBRARY_PATH:+:}$PWD/../fake-gtkd + # pkg-config needs to find our .pc file which is in $PWD/../fake-gtkd/pkgconfig, so set PKG_CONFIG_PATH accordingly + export PKG_CONFIG_PATH=$PWD/../fake-gtkd/pkgconfig + ${DUB} run --force --compiler=${DC} + cd .. + rm -rf fake-gtkd/.dub + rm fake-gtkd/libfake-gtkd.so + rm -rf main/.dub + rm main/fake-gtkd-test +fi diff --git a/test/issue782-gtkd-pkg-config.sh.min_frontend b/test/issue782-gtkd-pkg-config.sh.min_frontend new file mode 100644 index 0000000..a7ad183 --- /dev/null +++ b/test/issue782-gtkd-pkg-config.sh.min_frontend @@ -0,0 +1 @@ +2.068 \ No newline at end of file diff --git a/test/issue782-gtkd-pkg-config/.no_build b/test/issue782-gtkd-pkg-config/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/.no_build diff --git a/test/issue782-gtkd-pkg-config/.no_run b/test/issue782-gtkd-pkg-config/.no_run new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/.no_run diff --git a/test/issue782-gtkd-pkg-config/.no_test b/test/issue782-gtkd-pkg-config/.no_test new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/.no_test diff --git a/test/issue782-gtkd-pkg-config/fake-gtkd/dub.json b/test/issue782-gtkd-pkg-config/fake-gtkd/dub.json new file mode 100644 index 0000000..d2c4bcb --- /dev/null +++ b/test/issue782-gtkd-pkg-config/fake-gtkd/dub.json @@ -0,0 +1,5 @@ +{ + "name" : "fake-gtkd", + "targetType": "dynamicLibrary", + "description" : "Fake GtkD shared library for testing", +} diff --git a/test/issue782-gtkd-pkg-config/fake-gtkd/pkgconfig/fake-gtkd.pc b/test/issue782-gtkd-pkg-config/fake-gtkd/pkgconfig/fake-gtkd.pc new file mode 100644 index 0000000..654ac32 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/fake-gtkd/pkgconfig/fake-gtkd.pc @@ -0,0 +1,12 @@ +prefix=../fake-gtkd +libdir=${prefix} +includedir=${prefix}/src + +Name: fake-gtkd +Description: Fake GtkD shared library for testing +Version: 1.0.0 +#Requires: phobos2 +# The "-L-defaultlib=libphobos2.so" and "-defaultlib=libphobos2.so" should both end up on the compiler (at link stage) invocation as "-defaultlib=libphobos2.so" +# For this test, it doesn't hurt that they appear twice on the cmd line... +Libs: -L-L${libdir} -L-l:libfake-gtkd.so -L-l:libdl.so.2 -pthread -L-defaultlib=libphobos2.so -defaultlib=libphobos2.so +Cflags: -I${includedir} diff --git a/test/issue782-gtkd-pkg-config/fake-gtkd/src/fakegtkd.d b/test/issue782-gtkd-pkg-config/fake-gtkd/src/fakegtkd.d new file mode 100644 index 0000000..0990da6 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/fake-gtkd/src/fakegtkd.d @@ -0,0 +1,3 @@ +extern (C) string test_function() { + return "Fake GtkD shared library"; +} \ No newline at end of file diff --git a/test/issue782-gtkd-pkg-config/fake-gtkd/src/lib.d b/test/issue782-gtkd-pkg-config/fake-gtkd/src/lib.d new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/fake-gtkd/src/lib.d diff --git a/test/issue782-gtkd-pkg-config/main/dub.json b/test/issue782-gtkd-pkg-config/main/dub.json new file mode 100644 index 0000000..e45cbf1 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/main/dub.json @@ -0,0 +1,6 @@ +{ + "name" : "fake-gtkd-test", + "description" : "Small test executable calling a Fake GtkD shared library function for testing", + "importPaths" : ["../fake-gtkd/src"], + "libs" : ["fake-gtkd"] +} diff --git a/test/issue782-gtkd-pkg-config/main/src/app.d b/test/issue782-gtkd-pkg-config/main/src/app.d new file mode 100644 index 0000000..08adda8 --- /dev/null +++ b/test/issue782-gtkd-pkg-config/main/src/app.d @@ -0,0 +1,7 @@ +import std.stdio; +import fakegtkd; + +int main() { + writeln(test_function()); + return 0; +} \ No newline at end of file diff --git a/test/issue813-fixed-dependency.sh b/test/issue813-fixed-dependency.sh new file mode 100755 index 0000000..bddf078 --- /dev/null +++ b/test/issue813-fixed-dependency.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue813-fixed-dependency +rm -rf main/.dub +rm -rf sub/.dub +rm -rf sub/sub/.dub +${DUB} build --bare --compiler=${DC} main diff --git a/test/issue813-fixed-dependency/.no_build b/test/issue813-fixed-dependency/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/issue813-fixed-dependency/.no_build @@ -0,0 +1 @@ + diff --git a/test/issue813-fixed-dependency/main/dub.sdl b/test/issue813-fixed-dependency/main/dub.sdl new file mode 100644 index 0000000..2faef96 --- /dev/null +++ b/test/issue813-fixed-dependency/main/dub.sdl @@ -0,0 +1,3 @@ +name "main" +targetType "executable" +dependency "sub" version="*" diff --git a/test/issue813-fixed-dependency/main/dub.selections.json b/test/issue813-fixed-dependency/main/dub.selections.json new file mode 100644 index 0000000..8443eed --- /dev/null +++ b/test/issue813-fixed-dependency/main/dub.selections.json @@ -0,0 +1,6 @@ +{ + "fileVersion": 1, + "versions": { + "sub": {"path": "../sub"} + } +} diff --git a/test/issue813-fixed-dependency/main/src/app.d b/test/issue813-fixed-dependency/main/src/app.d new file mode 100644 index 0000000..0b416f0 --- /dev/null +++ b/test/issue813-fixed-dependency/main/src/app.d @@ -0,0 +1,6 @@ +import sub.test; + +void main() +{ + foo(); +} diff --git a/test/issue813-fixed-dependency/sub/dub.sdl b/test/issue813-fixed-dependency/sub/dub.sdl new file mode 100644 index 0000000..f8bdac6 --- /dev/null +++ b/test/issue813-fixed-dependency/sub/dub.sdl @@ -0,0 +1,3 @@ +name "sub" +subPackage "sub/" +dependency ":sub" version="*" diff --git a/test/issue813-fixed-dependency/sub/sub/dub.sdl b/test/issue813-fixed-dependency/sub/sub/dub.sdl new file mode 100644 index 0000000..a932e26 --- /dev/null +++ b/test/issue813-fixed-dependency/sub/sub/dub.sdl @@ -0,0 +1 @@ +name "sub" diff --git a/test/issue813-fixed-dependency/sub/sub/src/sub/test.d b/test/issue813-fixed-dependency/sub/sub/src/sub/test.d new file mode 100644 index 0000000..fe5bb2c --- /dev/null +++ b/test/issue813-fixed-dependency/sub/sub/src/sub/test.d @@ -0,0 +1,6 @@ +module sub.test; + +void foo() +{ + +} \ No newline at end of file diff --git a/test/issue813-pure-sub-dependency.sh b/test/issue813-pure-sub-dependency.sh new file mode 100755 index 0000000..ec2291e --- /dev/null +++ b/test/issue813-pure-sub-dependency.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue813-pure-sub-dependency +rm -rf main/.dub +rm -rf sub/.dub +rm -rf sub/sub/.dub +rm -f main/dub.selections.json +${DUB} build --bare --compiler=${DC} main diff --git a/test/issue813-pure-sub-dependency/.no_build b/test/issue813-pure-sub-dependency/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/issue813-pure-sub-dependency/.no_build @@ -0,0 +1 @@ + diff --git a/test/issue813-pure-sub-dependency/main/dub.sdl b/test/issue813-pure-sub-dependency/main/dub.sdl new file mode 100644 index 0000000..79f7d71 --- /dev/null +++ b/test/issue813-pure-sub-dependency/main/dub.sdl @@ -0,0 +1,3 @@ +name "main" +targetType "executable" +dependency "sub:sub" version="*" diff --git a/test/issue813-pure-sub-dependency/main/src/app.d b/test/issue813-pure-sub-dependency/main/src/app.d new file mode 100644 index 0000000..0b416f0 --- /dev/null +++ b/test/issue813-pure-sub-dependency/main/src/app.d @@ -0,0 +1,6 @@ +import sub.test; + +void main() +{ + foo(); +} diff --git a/test/issue813-pure-sub-dependency/sub/dub.sdl b/test/issue813-pure-sub-dependency/sub/dub.sdl new file mode 100644 index 0000000..f8bdac6 --- /dev/null +++ b/test/issue813-pure-sub-dependency/sub/dub.sdl @@ -0,0 +1,3 @@ +name "sub" +subPackage "sub/" +dependency ":sub" version="*" diff --git a/test/issue813-pure-sub-dependency/sub/sub/dub.sdl b/test/issue813-pure-sub-dependency/sub/sub/dub.sdl new file mode 100644 index 0000000..a932e26 --- /dev/null +++ b/test/issue813-pure-sub-dependency/sub/sub/dub.sdl @@ -0,0 +1 @@ +name "sub" diff --git a/test/issue813-pure-sub-dependency/sub/sub/src/sub/test.d b/test/issue813-pure-sub-dependency/sub/sub/src/sub/test.d new file mode 100644 index 0000000..fe5bb2c --- /dev/null +++ b/test/issue813-pure-sub-dependency/sub/sub/src/sub/test.d @@ -0,0 +1,6 @@ +module sub.test; + +void foo() +{ + +} \ No newline at end of file diff --git a/test/issue820-extra-fields-after-convert.sh b/test/issue820-extra-fields-after-convert.sh new file mode 100755 index 0000000..5e81e35 --- /dev/null +++ b/test/issue820-extra-fields-after-convert.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd ${CURR_DIR}/1-exec-simple + +cp dub.json dub.json.bak +${DUB} convert -f sdl + +if grep -qe "version\|sourcePaths\|importPaths\|configuration" dub.sdl > /dev/null; then + mv dub.json.bak dub.json + rm dub.sdl + die $LINENO 'Conversion added extra fields.' +fi + +mv dub.json.bak dub.json +rm dub.sdl diff --git a/test/issue884-init-defer-file-creation.sh b/test/issue884-init-defer-file-creation.sh new file mode 100755 index 0000000..b71b268 --- /dev/null +++ b/test/issue884-init-defer-file-creation.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +TMPDIR=${CURR_DIR}tmppack +echo $TMPDIR + +mkdir ${TMPDIR} +cd ${TMPDIR} + +# kill dub init during interactive mode +${DUB} init < /dev/stdin & +sleep 1 +kill $! + +# ensure that no files are left behind +NFILES_PLUS_ONE=`ls -la | wc -l` + +cd ${CURR_DIR} +rm -r ${TMPDIR} + +# ignore sum + "." + ".." +if [ ${NFILES_PLUS_ONE} -gt 3 ]; then + die $LINENO 'Aborted dub init left spurious files around.' +fi diff --git a/test/issue895-local-configuration.sh b/test/issue895-local-configuration.sh new file mode 100755 index 0000000..d00bb62 --- /dev/null +++ b/test/issue895-local-configuration.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash +. $(dirname "${BASH_SOURCE[0]}")/common.sh + +cd ${CURR_DIR} +mkdir ../etc +mkdir ../etc/dub +echo "{\"defaultCompiler\": \"foo\"}" > ../etc/dub/settings.json + +if [ -e /var/lib/dub/settings.json ]; then + die $LINENO 'Found existing system wide DUB configuration. Aborting.' +fi + +if [ -e ~/.dub/settings.json ]; then + die $LINENO 'Found existing user wide DUB configuration. Aborting.' +fi + +if ! { ${DUB} describe --single issue103-single-file-package.d 2>&1 || true; } | grep -cF 'Unknown compiler: foo'; then + rm -r ../etc + die $LINENO 'DUB did not find the local configuration' +fi + +rm -r ../etc diff --git a/test/issue923-subpackage-deps.sh b/test/issue923-subpackage-deps.sh new file mode 100755 index 0000000..f3be79c --- /dev/null +++ b/test/issue923-subpackage-deps.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue923-subpackage-deps +rm -rf main/.dub +rm -rf a/.dub +rm -rf b/.dub +rm -f main/dub.selections.json +${DUB} build --bare --compiler=${DC} main + + +if ! grep -c -e \"b\" main/dub.selections.json; then + die $LINENO 'Dependency b not resolved.' +fi diff --git a/test/issue923-subpackage-deps/.no_build b/test/issue923-subpackage-deps/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/issue923-subpackage-deps/.no_build @@ -0,0 +1 @@ + diff --git a/test/issue923-subpackage-deps/a/dub.sdl b/test/issue923-subpackage-deps/a/dub.sdl new file mode 100644 index 0000000..259eecf --- /dev/null +++ b/test/issue923-subpackage-deps/a/dub.sdl @@ -0,0 +1,13 @@ +name "a" + +dependency ":foo" version="*" + +subPackage { + name "foo" + dependency "b" version="*" +} + +subPackage { + name "bar" + dependency "a" version="*" +} \ No newline at end of file diff --git a/test/issue923-subpackage-deps/b/dub.sdl b/test/issue923-subpackage-deps/b/dub.sdl new file mode 100644 index 0000000..c37c6fc --- /dev/null +++ b/test/issue923-subpackage-deps/b/dub.sdl @@ -0,0 +1 @@ +name "b" \ No newline at end of file diff --git a/test/issue923-subpackage-deps/b/source/b.d b/test/issue923-subpackage-deps/b/source/b.d new file mode 100644 index 0000000..5b09673 --- /dev/null +++ b/test/issue923-subpackage-deps/b/source/b.d @@ -0,0 +1,5 @@ +module b; + +void test() +{ +} diff --git a/test/issue923-subpackage-deps/main/dub.sdl b/test/issue923-subpackage-deps/main/dub.sdl new file mode 100644 index 0000000..42865df --- /dev/null +++ b/test/issue923-subpackage-deps/main/dub.sdl @@ -0,0 +1,2 @@ +name "main" +dependency "a:bar" version="*" diff --git a/test/issue923-subpackage-deps/main/source/app.d b/test/issue923-subpackage-deps/main/source/app.d new file mode 100644 index 0000000..786e416 --- /dev/null +++ b/test/issue923-subpackage-deps/main/source/app.d @@ -0,0 +1,6 @@ +import b; + +void main() +{ + test(); +} diff --git a/test/issue934-path-dep.sh b/test/issue934-path-dep.sh new file mode 100755 index 0000000..387521b --- /dev/null +++ b/test/issue934-path-dep.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue934-path-dep +rm -rf main/.dub +rm -rf a/.dub +rm -rf b/.dub +rm -f main/dub.selections.json +cd main +${DUB} build --compiler=${DC} diff --git a/test/issue934-path-dep/.no_build b/test/issue934-path-dep/.no_build new file mode 100644 index 0000000..8d1c8b6 --- /dev/null +++ b/test/issue934-path-dep/.no_build @@ -0,0 +1 @@ + diff --git a/test/issue934-path-dep/a/dub.sdl b/test/issue934-path-dep/a/dub.sdl new file mode 100644 index 0000000..278a88a --- /dev/null +++ b/test/issue934-path-dep/a/dub.sdl @@ -0,0 +1 @@ +name "a" diff --git a/test/issue934-path-dep/b/dub.sdl b/test/issue934-path-dep/b/dub.sdl new file mode 100644 index 0000000..58a1812 --- /dev/null +++ b/test/issue934-path-dep/b/dub.sdl @@ -0,0 +1,2 @@ +name "b" +dependency "a" path="../a" \ No newline at end of file diff --git a/test/issue934-path-dep/b/source/b.d b/test/issue934-path-dep/b/source/b.d new file mode 100644 index 0000000..5b09673 --- /dev/null +++ b/test/issue934-path-dep/b/source/b.d @@ -0,0 +1,5 @@ +module b; + +void test() +{ +} diff --git a/test/issue934-path-dep/main/dub.sdl b/test/issue934-path-dep/main/dub.sdl new file mode 100644 index 0000000..e2d83f8 --- /dev/null +++ b/test/issue934-path-dep/main/dub.sdl @@ -0,0 +1,3 @@ +name "main" +dependency "a" path="../a" +dependency "b" path="../b" diff --git a/test/issue934-path-dep/main/source/app.d b/test/issue934-path-dep/main/source/app.d new file mode 100644 index 0000000..786e416 --- /dev/null +++ b/test/issue934-path-dep/main/source/app.d @@ -0,0 +1,6 @@ +import b; + +void main() +{ + test(); +} diff --git a/test/issue959-path-based-subpack-dep/dub.sdl b/test/issue959-path-based-subpack-dep/dub.sdl new file mode 100644 index 0000000..727af60 --- /dev/null +++ b/test/issue959-path-based-subpack-dep/dub.sdl @@ -0,0 +1,6 @@ +name "bar" +mainSourceFile "main.d" +targetType "executable" + +dependency "foo" path="foo" +dependency "foo:baz" path="foo" diff --git a/test/issue959-path-based-subpack-dep/foo/dub.sdl b/test/issue959-path-based-subpack-dep/foo/dub.sdl new file mode 100644 index 0000000..8266250 --- /dev/null +++ b/test/issue959-path-based-subpack-dep/foo/dub.sdl @@ -0,0 +1,8 @@ +name "foo" +targetType "sourceLibrary" + +subPackage { + name "baz" + targetType "sourceLibrary" + dependency "foo" path="." +} diff --git a/test/issue959-path-based-subpack-dep/main.d b/test/issue959-path-based-subpack-dep/main.d new file mode 100644 index 0000000..05a37e9 --- /dev/null +++ b/test/issue959-path-based-subpack-dep/main.d @@ -0,0 +1 @@ +void main() {} \ No newline at end of file diff --git a/test/issue990-download-optional-selected.sh b/test/issue990-download-optional-selected.sh new file mode 100755 index 0000000..8b548a3 --- /dev/null +++ b/test/issue990-download-optional-selected.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +. $(dirname "${BASH_SOURCE[0]}")/common.sh +cd ${CURR_DIR}/issue990-download-optional-selected +rm -rf b/.dub +${DUB} remove gitcompatibledubpackage -n --version=* 2>/dev/null || true +${DUB} run diff --git a/test/issue990-download-optional-selected/.no_build b/test/issue990-download-optional-selected/.no_build new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test/issue990-download-optional-selected/.no_build diff --git a/test/issue990-download-optional-selected/dub.sdl b/test/issue990-download-optional-selected/dub.sdl new file mode 100644 index 0000000..79e0e11 --- /dev/null +++ b/test/issue990-download-optional-selected/dub.sdl @@ -0,0 +1,2 @@ +name "b" +dependency "gitcompatibledubpackage" version="1.0.2" optional=true diff --git a/test/issue990-download-optional-selected/dub.selections.json b/test/issue990-download-optional-selected/dub.selections.json new file mode 100644 index 0000000..7c04270 --- /dev/null +++ b/test/issue990-download-optional-selected/dub.selections.json @@ -0,0 +1,6 @@ +{ + "fileVersion": 1, + "versions": { + "gitcompatibledubpackage": "1.0.2" + } +} diff --git a/test/issue990-download-optional-selected/source/app.d b/test/issue990-download-optional-selected/source/app.d new file mode 100644 index 0000000..49eeacb --- /dev/null +++ b/test/issue990-download-optional-selected/source/app.d @@ -0,0 +1,6 @@ +import gitcompatibledubpackage.subdir.file; + +void main() +{ + assert(!hasTheWorldExploded); +} diff --git a/test/run-unittest.sh b/test/run-unittest.sh index a308ef4..312c9ea 100755 --- a/test/run-unittest.sh +++ b/test/run-unittest.sh @@ -1,53 +1,71 @@ -#!/bin/bash +#!/usr/bin/env bash -function die() { - echo -e 1>&2 "\033[0;31m"$@"\033[0m" - exit 1 -} +. $(dirname "${BASH_SOURCE[0]}")/common.sh function log() { echo -e "\033[0;33m[INFO] "$@"\033[0m" } -if [ -z ${DUB} ]; then - die 'Error: Variable $DUB must be defined to run the tests.' +function logError() { + echo -e 1>&2 "\033[0;31m[ERROR] "$@"\033[0m" + any_errors=1 +} + +function die() { + logError "$@" + exit 1 +} + +export -f log +export -f die + +if [ -z ${DUB:-} ]; then + die 'Variable $DUB must be defined to run the tests.' fi -if [ -z ${COMPILER} ]; then - log '$COMPILER not defined, assuming dmd...' - COMPILER=dmd +if [ -z ${DC:-} ]; then + log '$DC not defined, assuming dmd...' + DC=dmd fi +DC_BIN=$(basename "$DC") CURR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) for script in $(ls $CURR_DIR/*.sh); do - if [ "$script" = "$(readlink -f ${BASH_SOURCE[0]})" ]; then continue; fi + if [ "$script" = "$(readlink -f ${BASH_SOURCE[0]})" ] || [ "$(basename $script)" = "common.sh" ]; then continue; fi + if [ -e $script.min_frontend ] && [ ! -z ${FRONTEND:-} -a ${FRONTEND:-} \< $(cat $script.min_frontend) ]; then continue; fi log "Running $script..." - $script || die "Script failure." + DUB=$DUB DC=$DC CURR_DIR="$CURR_DIR" $script || logError "Script failure." done for pack in $(ls -d $CURR_DIR/*/); do + if [ -e $pack/.min_frontend ] && [ ! -z "$FRONTEND" -a "$FRONTEND" \< $(cat $pack/.min_frontend) ]; then continue; fi + # First we build the packages - if [ ! -e $pack/.no_build ]; then # For sourceLibrary + if [ ! -e $pack/.no_build ] && [ ! -e $pack/.no_build_$DC_BIN ]; then # For sourceLibrary + build=1 if [ -e $pack/.fail_build ]; then log "Building $pack, expected failure..." - $DUB build --force --root=$pack --compiler=$COMPILER 2>/dev/null && die "Error: Failure expected, but build passed." + $DUB build --force --root=$pack --compiler=$DC 2>/dev/null && logError "Error: Failure expected, but build passed." else log "Building $pack..." - $DUB build --force --root=$pack --compiler=$COMPILER || die "Build failure." + $DUB build --force --root=$pack --compiler=$DC || logError "Build failure." fi + else + build=0 fi - # We run the ones that are supposed to be runned - if [ ! -e $pack/.no_build ] && [ ! -e $pack/.no_run ]; then + # We run the ones that are supposed to be run + if [ $build -eq 1 ] && [ ! -e $pack/.no_run ] && [ ! -e $pack/.no_run_$DC_BIN ]; then log "Running $pack..." - $DUB run --force --root=$pack --compiler=$COMPILER || die "Run failure." + $DUB run --force --root=$pack --compiler=$DC || logError "Run failure." fi # Finally, the unittest part - if [ ! -e $pack/.no_build ] && [ ! -e $pack/.no_test ]; then + if [ $build -eq 1 ] && [ ! -e $pack/.no_test ] && [ ! -e $pack/.no_test_$DC_BIN ]; then log "Testing $pack..." - $DUB test --force --root=$pack --compiler=$COMPILER || die "Test failure." + $DUB test --force --root=$pack --compiler=$DC || logError "Test failure." fi - done + +exit ${any_errors:-0} diff --git a/test/sdl-package-simple/dub.sdl b/test/sdl-package-simple/dub.sdl new file mode 100644 index 0000000..78d50b6 --- /dev/null +++ b/test/sdl-package-simple/dub.sdl @@ -0,0 +1,2 @@ +name "exec-simple"; +targetType "executable"; diff --git a/test/sdl-package-simple/source/app.d b/test/sdl-package-simple/source/app.d new file mode 100644 index 0000000..dbab869 --- /dev/null +++ b/test/sdl-package-simple/source/app.d @@ -0,0 +1,6 @@ +import std.stdio; + +void main() +{ + writeln(__FUNCTION__); +} diff --git a/test/single-file-sdl-default-name.d b/test/single-file-sdl-default-name.d new file mode 100644 index 0000000..e7e7bef --- /dev/null +++ b/test/single-file-sdl-default-name.d @@ -0,0 +1,10 @@ +/++dub.sdl: +dependency "sourcelib-simple" path="1-sourceLib-simple" ++/ +module single; + +void main(string[] args) +{ + import sourcelib.app; + entry(); +} diff --git a/test/single-file-sdl-default-name.sh b/test/single-file-sdl-default-name.sh new file mode 100755 index 0000000..1c61540 --- /dev/null +++ b/test/single-file-sdl-default-name.sh @@ -0,0 +1,11 @@ +#!/bin/sh +set -e +cd ${CURR_DIR} +rm -f single-file-sdl-default-name + +${DUB} run --single single-file-sdl-default-name.d --compiler=${DC} +if [ ! -f single-file-sdl-default-name ]; then + echo "Normal invocation did not produce a binary in the current directory" + exit 1 +fi +rm single-file-sdl-default-name diff --git a/test/test-version-opt.sh b/test/test-version-opt.sh index 2931868..3abf31b 100755 --- a/test/test-version-opt.sh +++ b/test/test-version-opt.sh @@ -1,3 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash +. $(dirname "${BASH_SOURCE[0]}")/common.sh $DUB --version | grep -qF 'DUB version' diff --git a/travis-ci.sh b/travis-ci.sh new file mode 100755 index 0000000..7005767 --- /dev/null +++ b/travis-ci.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +set -v -e -o pipefail + +if [ -z "$FRONTEND" -o "$FRONTEND" \> 2.067.z ]; then + vibe_ver=$(jq -r '.versions | .["vibe-d"]' < dub.selections.json) + dub fetch vibe-d --version=$vibe_ver # get optional dependency + dub test --compiler=${DC} -c library-nonet +fi + +if [ "$COVERAGE" = true ]; then + # library-nonet fails to build with coverage (Issue 13742) + dub test --compiler=${DC} -b unittest-cov + ./build.sh -cov +else + ./build.sh +fi +DUB=`pwd`/bin/dub DC=${DC} test/run-unittest.sh + +if [ "$COVERAGE" = true ]; then + dub fetch doveralls + dub run doveralls --compiler=${DC} +fi + +# check for trailing whitespace (needs to be done only once per build) +if [ "$COVERAGE" = true ]; then + find . -type f -name '*.d' -exec grep -Hn "[[:blank:]]$" {} \; +fi