aboutsummaryrefslogtreecommitdiff
path: root/libbpkg/manifest.cxx
diff options
context:
space:
mode:
Diffstat (limited to 'libbpkg/manifest.cxx')
-rw-r--r--libbpkg/manifest.cxx4436
1 files changed, 3918 insertions, 518 deletions
diff --git a/libbpkg/manifest.cxx b/libbpkg/manifest.cxx
index 7de5f7e..bd69b85 100644
--- a/libbpkg/manifest.cxx
+++ b/libbpkg/manifest.cxx
@@ -4,24 +4,32 @@
#include <libbpkg/manifest.hxx>
#include <string>
+#include <limits>
#include <ostream>
#include <sstream>
#include <cassert>
-#include <cstring> // strncmp(), strcmp()
-#include <utility> // move()
-#include <cstdint> // uint*_t, UINT16_MAX
-#include <algorithm> // find(), find_if_not(), find_first_of(), replace()
-#include <stdexcept> // invalid_argument
-
-#include <libbutl/url.mxx>
-#include <libbutl/path.mxx>
-#include <libbutl/base64.mxx>
-#include <libbutl/utility.mxx> // icasecmp(), lcase(), alnum(),
+#include <cstdlib> // strtoull()
+#include <cstring> // strncmp(), strcmp(), strchr(), strcspn()
+#include <utility> // move()
+#include <cstdint> // uint*_t
+#include <algorithm> // find(), find_if(), find_first_of(), replace()
+#include <stdexcept> // invalid_argument
+#include <type_traits> // remove_reference
+
+#include <libbutl/url.hxx>
+#include <libbutl/path.hxx>
+#include <libbutl/utf8.hxx>
+#include <libbutl/base64.hxx>
+#include <libbutl/utility.hxx> // icasecmp(), lcase(), alnum(),
// digit(), xdigit(), next_word()
-#include <libbutl/small-vector.mxx>
-#include <libbutl/manifest-parser.mxx>
-#include <libbutl/manifest-serializer.mxx>
-#include <libbutl/standard-version.mxx>
+#include <libbutl/filesystem.hxx> // dir_exist()
+#include <libbutl/small-vector.hxx>
+#include <libbutl/char-scanner.hxx>
+#include <libbutl/manifest-parser.hxx>
+#include <libbutl/manifest-serializer.hxx>
+#include <libbutl/standard-version.hxx>
+
+#include <libbpkg/buildfile-scanner.hxx>
using namespace std;
using namespace butl;
@@ -171,12 +179,12 @@ namespace bpkg
canonical_upstream (
data_type (upstream.c_str (),
data_type::parse::upstream,
- false /* fold_zero_revision */).
+ none).
canonical_upstream),
canonical_release (
data_type (release ? release->c_str () : nullptr,
data_type::parse::release,
- false /* fold_zero_revision */).
+ none).
canonical_release)
{
// Check members constrains.
@@ -253,9 +261,12 @@ namespace bpkg
}
version::data_type::
- data_type (const char* v, parse pr, bool fold_zero_rev)
+ data_type (const char* v, parse pr, version::flags fl)
{
- if (fold_zero_rev)
+ if ((fl & version::fold_zero_revision) != 0)
+ assert (pr == parse::full);
+
+ if ((fl & version::allow_iteration) != 0)
assert (pr == parse::full);
// Otherwise compiler gets confused with string() member.
@@ -270,32 +281,75 @@ namespace bpkg
return;
}
- assert (v != nullptr);
-
- optional<uint16_t> ep;
-
auto bad_arg = [](const string& d) {throw invalid_argument (d);};
- auto uint16 = [&bad_arg](const string& s, const char* what) -> uint16_t
+ auto parse_uint = [&bad_arg](const string& s, auto& r, const char* what)
{
- try
- {
- uint64_t v (stoull (s));
+ using type = typename remove_reference<decltype (r)>::type;
- if (v <= UINT16_MAX) // From <cstdint>.
- return static_cast<uint16_t> (v);
- }
- catch (const std::exception&)
+ if (!s.empty () && s[0] != '-' && s[0] != '+') // strtoull() allows these.
{
- // Fall through.
+ const char* b (s.c_str ());
+ char* e (nullptr);
+ errno = 0; // We must clear it according to POSIX.
+ uint64_t v (strtoull (b, &e, 10)); // Can't throw.
+
+ if (errno != ERANGE &&
+ e == b + s.size () &&
+ v <= numeric_limits<type>::max ())
+ {
+ r = static_cast<type> (v);
+ return;
+ }
}
- bad_arg (string (what) + " should be 2-byte unsigned integer");
+ bad_arg (string (what) + " should be " +
+ std::to_string (sizeof (type)) + "-byte unsigned integer");
+ };
- assert (false); // Can't be here.
- return 0;
+ auto parse_uint16 = [&parse_uint](const string& s, const char* what)
+ {
+ uint16_t r;
+ parse_uint (s, r, what);
+ return r;
+ };
+
+ auto parse_uint32 = [&parse_uint](const string& s, const char* what)
+ {
+ uint32_t r;
+ parse_uint (s, r, what);
+ return r;
};
+ assert (v != nullptr);
+
+ // Parse the iteration, if allowed.
+ //
+ // Note that allowing iteration is not very common, so let's handle it in
+ // an ad hoc way not to complicate the subsequent parsing.
+ //
+ string storage;
+ if (pr == parse::full)
+ {
+ iteration = 0;
+
+ // Note that if not allowed but the iteration is present, then the below
+ // version parsing code will fail with appropriate diagnostics.
+ //
+ if ((fl & version::allow_iteration) != 0)
+ {
+ if (const char* p = strchr (v, '#'))
+ {
+ iteration = parse_uint32 (p + 1, "iteration");
+
+ storage.assign (v, p - v);
+ v = storage.c_str ();
+ }
+ }
+ }
+
+ optional<uint16_t> ep;
+
enum class mode {epoch, upstream, release, revision};
mode m (pr == parse::full
? (v[0] == '+'
@@ -350,7 +404,7 @@ namespace bpkg
if (lnn >= cb) // Contains non-digits.
bad_arg ("epoch should be 2-byte unsigned integer");
- ep = uint16 (string (cb, p), "epoch");
+ ep = parse_uint16 (string (cb, p), "epoch");
}
else
canon_part->add (cb, p, lnn < cb);
@@ -423,9 +477,9 @@ namespace bpkg
if (lnn >= cb) // Contains non-digits.
bad_arg ("revision should be 2-byte unsigned integer");
- std::uint16_t rev (uint16 (cb, "revision"));
+ uint16_t rev (parse_uint16 (cb, "revision"));
- if (rev != 0 || !fold_zero_rev)
+ if (rev != 0 || (fl & fold_zero_revision) == 0)
revision = rev;
}
else if (cb != p)
@@ -512,7 +566,7 @@ namespace bpkg
}
version& version::
- operator= (version&& v)
+ operator= (version&& v) noexcept
{
if (this != &v)
{
@@ -570,7 +624,8 @@ namespace bpkg
}
text_file::
- text_file (text_file&& f): file (f.file), comment (move (f.comment))
+ text_file (text_file&& f) noexcept
+ : file (f.file), comment (move (f.comment))
{
if (file)
new (&path) path_type (move (f.path));
@@ -588,12 +643,12 @@ namespace bpkg
}
text_file& text_file::
- operator= (text_file&& f)
+ operator= (text_file&& f) noexcept
{
if (this != &f)
{
this->~text_file ();
- new (this) text_file (move (f)); // Assume noexcept move-construction.
+ new (this) text_file (move (f)); // Rely on noexcept move-construction.
}
return *this;
}
@@ -606,10 +661,138 @@ namespace bpkg
return *this;
}
- // url
+ // text_type
+ //
+ string
+ to_string (text_type t)
+ {
+ switch (t)
+ {
+ case text_type::plain: return "text/plain";
+ case text_type::github_mark: return "text/markdown;variant=GFM";
+ case text_type::common_mark: return "text/markdown;variant=CommonMark";
+ }
+
+ assert (false); // Can't be here.
+ return string ();
+ }
+
+ optional<text_type>
+ to_text_type (const string& t)
+ {
+ auto bad_type = [] (const string& d) {throw invalid_argument (d);};
+
+ // Parse the media type representation (see RFC2045 for details) into the
+ // type/subtype value and the parameter list. Note: we don't support
+ // parameter quoting and comments for simplicity.
+ //
+ size_t p (t.find (';'));
+ const string& tp (p != string::npos ? trim (string (t, 0, p)) : t);
+
+ small_vector<pair<string, string>, 1> ps;
+
+ while (p != string::npos)
+ {
+ // Extract parameter name.
+ //
+ size_t b (p + 1);
+ p = t.find ('=', b);
+
+ if (p == string::npos)
+ bad_type ("missing '='");
+
+ string n (trim (string (t, b, p - b)));
+
+ // Extract parameter value.
+ //
+ b = p + 1;
+ p = t.find (';', b);
+
+ string v (trim (string (t,
+ b,
+ p != string::npos ? p - b : string::npos)));
+
+ ps.emplace_back (move (n), move (v));
+ }
+
+ // Calculate the resulting text type, failing on unrecognized media type,
+ // unexpected parameter name or value.
+ //
+ // Note that type, subtype, and parameter names are matched
+ // case-insensitively.
+ //
+ optional<text_type> r;
+
+ // Currently only the plain and markdown text types are allowed. Later we
+ // can potentially introduce some other text types.
+ //
+ if (icasecmp (tp, "text/plain") == 0)
+ {
+ // Currently, we don't expect parameters for plain text. Later we can
+ // potentially introduce some plain text variants.
+ //
+ if (ps.empty ())
+ r = text_type::plain;
+ }
+ else if (icasecmp (tp, "text/markdown") == 0)
+ {
+ // Currently, a single optional variant parameter with the two possible
+ // values is allowed for markdown. Later we can potentially introduce
+ // some other markdown variants.
+ //
+ if (ps.empty () ||
+ (ps.size () == 1 && icasecmp (ps[0].first, "variant") == 0))
+ {
+ // Note that markdown variants are matched case-insensitively (see
+ // RFC7763 for details).
+ //
+ string v;
+ if (ps.empty () || icasecmp (v = move (ps[0].second), "GFM") == 0)
+ r = text_type::github_mark;
+ else if (icasecmp (v, "CommonMark") == 0)
+ r = text_type::common_mark;
+ }
+ }
+ else if (icasecmp (tp, "text/", 5) != 0)
+ bad_type ("text type expected");
+
+ return r;
+ }
+
+ // typed_text_file
+ //
+ optional<text_type> typed_text_file::
+ effective_type (bool iu) const
+ {
+ optional<text_type> r;
+
+ if (type)
+ {
+ r = to_text_type (*type);
+ }
+ else if (file)
+ {
+ string ext (path.extension ());
+ if (ext.empty () || icasecmp (ext, "txt") == 0)
+ r = text_type::plain;
+ else if (icasecmp (ext, "md") == 0 || icasecmp (ext, "markdown") == 0)
+ r = text_type::github_mark;
+ }
+ else
+ r = text_type::plain;
+
+ if (!r && !iu)
+ throw invalid_argument ("unknown text type");
+
+ return r;
+ }
+
+ // manifest_url
//
- url::
- url (const std::string& u, std::string c): butl::url (u), comment (move (c))
+ manifest_url::
+ manifest_url (const std::string& u, std::string c)
+ : url (u),
+ comment (move (c))
{
if (rootless)
throw invalid_argument ("rootless URL");
@@ -655,7 +838,7 @@ namespace bpkg
if (mnv != "$")
try
{
- min_version = version (mnv, false /* fold_zero_revision */);
+ min_version = version (mnv, version::none);
}
catch (const invalid_argument& e)
{
@@ -682,7 +865,7 @@ namespace bpkg
if (mxv != "$")
try
{
- max_version = version (mxv, false /* fold_zero_revision */);
+ max_version = version (mxv, version::none);
}
catch (const invalid_argument& e)
{
@@ -786,7 +969,7 @@ namespace bpkg
// version.
//
if (vs != "$")
- v = version (vs, false /* fold_zero_revision */);
+ v = version (vs, version::none);
switch (operation)
{
@@ -1010,15 +1193,146 @@ namespace bpkg
return r;
}
- std::string dependency::
+ // dependency
+ //
+ dependency::
+ dependency (std::string d)
+ {
+ using std::string;
+ using iterator = string::const_iterator;
+
+ iterator b (d.begin ());
+ iterator i (b);
+ iterator ne (b); // End of name.
+ iterator e (d.end ());
+
+ // Find end of name (ne).
+ //
+ // Grep for '=<>([~^' in the bpkg source code and update, if changed.
+ //
+ const string cb ("=<>([~^");
+ for (char c; i != e && cb.find (c = *i) == string::npos; ++i)
+ {
+ if (!space (c))
+ ne = i + 1;
+ }
+
+ try
+ {
+ name = package_name (i == e ? move (d) : string (b, ne));
+ }
+ catch (const invalid_argument& e)
+ {
+ throw invalid_argument (string ("invalid package name: ") + e.what ());
+ }
+
+ if (i != e)
+ try
+ {
+ constraint = version_constraint (string (i, e));
+ }
+ catch (const invalid_argument& e)
+ {
+ throw invalid_argument (string ("invalid package constraint: ") +
+ e.what ());
+ }
+ }
+
+ // dependency_alternative
+ //
+ string dependency_alternative::
string () const
{
- std::string r (name.string ());
+ std::string r (size () > 1 ? "{" : "");
- if (constraint)
+ bool first (true);
+ for (const dependency& d: *this)
{
- r += ' ';
- r += constraint->string ();
+ if (!first)
+ r += ' ';
+ else
+ first = false;
+
+ r += d.string ();
+ }
+
+ if (size () > 1)
+ r += '}';
+
+ if (single_line ())
+ {
+ if (enable)
+ {
+ r += " ? (";
+ r += *enable;
+ r += ')';
+ }
+
+ if (reflect)
+ {
+ r += ' ';
+ r += *reflect;
+ }
+ }
+ else
+ {
+ // Add an extra newline between the clauses.
+ //
+ bool first (true);
+
+ r += "\n{";
+
+ if (enable)
+ {
+ first = false;
+
+ r += "\n enable (";
+ r += *enable;
+ r += ')';
+ }
+
+ if (prefer)
+ {
+ if (!first)
+ r += '\n';
+ else
+ first = false;
+
+ r += "\n prefer\n {\n";
+ r += *prefer;
+ r += " }";
+
+ assert (accept);
+
+ r += "\n\n accept (";
+ r += *accept;
+ r += ')';
+ }
+ else if (require)
+ {
+ if (!first)
+ r += '\n';
+ else
+ first = false;
+
+ r += "\n require\n {\n";
+ r += *require;
+ r += " }";
+ }
+
+ if (reflect)
+ {
+ if (!first)
+ r += '\n';
+ else
+ first = false;
+
+ r += "\n reflect\n {\n";
+ r += *reflect;
+ r += " }";
+ }
+
+ r += "\n}";
}
return r;
@@ -1026,26 +1340,1370 @@ namespace bpkg
// dependency_alternatives
//
- ostream&
- operator<< (ostream& o, const dependency_alternatives& as)
+ class dependency_alternatives_lexer: public char_scanner<utf8_validator>
+ {
+ public:
+ enum class token_type
+ {
+ eos,
+ newline,
+ word,
+ buildfile,
+
+ question, // ?
+
+ lcbrace, // {
+ rcbrace, // }
+
+ lparen, // (
+ rparen, // )
+
+ lsbrace, // [
+ rsbrace, // ]
+
+ equal, // ==
+ less, // <
+ greater, // >
+ less_equal, // <=
+ greater_equal, // >=
+
+ tilde, // ~
+ caret, // ^
+
+ bit_or // |
+ };
+
+ struct token
+ {
+ token_type type;
+ std::string value;
+
+ uint64_t line;
+ uint64_t column;
+
+ std::string
+ string (bool diag = true) const;
+ };
+
+ // If true, then comments are allowed and are treated as whitespace
+ // characters.
+ //
+ bool comments = false;
+
+ public:
+ // Note that name is stored by shallow reference.
+ //
+ dependency_alternatives_lexer (istream& is,
+ const string& name,
+ uint64_t line,
+ uint64_t column)
+ : char_scanner (is,
+ utf8_validator (codepoint_types::graphic, U"\n\r\t"),
+ true /* crlf */,
+ line,
+ column),
+ name_ (name),
+ buildfile_scan_ (*this, name_) {}
+
+ // The following functions throw manifest_parsing on invalid UTF-8
+ // sequence.
+ //
+
+ // Peek the next non-whitespace character.
+ //
+ xchar
+ peek_char ();
+
+ // Extract next token (other than of the buildfile type) from the stream.
+ //
+ // Note that it is ok to call next() again after getting eos.
+ //
+ token
+ next ();
+
+ // The next_*() functions extract the buildfile token from the stream.
+ // Throw manifest_parsing on error (invalid buildfile fragment, etc).
+ //
+ // Note that they are just thin wrappers around the scan_*() functions
+ // (see buildfile-scanner.hxx for details).
+ //
+ token
+ next_eval ();
+
+ token
+ next_line (char stop);
+
+ token
+ next_block ();
+
+ private:
+ using base = char_scanner<utf8_validator>;
+
+ xchar
+ get ()
+ {
+ xchar c (base::get (ebuf_));
+
+ if (invalid (c))
+ throw parsing (name_, c.line, c.column, ebuf_);
+
+ return c;
+ }
+
+ void
+ get (const xchar& peeked)
+ {
+ base::get (peeked);
+ }
+
+ xchar
+ peek ()
+ {
+ xchar c (base::peek (ebuf_));
+
+ if (invalid (c))
+ throw parsing (name_, c.line, c.column, ebuf_);
+
+ return c;
+ }
+
+ void
+ skip_spaces ();
+
+ private:
+ const string& name_;
+
+ // Buffer for a get()/peek() potential error.
+ //
+ string ebuf_;
+
+ buildfile_scanner<utf8_validator, 1> buildfile_scan_;
+ };
+
+ dependency_alternatives_lexer::token dependency_alternatives_lexer::
+ next ()
+ {
+ using type = token_type;
+
+ skip_spaces ();
+
+ uint64_t ln (line);
+ uint64_t cl (column);
+
+ xchar c (get ());
+
+ auto make_token = [ln, cl] (type t, string v = string ())
+ {
+ return token {t, move (v), ln, cl};
+ };
+
+ if (eos (c))
+ return make_token (type::eos);
+
+ // NOTE: don't forget to also update the below separators list if changing
+ // anything here.
+ //
+ switch (c)
+ {
+ case '\n': return make_token (type::newline);
+ case '?': return make_token (type::question);
+ case '(': return make_token (type::lparen);
+ case ')': return make_token (type::rparen);
+ case '{': return make_token (type::lcbrace);
+ case '}': return make_token (type::rcbrace);
+ case '[': return make_token (type::lsbrace);
+ case ']': return make_token (type::rsbrace);
+
+ case '=':
+ {
+ if (peek () == '=')
+ {
+ get ();
+ return make_token (type::equal);
+ }
+ break;
+ }
+
+ case '<':
+ {
+ if ((c = peek ()) == '=')
+ {
+ get (c);
+ return make_token (type::less_equal);
+ }
+ else
+ return make_token (type::less);
+ }
+
+ case '>':
+ {
+ if ((c = peek ()) == '=')
+ {
+ get (c);
+ return make_token (type::greater_equal);
+ }
+ else
+ return make_token (type::greater);
+ }
+
+ case '~': return make_token (type::tilde);
+ case '^': return make_token (type::caret);
+
+ case '|': return make_token (type::bit_or);
+ }
+
+ // Otherwise it is a word.
+ //
+ // Starts with a non-whitespace character which has not been recognized as
+ // a part of some other token.
+ //
+ string r (1, c);
+
+ // Add subsequent characters until eos or separator is encountered.
+ //
+ const char* s (" \n\t?(){}[]=<>~^|");
+ for (c = peek (); !eos (c) && strchr (s, c) == nullptr; c = peek ())
+ {
+ r += c;
+ get (c);
+ }
+
+ return make_token (type::word, move (r));
+ }
+
+ dependency_alternatives_lexer::token dependency_alternatives_lexer::
+ next_eval ()
+ {
+ skip_spaces ();
+
+ uint64_t ln (line);
+ uint64_t cl (column);
+
+ try
+ {
+ // Strip the trailing whitespaces.
+ //
+ return token {token_type::buildfile,
+ trim (buildfile_scan_.scan_eval ()),
+ ln,
+ cl};
+ }
+ catch (const buildfile_scanning& e)
+ {
+ throw parsing (e.name, e.line, e.column, e.description);
+ }
+ }
+
+ dependency_alternatives_lexer::token dependency_alternatives_lexer::
+ next_line (char stop)
+ {
+ skip_spaces ();
+
+ uint64_t ln (line);
+ uint64_t cl (column);
+
+ try
+ {
+ // Strip the trailing whitespaces.
+ //
+ return token {token_type::buildfile,
+ trim (buildfile_scan_.scan_line (stop)),
+ ln,
+ cl};
+ }
+ catch (const buildfile_scanning& e)
+ {
+ throw parsing (e.name, e.line, e.column, e.description);
+ }
+ }
+
+ dependency_alternatives_lexer::token dependency_alternatives_lexer::
+ next_block ()
+ {
+ uint64_t ln (line);
+ uint64_t cl (column);
+
+ try
+ {
+ // Don't trim the token value not to strip the potential block indenting
+ // on the first line.
+ //
+ return token {token_type::buildfile,
+ buildfile_scan_.scan_block (),
+ ln,
+ cl};
+ }
+ catch (const buildfile_scanning& e)
+ {
+ throw parsing (e.name, e.line, e.column, e.description);
+ }
+ }
+
+ dependency_alternatives_lexer::xchar dependency_alternatives_lexer::
+ peek_char ()
+ {
+ skip_spaces ();
+ return peek ();
+ }
+
+ void dependency_alternatives_lexer::
+ skip_spaces ()
+ {
+ xchar c (peek ());
+ bool start (c.column == 1);
+
+ for (; !eos (c); c = peek ())
+ {
+ switch (c)
+ {
+ case ' ':
+ case '\t': break;
+
+ case '#':
+ {
+ if (!comments)
+ return;
+
+ get (c);
+
+ // See if this is a multi-line comment in the form:
+ //
+ /*
+ #\
+ ...
+ #\
+ */
+ auto ml = [&c, this] () -> bool
+ {
+ if ((c = peek ()) == '\\')
+ {
+ get (c);
+ if ((c = peek ()) == '\n' || eos (c))
+ return true;
+ }
+
+ return false;
+ };
+
+ if (ml ())
+ {
+ // Scan until we see the closing one.
+ //
+ for (;;)
+ {
+ if (c == '#' && ml ())
+ break;
+
+ if (eos (c = peek ()))
+ throw parsing (name_,
+ c.line, c.column,
+ "unterminated multi-line comment");
+
+ get (c);
+ }
+ }
+ else
+ {
+ // Read until newline or eos.
+ //
+ for (; !eos (c) && c != '\n'; c = peek ())
+ get (c);
+ }
+
+ continue;
+ }
+
+ case '\n':
+ {
+ // Skip empty lines.
+ //
+ if (start)
+ break;
+ }
+ // Fall through.
+ default: return;
+ }
+
+ get (c);
+ }
+ }
+
+ std::string dependency_alternatives_lexer::token::
+ string (bool diag) const
+ {
+ std::string q (diag ? "'" : "");
+
+ switch (type)
+ {
+ case token_type::eos: return diag ? "<end of stream>" : "";
+ case token_type::newline: return diag ? "<newline>" : "\n";
+ case token_type::word: return q + value + q;
+ case token_type::buildfile: return (diag
+ ? "<buildfile fragment>"
+ : value);
+ case token_type::question: return q + '?' + q;
+ case token_type::lparen: return q + '(' + q;
+ case token_type::rparen: return q + ')' + q;
+ case token_type::lcbrace: return q + '{' + q;
+ case token_type::rcbrace: return q + '}' + q;
+ case token_type::lsbrace: return q + '[' + q;
+ case token_type::rsbrace: return q + ']' + q;
+ case token_type::equal: return q + "==" + q;
+ case token_type::less: return q + '<' + q;
+ case token_type::greater: return q + '>' + q;
+ case token_type::less_equal: return q + "<=" + q;
+ case token_type::greater_equal: return q + ">=" + q;
+ case token_type::tilde: return q + '~' + q;
+ case token_type::caret: return q + '^' + q;
+ case token_type::bit_or: return q + '|' + q;
+ }
+
+ assert (false); // Can't be here.
+ return "";
+ }
+
+ class dependency_alternatives_parser
+ {
+ public:
+
+ // If the requirements flavor is specified, then only enable and reflect
+ // clauses are allowed in the multi-line representation.
+ //
+ explicit
+ dependency_alternatives_parser (bool requirements = false)
+ : requirements_ (requirements) {}
+
+ // Throw manifest_parsing if representation is invalid.
+ //
+ void
+ parse (const package_name& dependent,
+ istream&,
+ const string& name,
+ uint64_t line,
+ uint64_t column,
+ dependency_alternatives&);
+
+ private:
+ using lexer = dependency_alternatives_lexer;
+ using token = lexer::token;
+ using token_type = lexer::token_type;
+
+ token_type
+ next (token&, token_type&);
+
+ token_type
+ next_eval (token&, token_type&);
+
+ token_type
+ next_line (token&, token_type&);
+
+ token_type
+ next_block (token&, token_type&);
+
+ // Receive the token/type from which it should start consuming and in
+ // return the token/type contains the first token that has not been
+ // consumed (normally eos, newline, or '|').
+ //
+ dependency_alternative
+ parse_alternative (token&, token_type&, bool first);
+
+ // Helpers.
+ //
+ // Throw manifest_parsing with the `<what> expected instead of <token>`
+ // description.
+ //
+ [[noreturn]] void
+ unexpected_token (const token&, string&& what);
+
+ bool requirements_;
+
+ const package_name* dependent_;
+ const string* name_;
+ lexer* lexer_;
+ dependency_alternatives* result_;
+ };
+
+ [[noreturn]] void dependency_alternatives_parser::
+ unexpected_token (const token& t, string&& w)
+ {
+ w += " expected";
+
+ // Don't add the `instead of...` part, if the unexpected token is eos or
+ // an empty word/buildfile.
+ //
+ if (t.type != token_type::eos &&
+ ((t.type != token_type::word && t.type != token_type::buildfile) ||
+ !t.value.empty ()))
+ {
+ w += " instead of ";
+ w += t.string ();
+ }
+
+ throw parsing (*name_, t.line, t.column, w);
+ }
+
+ void dependency_alternatives_parser::
+ parse (const package_name& dependent,
+ istream& is,
+ const string& name,
+ uint64_t line,
+ uint64_t column,
+ dependency_alternatives& result)
+ {
+ lexer lexer (is, name, line, column);
+
+ dependent_ = &dependent;
+ name_ = &name;
+ lexer_ = &lexer;
+ result_ = &result;
+
+ string what (requirements_ ? "requirement" : "dependency");
+
+ token t;
+ token_type tt;
+ next (t, tt);
+
+ // Make sure the representation is not empty, unless we are in the
+ // requirements mode. In the latter case fallback to creating a simple
+ // unconditional requirement. Note that it's the caller's responsibility
+ // to verify that a non-empty comment is specified in this case.
+ //
+ if (tt == token_type::eos)
+ {
+ if (!requirements_)
+ unexpected_token (t, what + " alternatives");
+
+ dependency_alternative da;
+ da.push_back (dependency ());
+
+ result_->push_back (move (da));
+ return;
+ }
+
+ for (bool first (true); tt != token_type::eos; )
+ {
+ dependency_alternative da (parse_alternative (t, tt, first));
+
+ // Skip newline after the dependency alternative, if present.
+ //
+ if (tt == token_type::newline)
+ next (t, tt);
+
+ // Make sure that the simple requirement has the only alternative in the
+ // representation.
+ //
+ if (requirements_ &&
+ da.size () == 1 &&
+ (da[0].name.empty () || (da.enable && da.enable->empty ())))
+ {
+ assert (first);
+
+ if (tt != token_type::eos)
+ throw parsing (*name_,
+ t.line,
+ t.column,
+ "end of simple requirement expected");
+ }
+ else
+ {
+ if (tt != token_type::eos && tt != token_type::bit_or)
+ unexpected_token (t, "end of " + what + " alternatives or '|'");
+ }
+
+ if (tt == token_type::bit_or)
+ {
+ next (t, tt);
+
+ // Skip newline after '|', if present.
+ //
+ if (tt == token_type::newline)
+ next (t, tt);
+
+ // Make sure '|' is not followed by eos.
+ //
+ if (tt == token_type::eos)
+ unexpected_token (t, move (what));
+ }
+
+ result_->push_back (move (da));
+
+ first = false;
+ }
+ }
+
+ dependency_alternative dependency_alternatives_parser::
+ parse_alternative (token& t, token_type& tt, bool first)
+ {
+ using type = token_type;
+
+ dependency_alternative r;
+
+ string what (requirements_ ? "requirement" : "dependency");
+ string config ("config." + dependent_->variable () + '.');
+
+ auto bad_token = [&t, this] (string&& what)
+ {
+ unexpected_token (t, move (what));
+ };
+
+ // Check that the current token type matches the expected one. Throw
+ // manifest_parsing if that's not the case. Use the expected token type
+ // name for the error description or the custom name, if specified. For
+ // the word and buildfile token types the custom name must be specified.
+ //
+ // Only move from the custom name argument if throwing exception.
+ //
+ auto expect_token = [&tt, &bad_token] (type et,
+ string&& what = string ())
+ {
+ assert ((et != type::word && et != type::buildfile) || !what.empty ());
+
+ if (tt != et)
+ {
+ if (what.empty ())
+ {
+ token e {et, "", 0, 0};
+ bad_token (e.string ());
+ }
+ else
+ bad_token (move (what));
+ }
+ };
+
+ // Parse dependencies.
+ //
+ // If the current token starts the version constraint, then read its
+ // tokens, rejoin them, and return the constraint string representation.
+ // Otherwise return nullopt.
+ //
+ // Note that normally the caller reads the dependency package name, reads
+ // the version constraint and, if present, appends it to the dependency,
+ // and then creates the dependency object with a single constructor call.
+ //
+ // Note: doesn't read token that follows the constraint.
+ //
+ auto try_scan_version_constraint =
+ [&t, &tt, &bad_token, &expect_token, this] () -> optional<string>
+ {
+ switch (t.type)
+ {
+ case type::lparen:
+ case type::lsbrace:
+ {
+ string r (t.string (false /* diag */));
+
+ next (t, tt);
+
+ expect_token (type::word, "version");
+
+ r += t.string (false /* diag */);
+ r += ' ';
+
+ next (t, tt);
+
+ expect_token (type::word, "version");
+
+ r += t.string (false /* diag */);
+
+ next (t, tt);
+
+ if (tt != type::rparen && tt != type::rsbrace)
+ bad_token ("')' or ']'");
+
+ r += t.string (false /* diag */);
+
+ return optional<string> (move (r));
+ }
+
+ case type::equal:
+ case type::less:
+ case type::greater:
+ case type::less_equal:
+ case type::greater_equal:
+ case type::tilde:
+ case type::caret:
+ {
+ string r (t.string (false /* diag */));
+
+ next (t, tt);
+
+ expect_token (type::word, "version");
+
+ r += t.string (false /* diag */);
+
+ return optional<string> (move (r));
+ }
+
+ default: return nullopt;
+ }
+ };
+
+ // Parse the evaluation context including the left and right parenthesis
+ // and return the enclosed buildfile fragment.
+ //
+ // Note: no token is read after terminating ')'.
+ //
+ auto parse_eval = [&t, &tt, &expect_token, &bad_token, this] ()
+ {
+ next (t, tt);
+ expect_token (type::lparen);
+
+ next_eval (t, tt);
+
+ if (t.value.empty ())
+ bad_token ("condition");
+
+ string r (move (t.value));
+
+ next (t, tt);
+ expect_token (type::rparen);
+
+ return r;
+ };
+
+ const char* vccs ("([<>=!~^");
+
+ bool group (tt == type::lcbrace); // Dependency group.
+
+ if (group)
+ {
+ next (t, tt);
+
+ if (tt == type::rcbrace)
+ bad_token (move (what));
+
+ while (tt != type::rcbrace)
+ {
+ expect_token (type::word, what + " or '}'");
+
+ string d (move (t.value));
+ uint64_t dl (t.line);
+ uint64_t dc (t.column);
+
+ next (t, tt);
+
+ optional<string> vc (try_scan_version_constraint ());
+
+ if (vc)
+ {
+ d += *vc;
+
+ next (t, tt);
+ }
+
+ try
+ {
+ r.emplace_back (d);
+ }
+ catch (const invalid_argument& e)
+ {
+ throw parsing (*name_, dl, dc, e.what ());
+ }
+ }
+
+ // See if a common version constraint follows the dependency group and
+ // parse it if that's the case.
+ //
+ // Note that we need to be accurate not to consume what may end up to be
+ // a part of the reflect config.
+ //
+ lexer::xchar c (lexer_->peek_char ());
+
+ if (!lexer::eos (c) && strchr (vccs, c) != nullptr)
+ {
+ next (t, tt);
+
+ uint64_t vcl (t.line);
+ uint64_t vcc (t.column);
+
+ optional<string> vc (try_scan_version_constraint ());
+
+ if (!vc)
+ bad_token ("version constraint");
+
+ try
+ {
+ version_constraint c (*vc);
+
+ for (dependency& d: r)
+ {
+ if (!d.constraint)
+ d.constraint = c;
+ }
+ }
+ catch (const invalid_argument& e)
+ {
+ throw parsing (*name_,
+ vcl,
+ vcc,
+ string ("invalid version constraint: ") + e.what ());
+ }
+ }
+ }
+ else // Single dependency.
+ {
+ // If we see the question mark instead of a word in the requirements
+ // mode, then this is a simple requirement. In this case parse the
+ // evaluation context, if present, and bail out.
+ //
+ if (requirements_ && first && tt == type::question)
+ {
+ r.emplace_back (dependency ());
+
+ bool eval (lexer_->peek_char () == '(');
+ r.enable = eval ? parse_eval () : string ();
+
+ next (t, tt);
+
+ // @@ TMP Treat requirements similar to `? cli` as `cli ?` until
+ // toolchain 0.15.0 and libodb-mssql 2.5.0-b.22 are both released.
+ //
+ // NOTE: don't forget to drop the temporary test in
+ // tests/manifest/testscript when dropping this workaround.
+ //
+ if (!eval && tt == type::word)
+ try
+ {
+ r.back ().name = package_name (move (t.value));
+ next (t, tt);
+ }
+ catch (const invalid_argument&) {}
+
+ return r;
+ }
+
+ expect_token (type::word, move (what));
+
+ string d (move (t.value));
+ uint64_t dl (t.line);
+ uint64_t dc (t.column);
+
+ // See if a version constraint follows the dependency package name and
+ // parse it if that's the case.
+ //
+ lexer::xchar c (lexer_->peek_char ());
+
+ if (!lexer::eos (c) && strchr (vccs, c) != nullptr)
+ {
+ next (t, tt);
+
+ optional<string> vc (try_scan_version_constraint ());
+
+ if (!vc)
+ bad_token ("version constraint");
+
+ d += *vc;
+ }
+
+ try
+ {
+ r.emplace_back (d);
+ }
+ catch (const invalid_argument& e)
+ {
+ throw parsing (*name_, dl, dc, e.what ());
+ }
+ }
+
+ // See if there is an enable condition and parse it if that's the case.
+ //
+ {
+ lexer::xchar c (lexer_->peek_char ());
+
+ if (c == '?')
+ {
+ next (t, tt);
+ expect_token (type::question);
+
+ // If we don't see the opening parenthesis in the requirements mode,
+ // then this is a simple requirement. In this case set the enable
+ // condition to an empty string and bail out.
+ //
+ c = lexer_->peek_char ();
+
+ if (requirements_ && first && !group && c != '(')
+ {
+ r.enable = "";
+
+ next (t, tt);
+ return r;
+ }
+
+ r.enable = parse_eval ();
+ }
+ }
+
+ // See if there is a reflect config and parse it if that's the case.
+ //
+ {
+ lexer::xchar c (lexer_->peek_char ());
+
+ if (!lexer::eos (c) && strchr ("|\n", c) == nullptr)
+ {
+ next_line (t, tt);
+
+ string& l (t.value);
+ if (l.compare (0, config.size (), config) != 0)
+ bad_token (config + "* variable assignment");
+
+ r.reflect = move (l);
+ }
+ }
+
+ // If the dependencies are terminated with the newline, then check if the
+ // next token is '{'. If that's the case, then this is a multi-line
+ // representation.
+ //
+ next (t, tt);
+
+ if (tt == type::newline)
+ {
+ next (t, tt);
+
+ if (tt == type::lcbrace)
+ {
+ if (r.enable)
+ throw parsing (
+ *name_,
+ t.line,
+ t.column,
+ "multi-line " + what + " form with inline enable clause");
+
+ if (r.reflect)
+ throw parsing (
+ *name_,
+ t.line,
+ t.column,
+ "multi-line " + what + " form with inline reflect clause");
+
+ // Allow comments.
+ //
+ lexer_->comments = true;
+
+ next (t, tt);
+ expect_token (type::newline);
+
+ // Parse the clauses.
+ //
+ for (next (t, tt); tt == type::word; next (t, tt))
+ {
+ auto fail_dup = [&t, this] ()
+ {
+ throw parsing (*name_, t.line, t.column, "duplicate clause");
+ };
+
+ auto fail_precede = [&t, this] (const char* what)
+ {
+ throw parsing (
+ *name_,
+ t.line,
+ t.column,
+ t.value + " clause should precede " + what + " clause");
+ };
+
+ auto fail_conflict = [&t, this] (const char* what)
+ {
+ throw parsing (
+ *name_,
+ t.line,
+ t.column,
+ t.value + " and " + what + " clauses are mutually exclusive");
+ };
+
+ auto fail_requirements = [&t, this] ()
+ {
+ throw parsing (
+ *name_,
+ t.line,
+ t.column,
+ t.value + " clause is not permitted for requirements");
+ };
+
+ // Parse the buildfile fragment block including the left and right
+ // curly braces (expected to be on the separate lines) and return
+ // the enclosed fragment.
+ //
+ // Note that an empty buildfile fragment is allowed.
+ //
+ auto parse_block = [&t, &tt, &expect_token, this] ()
+ {
+ next (t, tt);
+ expect_token (type::newline);
+
+ next (t, tt);
+ expect_token (type::lcbrace);
+
+ next (t, tt);
+ expect_token (type::newline);
+
+ next_block (t, tt);
+
+ return move (t.value);
+ };
+
+ const string& v (t.value);
+
+ if (v == "enable")
+ {
+ if (r.enable)
+ fail_dup ();
+
+ if (r.prefer)
+ fail_precede ("prefer");
+
+ if (r.require)
+ fail_precede ("require");
+
+ if (r.reflect)
+ fail_precede ("reflect");
+
+ r.enable = parse_eval ();
+
+ next (t, tt);
+ expect_token (type::newline);
+ }
+ else if (v == "prefer")
+ {
+ if (requirements_)
+ fail_requirements ();
+
+ if (r.prefer)
+ fail_dup ();
+
+ if (r.require)
+ fail_conflict ("require");
+
+ if (r.reflect)
+ fail_precede ("reflect");
+
+ r.prefer = parse_block ();
+
+ // The accept clause must follow, so parse it.
+ //
+ next (t, tt);
+
+ if (tt != type::word || t.value != "accept")
+ bad_token ("accept clause");
+
+ r.accept = parse_eval ();
+
+ next (t, tt);
+ expect_token (type::newline);
+ }
+ else if (v == "require")
+ {
+ if (requirements_)
+ fail_requirements ();
+
+ if (r.require)
+ fail_dup ();
+
+ if (r.prefer)
+ fail_conflict ("prefer");
+
+ if (r.reflect)
+ fail_precede ("reflect");
+
+ r.require = parse_block ();
+ }
+ else if (v == "reflect")
+ {
+ if (r.reflect)
+ fail_dup ();
+
+ r.reflect = parse_block ();
+ }
+ else if (v == "accept")
+ {
+ if (requirements_)
+ fail_requirements ();
+
+ throw parsing (*name_,
+ t.line,
+ t.column,
+ "accept clause should follow prefer clause");
+ }
+ else
+ bad_token (what + " alternative clause");
+ }
+
+ expect_token (type::rcbrace);
+
+ // Disallow comments.
+ //
+ lexer_->comments = false;
+
+ next (t, tt);
+ }
+ }
+
+ return r;
+ }
+
+ dependency_alternatives_parser::token_type dependency_alternatives_parser::
+ next (token& t, token_type& tt)
+ {
+ t = lexer_->next ();
+ tt = t.type;
+ return tt;
+ }
+
+ dependency_alternatives_parser::token_type dependency_alternatives_parser::
+ next_eval (token& t, token_type& tt)
+ {
+ t = lexer_->next_eval ();
+ tt = t.type;
+ return tt;
+ }
+
+ dependency_alternatives_parser::token_type dependency_alternatives_parser::
+ next_line (token& t, token_type& tt)
+ {
+ t = lexer_->next_line ('|');
+ tt = t.type;
+ return tt;
+ }
+
+ dependency_alternatives_parser::token_type dependency_alternatives_parser::
+ next_block (token& t, token_type& tt)
+ {
+ t = lexer_->next_block ();
+ tt = t.type;
+ return tt;
+ }
+
+ dependency_alternatives::
+ dependency_alternatives (const std::string& s,
+ const package_name& dependent,
+ const std::string& name,
+ uint64_t line,
+ uint64_t column)
+ {
+ using std::string;
+
+ auto vc (parser::split_comment (s));
+
+ comment = move (vc.second);
+
+ const string& v (vc.first);
+ buildtime = (v[0] == '*');
+
+ string::const_iterator b (v.begin ());
+ string::const_iterator e (v.end ());
+
+ if (buildtime)
+ {
+ string::size_type p (v.find_first_not_of (spaces, 1));
+ b = p == string::npos ? e : b + p;
+ }
+
+ dependency_alternatives_parser p;
+ istringstream is (b == v.begin () ? v : string (b, e));
+ p.parse (dependent, is, name, line, column, *this);
+ }
+
+ string dependency_alternatives::
+ string () const
+ {
+ std::string r (buildtime ? "* " : "");
+
+ const dependency_alternative* prev (nullptr);
+ for (const dependency_alternative& da: *this)
+ {
+ if (prev != nullptr)
+ {
+ r += prev->single_line () ? " |" : "\n|";
+ r += !da.single_line () || !prev->single_line () ? '\n' : ' ';
+ }
+
+ r += da.string ();
+ prev = &da;
+ }
+
+ return serializer::merge_comment (r, comment);
+ }
+
+ // requirement_alternative
+ //
+ string requirement_alternative::
+ string () const
+ {
+ using std::string;
+
+ string r (size () > 1 ? "{" : "");
+
+ bool first (true);
+ for (const string& rq: *this)
+ {
+ if (!first)
+ r += ' ';
+ else
+ first = false;
+
+ r += rq;
+ }
+
+ if (size () > 1)
+ r += '}';
+
+ if (single_line ())
+ {
+ if (enable)
+ {
+ if (!simple ())
+ {
+ r += " ? (";
+ r += *enable;
+ r += ')';
+ }
+ else
+ {
+ // Note that the (single) requirement id may or may not be empty.
+ //
+ if (!r.empty ())
+ r += ' ';
+
+ r += '?';
+
+ if (!enable->empty ())
+ {
+ r += " (";
+ r += *enable;
+ r += ')';
+ }
+ }
+ }
+
+ if (reflect)
+ {
+ r += ' ';
+ r += *reflect;
+ }
+ }
+ else
+ {
+ r += "\n{";
+
+ if (enable)
+ {
+ r += "\n enable (";
+ r += *enable;
+ r += ')';
+ }
+
+ if (reflect)
+ {
+ if (enable)
+ r += '\n';
+
+ r += "\n reflect\n {\n";
+ r += *reflect;
+ r += " }";
+ }
+
+ r += "\n}";
+ }
+
+ return r;
+ }
+
+ // requirement_alternatives
+ //
+ requirement_alternatives::
+ requirement_alternatives (const std::string& s,
+ const package_name& dependent,
+ const std::string& name,
+ uint64_t line,
+ uint64_t column)
+ {
+ using std::string;
+
+ auto vc (parser::split_comment (s));
+
+ comment = move (vc.second);
+
+ const string& v (vc.first);
+ buildtime = (v[0] == '*');
+
+ string::const_iterator b (v.begin ());
+ string::const_iterator e (v.end ());
+
+ if (buildtime)
+ {
+ string::size_type p (v.find_first_not_of (spaces, 1));
+ b = p == string::npos ? e : b + p;
+ }
+
+ // We will use the dependency alternatives parser to parse the
+ // representation into a temporary dependency alternatives in the
+ // requirements mode. Then we will move the dependency alternatives into
+ // the requirement alternatives using the string representation of the
+ // dependencies.
+ //
+ dependency_alternatives_parser p (true /* requirements */);
+ istringstream is (b == v.begin () ? v : string (b, e));
+
+ dependency_alternatives das;
+ p.parse (dependent, is, name, line, column, das);
+
+ for (dependency_alternative& da: das)
+ {
+ requirement_alternative ra (move (da.enable), move (da.reflect));
+
+ // Also handle the simple requirement.
+ //
+ for (dependency& d: da)
+ ra.push_back (!d.name.empty () ? d.string () : string ());
+
+ push_back (move (ra));
+ }
+
+ // Make sure that the simple requirement is accompanied with a non-empty
+ // comment.
+ //
+ if (simple () && comment.empty ())
+ {
+ // Let's describe the following error cases differently:
+ //
+ // requires: ?
+ // requires:
+ //
+ throw parsing (name,
+ line,
+ column,
+ (back ().enable
+ ? "no comment specified for simple requirement"
+ : "requirement or comment expected"));
+ }
+ }
+
+ std::string requirement_alternatives::
+ string () const
{
- if (as.conditional)
- o << '?';
+ using std::string;
- if (as.buildtime)
- o << '*';
+ string r (buildtime ? "* " : "");
- if (as.conditional || as.buildtime)
- o << ' ';
+ const requirement_alternative* prev (nullptr);
+ for (const requirement_alternative& ra: *this)
+ {
+ if (prev != nullptr)
+ {
+ r += prev->single_line () ? " |" : "\n|";
+ r += !ra.single_line () || !prev->single_line () ? '\n' : ' ';
+ }
- bool f (true);
- for (const dependency& a: as)
- o << (f ? (f = false, "") : " | ") << a;
+ r += ra.string ();
+ prev = &ra;
+ }
- if (!as.comment.empty ())
- o << "; " << as.comment;
+ // For better readability separate the comment from the question mark for
+ // the simple requirement with an empty condition.
+ //
+ if (simple () && conditional () && back ().enable->empty ())
+ r += ' ';
- return o;
+ return serializer::merge_comment (r, comment);
}
// build_class_term
@@ -1060,7 +2718,7 @@ namespace bpkg
}
build_class_term::
- build_class_term (build_class_term&& t)
+ build_class_term (build_class_term&& t) noexcept
: operation (t.operation),
inverted (t.inverted),
simple (t.simple)
@@ -1084,13 +2742,13 @@ namespace bpkg
}
build_class_term& build_class_term::
- operator= (build_class_term&& t)
+ operator= (build_class_term&& t) noexcept
{
if (this != &t)
{
this->~build_class_term ();
- // Assume noexcept move-construction.
+ // Rely on noexcept move-construction.
//
new (this) build_class_term (move (t));
}
@@ -1116,13 +2774,13 @@ namespace bpkg
if (!(alnum (c) || c == '_'))
throw invalid_argument (
- "class name '" + s + "' starts with '" + c + "'");
+ "class name '" + s + "' starts with '" + c + '\'');
for (; i != s.size (); ++i)
{
if (!(alnum (c = s[i]) || c == '+' || c == '-' || c == '_' || c == '.'))
throw invalid_argument (
- "class name '" + s + "' contains '" + c + "'");
+ "class name '" + s + "' contains '" + c + '\'');
}
return s[0] == '_';
@@ -1411,100 +3069,193 @@ namespace bpkg
match_classes (cs, im, expr, r);
}
- // text_type
+ // build_auxiliary
+ //
+ optional<pair<string, string>> build_auxiliary::
+ parse_value_name (const string& n)
+ {
+ // Check if the value name matches exactly.
+ //
+ if (n == "build-auxiliary")
+ return make_pair (string (), string ());
+
+ // Check if this is a *-build-auxiliary name.
+ //
+ if (n.size () > 16 &&
+ n.compare (n.size () - 16, 16, "-build-auxiliary") == 0)
+ {
+ return make_pair (string (n, 0, n.size () - 16), string ());
+ }
+
+ // Check if this is a build-auxiliary-* name.
+ //
+ if (n.size () > 16 && n.compare (0, 16, "build-auxiliary-") == 0)
+ return make_pair (string (), string (n, 16));
+
+ // Check if this is a *-build-auxiliary-* name.
+ //
+ size_t p (n.find ("-build-auxiliary-"));
+
+ if (p != string::npos &&
+ p != 0 && // Not '-build-auxiliary-*'?
+ p + 17 != n.size () && // Not '*-build-auxiliary-'?
+ n.find ("-build-auxiliary-", p + 17) == string::npos) // Unambiguous?
+ {
+ return make_pair (string (n, 0, p), string (n, p + 17));
+ }
+
+ return nullopt;
+ }
+
+ // test_dependency_type
//
string
- to_string (text_type t)
+ to_string (test_dependency_type t)
{
switch (t)
{
- case text_type::plain: return "text/plain";
- case text_type::github_mark: return "text/markdown;variant=GFM";
- case text_type::common_mark: return "text/markdown;variant=CommonMark";
+ case test_dependency_type::tests: return "tests";
+ case test_dependency_type::examples: return "examples";
+ case test_dependency_type::benchmarks: return "benchmarks";
}
assert (false); // Can't be here.
return string ();
}
- optional<text_type>
- to_text_type (const string& t)
+ test_dependency_type
+ to_test_dependency_type (const string& t)
{
- auto bad_type = [] (const string& d) {throw invalid_argument (d);};
+ if (t == "tests") return test_dependency_type::tests;
+ else if (t == "examples") return test_dependency_type::examples;
+ else if (t == "benchmarks") return test_dependency_type::benchmarks;
+ else throw invalid_argument ("invalid test dependency type '" + t + '\'');
+ }
- // Parse the media type representation (see RFC2045 for details) into the
- // type/subtype value and the parameter list. Note: we don't support
- // parameter quoting and comments for simplicity.
+
+ // test_dependency
+ //
+ test_dependency::
+ test_dependency (std::string v, test_dependency_type t)
+ : type (t)
+ {
+ using std::string;
+
+ // We will use the dependency alternatives parser to parse the
+ // `<name> [<version-constraint>] ['?' <enable-condition>] [<reflect-config>]`
+ // representation into a temporary dependency alternatives object. Then we
+ // will verify that the result has no multiple alternatives/dependency
+ // packages and unexpected clauses and will move the required information
+ // (dependency, reflection, etc) into the being created test dependency
+ // object.
+
+ // Verify that there is no newline characters to forbid the multi-line
+ // dependency alternatives representation.
//
- size_t p (t.find (';'));
- const string& tp (p != string::npos ? trim (string (t, 0, p)) : t);
+ if (v.find ('\n') != string::npos)
+ throw invalid_argument ("unexpected <newline>");
- small_vector<pair<string, string>, 1> ps;
+ buildtime = (v[0] == '*');
- while (p != string::npos)
- {
- // Extract parameter name.
- //
- size_t b (p + 1);
- p = t.find ('=', b);
+ size_t p (v.find_first_not_of (spaces, buildtime ? 1 : 0));
- if (p == string::npos)
- bad_type ("missing '='");
+ if (p == string::npos)
+ throw invalid_argument ("no package name specified");
- string n (trim (string (t, b, p - b)));
+ string::const_iterator b (v.begin () + p);
+ string::const_iterator e (v.end ());
- // Extract parameter value.
- //
- b = p + 1;
- p = t.find (';', b);
+ // Extract the dependency package name in advance, to pass it to the
+ // parser which will use it to verify the reflection variable name.
+ //
+ // Note that multiple packages can only be specified in {} to be accepted
+ // by the parser. In our case such '{' would be interpreted as a part of
+ // the package name and so would fail complaining about an invalid
+ // character. Let's handle this case manually to avoid the potentially
+ // confusing error description.
+ //
+ assert (b != e); // We would fail earlier otherwise.
- string v (trim (string (t,
- b,
- p != string::npos ? p - b : string::npos)));
+ if (*b == '{')
+ throw invalid_argument ("only single package allowed");
- ps.emplace_back (move (n), move (v));
+ package_name dn;
+
+ try
+ {
+ p = v.find_first_of (" \t=<>[(~^", p); // End of the package name.
+ dn = package_name (string (b, p == string::npos ? e : v.begin () + p));
+ }
+ catch (const invalid_argument& e)
+ {
+ throw invalid_argument (string ("invalid package name: ") + e.what ());
}
- // Calculate the resulting text type, failing on unrecognized media type,
- // unexpected parameter name or value.
+ // Parse the value into the temporary dependency alternatives object.
//
- // Note that type, subtype, and parameter names are matched
- // case-insensitively.
+ dependency_alternatives das;
+
+ try
+ {
+ dependency_alternatives_parser p;
+ istringstream is (b == v.begin () ? v : string (b, e));
+ p.parse (dn, is, "" /* name */, 1, 1, das);
+ }
+ catch (const manifest_parsing& e)
+ {
+ throw invalid_argument (e.description);
+ }
+
+ // Verify that there are no multiple dependency alternatives.
//
- optional<text_type> r;
+ assert (!das.empty ()); // Enforced by the parser.
- // Currently only the plain and markdown text types are allowed. Later we
- // can potentially introduce some other text types.
+ if (das.size () != 1)
+ throw invalid_argument ("unexpected '|'");
+
+ dependency_alternative& da (das[0]);
+
+ // Verify that there are no multiple dependencies in the alternative.
//
- if (icasecmp (tp, "text/plain") == 0)
+ // The parser can never end up with no dependencies in an alternative and
+ // we already verified that there can't be multiple of them (see above).
+ //
+ assert (da.size () == 1);
+
+ // Verify that there are no unexpected clauses.
+ //
+ // Note that the require, prefer, and accept clauses can only be present
+ // in the multi-line representation and we have already verified that this
+ // is not the case. So there is nothing to verify here.
+
+ // Move the dependency and the enable and reflect clauses into the being
+ // created test dependency object.
+ //
+ static_cast<dependency&> (*this) = move (da[0]);
+
+ enable = move (da.enable);
+ reflect = move (da.reflect);
+ }
+
+ string test_dependency::
+ string () const
+ {
+ std::string r (buildtime
+ ? "* " + dependency::string ()
+ : dependency::string ());
+
+ if (enable)
{
- // Currently, we don't expect parameters for plain text. Later we can
- // potentially introduce some plain text variants.
- //
- if (ps.empty ())
- r = text_type::plain;
+ r += " ? (";
+ r += *enable;
+ r += ')';
}
- else if (icasecmp (tp, "text/markdown") == 0)
+
+ if (reflect)
{
- // Currently, a single optional variant parameter with the two possible
- // values is allowed for markdown. Later we can potentially introduce
- // some other markdown variants.
- //
- if (ps.empty () ||
- (ps.size () == 1 && icasecmp (ps[0].first, "variant") == 0))
- {
- // Note that markdown variants are matched case-insensitively (see
- // RFC7763 for details).
- //
- string v;
- if (ps.empty () || icasecmp (v = move (ps[0].second), "GFM") == 0)
- r = text_type::github_mark;
- else if (icasecmp (v, "CommonMark") == 0)
- r = text_type::common_mark;
- }
+ r += ' ';
+ r += *reflect;
}
- else if (icasecmp (tp, "text/", 5) != 0)
- bad_type ("text type expected");
return r;
}
@@ -1563,7 +3314,7 @@ namespace bpkg
{
throw !source_name.empty ()
? parsing (source_name, nv.value_line, nv.value_column, d)
- : parsing (d + " in '" + v + "'");
+ : parsing (d + " in '" + v + '\'');
};
size_t p (v.find ('/'));
@@ -1605,56 +3356,130 @@ namespace bpkg
return email (move (v), move (c));
}
+ // Parse the [*-]build-auxiliary[-*] manifest value.
+ //
+ // Note that the environment name is expected to already be retrieved using
+ // build_auxiliary::parse_value_name().
+ //
+ static build_auxiliary
+ parse_build_auxiliary (const name_value& nv,
+ string&& env_name,
+ const string& source_name)
+ {
+ auto bad_value = [&nv, &source_name] (const string& d)
+ {
+ throw !source_name.empty ()
+ ? parsing (source_name, nv.value_line, nv.value_column, d)
+ : parsing (d);
+ };
+
+ pair<string, string> vc (parser::split_comment (nv.value));
+ string& v (vc.first);
+ string& c (vc.second);
+
+ if (v.empty ())
+ bad_value ("empty build auxiliary configuration name pattern");
+
+ return build_auxiliary (move (env_name), move (v), move (c));
+ }
+
+ // Parse the [*-]build-bot manifest value and append it to the specified
+ // custom bot public keys list. Make sure the specified key is not empty and
+ // is not a duplicate and throw parsing if that's not the case.
+ //
+ // Note: value name is not used by this function (and so can be moved out,
+ // etc before the call).
+ //
+ static void
+ parse_build_bot (const name_value& nv, const string& source_name, strings& r)
+ {
+ const string& v (nv.value);
+
+ auto bad_value = [&nv, &source_name, &v] (const string& d,
+ bool add_key = true)
+ {
+ throw !source_name.empty ()
+ ? parsing (source_name, nv.value_line, nv.value_column, d)
+ : parsing (!add_key ? d : (d + ":\n" + v));
+ };
+
+ if (v.empty ())
+ bad_value ("empty custom build bot public key", false /* add_key */);
+
+ if (find (r.begin (), r.end (), v) != r.end ())
+ bad_value ("duplicate custom build bot public key");
+
+ r.push_back (v);
+ }
+
const version stub_version (0, "0", nullopt, nullopt, 0);
+ // Parse until next() returns end-of-manifest value.
+ //
static void
parse_package_manifest (
- parser& p,
- name_value nv,
- const function<package_manifest::translate_function>& tf,
+ const string& name,
+ const function<name_value ()>& next,
+ const function<package_manifest::translate_function>& translate,
bool iu,
- bool cd,
+ bool cv,
package_manifest_flags fl,
package_manifest& m)
{
- auto bad_name ([&p, &nv](const string& d) {
- throw parsing (p.name (), nv.name_line, nv.name_column, d);});
+ name_value nv;
- auto bad_value ([&p, &nv](const string& d) {
- throw parsing (p.name (), nv.value_line, nv.value_column, d);});
-
- // Make sure this is the start and we support the version.
- //
- if (!nv.name.empty ())
- bad_name ("start of package manifest expected");
+ auto bad_name ([&name, &nv](const string& d) {
+ throw parsing (name, nv.name_line, nv.name_column, d);});
- if (nv.value != "1")
- bad_value ("unsupported format version");
+ auto bad_value ([&name, &nv](const string& d) {
+ throw parsing (name, nv.value_line, nv.value_column, d);});
- auto parse_email = [&bad_name] (const name_value& nv,
- optional<email>& r,
- const char* what,
- const string& source_name,
- bool empty = false)
+ auto parse_email = [&bad_name, &name] (const name_value& nv,
+ optional<email>& r,
+ const char* what,
+ bool empty = false)
{
if (r)
bad_name (what + string (" email redefinition"));
- r = bpkg::parse_email (nv, what, source_name, empty);
+ r = bpkg::parse_email (nv, what, name, empty);
+ };
+
+ // Parse the [*-]build-auxiliary[-*] manifest value and append it to the
+ // specified build auxiliary list. Make sure that the list contains not
+ // more than one entry with unspecified environment name and throw parsing
+ // if that's not the case. Also make sure that there are no entry
+ // redefinitions (multiple entries with the same environment name).
+ //
+ auto parse_build_auxiliary = [&bad_name, &name] (const name_value& nv,
+ string&& en,
+ vector<build_auxiliary>& r)
+ {
+ build_auxiliary a (bpkg::parse_build_auxiliary (nv, move (en), name));
+
+ if (find_if (r.begin (), r.end (),
+ [&a] (const build_auxiliary& ba)
+ {
+ return ba.environment_name == a.environment_name;
+ }) != r.end ())
+ bad_name ("build auxiliary environment redefinition");
+
+ r.push_back (move (a));
};
- auto parse_url = [&bad_value] (const string& v, const char* what) -> url
+ auto parse_url = [&bad_value] (const string& v,
+ const char* what) -> manifest_url
{
auto p (parser::split_comment (v));
if (v.empty ())
bad_value (string ("empty ") + what + " url");
- url r;
+ manifest_url r;
try
{
- r = url (p.first, move (p.second));
+ r = manifest_url (p.first, move (p.second));
}
catch (const invalid_argument& e)
{
@@ -1703,32 +3528,227 @@ namespace bpkg
}
};
+ // Note: the n argument is the distribution name length.
+ //
+ auto parse_distribution = [&bad_name, &bad_value] (string&& nm, size_t n,
+ string&& vl)
+ {
+ size_t p (nm.find ('-'));
+
+ // Distribution-related manifest value name always has a dash-starting
+ // suffix (-name, etc).
+ //
+ assert (p != string::npos);
+
+ if (p < n)
+ bad_name ("distribution name '" + string (nm, 0, n) + "' contains '-'");
+
+ if (vl.empty ())
+ bad_value ("empty package distribution value");
+
+ return distribution_name_value (move (nm), move (vl));
+ };
+
+ auto add_distribution = [&m, &bad_name] (distribution_name_value&& nv,
+ bool unique)
+ {
+ vector<distribution_name_value>& dvs (m.distribution_values);
+
+ if (unique &&
+ find_if (dvs.begin (), dvs.end (),
+ [&nv] (const distribution_name_value& dnv)
+ {return dnv.name == nv.name;}) != dvs.end ())
+ {
+ bad_name ("package distribution value redefinition");
+ }
+
+ dvs.push_back (move (nv));
+ };
+
auto flag = [fl] (package_manifest_flags f)
{
return (fl & f) != package_manifest_flags::none;
};
+ // Based on the buildfile path specified via the `*-build[2]` value name
+ // or the `build-file` value set the manifest's alt_naming flag if absent
+ // and verify that it doesn't change otherwise. If it does, then return
+ // the error description and nullopt otherwise.
+ //
+ auto alt_naming = [&m] (const string& p) -> optional<string>
+ {
+ assert (!p.empty ());
+
+ bool an (p.back () == '2');
+
+ if (!m.alt_naming)
+ m.alt_naming = an;
+ else if (*m.alt_naming != an)
+ return string (*m.alt_naming ? "alternative" : "standard") +
+ " buildfile naming scheme is already used";
+
+ return nullopt;
+ };
+
+ // Try to parse and verify the buildfile path specified via the
+ // `*-build[2]` value name or the `build-file` value and set the
+ // manifest's alt_naming flag. On success return the normalized path with
+ // the suffix stripped and nullopt and the error description
+ // otherwise. Expects that the prefix is not empty.
+ //
+ // Specifically, verify that the path doesn't contain backslashes, is
+ // relative, doesn't refer outside the packages's build subdirectory, and
+ // was not specified yet. Also verify that the file name is not empty.
+ //
+ auto parse_buildfile_path =
+ [&m, &alt_naming] (string&& p, string& err) -> optional<path>
+ {
+ if (optional<string> e = alt_naming (p))
+ {
+ err = move (*e);
+ return nullopt;
+ }
+
+ // Verify that the path doesn't contain backslashes which would be
+ // interpreted differently on Windows and POSIX.
+ //
+ if (p.find ('\\') != string::npos)
+ {
+ err = "backslash in package buildfile path";
+ return nullopt;
+ }
+
+ // Strip the '(-|.)build' suffix.
+ //
+ size_t n (*m.alt_naming ? 7 : 6);
+ assert (p.size () > n);
+
+ p.resize (p.size () - n);
+
+ try
+ {
+ path f (move (p));
+
+ // Fail if the value name is something like `config/-build`.
+ //
+ if (f.to_directory ())
+ {
+ err = "empty package buildfile name";
+ return nullopt;
+ }
+
+ if (f.absolute ())
+ {
+ err = "absolute package buildfile path";
+ return nullopt;
+ }
+
+ // Verify that the path refers inside the package's build/
+ // subdirectory.
+ //
+ f.normalize (); // Note: can't throw since the path is relative.
+
+ if (dir_path::traits_type::parent (*f.begin ()))
+ {
+ err = "package buildfile path refers outside build/ subdirectory";
+ return nullopt;
+ }
+
+ // Check for duplicates.
+ //
+ const vector<buildfile>& bs (m.buildfiles);
+ const vector<path>& bps (m.buildfile_paths);
+
+ if (find_if (bs.begin (), bs.end (),
+ [&f] (const auto& v) {return v.path == f;})
+ != bs.end () ||
+ find (bps.begin (), bps.end (), f) != bps.end ())
+ {
+ err = "package buildfile redefinition";
+ return nullopt;
+ }
+
+ return f;
+ }
+ catch (const invalid_path&)
+ {
+ err = "invalid package buildfile path";
+ return nullopt;
+ }
+ };
+
+ // Return the package build configuration with the specified name, if
+ // already exists. If no configuration matches, then create one, if
+ // requested, and throw manifest_parsing otherwise. If the new
+ // configuration creation is not allowed, then the description for a
+ // potential manifest_parsing exception needs to also be specified.
+ //
+ auto build_conf = [&m, &bad_name] (string&& nm,
+ bool create = true,
+ const string& desc = "")
+ -> build_package_config&
+ {
+ // The error description must only be specified if the creation of the
+ // package build configuration is not allowed.
+ //
+ assert (desc.empty () == create);
+
+ small_vector<build_package_config, 1>& cs (m.build_configs);
+
+ auto i (find_if (cs.begin (), cs.end (),
+ [&nm] (const build_package_config& c)
+ {return c.name == nm;}));
+
+ if (i != cs.end ())
+ return *i;
+
+ if (!create)
+ bad_name (desc + ": no build package configuration '" + nm + '\'');
+
+ // Add the new build configuration (arguments, builds, etc will come
+ // later).
+ //
+ cs.emplace_back (move (nm));
+ return cs.back ();
+ };
+
// Cache the upstream version manifest value and validate whether it's
// allowed later, after the version value is parsed.
//
optional<name_value> upstream_version;
- // We will cache the depends, tests, examples, and benchmarks manifest
- // values to parse and, if requested, complete the version constraints
- // later, after the version value is parsed.
+ // We will cache the depends and the test dependency manifest values to
+ // parse and, if requested, complete the version constraints later, after
+ // the version value is parsed. We will also cache the requires values to
+ // parse them later, after the package name is parsed.
//
vector<name_value> dependencies;
+ vector<name_value> requirements;
small_vector<name_value, 1> tests;
- small_vector<name_value, 1> examples;
- small_vector<name_value, 1> benchmarks;
- // We will cache the description and its type values to validate them
- // later, after both are parsed.
+ // We will cache the descriptions and changes and their type values to
+ // validate them later, after all are parsed.
//
optional<name_value> description;
optional<name_value> description_type;
+ optional<name_value> package_description;
+ optional<name_value> package_description_type;
+ vector<name_value> changes;
+ optional<name_value> changes_type;
+
+ // It doesn't make sense for only emails to be specified for a package
+ // build configuration. Thus, we will cache the build configuration email
+ // manifest values to parse them later, after all other build
+ // configuration values are parsed, and to make sure that the build
+ // configurations they refer to are also specified.
+ //
+ vector<name_value> build_config_emails;
+ vector<name_value> build_config_warning_emails;
+ vector<name_value> build_config_error_emails;
- for (nv = p.next (); !nv.empty (); nv = p.next ())
+ m.build_configs.emplace_back ("default");
+
+ for (nv = next (); !nv.empty (); nv = next ())
{
string& n (nv.name);
string& v (nv.value);
@@ -1767,9 +3787,9 @@ namespace bpkg
if (m.version.release && m.version.release->empty ())
bad_value ("invalid package version release");
- if (tf)
+ if (translate)
{
- tf (m.version);
+ translate (m.version);
// Re-validate the version after the translation.
//
@@ -1795,6 +3815,55 @@ namespace bpkg
upstream_version = move (nv);
}
+ else if (n == "type")
+ {
+ if (m.type)
+ bad_name ("package type redefinition");
+
+ if (v.empty () || v.find (',') == 0)
+ bad_value ("empty package type");
+
+ m.type = move (v);
+ }
+ else if (n == "language")
+ {
+ // Strip the language extra information, if present.
+ //
+ size_t p (v.find (','));
+ if (p != string::npos)
+ v.resize (p);
+
+ // Determine the language impl flag.
+ //
+ bool impl (false);
+ p = v.find ('=');
+ if (p != string::npos)
+ {
+ string s (trim (string (v, p + 1)));
+ if (s != "impl")
+ bad_value (!s.empty ()
+ ? "unexpected '" + s + "' value after '='"
+ : "expected 'impl' after '='");
+
+ impl = true;
+
+ v.resize (p);
+ }
+
+ // Finally, validate and add the language.
+ //
+ trim_right (v);
+
+ if (v.empty ())
+ bad_value ("empty package language");
+
+ if (find_if (m.languages.begin (), m.languages.end (),
+ [&v] (const language& l) {return l.name == v;}) !=
+ m.languages.end ())
+ bad_value ("duplicate package language");
+
+ m.languages.emplace_back (move (v), impl);
+ }
else if (n == "project")
{
if (m.project)
@@ -1851,28 +3920,28 @@ namespace bpkg
if (description)
{
if (description->name == "description-file")
- bad_name ("package description and description-file are "
+ bad_name ("project description and description file are "
"mutually exclusive");
else
- bad_name ("package description redefinition");
+ bad_name ("project description redefinition");
}
if (v.empty ())
- bad_value ("empty package description");
+ bad_value ("empty project description");
description = move (nv);
}
else if (n == "description-file")
{
if (flag (package_manifest_flags::forbid_file))
- bad_name ("package description-file not allowed");
+ bad_name ("project description file not allowed");
if (description)
{
if (description->name == "description-file")
- bad_name ("package description-file redefinition");
+ bad_name ("project description file redefinition");
else
- bad_name ("package description-file and description are "
+ bad_name ("project description file and description are "
"mutually exclusive");
}
@@ -1881,32 +3950,69 @@ namespace bpkg
else if (n == "description-type")
{
if (description_type)
- bad_name ("package description-type redefinition");
+ bad_name ("project description type redefinition");
description_type = move (nv);
}
+ else if (n == "package-description")
+ {
+ if (package_description)
+ {
+ if (package_description->name == "package-description-file")
+ bad_name ("package description and description file are "
+ "mutually exclusive");
+ else
+ bad_name ("package description redefinition");
+ }
+
+ if (v.empty ())
+ bad_value ("empty package description");
+
+ package_description = move (nv);
+ }
+ else if (n == "package-description-file")
+ {
+ if (flag (package_manifest_flags::forbid_file))
+ bad_name ("package description file not allowed");
+
+ if (package_description)
+ {
+ if (package_description->name == "package-description-file")
+ bad_name ("package description file redefinition");
+ else
+ bad_name ("package description file and description are "
+ "mutually exclusive");
+ }
+
+ package_description = move (nv);
+ }
+ else if (n == "package-description-type")
+ {
+ if (package_description_type)
+ bad_name ("package description type redefinition");
+
+ package_description_type = move (nv);
+ }
else if (n == "changes")
{
if (v.empty ())
bad_value ("empty package changes specification");
- m.changes.emplace_back (move (v));
+ changes.emplace_back (move (nv));
}
else if (n == "changes-file")
{
if (flag (package_manifest_flags::forbid_file))
bad_name ("package changes-file not allowed");
- auto vc (parser::split_comment (v));
- path p (move (vc.first));
-
- if (p.empty ())
- bad_value ("no path in package changes-file");
-
- if (p.absolute ())
- bad_value ("package changes-file path is absolute");
+ changes.emplace_back (move (nv));
+ }
+ else if (n == "changes-type")
+ {
+ if (changes_type)
+ bad_name ("package changes type redefinition");
- m.changes.emplace_back (move (p), move (vc.second));
+ changes_type = move (nv);
}
else if (n == "url")
{
@@ -1917,7 +4023,7 @@ namespace bpkg
}
else if (n == "email")
{
- parse_email (nv, m.email, "project", p.name ());
+ parse_email (nv, m.email, "project");
}
else if (n == "doc-url")
{
@@ -1942,19 +4048,19 @@ namespace bpkg
}
else if (n == "package-email")
{
- parse_email (nv, m.package_email, "package", p.name ());
+ parse_email (nv, m.package_email, "package");
}
else if (n == "build-email")
{
- parse_email (nv, m.build_email, "build", p.name (), true /* empty */);
+ parse_email (nv, m.build_email, "build", true /* empty */);
}
else if (n == "build-warning-email")
{
- parse_email (nv, m.build_warning_email, "build warning", p.name ());
+ parse_email (nv, m.build_warning_email, "build warning");
}
else if (n == "build-error-email")
{
- parse_email (nv, m.build_error_email, "build error", p.name ());
+ parse_email (nv, m.build_error_email, "build error");
}
else if (n == "priority")
{
@@ -1980,74 +4086,246 @@ namespace bpkg
list_parser lp (vc.first.begin (), vc.first.end ());
for (string lv (lp.next ()); !lv.empty (); lv = lp.next ())
+ {
+ // Reserve the license schemes for the future use and only recognize
+ // the 'other' scheme for now, if specified. By default, the 'spdx'
+ // scheme is implied.
+ //
+ // Note that if the substring that precedes ':' contains the
+ // 'DocumentRef-' substring, then this is not a license scheme but
+ // the license is a SPDX License Expression (see SPDX user defined
+ // license reference for details).
+ //
+ size_t p (lv.find (':'));
+
+ if (p != string::npos &&
+ lv.find ("DocumentRef-") > p &&
+ lv.compare (0, p, "other") != 0)
+ bad_value ("invalid package license scheme");
+
l.push_back (move (lv));
+ }
if (l.empty ())
bad_value ("empty package license specification");
m.license_alternatives.push_back (move (l));
}
+ else if (n == "depends")
+ {
+ dependencies.push_back (move (nv));
+ }
else if (n == "requires")
{
- // Allow specifying ?* in any order.
- //
- size_t n (v.size ());
- size_t cond ((n > 0 && v[0] == '?') || (n > 1 && v[1] == '?') ? 1 : 0);
- size_t btim ((n > 0 && v[0] == '*') || (n > 1 && v[1] == '*') ? 1 : 0);
-
+ requirements.push_back (move (nv));
+ }
+ else if (n == "builds")
+ {
+ m.builds.push_back (
+ parse_build_class_expr (nv, m.builds.empty (), name));
+ }
+ else if (n == "build-include")
+ {
+ m.build_constraints.push_back (
+ parse_build_constraint (nv, false /* exclusion */, name));
+ }
+ else if (n == "build-exclude")
+ {
+ m.build_constraints.push_back (
+ parse_build_constraint (nv, true /* exclusion */, name));
+ }
+ else if (optional<pair<string, string>> ba =
+ build_auxiliary::parse_value_name (n))
+ {
+ if (ba->first.empty ()) // build-auxiliary*?
+ {
+ parse_build_auxiliary (nv, move (ba->second), m.build_auxiliaries);
+ }
+ else // *-build-auxiliary*
+ {
+ build_package_config& bc (build_conf (move (ba->first)));
+ parse_build_auxiliary (nv, move (ba->second), bc.auxiliaries);
+ }
+ }
+ else if (n == "build-bot")
+ {
+ parse_build_bot (nv, name, m.build_bot_keys);
+ }
+ else if (n.size () > 13 &&
+ n.compare (n.size () - 13, 13, "-build-config") == 0)
+ {
auto vc (parser::split_comment (v));
- const string& vl (vc.first);
- requirement_alternatives ra (cond != 0, btim != 0, move (vc.second));
+ n.resize (n.size () - 13);
- string::const_iterator b (vl.begin ());
- string::const_iterator e (vl.end ());
+ build_package_config& bc (build_conf (move (n)));
- if (ra.conditional || ra.buildtime)
- {
- string::size_type p (vl.find_first_not_of (spaces, cond + btim));
- b = p == string::npos ? e : b + p;
- }
+ if (!bc.arguments.empty () || !bc.comment.empty ())
+ bad_name ("build configuration redefinition");
- list_parser lp (b, e, '|');
- for (string lv (lp.next ()); !lv.empty (); lv = lp.next ())
- ra.push_back (lv);
+ bc.arguments = move (vc.first);
+ bc.comment = move (vc.second);
+ }
+ else if (n.size () > 7 && n.compare (n.size () - 7, 7, "-builds") == 0)
+ {
+ n.resize (n.size () - 7);
- if (ra.empty () && ra.comment.empty ())
- bad_value ("empty package requirement specification");
+ build_package_config& bc (build_conf (move (n)));
- m.requirements.push_back (move (ra));
+ bc.builds.push_back (
+ parse_build_class_expr (nv, bc.builds.empty (), name));
}
- else if (n == "builds")
+ else if (n.size () > 14 &&
+ n.compare (n.size () - 14, 14, "-build-include") == 0)
{
- m.builds.push_back (
- parse_build_class_expr (nv, m.builds.empty (), p.name ()));
+ n.resize (n.size () - 14);
+
+ build_package_config& bc (build_conf (move (n)));
+
+ bc.constraints.push_back (
+ parse_build_constraint (nv, false /* exclusion */, name));
}
- else if (n == "build-include")
+ else if (n.size () > 14 &&
+ n.compare (n.size () - 14, 14, "-build-exclude") == 0)
{
- m.build_constraints.push_back (
- parse_build_constraint (nv, false /* exclusion */, p.name ()));
+ n.resize (n.size () - 14);
+
+ build_package_config& bc (build_conf (move (n)));
+
+ bc.constraints.push_back (
+ parse_build_constraint (nv, true /* exclusion */, name));
}
- else if (n == "build-exclude")
+ else if (n.size () > 10 &&
+ n.compare (n.size () - 10, 10, "-build-bot") == 0)
{
- m.build_constraints.push_back (
- parse_build_constraint (nv, true /* exclusion */, p.name ()));
+ n.resize (n.size () - 10);
+
+ build_package_config& bc (build_conf (move (n)));
+ parse_build_bot (nv, name, bc.bot_keys);
}
- else if (n == "depends")
+ else if (n.size () > 12 &&
+ n.compare (n.size () - 12, 12, "-build-email") == 0)
{
- dependencies.push_back (move (nv));
+ n.resize (n.size () - 12);
+ build_config_emails.push_back (move (nv));
+ }
+ else if (n.size () > 20 &&
+ n.compare (n.size () - 20, 20, "-build-warning-email") == 0)
+ {
+ n.resize (n.size () - 20);
+ build_config_warning_emails.push_back (move (nv));
}
- else if (n == "tests")
+ else if (n.size () > 18 &&
+ n.compare (n.size () - 18, 18, "-build-error-email") == 0)
{
+ n.resize (n.size () - 18);
+ build_config_error_emails.push_back (move (nv));
+ }
+ // @@ TMP time to drop *-0.14.0?
+ //
+ else if (n == "tests" || n == "tests-0.14.0" ||
+ n == "examples" || n == "examples-0.14.0" ||
+ n == "benchmarks" || n == "benchmarks-0.14.0")
+ {
+ // Strip the '-0.14.0' suffix from the value name, if present.
+ //
+ size_t p (n.find ('-'));
+ if (p != string::npos)
+ n.resize (p);
+
tests.push_back (move (nv));
}
- else if (n == "examples")
+ else if (n == "bootstrap-build" || n == "bootstrap-build2")
+ {
+ if (optional<string> e = alt_naming (n))
+ bad_name (*e);
+
+ if (m.bootstrap_build)
+ bad_name ("package " + n + " redefinition");
+
+ m.bootstrap_build = move (v);
+ }
+ else if (n == "root-build" || n == "root-build2")
+ {
+ if (optional<string> e = alt_naming (n))
+ bad_name (*e);
+
+ if (m.root_build)
+ bad_name ("package " + n + " redefinition");
+
+ m.root_build = move (v);
+ }
+ else if ((n.size () > 6 && n.compare (n.size () - 6, 6, "-build") == 0) ||
+ (n.size () > 7 && n.compare (n.size () - 7, 7, "-build2") == 0))
+ {
+ string err;
+ if (optional<path> p = parse_buildfile_path (move (n), err))
+ m.buildfiles.push_back (buildfile (move (*p), move (v)));
+ else
+ bad_name (err);
+ }
+ else if (n == "build-file")
+ {
+ if (flag (package_manifest_flags::forbid_file))
+ bad_name ("package build-file not allowed");
+
+ // Verify that the buildfile extension is either build or build2.
+ //
+ if ((v.size () > 6 && v.compare (v.size () - 6, 6, ".build") == 0) ||
+ (v.size () > 7 && v.compare (v.size () - 7, 7, ".build2") == 0))
+ {
+ string err;
+ if (optional<path> p = parse_buildfile_path (move (v), err))
+ {
+ // Verify that the resulting path differs from bootstrap and root.
+ //
+ const string& s (p->string ());
+ if (s == "bootstrap" || s == "root")
+ bad_value (s + " not allowed");
+
+ m.buildfile_paths.push_back (move (*p));
+ }
+ else
+ bad_value (err);
+ }
+ else
+ bad_value ("path with build or build2 extension expected");
+
+ }
+ else if (n.size () > 5 && n.compare (n.size () - 5, 5, "-name") == 0)
+ {
+ add_distribution (
+ parse_distribution (move (n), n.size () - 5, move (v)),
+ false /* unique */);
+ }
+ // Note: must precede the check for the "-version" suffix.
+ //
+ else if (n.size () > 22 &&
+ n.compare (n.size () - 22, 22, "-to-downstream-version") == 0)
{
- examples.push_back (move (nv));
+ add_distribution (
+ parse_distribution (move (n), n.size () - 22, move (v)),
+ false /* unique */);
}
- else if (n == "benchmarks")
+ // Note: must follow the check for "upstream-version".
+ //
+ else if (n.size () > 8 && n.compare (n.size () - 8, 8, "-version") == 0)
{
- benchmarks.push_back (move (nv));
+ // If the value is forbidden then throw, but only after the name is
+ // validated. Thus, check for that before we move the value from.
+ //
+ bool bad (v == "$" &&
+ flag (package_manifest_flags::forbid_incomplete_values));
+
+ // Can throw.
+ //
+ distribution_name_value d (
+ parse_distribution (move (n), n.size () - 8, move (v)));
+
+ if (bad)
+ bad_value ("$ not allowed");
+
+ add_distribution (move (d), true /* unique */);
}
else if (n == "location")
{
@@ -2129,23 +4407,24 @@ namespace bpkg
m.upstream_version = move (nv.value);
}
- // Verify that description is specified if the description type is
- // specified.
- //
- if (description_type && !description)
- bad_value ("no package description for specified description type");
-
- // Validate (and set) description and its type.
+ // Parse and validate a text/file manifest value and its respective type
+ // value, if present. Return a typed_text_file object.
//
- if (description)
+ auto parse_text_file = [iu, &nv, &bad_value] (name_value&& text_file,
+ optional<name_value>&& type,
+ const char* what)
+ -> typed_text_file
{
+ typed_text_file r;
+
// Restore as bad_value() uses its line/column.
//
- nv = move (*description);
+ nv = move (text_file);
string& v (nv.value);
+ const string& n (nv.name);
- if (nv.name == "description-file")
+ if (n.size () > 5 && n.compare (n.size () - 5, 5, "-file") == 0)
{
auto vc (parser::split_comment (v));
@@ -2156,177 +4435,393 @@ namespace bpkg
}
catch (const invalid_path& e)
{
- bad_value (string ("invalid package description file: ") +
- e.what ());
+ bad_value (string ("invalid ") + what + " file: " + e.what ());
}
if (p.empty ())
- bad_value ("no path in package description-file");
+ bad_value (string ("no path in ") + what + " file");
if (p.absolute ())
- bad_value ("package description-file path is absolute");
+ bad_value (string (what) + " file path is absolute");
- m.description = text_file (move (p), move (vc.second));
+ r = typed_text_file (move (p), move (vc.second));
}
else
- m.description = text_file (move (v));
+ r = typed_text_file (move (v));
- if (description_type)
- m.description_type = move (description_type->value);
+ if (type)
+ r.type = move (type->value);
- // Verify the description type.
+ // Verify the text type.
//
try
{
- m.effective_description_type (iu);
+ r.effective_type (iu);
}
catch (const invalid_argument& e)
{
- if (description_type)
+ if (type)
{
- // Restore as bad_value() uses its line/column.
+ // Restore as bad_value() uses its line/column. Note that we don't
+ // need to restore the moved out type value.
//
- nv = move (*description_type);
+ nv = move (*type);
- bad_value (string ("invalid package description type: ") +
- e.what ());
+ bad_value (string ("invalid ") + what + " type: " + e.what ());
}
else
- bad_value (string ("invalid package description file: ") +
- e.what ());
+ {
+ // Note that this can only happen due to inability to guess the
+ // type from the file extension. Let's help the user here a bit.
+ //
+ assert (r.file);
+
+ bad_value (string ("invalid ") + what + " file: " + e.what () +
+ " (use " + string (n, 0, n.size () - 5) +
+ "-type manifest value to specify explicitly)");
+ }
}
- }
- // Now, when the version manifest value is parsed, we can parse the
- // dependencies, tests, examples, and benchmarks and complete their
- // constraints, if requested.
+ return r;
+ };
+
+ // As above but also accepts nullopt as the text_file argument, in which
+ // case throws manifest_parsing if the type is specified and return
+ // nullopt otherwise.
//
- auto parse_dependency = [&m, cd, &flag, &bad_value] (string&& d,
- const char* what)
+ auto parse_text_file_opt = [&nv, &bad_name, &parse_text_file]
+ (optional<name_value>&& text_file,
+ optional<name_value>&& type,
+ const char* what) -> optional<typed_text_file>
{
- using iterator = string::const_iterator;
-
- iterator b (d.begin ());
- iterator i (b);
- iterator ne (b); // End of name.
- iterator e (d.end ());
-
- // Find end of name (ne).
- //
- // Grep for '=<>([~^' in the bpkg source code and update, if changed.
+ // Verify that the text/file value is specified if the type value is
+ // specified.
//
- const string cb ("=<>([~^");
- for (char c; i != e && cb.find (c = *i) == string::npos; ++i)
+ if (!text_file)
{
- if (!space (c))
- ne = i + 1;
+ if (type)
+ {
+ // Restore as bad_name() uses its line/column.
+ //
+ nv = move (*type);
+
+ bad_name (string ("no ") + what + " for specified type");
+ }
+
+ return nullopt;
}
- package_name nm;
+ return parse_text_file (move (*text_file), move (type), what);
+ };
+
+ // Parse the project/package descriptions/types.
+ //
+ m.description = parse_text_file_opt (move (description),
+ move (description_type),
+ "project description");
+
+ m.package_description =
+ parse_text_file_opt (move (package_description),
+ move (package_description_type),
+ "package description");
- try
+ // Parse the package changes/types.
+ //
+ // Note: at the end of the loop the changes_type variable may contain
+ // value in unspecified state but we can still check for the value
+ // presence.
+ //
+ for (name_value& c: changes)
+ {
+ // Move the changes_type value from for the last changes entry.
+ //
+ m.changes.push_back (
+ parse_text_file (move (c),
+ (&c != &changes.back ()
+ ? optional<name_value> (changes_type)
+ : move (changes_type)),
+ "changes"));
+ }
+
+ // If there are multiple changes and the changes type is not explicitly
+ // specified, then verify that all changes effective types are the same.
+ // Note that in the "ignore unknown" mode there can be unresolved
+ // effective types which we just skip.
+ //
+ if (changes.size () > 1 && !changes_type)
+ {
+ optional<text_type> type;
+
+ for (size_t i (0); i != m.changes.size (); ++i)
{
- nm = package_name (i == e ? move (d) : string (b, ne));
+ const typed_text_file& c (m.changes[i]);
+
+ if (optional<text_type> t = c.effective_type (iu))
+ {
+ if (!type)
+ {
+ type = *t;
+ }
+ else if (*t != *type)
+ {
+ // Restore as bad_value() uses its line/column.
+ //
+ nv = move (changes[i]);
+
+ bad_value ("changes type '" + to_string (*t) + "' differs from " +
+ " previous type '" + to_string (*type) + "'");
+ }
+ }
}
- catch (const invalid_argument& e)
+ }
+
+ // Parse the build configuration emails.
+ //
+ // Note: the argument can only be one of the build_config_*emails
+ // variables (see above) to distinguish between the email kinds.
+ //
+ auto parse_build_config_emails = [&nv,
+ &build_config_emails,
+ &build_config_warning_emails,
+ &build_config_error_emails,
+ &build_conf,
+ &parse_email]
+ (vector<name_value>&& emails)
+ {
+ enum email_kind {build, warning, error};
+
+ email_kind ek (
+ &emails == &build_config_emails ? email_kind::build :
+ &emails == &build_config_warning_emails ? email_kind::warning :
+ email_kind::error);
+
+ // The argument can only be one of the build_config_*emails variables.
+ //
+ assert (ek != email_kind::error || &emails == &build_config_error_emails);
+
+ for (name_value& e: emails)
{
- bad_value (string ("invalid ") + what + " package name: " +
- e.what ());
+ // Restore as bad_name() and bad_value() use its line/column.
+ //
+ nv = move (e);
+
+ build_package_config& bc (
+ build_conf (move (nv.name),
+ false /* create */,
+ "stray build notification email"));
+
+ parse_email (
+ nv,
+ (ek == email_kind::build ? bc.email :
+ ek == email_kind::warning ? bc.warning_email :
+ bc.error_email),
+ (ek == email_kind::build ? "build configuration" :
+ ek == email_kind::warning ? "build configuration warning" :
+ "build configuration error"),
+ ek == email_kind::build /* empty */);
}
+ };
- dependency r;
+ parse_build_config_emails (move (build_config_emails));
+ parse_build_config_emails (move (build_config_warning_emails));
+ parse_build_config_emails (move (build_config_error_emails));
- if (i == e)
- r = dependency {move (nm), nullopt};
- else
+ // Now, when the version manifest value is parsed, we can parse the
+ // dependencies and complete their constraints, if requested.
+ //
+ auto complete_constraint = [&m, cv, &flag] (auto&& dep)
+ {
+ if (dep.constraint)
+ try
{
- try
- {
- version_constraint vc (string (i, e));
-
- if (!vc.complete () &&
- flag (package_manifest_flags::forbid_incomplete_dependencies))
- bad_value ("$ not allowed");
+ version_constraint& vc (*dep.constraint);
- // Complete the constraint.
- //
- if (cd)
- vc = vc.effective (m.version);
+ if (!vc.complete () &&
+ flag (package_manifest_flags::forbid_incomplete_values))
+ throw invalid_argument ("$ not allowed");
- r = dependency {move (nm), move (vc)};
- }
- catch (const invalid_argument& e)
- {
- bad_value (string ("invalid ") + what + " package constraint: " +
- e.what ());
- }
+ // Complete the constraint.
+ //
+ if (cv)
+ vc = vc.effective (m.version);
+ }
+ catch (const invalid_argument& e)
+ {
+ throw invalid_argument ("invalid package constraint '" +
+ dep.constraint->string () + "': " + e.what ());
}
- return r;
+ return move (dep);
};
+ // Parse the regular dependencies.
+ //
for (name_value& d: dependencies)
{
nv = move (d); // Restore as bad_value() uses its line/column.
- const string& v (nv.value);
-
- // Allow specifying ?* in any order.
+ // Parse dependency alternatives.
//
- size_t n (v.size ());
- size_t cond ((n > 0 && v[0] == '?') || (n > 1 && v[1] == '?') ? 1 : 0);
- size_t btim ((n > 0 && v[0] == '*') || (n > 1 && v[1] == '*') ? 1 : 0);
-
- auto vc (parser::split_comment (v));
-
- const string& vl (vc.first);
- dependency_alternatives da (cond != 0, btim != 0, move (vc.second));
+ try
+ {
+ dependency_alternatives das (nv.value,
+ m.name,
+ name,
+ nv.value_line,
+ nv.value_column);
- string::const_iterator b (vl.begin ());
- string::const_iterator e (vl.end ());
+ for (dependency_alternative& da: das)
+ {
+ for (dependency& d: da)
+ d = complete_constraint (move (d));
+ }
- if (da.conditional || da.buildtime)
+ m.dependencies.push_back (move (das));
+ }
+ catch (const invalid_argument& e)
{
- string::size_type p (vl.find_first_not_of (spaces, cond + btim));
- b = p == string::npos ? e : b + p;
+ bad_value (e.what ());
}
+ }
- list_parser lp (b, e, '|');
- for (string lv (lp.next ()); !lv.empty (); lv = lp.next ())
- da.push_back (parse_dependency (move (lv), "prerequisite"));
-
- if (da.empty ())
- bad_value ("empty package dependency specification");
-
- m.dependencies.push_back (da);
+ // Parse the requirements.
+ //
+ for (const name_value& r: requirements)
+ {
+ m.requirements.push_back (
+ requirement_alternatives (r.value,
+ m.name,
+ name,
+ r.value_line,
+ r.value_column));
}
- auto parse_deps = [&nv, &parse_dependency]
- (small_vector<name_value, 1>&& nvs, const char* what)
+ // Parse the test dependencies.
+ //
+ for (name_value& t: tests)
{
- small_vector<dependency, 1> r;
- for (name_value& v: nvs)
+ nv = move (t); // Restore as bad_value() uses its line/column.
+
+ try
{
- nv = move (v); // Restore as bad_value() uses its line/column.
- r.push_back (parse_dependency (move (nv.value), what));
+ m.tests.push_back (
+ complete_constraint (
+ test_dependency (move (nv.value),
+ to_test_dependency_type (nv.name))));
}
- return r;
- };
+ catch (const invalid_argument& e)
+ {
+ bad_value (e.what ());
+ }
+ }
- m.tests = parse_deps (move (tests), "tests");
- m.examples = parse_deps (move (examples), "examples");
- m.benchmarks = parse_deps (move (benchmarks), "benchmarks");
+ // Now, when the version manifest value is parsed, we complete the
+ // <distribution>-version values, if requested.
+ //
+ if (cv)
+ {
+ for (distribution_name_value& nv: m.distribution_values)
+ {
+ const string& n (nv.name);
+ string& v (nv.value);
- if (m.description &&
- !m.description_type &&
- flag (package_manifest_flags::require_description_type))
- bad_name ("no package description type specified");
+ if (v == "$" &&
+ (n.size () > 8 && n.compare (n.size () - 8, 8, "-version") == 0) &&
+ n.find ('-') == n.size () - 8)
+ {
+ v = version (default_epoch (m.version),
+ move (m.version.upstream),
+ nullopt /* release */,
+ nullopt /* revision */,
+ 0 /* iteration */).string ();
+ }
+ }
+ }
if (!m.location && flag (package_manifest_flags::require_location))
bad_name ("no package location specified");
if (!m.sha256sum && flag (package_manifest_flags::require_sha256sum))
bad_name ("no package sha256sum specified");
+
+ if (flag (package_manifest_flags::require_text_type))
+ {
+ if (m.description && !m.description->type)
+ bad_name ("no project description type specified");
+
+ if (m.package_description && !m.package_description->type)
+ bad_name ("no package description type specified");
+
+ // Note that changes either all have the same explicitly specified type
+ // or have no type.
+ //
+ if (!m.changes.empty () && !m.changes.front ().type)
+ {
+ // @@ TMP To support older repositories allow absent changes type
+ // until toolchain 0.16.0 is released.
+ //
+ // Note that for such repositories the packages may not have
+ // changes values other than plan text. Thus, we can safely set
+ // this type, if they are absent, so that the caller can always
+ // be sure that these values are always present for package
+ // manifest lists.
+ //bad_name ("no package changes type specified");
+ for (typed_text_file& c: m.changes)
+ c.type = "text/plain";
+ }
+ }
+
+ if (!m.bootstrap_build &&
+ flag (package_manifest_flags::require_bootstrap_build))
+ {
+ // @@ TMP To support older repositories allow absent bootstrap build
+ // and alt_naming until toolchain 0.15.0 is released.
+ //
+ // Note that for such repositories the packages may not have any
+ // need for the bootstrap buildfile (may not have any dependency
+ // clauses, etc). Thus, we can safely set the bootstrap build and
+ // alt_naming values to an empty string and false, respectively,
+ // if they are absent, so that the caller can always be sure that
+ // these values are always present for package manifest lists.
+ //
+ // Note: don't forget to uncomment no-bootstrap test in
+ // tests/manifest/testscript when removing this workaround.
+ //
+ // bad_name ("no package bootstrap build specified");
+ m.bootstrap_build = "project = " + m.name.string () + '\n';
+ m.alt_naming = false;
+ }
+ }
+
+ static void
+ parse_package_manifest (
+ parser& p,
+ name_value nv,
+ const function<package_manifest::translate_function>& tf,
+ bool iu,
+ bool cv,
+ package_manifest_flags fl,
+ package_manifest& m)
+ {
+ // Make sure this is the start and we support the version.
+ //
+ if (!nv.name.empty ())
+ throw parsing (p.name (), nv.name_line, nv.name_column,
+ "start of package manifest expected");
+
+ if (nv.value != "1")
+ throw parsing (p.name (), nv.value_line, nv.value_column,
+ "unsupported format version");
+
+ // Note that we rely on "small function object" optimization here.
+ //
+ parse_package_manifest (p.name (),
+ [&p] () {return p.next ();},
+ tf,
+ iu,
+ cv,
+ fl,
+ m);
}
package_manifest
@@ -2336,12 +4831,13 @@ namespace bpkg
p,
move (nv),
iu,
- false /* complete_depends */,
+ false /* complete_values */,
package_manifest_flags::forbid_file |
- package_manifest_flags::require_description_type |
- package_manifest_flags::require_location |
package_manifest_flags::forbid_fragment |
- package_manifest_flags::forbid_incomplete_dependencies);
+ package_manifest_flags::forbid_incomplete_values |
+ package_manifest_flags::require_location |
+ package_manifest_flags::require_text_type |
+ package_manifest_flags::require_bootstrap_build);
}
// package_manifest
@@ -2350,10 +4846,10 @@ namespace bpkg
package_manifest (manifest_parser& p,
const function<translate_function>& tf,
bool iu,
- bool cd,
+ bool cv,
package_manifest_flags fl)
{
- parse_package_manifest (p, p.next (), tf, iu, cd, fl, *this);
+ parse_package_manifest (p, p.next (), tf, iu, cv, fl, *this);
// Make sure this is the end.
//
@@ -2364,11 +4860,42 @@ namespace bpkg
}
package_manifest::
- package_manifest (manifest_parser& p,
+ package_manifest (const string& name,
+ vector<name_value>&& vs,
+ const function<translate_function>& tf,
+ bool iu,
+ bool cv,
+ package_manifest_flags fl)
+ {
+ auto i (vs.begin ());
+ auto e (vs.end ());
+
+ // Note that we rely on "small function object" optimization here.
+ //
+ parse_package_manifest (name,
+ [&i, &e] ()
+ {
+ return i != e ? move (*i++) : name_value ();
+ },
+ tf,
+ iu,
+ cv,
+ fl,
+ *this);
+ }
+
+ package_manifest::
+ package_manifest (const string& name,
+ vector<name_value>&& vs,
bool iu,
- bool cd,
+ bool cv,
package_manifest_flags fl)
- : package_manifest (p, function<translate_function> (), iu, cd, fl)
+ : package_manifest (name,
+ move (vs),
+ function<translate_function> (),
+ iu,
+ cv,
+ fl)
{
}
@@ -2376,185 +4903,748 @@ namespace bpkg
package_manifest (manifest_parser& p,
name_value nv,
bool iu,
- bool cd,
+ bool cv,
package_manifest_flags fl)
{
parse_package_manifest (
- p, move (nv), function<translate_function> (), iu, cd, fl, *this);
+ p, move (nv), function<translate_function> (), iu, cv, fl, *this);
}
- optional<text_type> package_manifest::
- effective_description_type (bool iu) const
+ strings package_manifest::
+ effective_type_sub_options (const optional<string>& t)
{
- if (!description)
- throw logic_error ("absent description");
+ strings r;
- optional<text_type> r;
-
- if (description_type)
- r = to_text_type (*description_type);
- else if (description->file)
+ if (t)
{
- string ext (description->path.extension ());
- if (ext.empty () || icasecmp (ext, "txt") == 0)
- r = text_type::plain;
- else if (icasecmp (ext, "md") == 0 || icasecmp (ext, "markdown") == 0)
- r = text_type::github_mark;
+ for (size_t b (0), e (0); next_word (*t, b, e, ','); )
+ {
+ if (b != 0)
+ r.push_back (trim (string (*t, b, e - b)));
+ }
}
- else
- r = text_type::plain;
-
- if (!r && !iu)
- throw invalid_argument ("unknown text type");
return r;
}
- void package_manifest::
- override (const vector<manifest_name_value>& nvs, const string& name)
+ // If validate_only is true, then the package manifest is assumed to be
+ // default constructed and is used as a storage for convenience of the
+ // validation implementation.
+ //
+ static void
+ override (const vector<manifest_name_value>& nvs,
+ const string& name,
+ package_manifest& m,
+ bool validate_only)
{
- // Reset the builds value group on the first call.
+ // The first {builds, build-{include,exclude}} override value.
//
- bool rb (true);
- auto reset_builds = [&rb, this] ()
- {
- if (rb)
- {
- builds.clear ();
- build_constraints.clear ();
- rb = false;
- }
- };
+ const manifest_name_value* cbc (nullptr);
+
+ // The first builds override value.
+ //
+ const manifest_name_value* cb (nullptr);
+
+ // The first {*-builds, *-build-{include,exclude}} override value.
+ //
+ const manifest_name_value* pbc (nullptr);
+
+ // The first {build-bot} override value.
+ //
+ const manifest_name_value* cbb (nullptr);
+
+ // The first {*-build-bot} override value.
+ //
+ const manifest_name_value* pbb (nullptr);
+
+ // The first {build-*email} override value.
+ //
+ const manifest_name_value* cbe (nullptr);
- // Reset the build emails value group on the first call.
+ // The first {*-build-*email} override value.
//
- bool rbe (true);
- auto reset_build_emails = [&rbe, this] ()
+ const manifest_name_value* pbe (nullptr);
+
+ // List of indexes of the build configurations with the overridden build
+ // constraints together with flags which indicate if the *-builds override
+ // value was encountered for this configuration.
+ //
+ vector<pair<size_t, bool>> obcs;
+
+ // List of indexes of the build configurations with the overridden bots.
+ //
+ vector<size_t> obbs;
+
+ // List of indexes of the build configurations with the overridden emails.
+ //
+ vector<size_t> obes;
+
+ // Return true if the specified package build configuration is newly
+ // created by the *-build-config override.
+ //
+ auto config_created = [&m, confs_num = m.build_configs.size ()]
+ (const build_package_config& c)
{
- if (rbe)
- {
- build_email = nullopt;
- build_warning_email = nullopt;
- build_error_email = nullopt;
- rbe = false;
- }
+ return &c >= m.build_configs.data () + confs_num;
};
+ // Apply overrides.
+ //
for (const manifest_name_value& nv: nvs)
{
+ auto bad_name = [&name, &nv] (const string& d)
+ {
+ throw !name.empty ()
+ ? parsing (name, nv.name_line, nv.name_column, d)
+ : parsing (d);
+ };
+
+ // Reset the build-{include,exclude} value sub-group on the first call
+ // but throw if any of the {*-builds, *-build-{include,exclude}}
+ // override values are already encountered.
+ //
+ auto reset_build_constraints = [&cbc, &pbc, &nv, &bad_name, &m] ()
+ {
+ if (cbc == nullptr)
+ {
+ if (pbc != nullptr)
+ bad_name ('\'' + nv.name + "' override specified together with '" +
+ pbc->name + "' override");
+
+ m.build_constraints.clear ();
+ cbc = &nv;
+ }
+ };
+
+ // Reset the {builds, build-{include,exclude}} value group on the first
+ // call.
+ //
+ auto reset_builds = [&cb, &nv, &reset_build_constraints, &m] ()
+ {
+ if (cb == nullptr)
+ {
+ reset_build_constraints ();
+
+ m.builds.clear ();
+ cb = &nv;
+ }
+ };
+
+ // Return the reference to the package build configuration which matches
+ // the build config value override, if exists. If no configuration
+ // matches, then create one, if requested, and throw manifest_parsing
+ // otherwise.
+ //
+ // The n argument specifies the length of the configuration name in
+ // *-build-config, *-builds, *-build-{include,exclude}, *-build-bot, and
+ // *-build-*email values.
+ //
+ auto build_conf =
+ [&nv, &bad_name, &m] (size_t n, bool create) -> build_package_config&
+ {
+ const string& nm (nv.name);
+ small_vector<build_package_config, 1>& cs (m.build_configs);
+
+ // Find the build package configuration. If no configuration is found,
+ // then create one, if requested, and throw otherwise.
+ //
+ auto i (find_if (cs.begin (), cs.end (),
+ [&nm, n] (const build_package_config& c)
+ {return nm.compare (0, n, c.name) == 0;}));
+
+ if (i == cs.end ())
+ {
+ string cn (nm, 0, n);
+
+ if (create)
+ {
+ cs.emplace_back (move (cn));
+ return cs.back ();
+ }
+ else
+ bad_name ("cannot override '" + nm + "' value: no build " +
+ "package configuration '" + cn + '\'');
+ }
+
+ return *i;
+ };
+
+ // Return the reference to the package build configuration which matches
+ // the build config-specific builds group value override, if exists. If
+ // no configuration matches, then throw manifest_parsing, except for the
+ // validate-only mode in which case just add an empty configuration with
+ // this name and return the reference to it. Also verify that no common
+ // build constraints group value overrides are applied yet and throw if
+ // that's not the case.
+ //
+ auto build_conf_constr =
+ [&pbc, &cbc, &nv, &obcs, &bad_name, &build_conf, &m, validate_only]
+ (size_t n) -> build_package_config&
+ {
+ const string& nm (nv.name);
+
+ // If this is the first build config override value, then save its
+ // address. But first verify that no common build constraints group
+ // value overrides are applied yet and throw if that's not the case.
+ //
+ if (pbc == nullptr)
+ {
+ if (cbc != nullptr)
+ bad_name ('\'' + nm + "' override specified together with '" +
+ cbc->name + "' override");
+
+ pbc = &nv;
+ }
+
+ small_vector<build_package_config, 1>& cs (m.build_configs);
+
+ // Find the build package configuration. If there is no such a
+ // configuration then throw, except for the validate-only mode in
+ // which case just add an empty configuration with this name.
+ //
+ // Note that we are using indexes rather then configuration addresses
+ // due to potential reallocations.
+ //
+ build_package_config& r (build_conf (n, validate_only));
+ size_t ci (&r - cs.data ());
+ bool bv (nm.compare (n, nm.size () - n, "-builds") == 0);
+
+ // If this is the first encountered
+ // {*-builds, *-build-{include,exclude}} override for this build
+ // config, then clear this config' constraints member and add an entry
+ // to the overridden configs list.
+ //
+ auto i (find_if (obcs.begin (), obcs.end (),
+ [ci] (const auto& c) {return c.first == ci;}));
+
+ bool first (i == obcs.end ());
+
+ if (first)
+ {
+ r.constraints.clear ();
+
+ obcs.push_back (make_pair (ci, bv));
+ }
+
+ // If this is the first encountered *-builds override, then also clear
+ // this config' builds member.
+ //
+ if (bv && (first || !i->second))
+ {
+ r.builds.clear ();
+
+ if (!first)
+ i->second = true;
+ }
+
+ return r;
+ };
+
+ // Reset the {build-bot} value group on the first call but throw if any
+ // of the {*-build-bot} override values are already encountered.
+ //
+ auto reset_build_bots = [&cbb, &pbb, &nv, &bad_name, &m] ()
+ {
+ if (cbb == nullptr)
+ {
+ if (pbb != nullptr)
+ bad_name ('\'' + nv.name + "' override specified together with '" +
+ pbb->name + "' override");
+
+ m.build_bot_keys.clear ();
+ cbb = &nv;
+ }
+ };
+
+ // Return the reference to the package build configuration which matches
+ // the build config-specific build bot value override, if exists. If no
+ // configuration matches, then throw manifest_parsing, except for the
+ // validate-only mode in which case just add an empty configuration with
+ // this name and return the reference to it. Also verify that no common
+ // build bot value overrides are applied yet and throw if that's not the
+ // case.
+ //
+ auto build_conf_bot =
+ [&pbb, &cbb, &nv, &obbs, &bad_name, &build_conf, &m, validate_only]
+ (size_t n) -> build_package_config&
+ {
+ const string& nm (nv.name);
+
+ // If this is the first build config override value, then save its
+ // address. But first verify that no common build bot value overrides
+ // are applied yet and throw if that's not the case.
+ //
+ if (pbb == nullptr)
+ {
+ if (cbb != nullptr)
+ bad_name ('\'' + nm + "' override specified together with '" +
+ cbb->name + "' override");
+
+ pbb = &nv;
+ }
+
+ small_vector<build_package_config, 1>& cs (m.build_configs);
+
+ // Find the build package configuration. If there is no such a
+ // configuration then throw, except for the validate-only mode in
+ // which case just add an empty configuration with this name.
+ //
+ // Note that we are using indexes rather then configuration addresses
+ // due to potential reallocations.
+ //
+ build_package_config& r (build_conf (n, validate_only));
+ size_t ci (&r - cs.data ());
+
+ // If this is the first encountered {*-build-bot} override for this
+ // build config, then clear this config' bot_keys members and add an
+ // entry to the overridden configs list.
+ //
+ if (find (obbs.begin (), obbs.end (), ci) == obbs.end ())
+ {
+ r.bot_keys.clear ();
+
+ obbs.push_back (ci);
+ }
+
+ return r;
+ };
+
+ // Reset the {build-*email} value group on the first call but throw if
+ // any of the {*-build-*email} override values are already encountered.
+ //
+ auto reset_build_emails = [&cbe, &pbe, &nv, &bad_name, &m] ()
+ {
+ if (cbe == nullptr)
+ {
+ if (pbe != nullptr)
+ bad_name ('\'' + nv.name + "' override specified together with '" +
+ pbe->name + "' override");
+
+ m.build_email = nullopt;
+ m.build_warning_email = nullopt;
+ m.build_error_email = nullopt;
+ cbe = &nv;
+ }
+ };
+
+ // Return the reference to the package build configuration which matches
+ // the build config-specific emails group value override, if exists. If
+ // no configuration matches, then throw manifest_parsing, except for the
+ // validate-only mode in which case just add an empty configuration with
+ // this name and return the reference to it. Also verify that no common
+ // build emails group value overrides are applied yet and throw if
+ // that's not the case.
+ //
+ auto build_conf_email =
+ [&pbe, &cbe, &nv, &obes, &bad_name, &build_conf, &m, validate_only]
+ (size_t n) -> build_package_config&
+ {
+ const string& nm (nv.name);
+
+ // If this is the first build config override value, then save its
+ // address. But first verify that no common build emails group value
+ // overrides are applied yet and throw if that's not the case.
+ //
+ if (pbe == nullptr)
+ {
+ if (cbe != nullptr)
+ bad_name ('\'' + nm + "' override specified together with '" +
+ cbe->name + "' override");
+
+ pbe = &nv;
+ }
+
+ small_vector<build_package_config, 1>& cs (m.build_configs);
+
+ // Find the build package configuration. If there is no such a
+ // configuration then throw, except for the validate-only mode in
+ // which case just add an empty configuration with this name.
+ //
+ // Note that we are using indexes rather then configuration addresses
+ // due to potential reallocations.
+ //
+ build_package_config& r (build_conf (n, validate_only));
+ size_t ci (&r - cs.data ());
+
+ // If this is the first encountered {*-build-*email} override for this
+ // build config, then clear this config' email members and add an
+ // entry to the overridden configs list.
+ //
+ if (find (obes.begin (), obes.end (), ci) == obes.end ())
+ {
+ r.email = nullopt;
+ r.warning_email = nullopt;
+ r.error_email = nullopt;
+
+ obes.push_back (ci);
+ }
+
+ return r;
+ };
+
+ // Parse the [*-]build-auxiliary[-*] value override. If the mode is not
+ // validate-only, then override the matching value and throw
+ // manifest_parsing if no match. But throw only unless this is a
+ // configuration-specific override (build_config is not NULL) for a
+ // newly created configuration, in which case add the value instead.
+ //
+ auto override_build_auxiliary =
+ [&bad_name,
+ &name,
+ &config_created,
+ validate_only] (const name_value& nv,
+ string&& en,
+ vector<build_auxiliary>& r,
+ build_package_config* build_config = nullptr)
+ {
+ build_auxiliary a (bpkg::parse_build_auxiliary (nv, move (en), name));
+
+ if (!validate_only)
+ {
+ auto i (find_if (r.begin (), r.end (),
+ [&a] (const build_auxiliary& ba)
+ {
+ return ba.environment_name == a.environment_name;
+ }));
+
+ if (i != r.end ())
+ {
+ *i = move (a);
+ }
+ else
+ {
+ if (build_config != nullptr && config_created (*build_config))
+ r.emplace_back (move (a));
+ else
+ bad_name ("no match for '" + nv.name + "' value override");
+ }
+ }
+ };
+
const string& n (nv.name);
if (n == "builds")
{
reset_builds ();
- builds.push_back (parse_build_class_expr (nv, builds.empty (), name));
+
+ m.builds.push_back (
+ parse_build_class_expr (nv, m.builds.empty (), name));
}
else if (n == "build-include")
{
- reset_builds ();
+ reset_build_constraints ();
- build_constraints.push_back (
+ m.build_constraints.push_back (
parse_build_constraint (nv, false /* exclusion */, name));
}
else if (n == "build-exclude")
{
- reset_builds ();
+ reset_build_constraints ();
+
+ m.build_constraints.push_back (
+ parse_build_constraint (nv, true /* exclusion */, name));
+ }
+ else if (n == "build-bot")
+ {
+ reset_build_bots ();
+
+ parse_build_bot (nv, name, m.build_bot_keys);
+ }
+ else if ((n.size () > 13 &&
+ n.compare (n.size () - 13, 13, "-build-config") == 0))
+ {
+ build_package_config& bc (
+ build_conf (n.size () - 13, true /* create */));
+
+ auto vc (parser::split_comment (nv.value));
+
+ bc.arguments = move (vc.first);
+ bc.comment = move (vc.second);
+ }
+ else if (n.size () > 7 && n.compare (n.size () - 7, 7, "-builds") == 0)
+ {
+ build_package_config& bc (build_conf_constr (n.size () - 7));
+
+ bc.builds.push_back (
+ parse_build_class_expr (nv, bc.builds.empty (), name));
+ }
+ else if (n.size () > 14 &&
+ n.compare (n.size () - 14, 14, "-build-include") == 0)
+ {
+ build_package_config& bc (build_conf_constr (n.size () - 14));
- build_constraints.push_back (
+ bc.constraints.push_back (
+ parse_build_constraint (nv, false /* exclusion */, name));
+ }
+ else if (n.size () > 14 &&
+ n.compare (n.size () - 14, 14, "-build-exclude") == 0)
+ {
+ build_package_config& bc (build_conf_constr (n.size () - 14));
+
+ bc.constraints.push_back (
parse_build_constraint (nv, true /* exclusion */, name));
}
+ else if (n.size () > 10 &&
+ n.compare (n.size () - 10, 10, "-build-bot") == 0)
+ {
+ build_package_config& bc (build_conf_bot (n.size () - 10));
+ parse_build_bot (nv, name, bc.bot_keys);
+ }
else if (n == "build-email")
{
reset_build_emails ();
- build_email = parse_email (nv, "build", name, true /* empty */);
+ m.build_email = parse_email (nv, "build", name, true /* empty */);
}
else if (n == "build-warning-email")
{
reset_build_emails ();
- build_warning_email = parse_email (nv, "build warning", name);
+ m.build_warning_email = parse_email (nv, "build warning", name);
}
else if (n == "build-error-email")
{
reset_build_emails ();
- build_error_email = parse_email (nv, "build error", name);
+ m.build_error_email = parse_email (nv, "build error", name);
+ }
+ else if (n.size () > 12 &&
+ n.compare (n.size () - 12, 12, "-build-email") == 0)
+ {
+ build_package_config& bc (build_conf_email (n.size () - 12));
+
+ bc.email = parse_email (
+ nv, "build configuration", name, true /* empty */);
+ }
+ else if (n.size () > 20 &&
+ n.compare (n.size () - 20, 20, "-build-warning-email") == 0)
+ {
+ build_package_config& bc (build_conf_email (n.size () - 20));
+
+ bc.warning_email = parse_email (
+ nv, "build configuration warning", name);
+ }
+ else if (n.size () > 18 &&
+ n.compare (n.size () - 18, 18, "-build-error-email") == 0)
+ {
+ build_package_config& bc (build_conf_email (n.size () - 18));
+
+ bc.error_email = parse_email (nv, "build configuration error", name);
+ }
+ else if (optional<pair<string, string>> ba =
+ build_auxiliary::parse_value_name (n))
+ {
+ if (ba->first.empty ()) // build-auxiliary*?
+ {
+ override_build_auxiliary (nv, move (ba->second), m.build_auxiliaries);
+ }
+ else // *-build-auxiliary*
+ {
+ build_package_config& bc (
+ build_conf (ba->first.size (), validate_only));
+
+ override_build_auxiliary (nv, move (ba->second), bc.auxiliaries, &bc);
+ }
}
else
+ bad_name ("cannot override '" + n + "' value");
+ }
+
+ // Common build constraints and build config overrides are mutually
+ // exclusive.
+ //
+ assert (cbc == nullptr || pbc == nullptr);
+
+ // Now, if not in the validate-only mode, as all the potential build
+ // constraint, bot keys, and email overrides are applied, perform the
+ // final adjustments to the build config constraints, bot keys, and
+ // emails.
+ //
+ if (!validate_only)
+ {
+ if (cbc != nullptr) // Common build constraints are overridden?
{
- string d ("cannot override '" + n + "' value");
+ for (build_package_config& c: m.build_configs)
+ {
+ c.builds.clear ();
+ c.constraints.clear ();
+ }
+ }
+ else if (pbc != nullptr) // Build config constraints are overridden?
+ {
+ for (size_t i (0); i != m.build_configs.size (); ++i)
+ {
+ if (find_if (obcs.begin (), obcs.end (),
+ [i] (const auto& pc) {return pc.first == i;}) ==
+ obcs.end ())
+ {
+ build_package_config& c (m.build_configs[i]);
- throw !name.empty ()
- ? parsing (name, nv.name_line, nv.name_column, d)
- : parsing (d);
+ c.builds.clear ();
+ c.constraints.clear ();
+ c.builds.emplace_back ("none", "" /* comment */);
+ }
+ }
+ }
+
+ if (cbb != nullptr) // Common build bots are overridden?
+ {
+ for (build_package_config& c: m.build_configs)
+ c.bot_keys.clear ();
+ }
+
+ if (cbe != nullptr) // Common build emails are overridden?
+ {
+ for (build_package_config& c: m.build_configs)
+ {
+ c.email = nullopt;
+ c.warning_email = nullopt;
+ c.error_email = nullopt;
+ }
+ }
+ else if (pbe != nullptr) // Build config emails are overridden?
+ {
+ for (size_t i (0); i != m.build_configs.size (); ++i)
+ {
+ if (find (obes.begin (), obes.end (), i) == obes.end ())
+ {
+ build_package_config& c (m.build_configs[i]);
+
+ c.email = email ();
+ c.warning_email = nullopt;
+ c.error_email = nullopt;
+ }
+ }
}
}
}
void package_manifest::
+ override (const vector<manifest_name_value>& nvs, const string& name)
+ {
+ bpkg::override (nvs, name, *this, false /* validate_only */);
+ }
+
+ void package_manifest::
validate_overrides (const vector<manifest_name_value>& nvs,
const string& name)
{
package_manifest p;
- p.override (nvs, name);
+ bpkg::override (nvs, name, p, true /* validate_only */);
}
- static const string description_file ("description-file");
- static const string changes_file ("changes-file");
+ static const string description_file ("description-file");
+ static const string package_description_file ("package-description-file");
+ static const string changes_file ("changes-file");
+ static const string build_file ("build-file");
void package_manifest::
load_files (const function<load_function>& loader, bool iu)
{
- auto load = [&loader] (const string& n, const path& p)
+ // If required, load a file and verify that its content is not empty, if
+ // the loader returns the content. Make the text type explicit.
+ //
+ auto load = [iu, &loader] (typed_text_file& text,
+ const string& file_value_name)
{
- string r (loader (n, p));
+ // Make the type explicit.
+ //
+ optional<text_type> t;
- if (r.empty ())
- throw parsing ("package " + n + " references empty file");
+ // Convert the potential invalid_argument exception to the
+ // manifest_parsing exception similar to what we do in the manifest
+ // parser.
+ //
+ try
+ {
+ t = text.effective_type (iu);
+ }
+ catch (const invalid_argument& e)
+ {
+ if (text.type)
+ {
+ // Strip trailing "-file".
+ //
+ string prefix (file_value_name, 0, file_value_name.size () - 5);
- return r;
- };
+ throw parsing ("invalid " + prefix + "-type package manifest " +
+ "value: " + e.what ());
+ }
+ else
+ {
+ throw parsing ("invalid " + file_value_name + " package " +
+ "manifest value: " + e.what ());
+ }
+ }
- // Load the description-file manifest value.
- //
- if (description)
- {
- // Make the description type explicit.
- //
- optional<text_type> t (effective_description_type (iu)); // Can throw.
assert (t || iu); // Can only be absent if we ignore unknown.
- if (!description_type && t)
- description_type = to_string (*t);
+ if (!text.type && t)
+ text.type = to_string (*t);
- // At this point the description type can only be absent if the
- // description comes from a file. Otherwise, we would end up with the
- // plain text.
+ // At this point the type can only be absent if the text comes from a
+ // file. Otherwise, we would end up with the plain text.
//
- assert (description_type || description->file);
+ assert (text.type || text.file);
- if (description->file)
+ if (text.file)
{
- if (!description_type)
- description_type = "text/unknown; extension=" +
- description->path.extension ();
+ if (!text.type)
+ text.type = "text/unknown; extension=" + text.path.extension ();
- description = text_file (load (description_file, description->path));
+ if (optional<string> fc = loader (file_value_name, text.path))
+ {
+ if (fc->empty ())
+ throw parsing ("package manifest value " + file_value_name +
+ " references empty file");
+
+ text = typed_text_file (move (*fc), move (text.type));
+ }
}
- }
+ };
+
+ // Load the descriptions and changes, if present.
+ //
+ if (description)
+ load (*description, description_file);
+
+ if (package_description)
+ load (*package_description, package_description_file);
+
+ for (typed_text_file& c: changes)
+ load (c, changes_file);
- // Load the changes-file manifest values.
+ // Load the build-file manifest values.
//
- for (text_file& c: changes)
+ if (!buildfile_paths.empty ())
{
- if (c.file)
- c = text_file (load (changes_file, c.path));
+ // Must already be set if the build-file value is parsed.
+ //
+ assert (alt_naming);
+
+ dir_path d (*alt_naming ? "build2" : "build");
+
+ for (auto i (buildfile_paths.begin ()); i != buildfile_paths.end (); )
+ {
+ path& p (*i);
+ path f (d / p);
+ f += *alt_naming ? ".build2" : ".build";
+
+ if (optional<string> fc = loader (build_file, f))
+ {
+ buildfiles.emplace_back (move (p), move (*fc));
+ i = buildfile_paths.erase (i); // Moved to buildfiles.
+ }
+ else
+ ++i;
+ }
}
}
static void
- serialize_package_manifest (manifest_serializer& s,
- const package_manifest& m,
- bool header_only)
+ serialize_package_manifest (
+ manifest_serializer& s,
+ const package_manifest& m,
+ bool header_only,
+ const optional<standard_version>& min_ver = nullopt)
{
// @@ Should we check that all non-optional values are specified ?
// @@ Should we check that values are valid: version release is not empty,
@@ -2578,6 +5668,12 @@ namespace bpkg
if (m.upstream_version)
s.next ("upstream-version", *m.upstream_version);
+ if (m.type)
+ s.next ("type", *m.type);
+
+ for (const language& l: m.languages)
+ s.next ("language", !l.impl ? l.name : l.name + "=impl");
+
if (m.project)
s.next ("project", m.project->string ());
@@ -2605,26 +5701,46 @@ namespace bpkg
if (!m.keywords.empty ())
s.next ("keywords", concatenate (m.keywords, " "));
- if (m.description)
+ auto serialize_text_file = [&s] (const text_file& v, const string& n)
{
- if (m.description->file)
- s.next ("description-file",
- serializer::merge_comment (m.description->path.string (),
- m.description->comment));
+ if (v.file)
+ s.next (n + "-file",
+ serializer::merge_comment (v.path.string (), v.comment));
else
- s.next ("description", m.description->text);
+ s.next (n, v.text);
+ };
- if (m.description_type)
- s.next ("description-type", *m.description_type);
- }
+ auto serialize_description = [&s, &serialize_text_file]
+ (const optional<typed_text_file>& desc,
+ const char* prefix)
+ {
+ if (desc)
+ {
+ string p (prefix);
+ serialize_text_file (*desc, p + "description");
+
+ if (desc->type)
+ s.next (p + "description-type", *desc->type);
+ }
+ };
+
+ serialize_description (m.description, "" /* prefix */);
+ serialize_description (m.package_description, "package-");
for (const auto& c: m.changes)
+ serialize_text_file (c, "changes");
+
+ // If there are any changes, then serialize the type of the first
+ // changes entry, if present. Note that if it is present, then we assume
+ // that the type was specified explicitly and so it is the same for all
+ // entries.
+ //
+ if (!m.changes.empty ())
{
- if (c.file)
- s.next ("changes-file",
- serializer::merge_comment (c.path.string (), c.comment));
- else
- s.next ("changes", c.text);
+ const typed_text_file& c (m.changes.front ());
+
+ if (c.type)
+ s.next ("changes-type", *c.type);
}
if (m.url)
@@ -2671,27 +5787,29 @@ namespace bpkg
m.build_error_email->comment));
for (const dependency_alternatives& d: m.dependencies)
- s.next ("depends",
- (d.conditional
- ? (d.buildtime ? "?* " : "? ")
- : (d.buildtime ? "* " : "")) +
- serializer::merge_comment (concatenate (d, " | "), d.comment));
+ s.next ("depends", d.string ());
for (const requirement_alternatives& r: m.requirements)
- s.next ("requires",
- (r.conditional
- ? (r.buildtime ? "?* " : "? ")
- : (r.buildtime ? "* " : "")) +
- serializer::merge_comment (concatenate (r, " | "), r.comment));
+ s.next ("requires", r.string ());
- for (const dependency& t: m.tests)
- s.next ("tests", t.string ());
+ for (const test_dependency& t: m.tests)
+ {
+ string n (to_string (t.type));
- for (const dependency& t: m.examples)
- s.next ("examples", t.string ());
+ // If we generate the manifest for parsing by clients of libbpkg
+ // versions less than 0.14.0-, then replace the introduced in 0.14.0
+ // build-time tests, examples, and benchmarks values with
+ // tests-0.14.0, examples-0.14.0, and benchmarks-0.14.0,
+ // respectively. This way such clients will still be able to parse it,
+ // ignoring unknown values.
+ //
+ // @@ TMP time to drop?
+ // 0.14.0-
+ if (t.buildtime && min_ver && min_ver->version < 13999990001ULL)
+ n += "-0.14.0";
- for (const dependency& t: m.benchmarks)
- s.next ("benchmarks", t.string ());
+ s.next (n, t.string ());
+ }
for (const build_class_expr& e: m.builds)
s.next ("builds", serializer::merge_comment (e.string (), e.comment));
@@ -2700,9 +5818,97 @@ namespace bpkg
s.next (c.exclusion ? "build-exclude" : "build-include",
serializer::merge_comment (!c.target
? c.config
- : c.config + "/" + *c.target,
+ : c.config + '/' + *c.target,
c.comment));
+ for (const build_auxiliary& ba: m.build_auxiliaries)
+ s.next ((!ba.environment_name.empty ()
+ ? "build-auxiliary-" + ba.environment_name
+ : "build-auxiliary"),
+ serializer::merge_comment (ba.config, ba.comment));
+
+ for (const string& k: m.build_bot_keys)
+ s.next ("build-bot", k);
+
+ for (const build_package_config& bc: m.build_configs)
+ {
+ if (!bc.builds.empty ())
+ {
+ string n (bc.name + "-builds");
+ for (const build_class_expr& e: bc.builds)
+ s.next (n, serializer::merge_comment (e.string (), e.comment));
+ }
+
+ if (!bc.constraints.empty ())
+ {
+ string in (bc.name + "-build-include");
+ string en (bc.name + "-build-exclude");
+
+ for (const build_constraint& c: bc.constraints)
+ s.next (c.exclusion ? en : in,
+ serializer::merge_comment (!c.target
+ ? c.config
+ : c.config + '/' + *c.target,
+ c.comment));
+ }
+
+ if (!bc.auxiliaries.empty ())
+ {
+ string n (bc.name + "-build-auxiliary");
+
+ for (const build_auxiliary& ba: bc.auxiliaries)
+ s.next ((!ba.environment_name.empty ()
+ ? n + '-' + ba.environment_name
+ : n),
+ serializer::merge_comment (ba.config, ba.comment));
+ }
+
+ if (!bc.bot_keys.empty ())
+ {
+ string n (bc.name + "-build-bot");
+
+ for (const string& k: bc.bot_keys)
+ s.next (n, k);
+ }
+
+ if (!bc.arguments.empty () || !bc.comment.empty ())
+ s.next (bc.name + "-build-config",
+ serializer::merge_comment (bc.arguments, bc.comment));
+
+ if (bc.email)
+ s.next (bc.name + "-build-email",
+ serializer::merge_comment (*bc.email, bc.email->comment));
+
+ if (bc.warning_email)
+ s.next (bc.name + "-build-warning-email",
+ serializer::merge_comment (*bc.warning_email,
+ bc.warning_email->comment));
+
+ if (bc.error_email)
+ s.next (bc.name + "-build-error-email",
+ serializer::merge_comment (*bc.error_email,
+ bc.error_email->comment));
+ }
+
+ bool an (m.alt_naming && *m.alt_naming);
+
+ if (m.bootstrap_build)
+ s.next (an ? "bootstrap-build2" : "bootstrap-build",
+ *m.bootstrap_build);
+
+ if (m.root_build)
+ s.next (an ? "root-build2" : "root-build", *m.root_build);
+
+ for (const auto& bf: m.buildfiles)
+ s.next (bf.path.posix_string () + (an ? "-build2" : "-build"),
+ bf.content);
+
+ for (const path& f: m.buildfile_paths)
+ s.next ("build-file", f.posix_string () + (an ? ".build2" : ".build"));
+
+ for (const distribution_name_value& nv: m.distribution_values)
+ s.next (nv.name, nv.value);
+
if (m.location)
s.next ("location", m.location->posix_string ());
@@ -2717,9 +5923,9 @@ namespace bpkg
}
void package_manifest::
- serialize (serializer& s) const
+ serialize (serializer& s, const optional<standard_version>& min_ver) const
{
- serialize_package_manifest (s, *this, false);
+ serialize_package_manifest (s, *this, false, min_ver);
}
void package_manifest::
@@ -2933,7 +6139,7 @@ namespace bpkg
}
void pkg_package_manifests::
- serialize (serializer& s) const
+ serialize (serializer& s, const optional<standard_version>& min_ver) const
{
// Serialize the package list manifest.
//
@@ -2951,21 +6157,32 @@ namespace bpkg
{
throw serialization (
s.name (),
- d + " for " + p.name.string () + "-" + p.version.string ());
+ d + " for " + p.name.string () + '-' + p.version.string ());
};
- if (p.description)
+ // Throw manifest_serialization if the text is in a file or untyped.
+ //
+ auto verify_text_file = [&bad_value] (const typed_text_file& v,
+ const string& n)
{
- if (p.description->file)
- bad_value ("forbidden description-file");
+ if (v.file)
+ bad_value ("forbidden " + n + "-file");
- if (!p.description_type)
- bad_value ("no valid description-type");
- }
+ if (!v.type)
+ bad_value ("no valid " + n + "-type");
+ };
+
+ if (p.description)
+ verify_text_file (*p.description, "description");
+
+ if (p.package_description)
+ verify_text_file (*p.package_description, "package-description");
for (const auto& c: p.changes)
- if (c.file)
- bad_value ("forbidden changes-file");
+ verify_text_file (c, "changes");
+
+ if (!p.buildfile_paths.empty ())
+ bad_value ("forbidden build-file");
if (!p.location)
bad_value ("no valid location");
@@ -2973,7 +6190,7 @@ namespace bpkg
if (!p.sha256sum)
bad_value ("no valid sha256sum");
- pkg_package_manifest (s, p);
+ pkg_package_manifest (s, p, min_ver);
}
s.next ("", ""); // End of stream.
@@ -3102,12 +6319,9 @@ namespace bpkg
if (!authority || authority->host.empty ())
bad_url ("invalid host");
- if (authority->host.kind != url_host_kind::name)
- bad_url ("unsupported host type");
-
- // Normalize the host name.
+ // Normalize the host name/address.
//
- lcase (authority->host.value);
+ authority->host.normalize ();
// We don't distinguish between the absent and empty paths for the
// remote repository URLs.
@@ -3258,7 +6472,7 @@ namespace bpkg
{
try
{
- return path_type (butl::url::decode (path));
+ return path_type (url::decode (path));
}
catch (const invalid_path&)
{
@@ -3269,8 +6483,6 @@ namespace bpkg
repository_url_traits::string_type repository_url_traits::
translate_path (const path_type& path)
{
- using butl::url;
-
// If the path is absolute then this is a local URL object and the file://
// URL notation is being produced. Thus, on POSIX we need to make the path
// relative (to the authority "root"). On Windows the path should stay
@@ -3324,7 +6536,7 @@ namespace bpkg
if (optional<repository_type> r = parse_repository_type (t))
return *r;
- throw invalid_argument ("invalid repository type '" + t + "'");
+ throw invalid_argument ("invalid repository type '" + t + '\'');
}
repository_type
@@ -3365,7 +6577,7 @@ namespace bpkg
typed_repository_url::
typed_repository_url (const string& s)
{
- using traits = butl::url::traits_type;
+ using traits = url::traits_type;
if (traits::find (s) == 0) // Looks like a non-rootless URL?
{
@@ -3720,6 +6932,19 @@ namespace bpkg
//
path sp;
+ // Convert the local repository location path to lower case on Windows.
+ //
+ // Note that we need to do that prior to stripping the special path
+ // components to match them case-insensitively, so, for example, the
+ // c:\pkg\1\stable and c:\Pkg\1\stable (or c:\repo.git and c:\repo.Git)
+ // repository locations end up with the same canonical name.
+ //
+ #ifdef _WIN32
+ const path& p (local () ? path (lcase (up.string ())) : up);
+ #else
+ const path& p (up);
+ #endif
+
switch (type_)
{
case repository_type::pkg:
@@ -3727,7 +6952,7 @@ namespace bpkg
// Produce the pkg repository canonical name <prefix>/<path> part (see
// the Repository Chaining documentation for more details).
//
- sp = strip_path (up,
+ sp = strip_path (p,
remote ()
? strip_mode::component
: strip_mode::path);
@@ -3737,7 +6962,7 @@ namespace bpkg
// stripping just the version component.
//
if (absolute () && sp.empty ())
- sp = strip_path (up, strip_mode::version);
+ sp = strip_path (p, strip_mode::version);
break;
}
@@ -3745,7 +6970,7 @@ namespace bpkg
{
// For dir repository we use the absolute (normalized) path.
//
- sp = up;
+ sp = p;
break;
}
case repository_type::git:
@@ -3753,7 +6978,7 @@ namespace bpkg
// For git repository we use the absolute (normalized) path, stripping
// the .git extension if present.
//
- sp = strip_path (up, strip_mode::extension);
+ sp = strip_path (p, strip_mode::extension);
break;
}
}
@@ -3970,7 +7195,8 @@ namespace bpkg
parse_repository_manifest (parser& p,
name_value nv,
repository_type base_type,
- bool iu)
+ bool iu,
+ bool verify_version = true)
{
auto bad_name ([&p, &nv](const string& d) {
throw parsing (p.name (), nv.name_line, nv.name_column, d);});
@@ -3980,11 +7206,16 @@ namespace bpkg
// Make sure this is the start and we support the version.
//
- if (!nv.name.empty ())
- bad_name ("start of repository manifest expected");
+ if (verify_version)
+ {
+ if (!nv.name.empty ())
+ bad_name ("start of repository manifest expected");
- if (nv.value != "1")
- bad_value ("unsupported format version");
+ if (nv.value != "1")
+ bad_value ("unsupported format version");
+
+ nv = p.next ();
+ }
repository_manifest r;
@@ -3995,7 +7226,7 @@ namespace bpkg
optional<repository_type> type;
optional<name_value> location;
- for (nv = p.next (); !nv.empty (); nv = p.next ())
+ for (; !nv.empty (); nv = p.next ())
{
string& n (nv.name);
string& v (nv.value);
@@ -4340,13 +7571,126 @@ namespace bpkg
parse_repository_manifests (parser& p,
repository_type base_type,
bool iu,
+ optional<repositories_manifest_header>& header,
vector<repository_manifest>& ms)
{
+ // Return nullopt on eos. Otherwise, parse and verify the
+ // manifest-starting format version value and return the subsequent
+ // manifest value, that can potentially be empty (for an empty manifest).
+ //
+ // Also save the manifest-starting position (start_nv) for the
+ // diagnostics.
+ //
+ name_value start_nv;
+ auto next_manifest = [&p, &start_nv] () -> optional<name_value>
+ {
+ start_nv = p.next ();
+
+ if (start_nv.empty ())
+ return nullopt;
+
+ // Make sure this is the start and we support the version.
+ //
+ if (!start_nv.name.empty ())
+ throw parsing (p.name (), start_nv.name_line, start_nv.name_column,
+ "start of repository manifest expected");
+
+ if (start_nv.value != "1")
+ throw parsing (p.name (), start_nv.value_line, start_nv.value_column,
+ "unsupported format version");
+
+ return p.next ();
+ };
+
+ optional<name_value> nv (next_manifest ());
+
+ if (!nv)
+ throw parsing (p.name (), start_nv.name_line, start_nv.name_column,
+ "start of repository manifest expected");
+
+ auto bad_name ([&p, &nv](const string& d) {
+ throw parsing (p.name (), nv->name_line, nv->name_column, d);});
+
+ auto bad_value ([&p, &nv](const string& d) {
+ throw parsing (p.name (), nv->value_line, nv->value_column, d);});
+
+ // First check if this a header manifest, if any manifest is present.
+ //
+ // Note that if this is none of the known header values, then we assume
+ // this is a repository manifest (rather than a header that starts with an
+ // unknown value; so use one of the original names to make sure it's
+ // recognized as such, for example `compression:none`).
+ //
+ if (nv->name == "min-bpkg-version" ||
+ nv->name == "compression")
+ {
+ header = repositories_manifest_header ();
+
+ // First verify the version, if any.
+ //
+ if (nv->name == "min-bpkg-version")
+ try
+ {
+ const string& v (nv->value);
+ standard_version mbv (v, standard_version::allow_earliest);
+
+ if (mbv > standard_version (LIBBPKG_VERSION_STR))
+ bad_value (
+ "incompatible repositories manifest: minimum bpkg version is " + v);
+
+ header->min_bpkg_version = move (mbv);
+
+ nv = p.next ();
+ }
+ catch (const invalid_argument& e)
+ {
+ bad_value (string ("invalid minimum bpkg version: ") + e.what ());
+ }
+
+ // Parse the remaining header values, failing if min-bpkg-version is
+ // encountered (should be first).
+ //
+ for (; !nv->empty (); nv = p.next ())
+ {
+ const string& n (nv->name);
+ string& v (nv->value);
+
+ if (n == "min-bpkg-version")
+ {
+ bad_name ("minimum bpkg version must be first in repositories "
+ "manifest header");
+ }
+ else if (n == "compression")
+ {
+ header->compression = move (v);
+ }
+ else if (!iu)
+ bad_name ("unknown name '" + n + "' in repositories manifest header");
+ }
+
+ nv = next_manifest ();
+ }
+
+ // Parse the manifest list.
+ //
+ // Note that if nv is present, then it contains the manifest's first
+ // value, which can potentially be empty (for an empty manifest, which is
+ // recognized as a base manifest).
+ //
+ // Also note that if the header is present but is not followed by
+ // repository manifests (there is no ':' line after the header values),
+ // then the empty manifest list is returned (no base manifest is
+ // automatically added).
+ //
bool base (false);
- for (name_value nv (p.next ()); !nv.empty (); nv = p.next ())
+ while (nv)
{
- ms.push_back (parse_repository_manifest (p, nv, base_type, iu));
+ ms.push_back (parse_repository_manifest (p,
+ *nv,
+ base_type,
+ iu,
+ false /* verify_version */));
// Make sure that there is a single base repository manifest in the
// list.
@@ -4354,19 +7698,38 @@ namespace bpkg
if (ms.back ().effective_role () == repository_role::base)
{
if (base)
- throw parsing (p.name (), nv.name_line, nv.name_column,
+ throw parsing (p.name (), start_nv.name_line, start_nv.name_column,
"base repository manifest redefinition");
base = true;
}
+
+ nv = next_manifest ();
}
}
// Serialize the repository manifest list.
//
static void
- serialize_repository_manifests (serializer& s,
- const vector<repository_manifest>& ms)
+ serialize_repository_manifests (
+ serializer& s,
+ const optional<repositories_manifest_header>& header,
+ const vector<repository_manifest>& ms)
{
+ if (header)
+ {
+ s.next ("", "1"); // Start of manifest.
+
+ const repositories_manifest_header& h (*header);
+
+ if (h.min_bpkg_version)
+ s.next ("min-bpkg-version", h.min_bpkg_version->string ());
+
+ if (h.compression)
+ s.next ("compression", *h.compression);
+
+ s.next ("", ""); // End of manifest.
+ }
+
for (const repository_manifest& r: ms)
r.serialize (s);
@@ -4378,13 +7741,13 @@ namespace bpkg
pkg_repository_manifests::
pkg_repository_manifests (parser& p, bool iu)
{
- parse_repository_manifests (p, repository_type::pkg, iu, *this);
+ parse_repository_manifests (p, repository_type::pkg, iu, header, *this);
}
void pkg_repository_manifests::
serialize (serializer& s) const
{
- serialize_repository_manifests (s, *this);
+ serialize_repository_manifests (s, header, *this);
}
// dir_repository_manifests
@@ -4392,13 +7755,13 @@ namespace bpkg
dir_repository_manifests::
dir_repository_manifests (parser& p, bool iu)
{
- parse_repository_manifests (p, repository_type::dir, iu, *this);
+ parse_repository_manifests (p, repository_type::dir, iu, header, *this);
}
void dir_repository_manifests::
serialize (serializer& s) const
{
- serialize_repository_manifests (s, *this);
+ serialize_repository_manifests (s, header, *this);
}
// git_repository_manifests
@@ -4406,13 +7769,13 @@ namespace bpkg
git_repository_manifests::
git_repository_manifests (parser& p, bool iu)
{
- parse_repository_manifests (p, repository_type::git, iu, *this);
+ parse_repository_manifests (p, repository_type::git, iu, header, *this);
}
void git_repository_manifests::
serialize (serializer& s) const
{
- serialize_repository_manifests (s, *this);
+ serialize_repository_manifests (s, header, *this);
}
// signature_manifest
@@ -4514,4 +7877,41 @@ namespace bpkg
s.next ("", ""); // End of manifest.
}
+
+ // extract_package_*()
+ //
+ package_name
+ extract_package_name (const char* s, bool allow_version)
+ {
+ if (!allow_version)
+ return package_name (s);
+
+ // Calculate the package name length as a length of the prefix that
+ // doesn't contain spaces, slashes and the version constraint starting
+ // characters. Note that none of them are valid package name characters.
+ //
+ size_t n (strcspn (s, " /=<>([~^"));
+ return package_name (string (s, n));
+ }
+
+ version
+ extract_package_version (const char* s, version::flags fl)
+ {
+ using traits = string::traits_type;
+
+ if (const char* p = traits::find (s, traits::length (s), '/'))
+ {
+ version r (p + 1, fl);
+
+ if (r.release && r.release->empty ())
+ throw invalid_argument ("earliest version");
+
+ if (r.compare (stub_version, true /* ignore_revision */) == 0)
+ throw invalid_argument ("stub version");
+
+ return r;
+ }
+
+ return version ();
+ }
}