aboutsummaryrefslogtreecommitdiff
path: root/mod/mod-ci-github-gq.cxx
diff options
context:
space:
mode:
Diffstat (limited to 'mod/mod-ci-github-gq.cxx')
-rw-r--r--mod/mod-ci-github-gq.cxx249
1 files changed, 225 insertions, 24 deletions
diff --git a/mod/mod-ci-github-gq.cxx b/mod/mod-ci-github-gq.cxx
index e5ea0c5..bcf9e55 100644
--- a/mod/mod-ci-github-gq.cxx
+++ b/mod/mod-ci-github-gq.cxx
@@ -114,10 +114,12 @@ namespace brep
}
else if (p.name () == "errors")
{
- // Don't stop parsing because the error semantics depends on whether
- // or not `data` is present.
+ // Skip the errors object but don't stop parsing because the error
+ // semantics depends on whether or not `data` is present.
//
err = true; // Handled below.
+
+ p.next_expect_value_skip ();
}
else
{
@@ -595,6 +597,10 @@ namespace brep
//
bool found = false;
+ // Non-fatal error message issued during the parse.
+ //
+ string parse_error;
+
// The response value. Absent if the merge commit is still being
// generated.
//
@@ -622,13 +628,23 @@ namespace brep
value = move (oid);
}
- else if (ma == "CONFLICTING")
+ else
{
- value = "";
- }
- else if (ma == "UNKNOWN")
- {
- // Still being generated; leave value absent.
+ if (ma == "CONFLICTING")
+ value = "";
+ if (ma == "UNKNOWN")
+ ; // Still being generated; leave value absent.
+ else
+ {
+ parse_error = "unexpected mergeable value '" + ma + '\'';
+
+ // Carry on as if it were UNKNOWN.
+ }
+
+ // Skip the merge commit ID (which should be null).
+ //
+ p.next_expect_name ("potentialMergeCommit");
+ p.next_expect_value_skip ();
}
p.next_expect (event::end_object); // node
@@ -650,6 +666,8 @@ namespace brep
{
if (!rs.found)
error << "pull request '" << nid << "' not found";
+ else if (!rs.parse_error.empty ())
+ error << rs.parse_error;
return rs.value;
}
@@ -685,22 +703,205 @@ namespace brep
return nullopt;
}
- // bool
- // gq_fetch_branch_open_pull_requests ()
- // {
- // // query {
- // // repository(owner:"francoisk" name:"libb2")
- // // {
- // // pullRequests (last:100 states:OPEN baseRefName:"master") {
- // // edges {
- // // node {
- // // id
- // // }
- // // }
- // // }
- // // }
- // // }
- // }
+ // Serialize a GraphQL query that fetches the last 100 (the maximum per
+ // page) open pull requests with the specified base branch from the
+ // repository with the specified node ID.
+ //
+ // @@ TMP Should we support more/less than 100?
+ //
+ // Doing more (or even 100) could waste a lot of CI resources on
+ // re-testing stale PRs. Maybe we should create a failed synthetic
+ // conclusion check run asking the user to re-run the CI manually if/when
+ // needed.
+ //
+ // Note that we cannot request more than 100 at a time (will need to
+ // do multiple requests with paging, etc).
+ //
+ // Also, maybe we should limit the result to "fresh" PRs, e.g., those
+ // that have been "touched" in the last week.
+ //
+ // Example query:
+ //
+ // query {
+ // node(id:"R_kgDOLc8CoA")
+ // {
+ // ... on Repository {
+ // pullRequests (last:100 states:OPEN baseRefName:"master") {
+ // edges {
+ // node {
+ // id
+ // number
+ // headRefOid
+ // }
+ // }
+ // }
+ // }
+ // }
+ // }
+ //
+ static string
+ gq_query_fetch_open_pull_requests (const string& rid, const string& br)
+ {
+ ostringstream os;
+
+ os << "query {" << '\n'
+ << " node(id:" << gq_str (rid) << ") {" << '\n'
+ << " ... on Repository {" << '\n'
+ << " pullRequests (last:100" << '\n'
+ << " states:" << gq_enum ("OPEN") << '\n'
+ << " baseRefName:" << gq_str (br) << '\n'
+ << " ) {" << '\n'
+ << " totalCount" << '\n'
+ << " edges { node { id number headRefOid } }" << '\n'
+ << " }" << '\n'
+ << " }" << '\n'
+ << " }" << '\n'
+ << "}" << '\n';
+
+ return os.str ();
+ }
+
+ optional<vector<gh_pull_request>>
+ gq_fetch_open_pull_requests (const basic_mark& error,
+ const string& iat,
+ const string& nid,
+ const string& br)
+ {
+ string rq (
+ gq_serialize_request (gq_query_fetch_open_pull_requests (nid, br)));
+
+ try
+ {
+ // Response parser.
+ //
+ // Example response (only the part we need to parse here):
+ //
+ // {
+ // "node": {
+ // "pullRequests": {
+ // "totalCount": 2,
+ // "edges": [
+ // {
+ // "node": {
+ // "id": "PR_kwDOLc8CoM5vRS0y",
+ // "number": 7,
+ // "headRefOid": "cf72888be9484d6946a1340264e7abf18d31cc92"
+ // }
+ // },
+ // {
+ // "node": {
+ // "id": "PR_kwDOLc8CoM5vRzHs",
+ // "number": 8,
+ // "headRefOid": "626d25b318aad27bc0005277afefe3e8d6b2d434"
+ // }
+ // }
+ // ]
+ // }
+ // }
+ // }
+ //
+ struct resp
+ {
+ bool found = false;
+
+ vector<gh_pull_request> pull_requests;
+
+ resp (json::parser& p)
+ {
+ using event = json::event;
+
+ auto parse_data = [this] (json::parser& p)
+ {
+ p.next_expect (event::begin_object);
+
+ if (p.next_expect_member_object_null ("node"))
+ {
+ found = true;
+
+ p.next_expect_member_object ("pullRequests");
+
+ uint16_t n (p.next_expect_member_number<uint16_t> ("totalCount"));
+
+ p.next_expect_member_array ("edges");
+ for (size_t i (0); i != n; ++i)
+ {
+ p.next_expect (event::begin_object); // edges[i]
+
+ p.next_expect_member_object ("node");
+ {
+ gh_pull_request pr;
+ pr.node_id = p.next_expect_member_string ("id");
+ pr.number = p.next_expect_member_number<unsigned int> ("number");
+ pr.head_sha = p.next_expect_member_string ("headRefOid");
+ pull_requests.push_back (move (pr));
+ }
+ p.next_expect (event::end_object); // node
+
+ p.next_expect (event::end_object); // edges[i]
+ }
+ p.next_expect (event::end_array); // edges
+
+ p.next_expect (event::end_object); // pullRequests
+ p.next_expect (event::end_object); // node
+ }
+
+ p.next_expect (event::end_object);
+ };
+
+ gq_parse_response (p, move (parse_data));
+ }
+
+ resp () = default;
+ } rs;
+
+ uint16_t sc (github_post (rs,
+ "graphql", // API Endpoint.
+ strings {"Authorization: Bearer " + iat},
+ move (rq)));
+
+ if (sc == 200)
+ {
+ if (!rs.found)
+ {
+ error << "repository '" << nid << "' not found";
+
+ return nullopt;
+ }
+
+ return rs.pull_requests;
+ }
+ else
+ error << "failed to fetch repository pull requests: "
+ << "error HTTP response status " << sc;
+ }
+ catch (const json::invalid_json_input& e)
+ {
+ // Note: e.name is the GitHub API endpoint.
+ //
+ error << "malformed JSON in response from " << e.name << ", line: "
+ << e.line << ", column: " << e.column << ", byte offset: "
+ << e.position << ", error: " << e;
+ }
+ catch (const invalid_argument& e)
+ {
+ error << "malformed header(s) in response: " << e;
+ }
+ catch (const system_error& e)
+ {
+ error << "unable to fetch repository pull requests (errno=" << e.code ()
+ << "): " << e.what ();
+ }
+ catch (const runtime_error& e) // From response type's parsing constructor.
+ {
+ // GitHub response contained error(s) (could be ours or theirs at this
+ // point).
+ //
+ error << "unable to fetch repository pull requests: " << e;
+ }
+
+ return nullopt;
+ }
+
// GraphQL serialization functions.
//