aboutsummaryrefslogtreecommitdiffstats
path: root/scripts/jenkins-gerrit/pipeline_summary.py
blob: 53c0f4a185fc1235175027de887017a45ad59822 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
#!/usr/bin/env python3
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright 2022 sysmocom - s.f.m.c. GmbH <info@sysmocom.de>
import argparse
import io
import json
import re
import urllib.request

jenkins_url = "https://jenkins.osmocom.org"
re_start_build = re.compile("Starting building: gerrit-[a-zA-Z-_0-9]* #[0-9]*")
re_result = re.compile("^PIPELINE_[A-Z]*_PASSED=[01]$")
re_job_type = re.compile("JOB_TYPE=([a-zA-Z-_0-9]*),")


def parse_args():
    parser = argparse.ArgumentParser(
        description="Get a summary of failed / successful builds from the CI"
                    " pipeline we run for patches submitted to gerrit.")
    parser.add_argument("build_url",
                        help="$BUILD_URL of the pipeline job, e.g."
                             " https://jenkins.osmocom.org/jenkins/job/gerrit-osmo-bsc-nat/17/")
    parser.add_argument("-o", "--output", help="output json file")
    parser.add_argument("-n", "--notify-on-success", action="store_true",
                        help="always indicate in json that the owner should be"
                             " notified via mail, not only on failure")
    return parser.parse_args()


def stage_from_job_name(job_name):
    if job_name == "gerrit-pipeline-result":
        # The job that runs this script. Don't include it in the summary.
        return None
    if job_name == "gerrit-lint":
        return "lint"
    if job_name == "gerrit-binpkgs-deb":
        return "deb"
    if job_name == "gerrit-binpkgs-rpm":
        return "rpm"
    if job_name.endswith("-build"):
        return "build"
    assert False, f"couldn't figure out stage from job_name: {job_name}"


def parse_pipeline(build_url):
    """ Parse started jobs and result from the pipeline log.
       :returns: a dict that looks like:
                 {"build": {"name": "gerrit-osmo-bsc-nat-build", id=7,
                            "passed": True, "url": "https://..."},
                  "lint": {...},
                  "deb": {...},
                  "rpm: {...}} """
    global re_start_build
    global re_result
    global jenkins_url
    ret = {}

    url = f"{build_url}/consoleText"
    with urllib.request.urlopen(url) as response:
        for line in io.TextIOWrapper(response, encoding='utf-8'):
            # Parse start build lines
            for match in re_start_build.findall(line):
                job_name = match.split(" ")[2]
                job_id = int(match.split(" ")[3].replace("#", ""))
                job_url = f"{jenkins_url}/jenkins/job/{job_name}/{job_id}"
                stage = stage_from_job_name(job_name)
                if stage:
                    ret[stage] = {"url": job_url, "name": job_name, "id": job_id}

            # Parse result lines
            if re_result.match(line):
                stage = line.split("_")[1].lower()
                assert stage in ret, f"found result for stage {stage}, but" \
                        " didn't find where it was started. The" \
                        " re_start_build regex probably needs to be adjusted" \
                        " to match the related gerrit-*-build job."
                passed = line.split("=")[1].rstrip() == "1"
                ret[stage]["passed"] = passed

    return ret


def parse_build_matrix(job):
    """ Parse started jobs and result from the matrix of the build job. Usually
        it is only one job, but for some projects we build for multiple arches
        (x86_64, arm) or build multiple times with different configure flags.
        :param job: "build" dict from parse_pipeline()
        :returns: a list of jobs in the matrix, looks like:
                  [{"passed": True, "url": "https://..."}, ...]
    """
    global jenkins_url

    ret = []
    url = f"{job['url']}/consoleFull"
    with urllib.request.urlopen(url) as response:
        for line in io.TextIOWrapper(response, encoding='utf-8'):
            if " completed with result " in line:
                url = line.split("<a href='", 1)[1].split("'", 1)[0]
                url = f"{jenkins_url}{url}{job['id']}"
                result = line.split(" completed with result ")[1].rstrip()
                passed = result == "SUCCESS"
                ret += [{"passed": passed, "url": url}]
    return ret


def jobs_for_summary(pipeline, build_matrix):
    """ Sort the jobs from pipeline and build matrix into passed/failed lists.
        :returns: a dict that looks like:
                  {"passed": [{"stage": "build", "url": "https://..."}, ...],
                   "failed": [...]} """
    ret = {"passed": [], "failed": []}

    # Build errors are most interesting, display them first
    for job in build_matrix:
        category = "passed" if job["passed"] else "failed"
        ret[category] += [{"stage": "build", "url": job["url"]}]

    # Hide the build matrix job (we show the jobs started by it instead), as
    # long as there is at least one failed started job when the matrix failed
    matrix_failed = "build" in pipeline and not pipeline["build"]["passed"]
    show_build_matrix_job = matrix_failed and not ret["failed"]

    # Add jobs from the pipeline
    for stage, job in pipeline.items():
        if stage == "build" and not show_build_matrix_job:
            continue
        category = "passed" if job["passed"] else "failed"
        ret[category] += [{"stage": stage, "url": job["url"]}]

    return ret


def get_job_short_name(job):
    """ :returns: a short job name, usually the stage (lint, deb, rpm, build).
                  Or in case of build a more useful name like the JOB_TYPE part
                  of the URL if it is found. For osmo-e1-hardware it could be
                  one of: manuals, gateware, firmware, software """
    global re_job_type
    stage = job["stage"]

    if stage == "build":
        match = re_job_type.search(job["url"])
        if match:
            return match.group(1)

    return stage


def get_jobs_list_str(jobs):
    ret = ""
    for job in jobs:
        ret += f"  [{get_job_short_name(job)}] {job['url']}/consoleFull\n"
    return ret


def get_pipeline_summary(build_url, notify_on_success):
    """ Generate a summary of failed and successful builds for gerrit.
        :returns: a dict that is expected by gerrit's set-review api, e.g.
                  {"tag": "jenkins",
                   "message": "...",
                   "labels": {"Code-Review": -1},
                   "notify": "OWNER"} """
    summary = ""
    pipeline = parse_pipeline(build_url)

    build_matrix = []
    if "build" in pipeline:
        build_matrix = parse_build_matrix(pipeline["build"])

    jobs = jobs_for_summary(pipeline, build_matrix)

    if jobs["failed"]:
        summary += f"{len(jobs['failed'])} failed:\n"
        summary += get_jobs_list_str(jobs["failed"])
        summary += "\n"

    summary += f"{len(jobs['passed'])} passed:\n"
    summary += get_jobs_list_str(jobs["passed"])

    if "build" in pipeline and "deb" in pipeline and "rpm" in pipeline and \
            not pipeline["build"]["passed"] and pipeline["deb"]["passed"] \
            and pipeline["rpm"]["passed"]:
        summary += "\n"
        summary += "The build job(s) failed, but deb/rpm jobs passed.\n"
        summary += "We don't enable external/vty tests when building\n"
        summary += "packages, so maybe those failed. Check the logs.\n"

    if "lint" in pipeline and not pipeline["lint"]["passed"]:
        summary += "\n"
        summary += "Please fix the linting errors. More information:\n"
        summary += "https://osmocom.org/projects/cellular-infrastructure/wiki/Linting\n"

    summary += "\n"
    if jobs["failed"]:
        summary += "Build Failed\n"
        summary += "\n"
        summary += f"Find the Retrigger button here:\n{build_url}\n"
        vote = -1
        notify = "OWNER"
    else:
        summary += "Build Successful\n"
        vote = 1
        notify = "OWNER" if notify_on_success else "NONE"

    # Reference:
    # https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#set-review
    # https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#review-input
    return {"tag": "jenkins",
            "message": summary,
            "labels": {"Verified": vote},
            "notify": notify}


def main():
    args = parse_args()
    summary = get_pipeline_summary(args.build_url, args.notify_on_success)

    print()
    print(summary["message"])
    print(f"notify: {summary['notify']}")

    if args.output:
        with open(args.output, "w") as handle:
            json.dump(summary, handle, indent=4)

if __name__ == "__main__":
    main()