aboutsummaryrefslogtreecommitdiff
path: root/pgo_tools/merge_profdata_and_upload.py
blob: bb53ed6c4765b4f9bef4ac3141e0f8416edfb993 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2019 The ChromiumOS Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

"""Download profdata from different arches, merge them and upload to gs.

The script is used for updating the PGO profiles for LLVM. The workflow
is that the script will download profdata from different PGO builds, merge
them and then upload it to a gs location that LLVM can access.

The simplest way of using this script, is to run:
    ./merge_profdata_and_upload.py --all_latest_profiles
which will automatically grab profdata from latest PGO generate builders
for three different architectures and merge them. LLVM hash is also
detected automatically from the artifacts.

If you want to specify certain llvm hash, run it with:
    ./merge_profdata_and_upload.py --all_latest_profiles --llvm_hash LLVM_HASH
Note that hash checking will fail if the llvm hash you provided is not the
same as those in artifacts, or llvm hash in different artifacts are not the
same.

To only use profiles from buildbucket tasks for PGO generate, run it with:
    ./merge_profdata_and_upload.py -b amd64/bb_id1 -b arm/bb_id2 ...
The buildbucket id can be found using `bb ls` command after manually launched
builder finishes.

There is a chance that builders only succeeded partially, in this case, you
can run this script to merge both profdata from builder scheduled and manually
launched:
    ./merge_profdata_and_upload.py -l arm -l amd64 -b arm64/bb_id
In this example, the script will merge profdata from arm and amd64 builder, and
profdata from an arm64 buildbucket task.
"""


import argparse
import collections
import distutils.spawn
import json
import os
import os.path
import shutil
import subprocess
import sys
import tempfile


_LLVM_PROFDATA = "/usr/bin/llvm-profdata"
_GS_PREFIX = "gs://"

_LLVMMetadata = collections.namedtuple("_LLVMMetadata", ["head_sha"])


def _fetch_gs_artifact(remote_name, local_name):
    """Fetch single file from remote gs location to local.

    Args:
      remote_name: full gs location to the file.
      local_name: the name of local file to be copied to.
    """
    assert remote_name.startswith(_GS_PREFIX)
    subprocess.check_call(["gsutil", "cp", remote_name, local_name])


def _get_gs_profdata(remote_profdata, arch):
    """Fetch and extract profdata from remote gs location.

    Args:
      remote_profdata: remote gs location of the profdata tarball.
      arch: directory named with arch to saperate each profdata.

    Returns:
      Local location of the extracted profdata.
    """
    tar = "llvm_profdata.tar.xz"
    _fetch_gs_artifact(remote_profdata, tar)
    extract_cmd = ["tar", "-xvf", tar]

    profdata_name = subprocess.check_output(extract_cmd).strip()
    # The output of the `tar` command should only contain one line of the
    # extracted profdata name.
    if b".llvm.profdata" not in profdata_name:
        raise RuntimeError("No profdata in the tarball: %s" % remote_profdata)

    os.mkdir(arch)
    profdata_loc = os.path.join(arch, "llvm.profdata")
    os.rename(profdata_name, profdata_loc)
    print("Profdata extracted to: %s" % profdata_loc)
    return profdata_loc


def _get_gs_metadata(remote_metadata):
    """Fetch metadata from remote gs location and read the LLVM head_sha.

    Args:
      remote_metadata: remote gs location of the metadata json file.

    Returns:
      LLVM head_sha metadata
    """
    metadata_basename = "llvm_metadata.json"
    _fetch_gs_artifact(remote_metadata, metadata_basename)

    with open(metadata_basename) as f:
        result = json.load(f)

    return _LLVMMetadata(head_sha=result["head_sha"])


def _find_latest_artifacts(gs_url, arch):
    """Fetch the latest profdata and metadata from a give gs location.

    Args:
      gs_url: a gs location containing one or more artifacts to fetch.
      arch: the arch profdata collected from.

    Returns:
      A tuple of local profdata location and metadata
    """
    assert gs_url.startswith(_GS_PREFIX)
    try:
        # List all artifacts in the gs location and sort by time.
        output = (
            subprocess.check_output(
                ["gsutil", "ls", "-l", gs_url], encoding="utf-8"
            )
            .strip()
            .split("\n")
        )
        lines = sorted(output, key=lambda x: x.split()[1], reverse=True)
    except subprocess.CalledProcessError:
        raise RuntimeError("Artifacts not found: %s" % gs_url)

    # Use a loop to go through all artifacts to find the latest profdata.
    # An example of the output of latest builder bucket:
    # pylint: disable=line-too-long
    #   5006528  2020-05-31T10:08:48Z  gs://chromeos-toolchain-artifacts/llvm-pgo/arm/llvm-11.0_pre387436_p20200403-r7-a8e5dcb072b1f794883ae8125fb08c06db678d56.llvm.profdata.tar.xz
    #   56  2020-05-31T10:08:48Z  gs://chromeos-toolchain-artifacts/llvm-pgo/arm/llvm-11.0_pre387436_p20200403-r7-a8e5dcb072b1f794883ae8125fb08c06db678d56.llvm_metadata.json
    #   5005952  2020-05-24T10:53:34Z  gs://chromeos-toolchain-artifacts/llvm-pgo/arm/llvm-11.0_pre387436_p20200403-r5-a8e5dcb072b1f794883ae8125fb08c06db678d56.llvm.profdata.tar.xz
    #   56  2020-05-24T10:53:34Z  gs://chromeos-toolchain-artifacts/llvm-pgo/arm/llvm-11.0_pre387436_p20200403-r5-a8e5dcb072b1f794883ae8125fb08c06db678d56.llvm_metadata.json
    # An example for the lines of buildbucket location:
    #   5004260  2020-05-29T09:48:04Z  gs://chromeos-image-archive/arm-pgo-generate-llvm-next-toolchain/R85-13254.0.0-1-8879010326583123168/llvm-11.0_pre387436_p20200403-r7-a8e5dcb072b1f794883ae8125fb08c06db678d56.llvm.profdata.tar.xz
    #   56  2020-05-29T09:48:04Z  gs://chromeos-image-archive/arm-pgo-generate-llvm-next-toolchain/R85-13254.0.0-1-8879010326583123168/llvm-11.0_pre387436_p20200403-r7-a8e5dcb072b1f794883ae8125fb08c06db678d56.llvm_metadata.json
    # pylint: enable=line-too-long
    profdata_url = ""
    for line in lines:
        url = line.split()[-1]
        if ".llvm.profdata.tar.xz" in url:
            profile_path = _get_gs_profdata(url, arch)
            profdata_url = url
            break
    if not profile_path or not profdata_url:
        raise RuntimeError("No profdata found from %s" % gs_url)

    metadata_url = profdata_url.replace(
        ".llvm.profdata.tar.xz", ".llvm_metadata.json"
    )
    metadata = _get_gs_metadata(metadata_url)
    if not metadata:
        raise RuntimeError("No metadata found from %s" % gs_url)
    return metadata, profile_path


def _fetch_from_latest(arch):
    """Fetch artifacts from latest builders.

    Args:
      arch: the arch profdata collected from.

    Returns:
      A tuple of local profdata location and metadata
    """
    print("\nFETCHING LATEST PROFDATA ON %s..." % arch.upper())
    remote_latest = "%schromeos-toolchain-artifacts/llvm-pgo/%s" % (
        _GS_PREFIX,
        arch,
    )
    return _find_latest_artifacts(remote_latest, arch)


def _fetch_from_buildbucket(arch, bb):
    """Fetch artifacts from buildbucket task.

    Args:
      arch: the arch profdata collected from.
      bb: buildbucket id.

    Returns:
      A tuple of local profdata location and metadata
    """
    print("\nFETCHING BUILDBUCKET PROFDATA ON %s..." % arch.upper())
    remote_arch = (
        "%schromeos-image-archive/%s-pgo-generate-llvm-next-toolchain"
        % (
            _GS_PREFIX,
            arch,
        )
    )
    # List all buckets under {arch}-pgo-generate-llvm-next-toolchain and
    # grep with buildbucket id.
    remote_bb = (
        subprocess.check_output(["gsutil", "ls", remote_arch], encoding="utf-8")
        .strip()
        .split("\n")
    )
    for line in remote_bb:
        if bb in line:
            return _find_latest_artifacts(line, arch)
    raise RuntimeError(
        "No matched results found in %s with bb: %s" % (arch, bb)
    )


def _merge_profdata(profdata_list, output_name):
    """Merge profdata.

    Args:
      profdata_list: list of profdata location of each arch.
      output_name: name of merged profdata.
    """
    merge_cmd = [
        _LLVM_PROFDATA,
        "merge",
        "-output",
        output_name,
    ] + profdata_list
    print("\nMerging PGO profiles.\nCMD: %s" % merge_cmd)
    subprocess.check_call(merge_cmd)


def _tar_and_upload_profdata(profdata, name_suffix):
    """Create a tarball of merged profdata and upload to certain gs location.

    Args:
      profdata: location of merged profdata.
      name_suffix: usually the LLVM head_sha.
    """
    tarball = "llvm-profdata-%s.tar.xz" % name_suffix
    print("Making profdata tarball: %s" % tarball)
    subprocess.check_call(
        ["tar", "--sparse", "-I", "xz", "-cf", tarball, profdata]
    )

    upload_location = "%schromeos-localmirror/distfiles/%s" % (
        _GS_PREFIX,
        tarball,
    )

    # TODO: it's better to create a subdir: distfiles/llvm_pgo_profile, but
    # now llvm could only recognize distfiles.
    upload_cmd = [
        "gsutil",
        "-m",
        "cp",
        "-n",
        "-a",
        "public-read",
        tarball,
        upload_location,
    ]
    print("\nUploading tarball to gs.\nCMD: %s\n" % upload_cmd)

    # gsutil prints all status to stderr, oddly enough.
    gs_output = subprocess.check_output(
        upload_cmd, stderr=subprocess.STDOUT, encoding="utf-8"
    )

    # gsutil exits successfully even if it uploaded nothing. It prints a summary
    # of what all it did, though. Successful uploads are just a progress bar,
    # unsuccessful ones note that items were skipped.
    if "Skipping existing item" in gs_output:
        raise ValueError(
            "Profile upload failed: would overwrite an existing "
            "profile at %s" % upload_location
        )


def main():
    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter,
    )
    parser.add_argument(
        "-a",
        "--all_latest_profiles",
        action="store_true",
        help="Merge and upload profiles from the latest builders.",
    )
    parser.add_argument(
        "-l",
        "--latest",
        default=[],
        action="append",
        help="User can specify the profdata from which builder with specific "
        "architecture to download. By default, we merge profdata from arm, "
        "arm64, amd64.",
    )
    parser.add_argument(
        "-b",
        "--buildbucket",
        default=[],
        action="append",
        help="Extra pgo-generate-llvm-next-toolchain buildbucket results to be "
        "used. Format should be: {arch}/{bb_id}.",
    )
    parser.add_argument(
        "-o",
        "--output",
        default="llvm.profdata",
        help="Where to put merged PGO profile. The default is to not save it "
        "anywhere.",
    )
    parser.add_argument(
        "--llvm_hash",
        help="The LLVM hash to select for the profiles. Generally autodetected.",
    )
    args = parser.parse_args()

    if not args.all_latest_profiles and not (args.latest or args.buildbucket):
        parser.error(
            "Please specify whether to use latest profiles or "
            "profiles from buildbucket"
        )

    if args.all_latest_profiles and (args.latest or args.buildbucket):
        parser.error(
            "--all_latest_profiles cannot be specified together "
            "with --latest or --buildbucket"
        )

    latest = (
        ["arm", "arm64", "amd64"] if args.all_latest_profiles else args.latest
    )

    all_arch_list = latest.copy()
    arch_bb_list = []
    if args.buildbucket:
        for arch_bb in args.buildbucket:
            arch, bb = arch_bb.split("/")
            arch_bb_list.append((arch, bb))
            all_arch_list.append(arch)

    if len(set(all_arch_list)) != len(all_arch_list):
        parser.error("Each arch can be only passed once.")

    if not distutils.spawn.find_executable(_LLVM_PROFDATA):
        sys.exit(_LLVM_PROFDATA + " not found; are you in the chroot?")

    initial_dir = os.getcwd()
    temp_dir = tempfile.mkdtemp(prefix="merge_pgo")
    success = True
    try:
        os.chdir(temp_dir)
        profdata_list = []
        heads = set()

        def append_artifacts(fetched_tuple):
            llvm_metadata, profdata_loc = fetched_tuple
            if os.path.getsize(profdata_loc) < 512 * 1024:
                raise RuntimeError(
                    "The PGO profile in local path %s is suspiciously "
                    "small. Something might have gone "
                    "wrong." % profdata_loc
                )
            heads.add(llvm_metadata.head_sha)
            profdata_list.append(profdata_loc)

        for arch in latest:
            append_artifacts(_fetch_from_latest(arch))

        for arch, bb in arch_bb_list:
            append_artifacts(_fetch_from_buildbucket(arch, bb))

        assert heads, "Didn't fetch anything?"

        def die_with_head_complaint(complaint):
            extra = " (HEADs found: %s)" % sorted(heads)
            raise RuntimeError(complaint.rstrip() + extra)

        llvm_hash = args.llvm_hash
        if not llvm_hash:
            if len(heads) != 1:
                die_with_head_complaint(
                    "%d LLVM HEADs were found, which is more than one. You probably "
                    "want a consistent set of HEADs for a profile. If you know you "
                    "don't, please specify --llvm_hash, and note that *all* profiles "
                    "will be merged into this final profile, regardless of their "
                    "reported HEAD." % len(heads)
                )
            (llvm_hash,) = heads

        if llvm_hash not in heads:
            assert llvm_hash == args.llvm_hash
            die_with_head_complaint(
                "HEAD %s wasn't found in any fetched artifacts." % llvm_hash
            )

        print("\nUsing LLVM hash: %s" % llvm_hash)

        _merge_profdata(profdata_list, args.output)
        print("Merged profdata locates at %s" % os.path.abspath(args.output))
        _tar_and_upload_profdata(args.output, name_suffix=llvm_hash)
        print("\nMerged profdata uploaded successfully.")
    except:
        success = False
        raise
    finally:
        os.chdir(initial_dir)
        if success:
            print("Clearing temp directory.")
            shutil.rmtree(temp_dir, ignore_errors=True)
        else:
            print("Script fails, temp directory is at: %s" % temp_dir)


if __name__ == "__main__":
    sys.exit(main())