summaryrefslogtreecommitdiff
path: root/import_release_prebuilts.py
blob: 5ad88dddcab7793f3fd101116caf71e001af2887 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
#!/usr/bin/python3

import os, sys, zipfile
import argparse
import subprocess
from shutil import rmtree
from distutils.dir_util import copy_tree

# cd into directory of script
os.chdir(os.path.dirname(os.path.abspath(__file__)))

# See go/fetch_artifact for details on this script.
FETCH_ARTIFACT = '/google/data/ro/projects/android/fetch_artifact'
PUBLISHDOCSRULES_REL = './buildSrc/src/main/kotlin/androidx/build/PublishDocsRules.kt'
FRAMEWORKS_SUPPORT_FP = os.path.abspath(os.path.join(os.getcwd(), '..', '..', '..', 'frameworks', 'support'))
PUBLISHDOCSRULES_FP = os.path.join(FRAMEWORKS_SUPPORT_FP, PUBLISHDOCSRULES_REL)
GIT_TREE_ARGS = '--git-dir=./../../../frameworks/support/.git/ --work-tree=./../../../frameworks/support/'
summary_log = []
publish_docs_log = []
prebuilts_log = []


def print_e(*args, **kwargs):
	print(*args, file=sys.stderr, **kwargs)

def cp(src_path, dst_path):
	if not os.path.exists(dst_path):
		os.makedirs(dst_path)
	if not os.path.exists(src_path):
		print_e('cp error: Source path %s does not exist.' % src_path)
		return None
	try:
		copy_tree(src_path, dst_path)
	except DistutilsFileError as err:
		print_e('FAIL: Unable to copy %s to destination %s')
		return None
	return dst_path

def rm(path):
	if os.path.isdir(path):
		rmtree(path)
	elif os.path.exists(path):
		os.remove(path)

def fetch_artifact(target, build_id, artifact_path):
	download_to = os.path.join('.', os.path.dirname(artifact_path))
	print('Fetching %s from %s with build ID %s ...' % (artifact_path, target, build_id))
	print("download_to: ", download_to)
	if not os.path.exists(download_to):
		os.makedirs(download_to)
	print("If this script hangs, try running glogin or gcert.")
	fetch_cmd = [FETCH_ARTIFACT, '--bid', str(build_id), '--target', target, artifact_path,
				 download_to]
	try:
		subprocess.check_output(fetch_cmd, stderr=subprocess.STDOUT)
	except subprocess.CalledProcessError:
		print_e('FAIL: Unable to retrieve %s artifact for build ID %s' % (artifact_path, build_id))
		print_e('Please make sure you are authenticated for build server access!')
		return None
	return artifact_path

def extract_artifact(artifact_path):
	# Unzip the repo archive into a separate directory.
	repo_dir = os.path.basename(artifact_path)[:-4]
	with zipfile.ZipFile(artifact_path) as zipFile:
		zipFile.extractall(repo_dir)
	return repo_dir

def get_repo_androidx_path(repo_dir):
	# Check that ${repo_path}/m2repository/androidx exists
	repo_androidx_path = os.path.join(os.getcwd(), "./%s/m2repository/androidx" % repo_dir)
	if not os.path.exists(repo_androidx_path):
		print_e("FAIL: Downloaded artifact zip %s.zip does not contain m2repository/androidx" % repo_dir)
		return None
	return repo_androidx_path

def get_groupId_from_artifactId(artifactId):
	# By convention, androidx namespace is declared as:
	# androidx.${groupId}:${groupId}-${optionalArtifactIdSuffix}:${version}
	# Here, artifactId == "${groupId}-${optionalArtifactIdSuffix}"
	return artifactId.split('-')[0]

def copy_and_merge_artifacts(repo_dir, dest_dir, groupIds, artifactIds):
	repo_androidx_path = get_repo_androidx_path(repo_dir)
	if not repo_androidx_path: return None
	if not groupIds and not artifactIds:
		return cp(repo_androidx_path, dest_dir)
	if groupIds:
		# Copy over groupIds that were specified on the command line
		for group in groupIds:
			repo_group_path = os.path.join(repo_androidx_path, group)
			if not os.path.exists(repo_group_path):
				print_e("Failed to find groupId %s in the artifact zip file" % group)
				return None
			dest_group_path = os.path.join(dest_dir, group)
			if not cp(repo_group_path, dest_group_path):
				print_e("Failed to find copy %s to %s" % (repo_group_path, dest_group_path))
				return None
	if artifactIds:
		# Copy over artifactIds that were specified on the command line
		for artifact in artifactIds:
			# Get the groupId from the artifactId (in AndroidX, the groupId must be based on the artifactId)
			artifact_groupId = get_groupId_from_artifactId(artifact)
			repo_artifact_path = os.path.join(repo_androidx_path, artifact_groupId, artifact)
			if not os.path.exists(repo_artifact_path):
				print_e("Failed to find artifactId %s in the artifact zip file" % artifact)
				return None
			dest_artifact_path = os.path.join(dest_dir, artifact_groupId, artifact)
			if not cp(repo_artifact_path, dest_artifact_path):
				print_e("Failed to find copy %s to %s" % (repo_artifact_path, dest_artifact_path))
				return None
	return dest_dir

def fetch_and_extract(target, build_id, file, artifact_path=None):
	if not artifact_path:
		artifact_path = fetch_artifact(target, build_id, file)
	if not artifact_path:
		return None
	return extract_artifact(artifact_path)

def remove_type_aar_from_pom_files():
	print("Removing <type>aar</type> from the pom files...", end = '')
	try:
		# Comment out <type>aar</type> in our pom files
		# This is being done as a workaround for b/118385540
		# TODO: Remove this method once https://github.com/gradle/gradle/issues/7594 is fixed
		subprocess.check_output("find -name *.pom | xargs sed 's|^      <type>aar</type>$|      <!--<type>aar</type>-->|' -i", shell=True)
	except subprocess.CalledProcessError:
		print("failed!")
		print_e("FAIL: Failed to remove <type>aar</type> from the pom files")
		summary_log.append("FAILED to remove <type>aar</type> from the pom files")
		return
	print("Successful")
	summary_log.append("<type>aar</type> was removed from the pom files")

def update_new_artifacts(group_id_file_path, groupId_ver_map, artifactId_ver_map, groupId):
	# Finds each new library having groupId <groupId> under <group_id_file_path> and
	# updates <groupId_ver_map> and <artifactId_ver_map> with this new library
	success = False
	# Walk filepath to get versions for each artifactId
	for parent_file_path, dirs, _ in os.walk(group_id_file_path):
		for dir_name in dirs:
			if dir_name[0].isnumeric():
				# Version directories have format version as dir_name, for example: 1.1.0-alpha06
				version = dir_name
				# Get artifactId from filepath
				artifactId = parent_file_path.strip('/').split('/')[-1]
				update_version_maps(groupId_ver_map, artifactId_ver_map, groupId, artifactId, version)
				success = True
	if not success:
		print_e("Failed to find any artifactIds in filepath: %s" % group_id_file_path)
	return success

def should_update_artifact(groupId, artifactId):
	# If a artifact or group list was specified and if the artifactId or groupId were NOT specified 
	# in either list on the command line, return false
	should_update = False
	if (args.groups) or (args.artifacts):
		if (args.groups) and (groupId in args.groups):
			should_update = True
		if (args.artifacts) and (artifactId in args.artifacts):
			should_update = True
	else:
		should_update = True
	return should_update

def update_version_maps(groupId_ver_map, artifactId_ver_map, groupId, artifactId, version):
	if should_update_artifact(groupId, artifactId):
		if groupId.upper() not in groupId_ver_map:
			groupId_ver_map[groupId.upper()] = version
		if artifactId not in artifactId_ver_map:
			artifactId_ver_map[artifactId] = version
			summary_log.append("Prebuilts: %s --> %s" % (artifactId, version))
			prebuilts_log.append(artifactId+'-'+version)

def get_updated_version_maps():
	try:
		# Run git status --porcelain to get the names of the libraries that have changed
		# (cut -c4- removes the change-type-character from git status output)
		gitdiff_ouput = subprocess.check_output('git status --porcelain | cut -c4-', shell=True)
	except subprocess.CalledProcessError:
		print_e('FAIL: No artifacts to import from build ID %s' %  build_id)
		return None
	# Iterate through the git diff output to map libraries to their new versions
	artifactId_ver_map = {}
	groupId_ver_map = {}
	diff = iter(gitdiff_ouput.splitlines())
	for line in diff:
		file_path_list = line.decode().split('/')
		if len(file_path_list) < 3:
			continue
		groupId = file_path_list[1]
		artifactId = file_path_list[2]
		# For new libraries/groupIds, git status doesn't return the directory with the version
		# So, we need to go get it if it's not there
		if len(file_path_list) <= 3 or file_path_list[3] == "":
			# New library, so we need to check full directory tree to get version(s)
			if not update_new_artifacts(line.decode(), groupId_ver_map, artifactId_ver_map, groupId):
				continue
		else:
			version = file_path_list[3]
			update_version_maps(groupId_ver_map, artifactId_ver_map, groupId, artifactId, version)
	return groupId_ver_map, artifactId_ver_map

# Inserts new groupdId into PublishDocsRules.kt
def insert_new_groupId_into_pdr(pdr_lines, num_lines, new_groupId, groupId_ver_map):
	new_groupId_insert_line = 0
	for i in range(num_lines):
		cur_line = pdr_lines[i]
		# Skip any line that doesn't declare a version
		if 'LibraryGroups' not in cur_line: continue
		groupId = cur_line.split('LibraryGroups.')[1].split(',')[0]
		# Skip any line that does contain a version
		cur_line_split = cur_line.split('\"')
		if len(cur_line_split) < 2: continue
		# Iterate through until you found the alphabetical place to insert the new groupId
		if new_groupId <= groupId:
			new_groupId_insert_line = i
			break
		else:
			new_groupId_insert_line = i + 1
	# Failed to find a spot for the new groupID, so append it to the end of the LibraryGroup list
	pdr_lines.insert(new_groupId_insert_line, "    prebuilts(LibraryGroups." \
				+ new_groupId.upper() + ", \"" \
				+ groupId_ver_map[new_groupId] + "\")\n")
	summary_log.append("PublishDocsRules.kt: ADDED %s with version %s" %(new_groupId.lower(), groupId_ver_map[new_groupId]))
	publish_docs_log.append(new_groupId.lower()+'-'+groupId_ver_map[new_groupId])

def update_publish_doc_rules():
	groupId_ver_map, artifactId_ver_map = get_updated_version_maps()
	groupId_found = {}
	for key in groupId_ver_map:
		groupId_found[key] = False
	# Get build the file path of PublicDocRules.kt - this isn't great, open to a better solution
	if not os.path.exists(PUBLISHDOCSRULES_FP):
		print_e("PublishDocsRules.kt not in expected location.")
		return None
	# Open file for reading and get all lines
	with open(PUBLISHDOCSRULES_FP, 'r') as f:
		pdr_lines = f.readlines()
	num_lines = len(pdr_lines)
	for i in range(num_lines):
		cur_line = pdr_lines[i]
		# Skip any line that doesn't declare a version
		if 'LibraryGroups' not in cur_line: continue
		groupId = cur_line.split('LibraryGroups.')[1].split(',')[0]
		# Get the artifactId (if it exists)
		cur_line_split = cur_line.split('\"')
		# Skip any line that does contain a version
		if len(cur_line_split) < 2: continue
		artifactId = ""
		if len(cur_line_split) >= 4:
			artifactId = cur_line_split[-4]
		# Split lines based on quotes and get second to last string - this will be the version
		outdated_ver = cur_line.split('\"')[-2]
		ver_index = cur_line.find(outdated_ver)
		# Skip any line that does contain a version
		if not outdated_ver[0].isnumeric():	continue
		### Update groupId or artifactId ###
		if artifactId in artifactId_ver_map:
			groupId_found[groupId] = True
			# Update version of artifactId
			if artifactId_ver_map[artifactId] != outdated_ver:
				pdr_lines[i] = cur_line[:ver_index] \
					+ artifactId_ver_map[artifactId] \
					+ cur_line[ver_index+len(outdated_ver):]
				summary_log.append("PublishDocsRules.kt: Updated %s from %s to %s" %(artifactId, outdated_ver, artifactId_ver_map[artifactId]))
				publish_docs_log.append(artifactId+'-'+artifactId_ver_map[artifactId])
		if not artifactId and groupId in groupId_ver_map:
			groupId_found[groupId] = True
			# Update version of groupId
			if groupId_ver_map[groupId] != outdated_ver:
				pdr_lines[i] = cur_line[:ver_index] \
					+ groupId_ver_map[groupId] \
					+ cur_line[ver_index+len(outdated_ver):]
				summary_log.append("PublishDocsRules.kt: Updated %s from %s to %s" %(groupId.lower(), outdated_ver, groupId_ver_map[groupId]))
				publish_docs_log.append(groupId.lower()+'-'+groupId_ver_map[groupId])
	for groupId in groupId_found:
		if not groupId_found[groupId]:
			insert_new_groupId_into_pdr(pdr_lines, num_lines, groupId, groupId_ver_map)
	# Open file for writing and update all lines
	with open(PUBLISHDOCSRULES_FP, 'w') as f:
		f.writelines(pdr_lines)
	return True

def update_androidx(target, build_id, local_file, update_all_prebuilts):
	try:
		if build_id:
			if update_all_prebuilts:
				artifact_zip_file = 'top-of-tree-m2repository-all-%s.zip' % build_id
			else:
				artifact_zip_file = 'gmaven-diff-all-%s.zip' % build_id
			repo_dir = fetch_and_extract("androidx", build_id, artifact_zip_file, None)
		else:
			repo_dir = fetch_and_extract("androidx", None, None, local_file)
		if not repo_dir:
			print_e('Failed to extract AndroidX repository')
			return False
		print("Download and extract artifacts... Successful")
		if not copy_and_merge_artifacts(repo_dir, './androidx', args.groups, args.artifacts):
			print_e('Failed to copy and merge AndroidX repository')
			return False
		print("Copy and merge artifacts... Successful")
		if not args.skip_publishdocrules:
			if not update_publish_doc_rules():
				print_e('Failed to update PublicDocRules.kt')
				return False
			print("Update PublishDocsRules.kt... Successful")
		return True
	finally:
		# Remove temp directories and temp files we've created 
		rm(repo_dir)
		rm('%s.zip' % repo_dir)
		rm('.fetch_artifact2.dat')

def print_change_summary():
	print("\n ---  SUMMARY --- ")
	for change in summary_log:
		print(change)

# Check if build ID exists and is a number
def getBuildId(args):
	source = args.source
	number_text = source[:]
	if not number_text.isnumeric():
		return None
	args.file = False
	return source

# Check if file exists and is not a number
def getFile(args):
	source = args.source
	if not source.isnumeric():
		return args.source
	return None

def commit_prebuilts():
	subprocess.check_call(['git', 'add', './androidx'])
	# ensure that we've actually made a change:
	staged_changes = subprocess.check_output('git diff --cached', stderr=subprocess.STDOUT, shell=True)
	if not staged_changes:
		print_e("There are no prebuilts changes to commit!  Check build id.")
		return False
	if not args.source.isnumeric():
		src_msg = "local Maven ZIP %s" % getFile(args)
	else:
		src_msg = "build %s" % (getBuildId(args))
	msg = "Import prebuilts %s from %s\n\nThis commit was generated from the command:\n%s\n\n%s" % (", ".join(prebuilts_log), src_msg, " ".join(sys.argv), 'Test: ./gradlew buildOnServer')
	subprocess.check_call(['git', 'commit', '-m', msg])
	summary_log.append("1 Commit was made in prebuilts/androidx/internal to commit prebuilts")
	print("Create commit for prebuilts... Successful")
	return True

def commit_publish_docs_rules():
	git_add_cmd =  "git %s add %s"  % (GIT_TREE_ARGS, PUBLISHDOCSRULES_REL)
	subprocess.check_output(git_add_cmd, stderr=subprocess.STDOUT, shell=True)
	git_cached_cmd = "git %s diff --cached" % GIT_TREE_ARGS
	staged_changes = subprocess.check_output(git_cached_cmd, stderr=subprocess.STDOUT, shell=True)
	if not staged_changes:
		summary_log.append("NO CHANGES were made to PublishDocsRules.kt")
		return False
	pdr_msg = "Updated PublishDocsRules.kt for %s \n\nThis commit was generated from the command:\n%s\n\n%s" % (", ".join(publish_docs_log), " ".join(sys.argv), 'Test: ./gradlew buildOnServer')
	git_commit_cmd = "git %s commit -m \"%s\"" % (GIT_TREE_ARGS, pdr_msg)
	subprocess.check_output(git_commit_cmd, stderr=subprocess.STDOUT, shell=True)
	summary_log.append("1 Commit was made in frameworks/support to commmit changes to PublishDocsRules.kt")
	print("Create commit for PublishDocsRules.kt... Successful")


# Set up input arguments
parser = argparse.ArgumentParser(
	description=('Import AndroidX prebuilts from the Android Build Server and if necessary, update PublishDocsRules.kt.  By default, uses gmaven-diff-all-<BUILDID>.zip to get artifacts.'))
parser.add_argument(
	'source',
	help='Build server build ID or local Maven ZIP file')
parser.add_argument(
	'--all-prebuilts', action="store_true",
	help='If specified, updates all AndroidX prebuilts with artifacts from the build ID')
parser.add_argument(
	'--skip-publishdocrules', action="store_true",
	help='If specified, PublishDocsRules.kt will NOT be updated')
parser.add_argument(
	'--groups', metavar='groupId', nargs='+',
	help="""If specified, only update libraries whose groupId contains the listed text.
	For example, if you specify \"--groups paging slice lifecycle\", then this
	script will import each library with groupId beginning with \"androidx.paging\", \"androidx.slice\",
	or \"androidx.lifecycle\"""")
parser.add_argument(
	'--artifacts', metavar='artifactId', nargs='+',
	help="""If specified, only update libraries whose artifactId contains the listed text.
	For example, if you specify \"--artifacts core slice-view lifecycle-common\", then this
	script will import specific artifacts \"androidx.core:core\", \"androidx.slice:slice-view\",
	and \"androidx.lifecycle:lifecycle-common\"""")
parser.add_argument(
	'--no-commit', action="store_true",
	help='If specified, this script will not commit the changes')

# Parse arguments and check for existence of build ID or file
args = parser.parse_args()
args.file = True
if not args.source:
	parser.error("You must specify a build ID or local Maven ZIP file")
	sys.exit(1)

if not update_androidx('androidx', getBuildId(args), getFile(args), args.all_prebuilts):
	print_e('Failed to update AndroidX, aborting...')
	sys.exit(1)

if args.no_commit:
	summary_log.append("These changes were NOT committed.")
else:
	if not commit_prebuilts(): sys.exit(1)
	commit_publish_docs_rules()

remove_type_aar_from_pom_files()
print_change_summary()
print("Test and check these changes before uploading to Gerrit")