# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Generic presubmit checks that can be reused by other presubmit checks.""" import os as _os _HERE = _os.path.dirname(_os.path.abspath(__file__)) # Justifications for each filter: # # - build/include : Too many; fix in the future. # - build/include_order : Not happening; #ifdefed includes. # - build/namespace : I'm surprised by how often we violate this rule. # - readability/casting : Mistakes a whole bunch of function pointer. # - runtime/int : Can be fixed long term; volume of errors too high # - runtime/virtual : Broken now, but can be fixed in the future? # - whitespace/braces : We have a lot of explicit scoping in chrome code. # - readability/inheritance : Temporary, while the OVERRIDE and FINAL fixup # is in progress. DEFAULT_LINT_FILTERS = [ '-build/include', '-build/include_order', '-build/namespace', '-readability/casting', '-runtime/int', '-runtime/virtual', '-whitespace/braces', '-readability/inheritance' ] ### Description checks def CheckChangeHasTestField(input_api, output_api): """Requires that the changelist have a TEST= field.""" if input_api.change.TEST: return [] else: return [output_api.PresubmitNotifyResult( 'If this change requires manual test instructions to QA team, add ' 'TEST=[instructions].')] def CheckChangeHasBugField(input_api, output_api): """Requires that the changelist have a BUG= field.""" if input_api.change.BUG: return [] else: return [output_api.PresubmitNotifyResult( 'If this change has an associated bug, add BUG=[bug number].')] def CheckChangeHasTestedField(input_api, output_api): """Requires that the changelist have a TESTED= field.""" if input_api.change.TESTED: return [] else: return [output_api.PresubmitError('Changelist must have a TESTED= field.')] def CheckChangeHasQaField(input_api, output_api): """Requires that the changelist have a QA= field.""" if input_api.change.QA: return [] else: return [output_api.PresubmitError('Changelist must have a QA= field.')] def CheckDoNotSubmitInDescription(input_api, output_api): """Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description. """ keyword = 'DO NOT ''SUBMIT' if keyword in input_api.change.DescriptionText(): return [output_api.PresubmitError( keyword + ' is present in the changelist description.')] else: return [] def CheckChangeHasDescription(input_api, output_api): """Checks the CL description is not empty.""" text = input_api.change.DescriptionText() if text.strip() == '': if input_api.is_committing: return [output_api.PresubmitError('Add a description to the CL.')] else: return [output_api.PresubmitNotifyResult('Add a description to the CL.')] return [] def CheckChangeWasUploaded(input_api, output_api): """Checks that the issue was uploaded before committing.""" if input_api.is_committing and not input_api.change.issue: return [output_api.PresubmitError( 'Issue wasn\'t uploaded. Please upload first.')] return [] ### Content checks def CheckDoNotSubmitInFiles(input_api, output_api): """Checks that the user didn't add 'DO NOT ''SUBMIT' to any files.""" # We want to check every text file, not just source files. file_filter = lambda x : x keyword = 'DO NOT ''SUBMIT' errors = _FindNewViolationsOfRule(lambda _, line : keyword not in line, input_api, file_filter) text = '\n'.join('Found %s in %s' % (keyword, loc) for loc in errors) if text: return [output_api.PresubmitError(text)] return [] def CheckChangeLintsClean(input_api, output_api, source_file_filter=None, lint_filters=None, verbose_level=None): """Checks that all '.cc' and '.h' files pass cpplint.py.""" _RE_IS_TEST = input_api.re.compile(r'.*tests?.(cc|h)$') result = [] cpplint = input_api.cpplint # Access to a protected member _XX of a client class # pylint: disable=W0212 cpplint._cpplint_state.ResetErrorCounts() lint_filters = lint_filters or DEFAULT_LINT_FILTERS cpplint._SetFilters(','.join(lint_filters)) # We currently are more strict with normal code than unit tests; 4 and 5 are # the verbosity level that would normally be passed to cpplint.py through # --verbose=#. Hopefully, in the future, we can be more verbose. files = [f.AbsoluteLocalPath() for f in input_api.AffectedSourceFiles(source_file_filter)] for file_name in files: if _RE_IS_TEST.match(file_name): level = 5 else: level = 4 verbose_level = verbose_level or level cpplint.ProcessFile(file_name, verbose_level) if cpplint._cpplint_state.error_count > 0: if input_api.is_committing: res_type = output_api.PresubmitError else: res_type = output_api.PresubmitPromptWarning result = [res_type('Changelist failed cpplint.py check.')] return result def CheckChangeHasNoCR(input_api, output_api, source_file_filter=None): """Checks no '\r' (CR) character is in any source files.""" cr_files = [] for f in input_api.AffectedSourceFiles(source_file_filter): if '\r' in input_api.ReadFile(f, 'rb'): cr_files.append(f.LocalPath()) if cr_files: return [output_api.PresubmitPromptWarning( 'Found a CR character in these files:', items=cr_files)] return [] def CheckSvnModifiedDirectories(input_api, output_api, source_file_filter=None): """Checks for files in svn modified directories. They will get submitted on accident because svn commits recursively by default, and that's very dangerous. """ if input_api.change.scm != 'svn': return [] errors = [] current_cl_files = input_api.change.GetModifiedFiles() all_modified_files = input_api.change.GetAllModifiedFiles() # Filter out files in the current CL. modified_files = [f for f in all_modified_files if f not in current_cl_files] modified_abspaths = [input_api.os_path.abspath(f) for f in modified_files] for f in input_api.AffectedFiles(file_filter=source_file_filter): if f.Action() == 'M' and f.IsDirectory(): curpath = f.AbsoluteLocalPath() bad_files = [] # Check if any of the modified files in other CLs are under curpath. for i in xrange(len(modified_files)): abspath = modified_abspaths[i] if input_api.os_path.commonprefix([curpath, abspath]) == curpath: bad_files.append(modified_files[i]) if bad_files: if input_api.is_committing: error_type = output_api.PresubmitPromptWarning else: error_type = output_api.PresubmitNotifyResult errors.append(error_type( 'Potential accidental commits in changelist %s:' % f.LocalPath(), items=bad_files)) return errors def CheckChangeHasOnlyOneEol(input_api, output_api, source_file_filter=None): """Checks the files ends with one and only one \n (LF).""" eof_files = [] for f in input_api.AffectedSourceFiles(source_file_filter): contents = input_api.ReadFile(f, 'rb') # Check that the file ends in one and only one newline character. if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'): eof_files.append(f.LocalPath()) if eof_files: return [output_api.PresubmitPromptWarning( 'These files should end in one (and only one) newline character:', items=eof_files)] return [] def CheckChangeHasNoCrAndHasOnlyOneEol(input_api, output_api, source_file_filter=None): """Runs both CheckChangeHasNoCR and CheckChangeHasOnlyOneEOL in one pass. It is faster because it is reading the file only once. """ cr_files = [] eof_files = [] for f in input_api.AffectedSourceFiles(source_file_filter): contents = input_api.ReadFile(f, 'rb') if '\r' in contents: cr_files.append(f.LocalPath()) # Check that the file ends in one and only one newline character. if len(contents) > 1 and (contents[-1:] != '\n' or contents[-2:-1] == '\n'): eof_files.append(f.LocalPath()) outputs = [] if cr_files: outputs.append(output_api.PresubmitPromptWarning( 'Found a CR character in these files:', items=cr_files)) if eof_files: outputs.append(output_api.PresubmitPromptWarning( 'These files should end in one (and only one) newline character:', items=eof_files)) return outputs def _ReportErrorFileAndLine(filename, line_num, dummy_line): """Default error formatter for _FindNewViolationsOfRule.""" return '%s:%s' % (filename, line_num) def _FindNewViolationsOfRule(callable_rule, input_api, source_file_filter=None, error_formatter=_ReportErrorFileAndLine): """Find all newly introduced violations of a per-line rule (a callable). Arguments: callable_rule: a callable taking a file extension and line of input and returning True if the rule is satisfied and False if there was a problem. input_api: object to enumerate the affected files. source_file_filter: a filter to be passed to the input api. error_formatter: a callable taking (filename, line_number, line) and returning a formatted error string. Returns: A list of the newly-introduced violations reported by the rule. """ errors = [] for f in input_api.AffectedFiles(include_deletes=False, file_filter=source_file_filter): # For speed, we do two passes, checking first the full file. Shelling out # to the SCM to determine the changed region can be quite expensive on # Win32. Assuming that most files will be kept problem-free, we can # skip the SCM operations most of the time. extension = str(f.LocalPath()).rsplit('.', 1)[-1] if all(callable_rule(extension, line) for line in f.NewContents()): continue # No violation found in full text: can skip considering diff. for line_num, line in f.ChangedContents(): if not callable_rule(extension, line): errors.append(error_formatter(f.LocalPath(), line_num, line)) return errors def CheckChangeHasNoTabs(input_api, output_api, source_file_filter=None): """Checks that there are no tab characters in any of the text files to be submitted. """ # In addition to the filter, make sure that makefiles are blacklisted. if not source_file_filter: # It's the default filter. source_file_filter = input_api.FilterSourceFile def filter_more(affected_file): basename = input_api.os_path.basename(affected_file.LocalPath()) return (not (basename in ('Makefile', 'makefile') or basename.endswith('.mk')) and source_file_filter(affected_file)) tabs = _FindNewViolationsOfRule(lambda _, line : '\t' not in line, input_api, filter_more) if tabs: return [output_api.PresubmitPromptWarning('Found a tab character in:', long_text='\n'.join(tabs))] return [] def CheckChangeTodoHasOwner(input_api, output_api, source_file_filter=None): """Checks that the user didn't add TODO(name) without an owner.""" unowned_todo = input_api.re.compile('TO''DO[^(]') errors = _FindNewViolationsOfRule(lambda _, x : not unowned_todo.search(x), input_api, source_file_filter) errors = ['Found TO''DO with no owner in ' + x for x in errors] if errors: return [output_api.PresubmitPromptWarning('\n'.join(errors))] return [] def CheckChangeHasNoStrayWhitespace(input_api, output_api, source_file_filter=None): """Checks that there is no stray whitespace at source lines end.""" errors = _FindNewViolationsOfRule(lambda _, line : line.rstrip() == line, input_api, source_file_filter) if errors: return [output_api.PresubmitPromptWarning( 'Found line ending with white spaces in:', long_text='\n'.join(errors))] return [] def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None): """Checks that there aren't any lines longer than maxlen characters in any of the text files to be submitted. """ maxlens = { 'java': 100, # This is specifically for Android's handwritten makefiles (Android.mk). 'mk': 200, '': maxlen, } # Language specific exceptions to max line length. # '.h' is considered an obj-c file extension, since OBJC_EXCEPTIONS are a # superset of CPP_EXCEPTIONS. CPP_FILE_EXTS = ('c', 'cc') CPP_EXCEPTIONS = ('#define', '#endif', '#if', '#include', '#pragma') JAVA_FILE_EXTS = ('java',) JAVA_EXCEPTIONS = ('import ', 'package ') JS_FILE_EXTS = ('js',) JS_EXCEPTIONS = ("GEN('#include",) OBJC_FILE_EXTS = ('h', 'm', 'mm') OBJC_EXCEPTIONS = ('#define', '#endif', '#if', '#import', '#include', '#pragma') PY_FILE_EXTS = ('py',) PY_EXCEPTIONS = ('import', 'from') LANGUAGE_EXCEPTIONS = [ (CPP_FILE_EXTS, CPP_EXCEPTIONS), (JAVA_FILE_EXTS, JAVA_EXCEPTIONS), (JS_FILE_EXTS, JS_EXCEPTIONS), (OBJC_FILE_EXTS, OBJC_EXCEPTIONS), (PY_FILE_EXTS, PY_EXCEPTIONS), ] def no_long_lines(file_extension, line): # Check for language specific exceptions. if any(file_extension in exts and line.startswith(exceptions) for exts, exceptions in LANGUAGE_EXCEPTIONS): return True file_maxlen = maxlens.get(file_extension, maxlens['']) # Stupidly long symbols that needs to be worked around if takes 66% of line. long_symbol = file_maxlen * 2 / 3 # Hard line length limit at 50% more. extra_maxlen = file_maxlen * 3 / 2 line_len = len(line) if line_len <= file_maxlen: return True # Allow long URLs of any length. if any((url in line) for url in ('file://', 'http://', 'https://')): return True if line_len > extra_maxlen: return False if 'url(' in line and file_extension == 'css': return True if ' max_pendings: out.append('%s has %d build(s) pending' % (builder_name, pending_builds_len)) if out: return [output_api.PresubmitPromptWarning( 'Build(s) pending. It is suggested to wait that no more than %d ' 'builds are pending.' % max_pendings, long_text='\n'.join(out))] return [] def CheckOwners(input_api, output_api, source_file_filter=None): if input_api.is_committing: if input_api.tbr: return [output_api.PresubmitNotifyResult( '--tbr was specified, skipping OWNERS check')] if input_api.change.issue: if _GetRietveldIssueProps(input_api, None).get('cq_dry_run', False): return [output_api.PresubmitNotifyResult( 'This is a CQ dry run, skipping OWNERS check')] else: return [output_api.PresubmitError("OWNERS check failed: this change has " "no Rietveld issue number, so we can't check it for approvals.")] needed = 'LGTM from an OWNER' output = output_api.PresubmitError else: needed = 'OWNER reviewers' output = output_api.PresubmitNotifyResult affected_files = set([f.LocalPath() for f in input_api.change.AffectedFiles(file_filter=source_file_filter)]) owners_db = input_api.owners_db owner_email, reviewers = _RietveldOwnerAndReviewers( input_api, owners_db.email_regexp, approval_needed=input_api.is_committing) owner_email = owner_email or input_api.change.author_email if owner_email: reviewers_plus_owner = set([owner_email]).union(reviewers) missing_files = owners_db.files_not_covered_by(affected_files, reviewers_plus_owner) else: missing_files = owners_db.files_not_covered_by(affected_files, reviewers) if missing_files: output_list = [ output('Missing %s for these files:\n %s' % (needed, '\n '.join(sorted(missing_files))))] if not input_api.is_committing: suggested_owners = owners_db.reviewers_for(missing_files, owner_email) output_list.append(output('Suggested OWNERS: ' + '(Use "git-cl owners" to interactively select owners.)\n %s' % ('\n '.join(suggested_owners or [])))) return output_list if input_api.is_committing and not reviewers: return [output('Missing LGTM from someone other than %s' % owner_email)] return [] def _GetRietveldIssueProps(input_api, messages): """Gets the issue properties from rietveld.""" issue = input_api.change.issue if issue and input_api.rietveld: return input_api.rietveld.get_issue_properties( issue=int(issue), messages=messages) def _ReviewersFromChange(change): """Return the reviewers specified in the |change|, if any.""" reviewers = set() if change.R: reviewers.update(set([r.strip() for r in change.R.split(',')])) if change.TBR: reviewers.update(set([r.strip() for r in change.TBR.split(',')])) # Drop reviewers that aren't specified in email address format. return set(reviewer for reviewer in reviewers if '@' in reviewer) def _RietveldOwnerAndReviewers(input_api, email_regexp, approval_needed=False): """Return the owner and reviewers of a change, if any. If approval_needed is True, only reviewers who have approved the change will be returned. """ issue_props = _GetRietveldIssueProps(input_api, True) if not issue_props: reviewers = set() if not approval_needed: reviewers = _ReviewersFromChange(input_api.change) return None, reviewers if not approval_needed: return issue_props['owner_email'], set(issue_props['reviewers']) owner_email = issue_props['owner_email'] def match_reviewer(r): return email_regexp.match(r) and r != owner_email messages = issue_props.get('messages', []) approvers = set( m['sender'] for m in messages if m.get('approval') and match_reviewer(m['sender'])) return owner_email, approvers def _CheckConstNSObject(input_api, output_api, source_file_filter): """Checks to make sure no objective-c files have |const NSSomeClass*|.""" pattern = input_api.re.compile( r'(? 500: print " %s took a long time: %dms" % (snapshot_memory[1], delta_ms) snapshot_memory[:] = (dt2, msg) if owners_check: snapshot("checking owners") results.extend(input_api.canned_checks.CheckOwners( input_api, output_api, source_file_filter=None)) snapshot("checking long lines") results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, maxlen, source_file_filter=sources)) snapshot( "checking tabs") results.extend(input_api.canned_checks.CheckChangeHasNoTabs( input_api, output_api, source_file_filter=sources)) snapshot( "checking stray whitespace") results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( input_api, output_api, source_file_filter=sources)) snapshot("checking nsobjects") results.extend(_CheckConstNSObject( input_api, output_api, source_file_filter=sources)) # The following checks are only done on commit, since the commit bot will # auto-fix most of these. if input_api.is_committing: snapshot("checking eol style") results.extend(input_api.canned_checks.CheckChangeSvnEolStyle( input_api, output_api, source_file_filter=text_files)) snapshot("checking svn mime types") results.extend(input_api.canned_checks.CheckSvnForCommonMimeTypes( input_api, output_api)) snapshot("checking license") results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, license_header, source_file_filter=sources)) snapshot("checking was uploaded") results.extend(input_api.canned_checks.CheckChangeWasUploaded( input_api, output_api)) snapshot("checking description") results.extend(input_api.canned_checks.CheckChangeHasDescription( input_api, output_api)) results.extend(input_api.canned_checks.CheckDoNotSubmitInDescription( input_api, output_api)) snapshot("checking do not submit in files") results.extend(input_api.canned_checks.CheckDoNotSubmitInFiles( input_api, output_api)) snapshot("done") return results def CheckPatchFormatted(input_api, output_api): import git_cl cmd = ['cl', 'format', '--dry-run', input_api.PresubmitLocalPath()] code, _ = git_cl.RunGitWithCode(cmd, suppress_stderr=True) if code == 2: short_path = input_api.basename(input_api.PresubmitLocalPath()) full_path = input_api.os_path.relpath(input_api.PresubmitLocalPath(), input_api.change.RepositoryRoot()) return [output_api.PresubmitPromptWarning( 'The %s directory requires source formatting. ' 'Please run git cl format %s' % (short_path, full_path))] # As this is just a warning, ignore all other errors if the user # happens to have a broken clang-format, doesn't use git, etc etc. return [] def CheckGNFormatted(input_api, output_api): import gn affected_files = input_api.AffectedFiles( include_deletes=False, file_filter=lambda x: x.LocalPath().endswith('.gn') or x.LocalPath().endswith('.gni')) warnings = [] for f in affected_files: cmd = ['gn', 'format', '--dry-run', f.AbsoluteLocalPath()] rc = gn.main(cmd) if rc == 2: warnings.append(output_api.PresubmitPromptWarning( '%s requires formatting. Please run `gn format --in-place %s`.' % ( f.AbsoluteLocalPath(), f.LocalPath()))) # It's just a warning, so ignore other types of failures assuming they'll be # caught elsewhere. return warnings