diff options
author | Android Build Coastguard Worker <android-build-coastguard-worker@google.com> | 2021-10-19 23:09:45 +0000 |
---|---|---|
committer | Android Build Coastguard Worker <android-build-coastguard-worker@google.com> | 2021-10-19 23:09:45 +0000 |
commit | 09698708138dc3cb3f7ddf94d919aaeaced30a94 (patch) | |
tree | 92df91ba2e8af1e8571f88635559b6012f9dfa4c | |
parent | 5197623930b8c7b7cad418f2bee37e233e50da90 (diff) | |
parent | 0a7575139704852003c31ba7bf4fa443632f3b8c (diff) | |
download | repohooks-android12L-d2-s8-release.tar.gz |
Snap for 7838477 from 0a7575139704852003c31ba7bf4fa443632f3b8c to sc-d2-releaseandroid-12.1.0_r26android-12.1.0_r25android-12.1.0_r24android-12.1.0_r23android-12.1.0_r18android-12.1.0_r17android-12.1.0_r16android-12.1.0_r15android-12.1.0_r14android-12.1.0_r13android-12.1.0_r12android12L-d2-s8-releaseandroid12L-d2-s7-releaseandroid12L-d2-s6-releaseandroid12L-d2-s5-releaseandroid12L-d2-s4-releaseandroid12L-d2-s3-releaseandroid12L-d2-s2-releaseandroid12L-d2-s1-releaseandroid12L-d2-release
Change-Id: I8aaef7b887b192320fb0c99bcfcff5313f63a2da
-rw-r--r-- | rh/config.py | 48 | ||||
-rw-r--r-- | rh/utils.py | 52 | ||||
-rwxr-xr-x | rh/utils_unittest.py | 54 | ||||
-rwxr-xr-x | tools/android_test_mapping_format.py | 154 | ||||
-rwxr-xr-x | tools/android_test_mapping_format_unittest.py | 122 | ||||
-rwxr-xr-x | tools/cpplint.py | 1027 | ||||
-rwxr-xr-x | tools/cpplint.py-update | 12 |
7 files changed, 1083 insertions, 386 deletions
diff --git a/rh/config.py b/rh/config.py index ed75007..1eb93a7 100644 --- a/rh/config.py +++ b/rh/config.py @@ -63,22 +63,10 @@ class RawConfigParser(configparser.RawConfigParser): return default raise - def get(self, section, option, default=_UNSET): - """Return the value for |option| in |section| (with |default|).""" - try: - return configparser.RawConfigParser.get(self, section, option) - except (configparser.NoSectionError, configparser.NoOptionError): - if default is not _UNSET: - return default - raise - def items(self, section=_UNSET, default=_UNSET): """Return a list of (key, value) tuples for the options in |section|.""" if section is _UNSET: - # Python 3 compat logic. Return a dict of section-to-options. - if sys.version_info.major < 3: - return [(x, self.items(x)) for x in self.sections()] - return super(RawConfigParser, self).items() + return super().items() try: return configparser.RawConfigParser.items(self, section) @@ -87,15 +75,6 @@ class RawConfigParser(configparser.RawConfigParser): return default raise - if sys.version_info.major < 3: - def read_dict(self, dictionary): - """Store |dictionary| into ourselves.""" - for section, settings in dictionary.items(): - for option, value in settings: - if not self.has_section(section): - self.add_section(section) - self.set(section, option, value) - class PreUploadConfig(object): """A single (abstract) config used for `repo upload` hooks.""" @@ -138,7 +117,8 @@ class PreUploadConfig(object): def custom_hook(self, hook): """The command to execute for |hook|.""" - return shlex.split(self.config.get(self.CUSTOM_HOOKS_SECTION, hook, '')) + return shlex.split(self.config.get( + self.CUSTOM_HOOKS_SECTION, hook, fallback='')) @property def builtin_hooks(self): @@ -148,13 +128,13 @@ class PreUploadConfig(object): def builtin_hook_option(self, hook): """The options to pass to |hook|.""" - return shlex.split(self.config.get(self.BUILTIN_HOOKS_OPTIONS_SECTION, - hook, '')) + return shlex.split(self.config.get( + self.BUILTIN_HOOKS_OPTIONS_SECTION, hook, fallback='')) def builtin_hook_exclude_paths(self, hook): """List of paths for which |hook| should not be executed.""" - return shlex.split(self.config.get(self.BUILTIN_HOOKS_EXCLUDE_SECTION, - hook, '')) + return shlex.split(self.config.get( + self.BUILTIN_HOOKS_EXCLUDE_SECTION, hook, fallback='')) @property def tool_paths(self): @@ -186,7 +166,7 @@ class PreUploadConfig(object): """Whether to skip hooks for merged commits.""" return rh.shell.boolean_shell_value( self.config.get(self.OPTIONS_SECTION, - self.OPTION_IGNORE_MERGED_COMMITS, None), + self.OPTION_IGNORE_MERGED_COMMITS, fallback=None), False) def update(self, preupload_config): @@ -234,7 +214,7 @@ class PreUploadConfig(object): self.custom_hook(hook) except ValueError as e: raise ValidationError('%s: hook "%s" command line is invalid: ' - '%s' % (self.source, hook, e)) + '%s' % (self.source, hook, e)) from e # Verify hook options are valid shell strings. for hook in self.builtin_hooks: @@ -242,7 +222,7 @@ class PreUploadConfig(object): self.builtin_hook_option(hook) except ValueError as e: raise ValidationError('%s: hook options "%s" are invalid: %s' % - (self.source, hook, e)) + (self.source, hook, e)) from e # Reject unknown tools. valid_tools = set(rh.hooks.TOOL_PATHS.keys()) @@ -279,13 +259,13 @@ class PreUploadFile(PreUploadConfig): Args: path: The config file to load. """ - super(PreUploadFile, self).__init__(source=path) + super().__init__(source=path) self.path = path try: self.config.read(path) except configparser.ParsingError as e: - raise ValidationError('%s: %s' % (path, e)) + raise ValidationError('%s: %s' % (path, e)) from e self._validate() @@ -310,7 +290,7 @@ class LocalPreUploadFile(PreUploadFile): FILENAME = 'PREUPLOAD.cfg' def _validate(self): - super(LocalPreUploadFile, self)._validate() + super()._validate() # Reject Exclude Paths section for local config. if self.config.has_section(self.BUILTIN_HOOKS_EXCLUDE_SECTION): @@ -340,7 +320,7 @@ class PreUploadSettings(PreUploadConfig): paths: The directories to look for config files. global_paths: The directories to look for global config files. """ - super(PreUploadSettings, self).__init__() + super().__init__() self.paths = [] for config in itertools.chain( diff --git a/rh/utils.py b/rh/utils.py index 1b36c7a..aeab52f 100644 --- a/rh/utils.py +++ b/rh/utils.py @@ -66,7 +66,7 @@ class CompletedProcess(getattr(subprocess, 'CompletedProcess', object)): self.stderr = stderr self.returncode = returncode else: - super(CompletedProcess, self).__init__( + super().__init__( args=args, returncode=returncode, stdout=stdout, stderr=stderr) @property @@ -99,7 +99,7 @@ class CalledProcessError(subprocess.CalledProcessError): raise TypeError('exception must be an exception instance; got %r' % (exception,)) - super(CalledProcessError, self).__init__(returncode, cmd, stdout) + super().__init__(returncode, cmd, stdout) # The parent class will set |output|, so delete it. del self.output # TODO(vapier): When we're Python 3-only, delete this assignment as the @@ -183,12 +183,8 @@ def _kill_child_process(proc, int_timeout, kill_timeout, cmd, original_handler, print('Ignoring unhandled exception in _kill_child_process: %s' % e, file=sys.stderr) - # Ensure our child process has been reaped. - kwargs = {} - if sys.version_info.major >= 3: - # ... but don't wait forever. - kwargs['timeout'] = 60 - proc.wait_lock_breaker(**kwargs) + # Ensure our child process has been reaped, but don't wait forever. + proc.wait_lock_breaker(timeout=60) if not rh.signals.relay_signal(original_handler, signum, frame): # Mock up our own, matching exit code for signaling. @@ -310,13 +306,8 @@ def run(cmd, redirect_stdout=False, redirect_stderr=False, cwd=None, input=None, kill_timeout = float(kill_timeout) def _get_tempfile(): - kwargs = {} - if sys.version_info.major < 3: - kwargs['bufsize'] = 0 - else: - kwargs['buffering'] = 0 try: - return tempfile.TemporaryFile(**kwargs) + return tempfile.TemporaryFile(buffering=0) except EnvironmentError as e: if e.errno != errno.ENOENT: raise @@ -325,7 +316,7 @@ def run(cmd, redirect_stdout=False, redirect_stderr=False, cwd=None, input=None, # issue in this particular case since our usage gurantees deletion, # and since this is primarily triggered during hard cgroups # shutdown. - return tempfile.TemporaryFile(dir='/tmp', **kwargs) + return tempfile.TemporaryFile(dir='/tmp', buffering=0) # Modify defaults based on parameters. # Note that tempfiles must be unbuffered else attempts to read @@ -370,6 +361,12 @@ def run(cmd, redirect_stdout=False, redirect_stderr=False, cwd=None, input=None, env = env.copy() if env is not None else os.environ.copy() env.update(extra_env if extra_env else {}) + def ensure_text(s): + """Make sure |s| is a string if it's bytes.""" + if isinstance(s, bytes): + s = s.decode('utf-8', 'replace') + return s + result.args = cmd proc = None @@ -415,19 +412,26 @@ def run(cmd, redirect_stdout=False, redirect_stderr=False, cwd=None, input=None, if extra_env: msg += ', extra env=%s' % extra_env raise CalledProcessError( - result.returncode, result.cmd, stdout=result.stdout, - stderr=result.stderr, msg=msg) + result.returncode, result.cmd, msg=msg, + stdout=ensure_text(result.stdout), + stderr=ensure_text(result.stderr)) except OSError as e: + # Avoid leaking tempfiles. + if popen_stdout is not None and not isinstance(popen_stdout, int): + popen_stdout.close() + if popen_stderr is not None and not isinstance(popen_stderr, int): + popen_stderr.close() + estr = str(e) if e.errno == errno.EACCES: estr += '; does the program need `chmod a+x`?' if not check: - result = CompletedProcess( - args=cmd, stderr=estr.encode('utf-8'), returncode=255) + result = CompletedProcess(args=cmd, stderr=estr, returncode=255) else: raise CalledProcessError( - result.returncode, result.cmd, stdout=result.stdout, - stderr=result.stderr, msg=estr, exception=e) + result.returncode, result.cmd, msg=estr, exception=e, + stdout=ensure_text(result.stdout), + stderr=ensure_text(result.stderr)) from e finally: if proc is not None: # Ensure the process is dead. @@ -437,10 +441,8 @@ def run(cmd, redirect_stdout=False, redirect_stderr=False, cwd=None, input=None, None, None) # Make sure output is returned as a string rather than bytes. - if result.stdout is not None: - result.stdout = result.stdout.decode('utf-8', 'replace') - if result.stderr is not None: - result.stderr = result.stderr.decode('utf-8', 'replace') + result.stdout = ensure_text(result.stdout) + result.stderr = ensure_text(result.stderr) return result # pylint: enable=redefined-builtin,input-builtin diff --git a/rh/utils_unittest.py b/rh/utils_unittest.py index f3098a9..ea2ddaa 100755 --- a/rh/utils_unittest.py +++ b/rh/utils_unittest.py @@ -161,6 +161,60 @@ class RunCommandTests(unittest.TestCase): self.assertEqual(u'ß', ret.stdout) self.assertIsNone(ret.stderr) + def test_check_false(self): + """Verify handling of check=False.""" + ret = rh.utils.run(['false'], check=False) + self.assertNotEqual(0, ret.returncode) + self.assertIn('false', str(ret)) + + ret = rh.utils.run(['true'], check=False) + self.assertEqual(0, ret.returncode) + self.assertIn('true', str(ret)) + + def test_check_true(self): + """Verify handling of check=True.""" + with self.assertRaises(rh.utils.CalledProcessError) as e: + rh.utils.run(['false'], check=True) + err = e.exception + self.assertNotEqual(0, err.returncode) + self.assertIn('false', str(err)) + + ret = rh.utils.run(['true'], check=True) + self.assertEqual(0, ret.returncode) + self.assertIn('true', str(ret)) + + def test_check_false_output(self): + """Verify handling of output capturing w/check=False.""" + with self.assertRaises(rh.utils.CalledProcessError) as e: + rh.utils.run(['sh', '-c', 'echo out; echo err >&2; false'], + check=True, capture_output=True) + err = e.exception + self.assertNotEqual(0, err.returncode) + self.assertIn('false', str(err)) + + def test_check_true_missing_prog_output(self): + """Verify handling of output capturing w/missing progs.""" + with self.assertRaises(rh.utils.CalledProcessError) as e: + rh.utils.run(['./!~a/b/c/d/'], check=True, capture_output=True) + err = e.exception + self.assertNotEqual(0, err.returncode) + self.assertIn('a/b/c/d', str(err)) + + def test_check_false_missing_prog_output(self): + """Verify handling of output capturing w/missing progs.""" + ret = rh.utils.run(['./!~a/b/c/d/'], check=False, capture_output=True) + self.assertNotEqual(0, ret.returncode) + self.assertIn('a/b/c/d', str(ret)) + + def test_check_false_missing_prog_combined_output(self): + """Verify handling of combined output capturing w/missing progs.""" + with self.assertRaises(rh.utils.CalledProcessError) as e: + rh.utils.run(['./!~a/b/c/d/'], check=True, + combine_stdout_stderr=True) + err = e.exception + self.assertNotEqual(0, err.returncode) + self.assertIn('a/b/c/d', str(err)) + if __name__ == '__main__': unittest.main() diff --git a/tools/android_test_mapping_format.py b/tools/android_test_mapping_format.py index b87b886..ae784cf 100755 --- a/tools/android_test_mapping_format.py +++ b/tools/android_test_mapping_format.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Validate TEST_MAPPING files in Android source code. +"""Validates TEST_MAPPING files in Android source code. The goal of this script is to validate the format of TEST_MAPPING files: 1. It must be a valid json file. @@ -27,6 +27,7 @@ import json import os import re import sys +from typing import Any, Dict _path = os.path.realpath(__file__ + '/../..') if sys.path[0] != _path: @@ -38,14 +39,16 @@ del _path # pylint: disable=wrong-import-position import rh.git -IMPORTS = 'imports' -NAME = 'name' -OPTIONS = 'options' -PATH = 'path' -HOST = 'host' -PREFERRED_TARGETS = 'preferred_targets' -FILE_PATTERNS = 'file_patterns' -TEST_MAPPING_URL = ( +_IMPORTS = 'imports' +_NAME = 'name' +_OPTIONS = 'options' +_PATH = 'path' +_HOST = 'host' +_PREFERRED_TARGETS = 'preferred_targets' +_FILE_PATTERNS = 'file_patterns' +_INVALID_IMPORT_CONFIG = 'Invalid import config in TEST_MAPPING file' +_INVALID_TEST_CONFIG = 'Invalid test config in TEST_MAPPING file' +_TEST_MAPPING_URL = ( 'https://source.android.com/compatibility/tests/development/' 'test-mapping') @@ -53,13 +56,6 @@ TEST_MAPPING_URL = ( _COMMENTS_RE = re.compile(r'^\s*//') -if sys.version_info.major < 3: - # pylint: disable=basestring-builtin,undefined-variable - string_types = basestring -else: - string_types = str - - class Error(Exception): """Base exception for all custom exceptions in this module.""" @@ -68,8 +64,8 @@ class InvalidTestMappingError(Error): """Exception to raise when detecting an invalid TEST_MAPPING file.""" -def filter_comments(json_data): - """Remove '//'-format comments in TEST_MAPPING file to valid format. +def _filter_comments(json_data: str) -> str: + """Removes '//'-format comments in TEST_MAPPING file to valid format. Args: json_data: TEST_MAPPING file content (as a string). @@ -77,12 +73,12 @@ def filter_comments(json_data): Returns: Valid json string without comments. """ - return ''.join('\n' if _COMMENTS_RE.match(x) else x for x in - json_data.splitlines()) + return ''.join( + '\n' if _COMMENTS_RE.match(x) else x for x in json_data.splitlines()) -def _validate_import(entry, test_mapping_file): - """Validate an import setting. +def _validate_import(entry: Dict[str, Any], test_mapping_file: str): + """Validates an import setting. Args: entry: A dictionary of an import setting. @@ -93,85 +89,84 @@ def _validate_import(entry, test_mapping_file): """ if len(entry) != 1: raise InvalidTestMappingError( - 'Invalid import config in test mapping file %s. each import can ' - 'only have one `path` setting. Failed entry: %s' % - (test_mapping_file, entry)) - if list(entry.keys())[0] != PATH: + f'{_INVALID_IMPORT_CONFIG} {test_mapping_file}. Each import can ' + f'only have one `path` setting. Failed entry: {entry}') + if _PATH not in entry: raise InvalidTestMappingError( - 'Invalid import config in test mapping file %s. import can only ' - 'have one `path` setting. Failed entry: %s' % - (test_mapping_file, entry)) + f'{_INVALID_IMPORT_CONFIG} {test_mapping_file}. Import can ' + f'only have one `path` setting. Failed entry: {entry}') -def _validate_test(test, test_mapping_file): - """Validate a test declaration. +def _validate_test(test: Dict[str, Any], test_mapping_file: str) -> bool: + """Returns whether a test declaration is valid. Args: - entry: A dictionary of a test declaration. + test: A dictionary of a test declaration. test_mapping_file: Path to the TEST_MAPPING file to be validated. Raises: InvalidTestMappingError: if the a test declaration is invalid. """ - if NAME not in test: - raise InvalidTestMappingError( - 'Invalid test config in test mapping file %s. test config must ' - 'a `name` setting. Failed test config: %s' % - (test_mapping_file, test)) - if not isinstance(test.get(HOST, False), bool): + if _NAME not in test: raise InvalidTestMappingError( - 'Invalid test config in test mapping file %s. `host` setting in ' - 'test config can only have boolean value of `true` or `false`. ' - 'Failed test config: %s' % (test_mapping_file, test)) - preferred_targets = test.get(PREFERRED_TARGETS, []) - if (not isinstance(preferred_targets, list) or - any(not isinstance(t, string_types) for t in preferred_targets)): - raise InvalidTestMappingError( - 'Invalid test config in test mapping file %s. `preferred_targets` ' - 'setting in test config can only be a list of strings. Failed test ' - 'config: %s' % (test_mapping_file, test)) - file_patterns = test.get(FILE_PATTERNS, []) - if (not isinstance(file_patterns, list) or - any(not isinstance(p, string_types) for p in file_patterns)): + + f'{_INVALID_TEST_CONFIG} {test_mapping_file}. Test config must ' + f'have a `name` setting. Failed test config: {test}') + + if not isinstance(test.get(_HOST, False), bool): raise InvalidTestMappingError( - 'Invalid test config in test mapping file %s. `file_patterns` ' - 'setting in test config can only be a list of strings. Failed test ' - 'config: %s' % (test_mapping_file, test)) - for option in test.get(OPTIONS, []): + f'{_INVALID_TEST_CONFIG} {test_mapping_file}. `host` setting in ' + f'test config can only have boolean value of `true` or `false`. ' + f'Failed test config: {test}') + + for key in (_PREFERRED_TARGETS, _FILE_PATTERNS): + value = test.get(key, []) + if (not isinstance(value, list) or + any(not isinstance(t, str) for t in value)): + raise InvalidTestMappingError( + f'{_INVALID_TEST_CONFIG} {test_mapping_file}. `{key}` setting ' + f'in test config can only be a list of strings. ' + f'Failed test config: {test}') + + for option in test.get(_OPTIONS, []): + if not isinstance(option, dict): + raise InvalidTestMappingError( + f'{_INVALID_TEST_CONFIG} {test_mapping_file}. Option setting ' + f'in test config can only be a dictionary of key-val setting. ' + f'Failed entry: {option}') if len(option) != 1: raise InvalidTestMappingError( - 'Invalid option setting in test mapping file %s. each option ' - 'setting can only have one key-val setting. Failed entry: %s' % - (test_mapping_file, option)) + f'{_INVALID_TEST_CONFIG} {test_mapping_file}. Each option ' + f'setting can only have one key-val setting. ' + f'Failed entry: {option}') -def _load_file(test_mapping_file): - """Load a TEST_MAPPING file as a json file.""" +def process_file(test_mapping_file: str): + """Validates a TEST_MAPPING file content.""" try: - return json.loads(filter_comments(test_mapping_file)) - except ValueError as e: + test_mapping_data = json.loads(_filter_comments(test_mapping_file)) + except ValueError as exception: # The file is not a valid JSON file. print( - 'Failed to parse JSON file %s, error: %s' % (test_mapping_file, e), + f'Invalid JSON data in TEST_MAPPING file ' + f'Failed to parse JSON data: {test_mapping_file}, ' + f'error: {exception}', file=sys.stderr) raise - -def process_file(test_mapping_file): - """Validate a TEST_MAPPING file.""" - test_mapping = _load_file(test_mapping_file) - # Validate imports. - for import_entry in test_mapping.get(IMPORTS, []): - _validate_import(import_entry, test_mapping_file) - # Validate tests. - all_tests = [test for group, tests in test_mapping.items() - if group != IMPORTS for test in tests] - for test in all_tests: - _validate_test(test, test_mapping_file) + for group, value in test_mapping_data.items(): + if group == _IMPORTS: + # Validate imports. + for test in value: + _validate_import(test, test_mapping_file) + else: + # Validate tests. + for test in value: + _validate_test(test, test_mapping_file) def get_parser(): - """Return a command line parser.""" + """Returns a command line parser.""" parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--commit', type=str, help='Specify the commit to validate.') @@ -181,6 +176,7 @@ def get_parser(): def main(argv): + """Main function.""" parser = get_parser() opts = parser.parse_args(argv) try: @@ -188,12 +184,12 @@ def main(argv): if opts.commit: json_data = rh.git.get_file_content(opts.commit, filename) else: - with open(os.path.join(opts.project_dir, filename)) as f: - json_data = f.read() + with open(os.path.join(opts.project_dir, filename)) as file: + json_data = file.read() process_file(json_data) except: print('Visit %s for details about the format of TEST_MAPPING ' - 'file.' % TEST_MAPPING_URL, file=sys.stderr) + 'file.' % _TEST_MAPPING_URL, file=sys.stderr) raise diff --git a/tools/android_test_mapping_format_unittest.py b/tools/android_test_mapping_format_unittest.py index 9bef300..14bae32 100755 --- a/tools/android_test_mapping_format_unittest.py +++ b/tools/android_test_mapping_format_unittest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Unittests for android_test_mapping_format.""" + import os import shutil import tempfile @@ -21,7 +23,7 @@ import unittest import android_test_mapping_format -VALID_TEST_MAPPING = r""" +_VALID_TEST_MAPPING = r""" { "presubmit": [ { @@ -52,11 +54,11 @@ VALID_TEST_MAPPING = r""" } """ -BAD_JSON = """ +_BAD_JSON = """ {wrong format} """ -BAD_TEST_WRONG_KEY = """ +_BAD_TEST_WRONG_KEY = """ { "presubmit": [ { @@ -66,7 +68,7 @@ BAD_TEST_WRONG_KEY = """ } """ -BAD_TEST_WRONG_HOST_VALUE = """ +_BAD_TEST_WRONG_HOST_VALUE = """ { "presubmit": [ { @@ -78,7 +80,7 @@ BAD_TEST_WRONG_HOST_VALUE = """ """ -BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_NONE_LIST = """ +_BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_NONE_LIST = """ { "presubmit": [ { @@ -89,7 +91,7 @@ BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_NONE_LIST = """ } """ -BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_WRONG_TYPE = """ +_BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_WRONG_TYPE = """ { "presubmit": [ { @@ -100,7 +102,7 @@ BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_WRONG_TYPE = """ } """ -BAD_TEST_WRONG_OPTION = """ +_BAD_TEST_WRONG_OPTION = """ { "presubmit": [ { @@ -116,7 +118,7 @@ BAD_TEST_WRONG_OPTION = """ } """ -BAD_IMPORT_WRONG_KEY = """ +_BAD_IMPORT_WRONG_KEY = """ { "imports": [ { @@ -126,7 +128,7 @@ BAD_IMPORT_WRONG_KEY = """ } """ -BAD_IMPORT_WRONG_IMPORT_VALUE = """ +_BAD_IMPORT_WRONG_IMPORT_VALUE = """ { "imports": [ { @@ -137,7 +139,7 @@ BAD_IMPORT_WRONG_IMPORT_VALUE = """ } """ -BAD_FILE_PATTERNS = """ +_BAD_FILE_PATTERNS = """ { "presubmit": [ { @@ -148,7 +150,7 @@ BAD_FILE_PATTERNS = """ } """ -TEST_MAPPING_WITH_SUPPORTED_COMMENTS = r""" +_TEST_MAPPING_WITH_SUPPORTED_COMMENTS = r""" // supported comment { // supported comment!@#$%^&*()_ @@ -171,7 +173,7 @@ TEST_MAPPING_WITH_SUPPORTED_COMMENTS = r""" } """ -TEST_MAPPING_WITH_NON_SUPPORTED_COMMENTS = """ +_TEST_MAPPING_WITH_NON_SUPPORTED_COMMENTS = """ { #non-supported comments // supported comments "presubmit": [#non-supported comments @@ -196,112 +198,112 @@ class AndroidTestMappingFormatTests(unittest.TestCase): def test_valid_test_mapping(self): """Verify that the check doesn't raise any error for valid test mapping. """ - with open(self.test_mapping_file, 'w') as f: - f.write(VALID_TEST_MAPPING) - with open(self.test_mapping_file, 'r') as f: - android_test_mapping_format.process_file(f.read()) + with open(self.test_mapping_file, 'w') as file: + file.write(_VALID_TEST_MAPPING) + with open(self.test_mapping_file, 'r') as file: + android_test_mapping_format.process_file(file.read()) def test_invalid_test_mapping_bad_json(self): """Verify that TEST_MAPPING file with bad json can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_JSON) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_JSON) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( ValueError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_invalid_test_mapping_wrong_test_key(self): """Verify that test config using wrong key can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_TEST_WRONG_KEY) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_TEST_WRONG_KEY) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_invalid_test_mapping_wrong_test_value(self): """Verify that test config using wrong host value can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_TEST_WRONG_HOST_VALUE) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_TEST_WRONG_HOST_VALUE) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_invalid_test_mapping_wrong_preferred_targets_value(self): """Verify invalid preferred_targets are rejected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_NONE_LIST) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_NONE_LIST) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_WRONG_TYPE) - with open(self.test_mapping_file, 'r') as f: + file.read()) + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_TEST_WRONG_PREFERRED_TARGETS_VALUE_WRONG_TYPE) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_invalid_test_mapping_wrong_test_option(self): """Verify that test config using wrong option can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_TEST_WRONG_OPTION) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_TEST_WRONG_OPTION) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_invalid_test_mapping_wrong_import_key(self): """Verify that import setting using wrong key can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_IMPORT_WRONG_KEY) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_IMPORT_WRONG_KEY) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_invalid_test_mapping_wrong_import_value(self): """Verify that import setting using wrong value can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_IMPORT_WRONG_IMPORT_VALUE) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_IMPORT_WRONG_IMPORT_VALUE) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_invalid_test_mapping_file_patterns_value(self): """Verify that file_patterns using wrong value can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(BAD_FILE_PATTERNS) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_BAD_FILE_PATTERNS) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( android_test_mapping_format.InvalidTestMappingError, android_test_mapping_format.process_file, - f.read()) + file.read()) def test_valid_test_mapping_file_with_supported_comments(self): """Verify that '//'-format comment can be filtered.""" - with open(self.test_mapping_file, 'w') as f: - f.write(TEST_MAPPING_WITH_SUPPORTED_COMMENTS) - with open(self.test_mapping_file, 'r') as f: - android_test_mapping_format.process_file(f.read()) + with open(self.test_mapping_file, 'w') as file: + file.write(_TEST_MAPPING_WITH_SUPPORTED_COMMENTS) + with open(self.test_mapping_file, 'r') as file: + android_test_mapping_format.process_file(file.read()) def test_valid_test_mapping_file_with_non_supported_comments(self): """Verify that non-supported comment can be detected.""" - with open(self.test_mapping_file, 'w') as f: - f.write(TEST_MAPPING_WITH_NON_SUPPORTED_COMMENTS) - with open(self.test_mapping_file, 'r') as f: + with open(self.test_mapping_file, 'w') as file: + file.write(_TEST_MAPPING_WITH_NON_SUPPORTED_COMMENTS) + with open(self.test_mapping_file, 'r') as file: self.assertRaises( ValueError, android_test_mapping_format.process_file, - f.read()) + file.read()) if __name__ == '__main__': diff --git a/tools/cpplint.py b/tools/cpplint.py index e99d661..c5db879 100755 --- a/tools/cpplint.py +++ b/tools/cpplint.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # pylint: skip-file # # Copyright (c) 2009 Google Inc. All rights reserved. @@ -45,30 +45,49 @@ same line, but it is far from perfect (in either direction). import codecs import copy import getopt +import glob +import itertools import math # for log import os import re import sre_compile import string import sys -import unicodedata import sysconfig +import unicodedata +import xml.etree.ElementTree + +# if empty, use defaults +_valid_extensions = set([]) + +__VERSION__ = '1.5.5' try: xrange # Python 2 except NameError: + # -- pylint: disable=redefined-builtin xrange = range # Python 3 _USAGE = """ -Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] +Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit|sed|gsed] + [--filter=-x,+y,...] [--counting=total|toplevel|detailed] [--root=subdir] + [--repository=path] [--linelength=digits] [--headers=x,y,...] + [--recursive] + [--exclude=path] + [--extensions=hpp,cpp,...] + [--includeorder=default|standardcfirst] [--quiet] + [--version] <file> [file] ... + Style checker for C/C++ source files. + This is a fork of the Google style checker with minor extensions. + The style guidelines this tries to follow are those in - https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml + https://google.github.io/styleguide/cppguide.html Every problem is given a confidence score from 1-5, with 5 meaning we are certain of the problem, and 1 meaning it could be a legitimate construct. @@ -79,17 +98,27 @@ Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] suppresses errors of all categories on that line. The files passed in will be linted; at least one file must be provided. - Default linted extensions are .cc, .cpp, .cu, .cuh and .h. Change the - extensions with the --extensions flag. + Default linted extensions are %s. + Other file types will be ignored. + Change the extensions with the --extensions flag. Flags: - output=vs7 + output=emacs|eclipse|vs7|junit|sed|gsed By default, the output is formatted to ease emacs parsing. Visual Studio - compatible output (vs7) may also be used. Other formats are unsupported. + compatible output (vs7) may also be used. Further support exists for + eclipse (eclipse), and JUnit (junit). XML parsers such as those used + in Jenkins and Bamboo may also be used. + The sed format outputs sed commands that should fix some of the errors. + Note that this requires gnu sed. If that is installed as gsed on your + system (common e.g. on macOS with homebrew) you can use the gsed output + format. Sed commands are written to stdout, not stderr, so you should be + able to pipe output straight to a shell to run the fixes. verbose=# Specify a number 0-5 to restrict errors to certain verbosity levels. + Errors with lower verbosity levels have lower confidence and are more + likely to be false positives. quiet Don't print anything if no errors are found. @@ -99,11 +128,11 @@ Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] error messages whose category names pass the filters will be printed. (Category names are printed with the message and look like "[whitespace/indent]".) Filters are evaluated left to right. - "-FOO" and "FOO" means "do not print categories that start with FOO". + "-FOO" means "do not print categories that start with FOO". "+FOO" means "do print categories that start with FOO". Examples: --filter=-whitespace,+whitespace/braces - --filter=whitespace,runtime/printf,+runtime/printf_format + --filter=-whitespace,-runtime/printf,+runtime/printf_format --filter=-,+build/include_what_you_use To see a list of all the categories used in cpplint, pass no arg: @@ -116,17 +145,41 @@ Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] also be printed. If 'detailed' is provided, then a count is provided for each category like 'build/class'. + repository=path + The top level directory of the repository, used to derive the header + guard CPP variable. By default, this is determined by searching for a + path that contains .git, .hg, or .svn. When this flag is specified, the + given path is used instead. This option allows the header guard CPP + variable to remain consistent even if members of a team have different + repository root directories (such as when checking out a subdirectory + with SVN). In addition, users of non-mainstream version control systems + can use this flag to ensure readable header guard CPP variables. + + Examples: + Assuming that Alice checks out ProjectName and Bob checks out + ProjectName/trunk and trunk contains src/chrome/ui/browser.h, then + with no --repository flag, the header guard CPP variable will be: + + Alice => TRUNK_SRC_CHROME_BROWSER_UI_BROWSER_H_ + Bob => SRC_CHROME_BROWSER_UI_BROWSER_H_ + + If Alice uses the --repository=trunk flag and Bob omits the flag or + uses --repository=. then the header guard CPP variable will be: + + Alice => SRC_CHROME_BROWSER_UI_BROWSER_H_ + Bob => SRC_CHROME_BROWSER_UI_BROWSER_H_ + root=subdir The root directory used for deriving header guard CPP variable. - By default, the header guard CPP variable is calculated as the relative - path to the directory that contains .git, .hg, or .svn. When this flag - is specified, the relative path is calculated from the specified - directory. If the specified directory does not exist, this flag is - ignored. + This directory is relative to the top level directory of the repository + which by default is determined by searching for a directory that contains + .git, .hg, or .svn but can also be controlled with the --repository flag. + If the specified directory does not exist, this flag is ignored. Examples: - Assuming that top/src/.git exists (and cwd=top/src), the header guard - CPP variables for top/src/chrome/browser/ui/browser.h are: + Assuming that src is the top level directory of the repository (and + cwd=top/src), the header guard CPP variables for + src/chrome/browser/ui/browser.h are: No flag => CHROME_BROWSER_UI_BROWSER_H_ --root=chrome => BROWSER_UI_BROWSER_H_ @@ -140,17 +193,45 @@ Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] Examples: --linelength=120 + recursive + Search for files to lint recursively. Each directory given in the list + of files to be linted is replaced by all files that descend from that + directory. Files with extensions not in the valid extensions list are + excluded. + + exclude=path + Exclude the given path from the list of files to be linted. Relative + paths are evaluated relative to the current directory and shell globbing + is performed. This flag can be provided multiple times to exclude + multiple files. + + Examples: + --exclude=one.cc + --exclude=src/*.cc + --exclude=src/*.cc --exclude=test/*.cc + extensions=extension,extension,... The allowed file extensions that cpplint will check Examples: - --extensions=hpp,cpp + --extensions=%s + + includeorder=default|standardcfirst + For the build/include_order rule, the default is to blindly assume angle + bracket includes with file extension are c-system-headers (default), + even knowing this will have false classifications. + The default is established at google. + standardcfirst means to instead use an allow-list of known c headers and + treat all others as separate group of "other system headers". The C headers + included are those of the C-standard lib and closely related ones. headers=x,y,... The header extensions that cpplint will treat as .h in checks. Values are automatically added to --extensions list. + (by default, only files with extensions %s will be assumed to be headers) Examples: + --headers=%s --headers=hpp,hxx --headers=hpp @@ -175,7 +256,7 @@ Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] "exclude_files" allows to specify a regular expression to be matched against a file name. If the expression matches, the file is skipped and not run - through liner. + through the linter. "linelength" allows to specify the allowed line length for the project. @@ -190,7 +271,7 @@ Syntax: cpplint.py [--verbose=#] [--output=vs7] [--filter=-x,+y,...] Example file: filter=-build/include_order,+build/include_alpha - exclude_files=.*\.cc + exclude_files=.*\\.cc The above example disables build/include_order warning and enables build/include_alpha as well as excludes all .cc from being @@ -213,9 +294,12 @@ _ERROR_CATEGORIES = [ 'build/forward_decl', 'build/header_guard', 'build/include', + 'build/include_subdir', 'build/include_alpha', 'build/include_order', 'build/include_what_you_use', + 'build/namespaces_headers', + 'build/namespaces_literals', 'build/namespaces', 'build/printf_format', 'build/storage_class', @@ -271,6 +355,13 @@ _ERROR_CATEGORIES = [ 'whitespace/todo', ] +# keywords to use with --outputs which generate stdout for machine processing +_MACHINE_OUTPUTS = [ + 'junit', + 'sed', + 'gsed' +] + # These error categories are no longer enforced by cpplint, but for backwards- # compatibility they may still appear in NOLINT comments. _LEGACY_ERROR_CATEGORIES = [ @@ -406,6 +497,18 @@ _CPP_HEADERS = frozenset([ 'utility', 'valarray', 'vector', + # 17.6.1.2 C++14 headers + 'shared_mutex', + # 17.6.1.2 C++17 headers + 'any', + 'charconv', + 'codecvt', + 'execution', + 'filesystem', + 'memory_resource', + 'optional', + 'string_view', + 'variant', # 17.6.1.2 C++ headers for C library facilities 'cassert', 'ccomplex', @@ -435,6 +538,186 @@ _CPP_HEADERS = frozenset([ 'cwctype', ]) +# C headers +_C_HEADERS = frozenset([ + # System C headers + 'assert.h', + 'complex.h', + 'ctype.h', + 'errno.h', + 'fenv.h', + 'float.h', + 'inttypes.h', + 'iso646.h', + 'limits.h', + 'locale.h', + 'math.h', + 'setjmp.h', + 'signal.h', + 'stdalign.h', + 'stdarg.h', + 'stdatomic.h', + 'stdbool.h', + 'stddef.h', + 'stdint.h', + 'stdio.h', + 'stdlib.h', + 'stdnoreturn.h', + 'string.h', + 'tgmath.h', + 'threads.h', + 'time.h', + 'uchar.h', + 'wchar.h', + 'wctype.h', + # additional POSIX C headers + 'aio.h', + 'arpa/inet.h', + 'cpio.h', + 'dirent.h', + 'dlfcn.h', + 'fcntl.h', + 'fmtmsg.h', + 'fnmatch.h', + 'ftw.h', + 'glob.h', + 'grp.h', + 'iconv.h', + 'langinfo.h', + 'libgen.h', + 'monetary.h', + 'mqueue.h', + 'ndbm.h', + 'net/if.h', + 'netdb.h', + 'netinet/in.h', + 'netinet/tcp.h', + 'nl_types.h', + 'poll.h', + 'pthread.h', + 'pwd.h', + 'regex.h', + 'sched.h', + 'search.h', + 'semaphore.h', + 'setjmp.h', + 'signal.h', + 'spawn.h', + 'strings.h', + 'stropts.h', + 'syslog.h', + 'tar.h', + 'termios.h', + 'trace.h', + 'ulimit.h', + 'unistd.h', + 'utime.h', + 'utmpx.h', + 'wordexp.h', + # additional GNUlib headers + 'a.out.h', + 'aliases.h', + 'alloca.h', + 'ar.h', + 'argp.h', + 'argz.h', + 'byteswap.h', + 'crypt.h', + 'endian.h', + 'envz.h', + 'err.h', + 'error.h', + 'execinfo.h', + 'fpu_control.h', + 'fstab.h', + 'fts.h', + 'getopt.h', + 'gshadow.h', + 'ieee754.h', + 'ifaddrs.h', + 'libintl.h', + 'mcheck.h', + 'mntent.h', + 'obstack.h', + 'paths.h', + 'printf.h', + 'pty.h', + 'resolv.h', + 'shadow.h', + 'sysexits.h', + 'ttyent.h', + # Additional linux glibc headers + 'dlfcn.h', + 'elf.h', + 'features.h', + 'gconv.h', + 'gnu-versions.h', + 'lastlog.h', + 'libio.h', + 'link.h', + 'malloc.h', + 'memory.h', + 'netash/ash.h', + 'netatalk/at.h', + 'netax25/ax25.h', + 'neteconet/ec.h', + 'netipx/ipx.h', + 'netiucv/iucv.h', + 'netpacket/packet.h', + 'netrom/netrom.h', + 'netrose/rose.h', + 'nfs/nfs.h', + 'nl_types.h', + 'nss.h', + 're_comp.h', + 'regexp.h', + 'sched.h', + 'sgtty.h', + 'stab.h', + 'stdc-predef.h', + 'stdio_ext.h', + 'syscall.h', + 'termio.h', + 'thread_db.h', + 'ucontext.h', + 'ustat.h', + 'utmp.h', + 'values.h', + 'wait.h', + 'xlocale.h', + # Hardware specific headers + 'arm_neon.h', + 'emmintrin.h', + 'xmmintin.h', + ]) + +# Folders of C libraries so commonly used in C++, +# that they have parity with standard C libraries. +C_STANDARD_HEADER_FOLDERS = frozenset([ + # standard C library + "sys", + # glibc for linux + "arpa", + "asm-generic", + "bits", + "gnu", + "net", + "netinet", + "protocols", + "rpc", + "rpcsvc", + "scsi", + # linux kernel header + "drm", + "linux", + "misc", + "mtd", + "rdma", + "sound", + "video", + "xen", + ]) + # Type names _TYPES = re.compile( r'^(?:' @@ -458,7 +741,8 @@ _THIRD_PARTY_HEADERS_PATTERN = re.compile( r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$') # Pattern for matching FileInfo.BaseName() against test file name -_TEST_FILE_SUFFIX = r'(_test|_unittest|_regtest)$' +_test_suffixes = ['_test', '_regtest', '_unittest'] +_TEST_FILE_SUFFIX = '(' + '|'.join(_test_suffixes) + r')$' # Pattern that matches only complete whitespace, possibly across multiple lines. _EMPTY_CONDITIONAL_BODY_PATTERN = re.compile(r'^\s*$', re.DOTALL) @@ -472,7 +756,7 @@ _CHECK_MACROS = [ ] # Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE -_CHECK_REPLACEMENT = dict([(m, {}) for m in _CHECK_MACROS]) +_CHECK_REPLACEMENT = dict([(macro_var, {}) for macro_var in _CHECK_MACROS]) for op, replacement in [('==', 'EQ'), ('!=', 'NE'), ('>=', 'GE'), ('>', 'GT'), @@ -520,9 +804,10 @@ _ALT_TOKEN_REPLACEMENT_PATTERN = re.compile( # _IncludeState.CheckNextIncludeOrder(). _C_SYS_HEADER = 1 _CPP_SYS_HEADER = 2 -_LIKELY_MY_HEADER = 3 -_POSSIBLE_MY_HEADER = 4 -_OTHER_HEADER = 5 +_OTHER_SYS_HEADER = 3 +_LIKELY_MY_HEADER = 4 +_POSSIBLE_MY_HEADER = 5 +_OTHER_HEADER = 6 # These constants define the current inline assembly state _NO_ASM = 0 # Outside of inline assembly block @@ -542,6 +827,22 @@ _SEARCH_C_FILE = re.compile(r'\b(?:LINT_C_FILE|' # Match string that indicates we're working on a Linux Kernel file. _SEARCH_KERNEL_FILE = re.compile(r'\b(?:LINT_KERNEL_FILE)') +# Commands for sed to fix the problem +_SED_FIXUPS = { + 'Remove spaces around =': r's/ = /=/', + 'Remove spaces around !=': r's/ != /!=/', + 'Remove space before ( in if (': r's/if (/if(/', + 'Remove space before ( in for (': r's/for (/for(/', + 'Remove space before ( in while (': r's/while (/while(/', + 'Remove space before ( in switch (': r's/switch (/switch(/', + 'Should have a space between // and comment': r's/\/\//\/\/ /', + 'Missing space before {': r's/\([^ ]\){/\1 {/', + 'Tab found, replace by spaces': r's/\t/ /g', + 'Line ends in whitespace. Consider deleting these extra spaces.': r's/\s*$//', + 'You don\'t need a ; after a }': r's/};/}/', + 'Missing space after ,': r's/,\([^ ]\)/, \1/g', +} + _regexp_compile_cache = {} # {str, set(int)}: a map from error categories to sets of linenumbers @@ -553,17 +854,55 @@ _error_suppressions = {} _root = None _root_debug = False +# The top level repository directory. If set, _root is calculated relative to +# this directory instead of the directory containing version control artifacts. +# This is set by the --repository flag. +_repository = None + +# Files to exclude from linting. This is set by the --exclude flag. +_excludes = None + +# Whether to supress all PrintInfo messages, UNRELATED to --quiet flag +_quiet = False + # The allowed line length of files. # This is set by --linelength flag. _line_length = 80 -# The allowed extensions for file names -# This is set by --extensions flag. -_valid_extensions = set(['cc', 'h', 'cpp', 'cu', 'cuh']) +# This allows to use different include order rule than default +_include_order = "default" + +try: + unicode +except NameError: + # -- pylint: disable=redefined-builtin + basestring = unicode = str + +try: + long +except NameError: + # -- pylint: disable=redefined-builtin + long = int + +if sys.version_info < (3,): + # -- pylint: disable=no-member + # BINARY_TYPE = str + itervalues = dict.itervalues + iteritems = dict.iteritems +else: + # BINARY_TYPE = bytes + itervalues = dict.values + iteritems = dict.items + +def unicode_escape_decode(x): + if sys.version_info < (3,): + return codecs.unicode_escape_decode(x)[0] + else: + return x # Treat all headers starting with 'h' equally: .h, .hpp, .hxx etc. # This is set by --headers flag. -_hpp_headers = set(['h']) +_hpp_headers = set([]) # {str, bool}: a map from error categories to booleans which indicate if the # category should be suppressed for every line. @@ -572,14 +911,47 @@ _global_error_suppressions = {} def ProcessHppHeadersOption(val): global _hpp_headers try: - _hpp_headers = set(val.split(',')) - # Automatically append to extensions list so it does not have to be set 2 times - _valid_extensions.update(_hpp_headers) + _hpp_headers = {ext.strip() for ext in val.split(',')} except ValueError: PrintUsage('Header extensions must be comma separated list.') +def ProcessIncludeOrderOption(val): + if val is None or val == "default": + pass + elif val == "standardcfirst": + global _include_order + _include_order = val + else: + PrintUsage('Invalid includeorder value %s. Expected default|standardcfirst') + def IsHeaderExtension(file_extension): - return file_extension in _hpp_headers + return file_extension in GetHeaderExtensions() + +def GetHeaderExtensions(): + if _hpp_headers: + return _hpp_headers + if _valid_extensions: + return {h for h in _valid_extensions if 'h' in h} + return set(['h', 'hh', 'hpp', 'hxx', 'h++', 'cuh']) + +# The allowed extensions for file names +# This is set by --extensions flag +def GetAllExtensions(): + return GetHeaderExtensions().union(_valid_extensions or set( + ['c', 'cc', 'cpp', 'cxx', 'c++', 'cu'])) + +def ProcessExtensionsOption(val): + global _valid_extensions + try: + extensions = [ext.strip() for ext in val.split(',')] + _valid_extensions = set(extensions) + except ValueError: + PrintUsage('Extensions should be a comma-separated list of values;' + 'for example: extensions=hpp,cpp\n' + 'This could not be parsed: "%s"' % (val,)) + +def GetNonHeaderExtensions(): + return GetAllExtensions().difference(GetHeaderExtensions()) def ParseNolintSuppressions(filename, raw_line, linenum, error): """Updates the global list of line error-suppressions. @@ -692,7 +1064,7 @@ def Search(pattern, s): def _IsSourceExtension(s): """File extension (excluding dot) matches a source file extension.""" - return s in ('c', 'cc', 'cpp', 'cxx') + return s in GetNonHeaderExtensions() class _IncludeState(object): @@ -713,11 +1085,13 @@ class _IncludeState(object): _MY_H_SECTION = 1 _C_SECTION = 2 _CPP_SECTION = 3 - _OTHER_H_SECTION = 4 + _OTHER_SYS_SECTION = 4 + _OTHER_H_SECTION = 5 _TYPE_NAMES = { _C_SYS_HEADER: 'C system header', _CPP_SYS_HEADER: 'C++ system header', + _OTHER_SYS_HEADER: 'other system header', _LIKELY_MY_HEADER: 'header this file implements', _POSSIBLE_MY_HEADER: 'header this file may implement', _OTHER_HEADER: 'other header', @@ -727,11 +1101,14 @@ class _IncludeState(object): _MY_H_SECTION: 'a header this file implements', _C_SECTION: 'C system header', _CPP_SECTION: 'C++ system header', + _OTHER_SYS_SECTION: 'other system header', _OTHER_H_SECTION: 'other header', } def __init__(self): self.include_list = [[]] + self._section = None + self._last_header = None self.ResetSection('') def FindHeader(self, header): @@ -838,6 +1215,12 @@ class _IncludeState(object): else: self._last_header = '' return error_message + elif header_type == _OTHER_SYS_HEADER: + if self._section <= self._OTHER_SYS_SECTION: + self._section = self._OTHER_SYS_SECTION + else: + self._last_header = '' + return error_message elif header_type == _LIKELY_MY_HEADER: if self._section <= self._MY_H_SECTION: self._section = self._MY_H_SECTION @@ -876,9 +1259,18 @@ class _CppLintState(object): # output format: # "emacs" - format that emacs can parse (default) + # "eclipse" - format that eclipse can parse # "vs7" - format that Microsoft Visual Studio 7 can parse + # "junit" - format that Jenkins, Bamboo, etc can parse + # "sed" - returns a gnu sed command to fix the problem + # "gsed" - like sed, but names the command gsed, e.g. for macOS homebrew users self.output_format = 'emacs' + # For JUnit output, save errors and failures until the end so that they + # can be written into the XML + self._junit_errors = [] + self._junit_failures = [] + def SetOutputFormat(self, output_format): """Sets the output format for errors.""" self.output_format = output_format @@ -953,10 +1345,71 @@ class _CppLintState(object): def PrintErrorCounts(self): """Print a summary of errors by category, and the total.""" - for category, count in self.errors_by_category.iteritems(): - sys.stderr.write('Category \'%s\' errors found: %d\n' % + for category, count in sorted(iteritems(self.errors_by_category)): + self.PrintInfo('Category \'%s\' errors found: %d\n' % (category, count)) - sys.stdout.write('Total errors found: %d\n' % self.error_count) + if self.error_count > 0: + self.PrintInfo('Total errors found: %d\n' % self.error_count) + + def PrintInfo(self, message): + # _quiet does not represent --quiet flag. + # Hide infos from stdout to keep stdout pure for machine consumption + if not _quiet and self.output_format not in _MACHINE_OUTPUTS: + sys.stdout.write(message) + + def PrintError(self, message): + if self.output_format == 'junit': + self._junit_errors.append(message) + else: + sys.stderr.write(message) + + def AddJUnitFailure(self, filename, linenum, message, category, confidence): + self._junit_failures.append((filename, linenum, message, category, + confidence)) + + def FormatJUnitXML(self): + num_errors = len(self._junit_errors) + num_failures = len(self._junit_failures) + + testsuite = xml.etree.ElementTree.Element('testsuite') + testsuite.attrib['errors'] = str(num_errors) + testsuite.attrib['failures'] = str(num_failures) + testsuite.attrib['name'] = 'cpplint' + + if num_errors == 0 and num_failures == 0: + testsuite.attrib['tests'] = str(1) + xml.etree.ElementTree.SubElement(testsuite, 'testcase', name='passed') + + else: + testsuite.attrib['tests'] = str(num_errors + num_failures) + if num_errors > 0: + testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase') + testcase.attrib['name'] = 'errors' + error = xml.etree.ElementTree.SubElement(testcase, 'error') + error.text = '\n'.join(self._junit_errors) + if num_failures > 0: + # Group failures by file + failed_file_order = [] + failures_by_file = {} + for failure in self._junit_failures: + failed_file = failure[0] + if failed_file not in failed_file_order: + failed_file_order.append(failed_file) + failures_by_file[failed_file] = [] + failures_by_file[failed_file].append(failure) + # Create a testcase for each file + for failed_file in failed_file_order: + failures = failures_by_file[failed_file] + testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase') + testcase.attrib['name'] = failed_file + failure = xml.etree.ElementTree.SubElement(testcase, 'failure') + template = '{0}: {1} [{2}] [{3}]' + texts = [template.format(f[1], f[2], f[3], f[4]) for f in failures] + failure.text = '\n'.join(texts) + + xml_decl = '<?xml version="1.0" encoding="UTF-8" ?>\n' + return xml_decl + xml.etree.ElementTree.tostring(testsuite, 'utf-8').decode('utf-8') + _cpplint_state = _CppLintState() @@ -1110,12 +1563,12 @@ class FileInfo(object): return os.path.abspath(self._filename).replace('\\', '/') def RepositoryName(self): - """FullName after removing the local path to the repository. + r"""FullName after removing the local path to the repository. If we have a real absolute path name here we can try to do something smart: detecting the root of the checkout and truncating /path/to/checkout from the name so that we get header guards that don't include things like - "C:\Documents and Settings\..." or "/home/username/..." in them and thus + "C:\\Documents and Settings\\..." or "/home/username/..." in them and thus people on different computers who have checked the source out to different locations won't see bogus errors. """ @@ -1124,6 +1577,20 @@ class FileInfo(object): if os.path.exists(fullname): project_dir = os.path.dirname(fullname) + # If the user specified a repository path, it exists, and the file is + # contained in it, use the specified repository path + if _repository: + repo = FileInfo(_repository).FullName() + root_dir = project_dir + while os.path.exists(root_dir): + # allow case insensitive compare on Windows + if os.path.normcase(root_dir) == os.path.normcase(repo): + return os.path.relpath(fullname, root_dir).replace('\\', '/') + one_up_dir = os.path.dirname(root_dir) + if one_up_dir == root_dir: + break + root_dir = one_up_dir + if os.path.exists(os.path.join(project_dir, ".svn")): # If there's a .svn file in the current directory, we recursively look # up the directory tree for the top of the SVN checkout @@ -1174,7 +1641,7 @@ class FileInfo(object): return self.Split()[1] def Extension(self): - """File extension - text following the final period.""" + """File extension - text following the final period, includes that period.""" return self.Split()[2] def NoExtension(self): @@ -1239,15 +1706,25 @@ def Error(filename, linenum, category, confidence, message): if _ShouldPrintError(category, confidence, linenum): _cpplint_state.IncrementErrorCount(category) if _cpplint_state.output_format == 'vs7': - sys.stderr.write('%s(%s): error cpplint: [%s] %s [%d]\n' % ( + _cpplint_state.PrintError('%s(%s): error cpplint: [%s] %s [%d]\n' % ( filename, linenum, category, message, confidence)) elif _cpplint_state.output_format == 'eclipse': sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % ( filename, linenum, message, category, confidence)) + elif _cpplint_state.output_format == 'junit': + _cpplint_state.AddJUnitFailure(filename, linenum, message, category, + confidence) + elif _cpplint_state.output_format in ['sed', 'gsed']: + if message in _SED_FIXUPS: + sys.stdout.write(_cpplint_state.output_format + " -i '%s%s' %s # %s [%s] [%d]\n" % ( + linenum, _SED_FIXUPS[message], filename, message, category, confidence)) + else: + sys.stderr.write('# %s:%s: "%s" [%s] [%d]\n' % ( + filename, linenum, message, category, confidence)) else: - sys.stderr.write('%s:%s: %s [%s] [%d]\n' % ( - filename, linenum, message, category, confidence)) - + final_message = '%s:%s: %s [%s] [%d]\n' % ( + filename, linenum, message, category, confidence) + sys.stderr.write(final_message) # Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard. _RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile( @@ -1794,10 +2271,10 @@ def PathSplitToList(path): lst = [] while True: (head, tail) = os.path.split(path) - if head == path: # absolute paths end + if head == path: # absolute paths end lst.append(head) break - if tail == path: # relative paths end + if tail == path: # relative paths end lst.append(tail) break @@ -1832,7 +2309,7 @@ def GetHeaderGuardCPPVariable(filename): def FixupPathFromRoot(): if _root_debug: sys.stderr.write("\n_root fixup, _root = '%s', repository name = '%s'\n" - %(_root, fileinfo.RepositoryName())) + % (_root, fileinfo.RepositoryName())) # Process the file path with the --root flag if it was set. if not _root: @@ -1854,27 +2331,28 @@ def GetHeaderGuardCPPVariable(filename): if _root_debug: sys.stderr.write(("_root lstrip (maybe_path=%s, file_path_from_root=%s," + - " _root=%s)\n") %(maybe_path, file_path_from_root, _root)) + " _root=%s)\n") % (maybe_path, file_path_from_root, _root)) if maybe_path: return os.path.join(*maybe_path) # --root=.. , will prepend the outer directory to the header guard full_path = fileinfo.FullName() - root_abspath = os.path.abspath(_root) + # adapt slashes for windows + root_abspath = os.path.abspath(_root).replace('\\', '/') maybe_path = StripListPrefix(PathSplitToList(full_path), PathSplitToList(root_abspath)) if _root_debug: sys.stderr.write(("_root prepend (maybe_path=%s, full_path=%s, " + - "root_abspath=%s)\n") %(maybe_path, full_path, root_abspath)) + "root_abspath=%s)\n") % (maybe_path, full_path, root_abspath)) if maybe_path: return os.path.join(*maybe_path) if _root_debug: - sys.stderr.write("_root ignore, returning %s\n" %(file_path_from_root)) + sys.stderr.write("_root ignore, returning %s\n" % (file_path_from_root)) # --root=FAKE_DIR is ignored return file_path_from_root @@ -1906,6 +2384,11 @@ def CheckForHeaderGuard(filename, clean_lines, error): if Search(r'//\s*NOLINT\(build/header_guard\)', i): return + # Allow pragma once instead of header guards + for i in raw_lines: + if Search(r'^\s*#pragma\s+once', i): + return + cppvar = GetHeaderGuardCPPVariable(filename) ifndef = '' @@ -1982,28 +2465,36 @@ def CheckForHeaderGuard(filename, clean_lines, error): def CheckHeaderFileIncluded(filename, include_state, error): - """Logs an error if a .cc file does not include its header.""" + """Logs an error if a source file does not include its header.""" # Do not check test files fileinfo = FileInfo(filename) if Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()): return - headerfile = filename[0:len(filename) - len(fileinfo.Extension())] + '.h' - if not os.path.exists(headerfile): - return - headername = FileInfo(headerfile).RepositoryName() - first_include = 0 - for section_list in include_state.include_list: - for f in section_list: - if headername in f[0] or f[0] in headername: - return - if not first_include: - first_include = f[1] + for ext in GetHeaderExtensions(): + basefilename = filename[0:len(filename) - len(fileinfo.Extension())] + headerfile = basefilename + '.' + ext + if not os.path.exists(headerfile): + continue + headername = FileInfo(headerfile).RepositoryName() + first_include = None + include_uses_unix_dir_aliases = False + for section_list in include_state.include_list: + for f in section_list: + include_text = f[0] + if "./" in include_text: + include_uses_unix_dir_aliases = True + if headername in include_text or include_text in headername: + return + if not first_include: + first_include = f[1] + + message = '%s should include its header file %s' % (fileinfo.RepositoryName(), headername) + if include_uses_unix_dir_aliases: + message += ". Relative paths like . and .. are not allowed." - error(filename, first_include, 'build/include', 5, - '%s should include its header file %s' % (fileinfo.RepositoryName(), - headername)) + error(filename, first_include, 'build/include', 5, message) def CheckForBadCharacters(filename, lines, error): @@ -2024,7 +2515,7 @@ def CheckForBadCharacters(filename, lines, error): error: The function to call with any errors found. """ for linenum, line in enumerate(lines): - if u'\ufffd' in line: + if unicode_escape_decode('\ufffd') in line: error(filename, linenum, 'readability/utf8', 5, 'Line contains invalid UTF-8 (or Unicode replacement character).') if '\0' in line: @@ -2653,8 +3144,8 @@ class NestingState(object): # class LOCKABLE API Object { # }; class_decl_match = Match( - r'^(\s*(?:template\s*<[\w\s<>,:]*>\s*)?' - r'(class|struct)\s+(?:[A-Z_]+\s+)*(\w+(?:::\w+)*))' + r'^(\s*(?:template\s*<[\w\s<>,:=]*>\s*)?' + r'(class|struct)\s+(?:[a-zA-Z0-9_]+\s+)*(\w+(?:::\w+)*))' r'(.*)$', line) if (class_decl_match and (not self.stack or self.stack[-1].open_parentheses == 0)): @@ -2902,6 +3393,7 @@ def CheckForNonStandardConstructs(filename, clean_lines, linenum, constructor_args[i] = constructor_arg i += 1 + variadic_args = [arg for arg in constructor_args if '&&...' in arg] defaulted_args = [arg for arg in constructor_args if '=' in arg] noarg_constructor = (not constructor_args or # empty arg list # 'void' arg specifier @@ -2912,20 +3404,24 @@ def CheckForNonStandardConstructs(filename, clean_lines, linenum, # all but at most one arg defaulted (len(constructor_args) >= 1 and not noarg_constructor and - len(defaulted_args) >= len(constructor_args) - 1)) + len(defaulted_args) >= len(constructor_args) - 1) or + # variadic arguments with zero or one argument + (len(constructor_args) <= 2 and + len(variadic_args) >= 1)) initializer_list_constructor = bool( onearg_constructor and Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0])) copy_constructor = bool( onearg_constructor and - Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&' + Match(r'((const\s+(volatile\s+)?)?|(volatile\s+(const\s+)?))?' + r'%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&' % re.escape(base_classname), constructor_args[0].strip())) if (not is_marked_explicit and onearg_constructor and not initializer_list_constructor and not copy_constructor): - if defaulted_args: + if defaulted_args or variadic_args: error(filename, linenum, 'runtime/explicit', 5, 'Constructors callable with one argument ' 'should be marked explicit.') @@ -2977,7 +3473,7 @@ def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error): # Note that we assume the contents of [] to be short enough that # they'll never need to wrap. if ( # Ignore control structures. - not Search(r'\b(if|for|while|switch|return|new|delete|catch|sizeof)\b', + not Search(r'\b(if|elif|for|while|switch|return|new|delete|catch|sizeof)\b', fncall) and # Ignore pointers/references to functions. not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and @@ -3090,7 +3586,7 @@ def CheckForFunctionLengths(filename, clean_lines, linenum, if Search(r'(;|})', start_line): # Declarations and trivial functions body_found = True break # ... ignore - elif Search(r'{', start_line): + if Search(r'{', start_line): body_found = True function = Search(r'((\w|:)*)\(', line).group(1) if Match(r'TEST', function): # Handle TEST... macros @@ -3283,9 +3779,10 @@ def CheckSpacing(filename, clean_lines, linenum, nesting_state, error): # get rid of comments and strings line = clean_lines.elided[linenum] - # You shouldn't have spaces before your brackets, except maybe after - # 'delete []', 'return []() {};', or 'auto [abc, ...] = ...;'. - if Search(r'\w\s+\[', line) and not Search(r'(?:auto&?|delete|return)\s+\[', line): + # You shouldn't have spaces before your brackets, except for C++11 attributes + # or maybe after 'delete []', 'return []() {};', or 'auto [abc, ...] = ...;'. + if (Search(r'\w\s+\[(?!\[)', line) and + not Search(r'(?:auto&?|delete|return)\s+\[', line)): error(filename, linenum, 'whitespace/braces', 5, 'Extra space before [') @@ -3655,7 +4152,6 @@ def IsDecltype(clean_lines, linenum, column): return True return False - def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error): """Checks for additional blank line issues related to sections. @@ -3804,11 +4300,11 @@ def CheckBraces(filename, clean_lines, linenum, error): # its line, and the line after that should have an indent level equal to or # lower than the if. We also check for ambiguous if/else nesting without # braces. - if_else_match = Search(r'\b(if\s*\(|else\b)', line) + if_else_match = Search(r'\b(if\s*(|constexpr)\s*\(|else\b)', line) if if_else_match and not Match(r'\s*#', line): if_indent = GetIndentLevel(line) endline, endlinenum, endpos = line, linenum, if_else_match.end() - if_match = Search(r'\bif\s*\(', line) + if_match = Search(r'\bif\s*(|constexpr)\s*\(', line) if if_match: # This could be a multiline if condition, so find the end first. pos = if_match.end() - 1 @@ -4073,12 +4569,12 @@ def CheckEmptyBlockBody(filename, clean_lines, linenum, error): return if closing_linenum > opening_linenum: # Opening line after the {. Ignore comments here since we checked above. - body = list(opening_line[opening_pos+1:]) + bodylist = list(opening_line[opening_pos+1:]) # All lines until closing line, excluding closing line, with comments. - body.extend(clean_lines.raw_lines[opening_linenum+1:closing_linenum]) + bodylist.extend(clean_lines.raw_lines[opening_linenum+1:closing_linenum]) # Closing line before the }. Won't (and can't) have comments. - body.append(clean_lines.elided[closing_linenum][:closing_pos-1]) - body = '\n'.join(body) + bodylist.append(clean_lines.elided[closing_linenum][:closing_pos-1]) + body = '\n'.join(bodylist) else: # If statement has brackets and fits on a single line. body = opening_line[opening_pos+1:closing_pos-1] @@ -4302,7 +4798,7 @@ def GetLineWidth(line): is_low_surrogate = 0xDC00 <= ord(uc) <= 0xDFFF if not is_wide_build and is_low_surrogate: width -= 1 - + width += 1 return width else: @@ -4350,7 +4846,7 @@ def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state, # if(match($0, " <<")) complain = 0; # if(match(prev, " +for \\(")) complain = 0; # if(prevodd && match(prevprev, " +for \\(")) complain = 0; - scope_or_label_pattern = r'\s*\w+\s*:\s*\\?$' + scope_or_label_pattern = r'\s*(?:public|private|protected|signals)(?:\s+(?:slots\s*)?)?:\s*\\?$' classinfo = nesting_state.InnermostClass() initial_spaces = 0 cleansed_line = clean_lines.elided[linenum] @@ -4390,16 +4886,23 @@ def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state, # # The "$Id:...$" comment may also get very long without it being the # developers fault. + # + # Doxygen documentation copying can get pretty long when using an overloaded + # function declaration if (not line.startswith('#include') and not is_header_guard and not Match(r'^\s*//.*http(s?)://\S*$', line) and not Match(r'^\s*//\s*[^\s]*$', line) and - not Match(r'^// \$Id:.*#[0-9]+ \$$', line)): + not Match(r'^// \$Id:.*#[0-9]+ \$$', line) and + not Match(r'^\s*/// [@\\](copydoc|copydetails|copybrief) .*$', line)): line_width = GetLineWidth(line) if line_width > _line_length: error(filename, linenum, 'whitespace/line_length', 2, 'Lines should be <= %i characters long' % _line_length) if (cleansed_line.count(';') > 1 and + # allow simple single line lambdas + not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}\n\r]*\}', + line) and # for loops are allowed two ;'s (and may run over two lines). cleansed_line.find('for') == -1 and (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or @@ -4456,21 +4959,25 @@ def _DropCommonSuffixes(filename): Returns: The filename with the common suffix removed. """ - for suffix in ('test.cc', 'regtest.cc', 'unittest.cc', - 'inl.h', 'impl.h', 'internal.h'): + for suffix in itertools.chain( + ('%s.%s' % (test_suffix.lstrip('_'), ext) + for test_suffix, ext in itertools.product(_test_suffixes, GetNonHeaderExtensions())), + ('%s.%s' % (suffix, ext) + for suffix, ext in itertools.product(['inl', 'imp', 'internal'], GetHeaderExtensions()))): if (filename.endswith(suffix) and len(filename) > len(suffix) and filename[-len(suffix) - 1] in ('-', '_')): return filename[:-len(suffix) - 1] return os.path.splitext(filename)[0] -def _ClassifyInclude(fileinfo, include, is_system): +def _ClassifyInclude(fileinfo, include, used_angle_brackets, include_order="default"): """Figures out what kind of header 'include' is. Args: fileinfo: The current file cpplint is running over. A FileInfo instance. include: The path to a #included file. - is_system: True if the #include used <> rather than "". + used_angle_brackets: True if the #include used <> rather than "". + include_order: "default" or other value allowed in program arguments Returns: One of the _XXX_HEADER constants. @@ -4480,6 +4987,8 @@ def _ClassifyInclude(fileinfo, include, is_system): _C_SYS_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True) _CPP_SYS_HEADER + >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', True, "standardcfirst") + _OTHER_SYS_HEADER >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False) _LIKELY_MY_HEADER >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'), @@ -4490,13 +4999,23 @@ def _ClassifyInclude(fileinfo, include, is_system): """ # This is a list of all standard c++ header files, except # those already checked for above. - is_cpp_h = include in _CPP_HEADERS + is_cpp_header = include in _CPP_HEADERS + + # Mark include as C header if in list or in a known folder for standard-ish C headers. + is_std_c_header = (include_order == "default") or (include in _C_HEADERS + # additional linux glibc header folders + or Search(r'(?:%s)\/.*\.h' % "|".join(C_STANDARD_HEADER_FOLDERS), include)) + + # Headers with C++ extensions shouldn't be considered C system headers + is_system = used_angle_brackets and not os.path.splitext(include)[1] in ['.hpp', '.hxx', '.h++'] if is_system: - if is_cpp_h: + if is_cpp_header: return _CPP_SYS_HEADER - else: + if is_std_c_header: return _C_SYS_HEADER + else: + return _OTHER_SYS_HEADER # If the target file and the include we're checking share a # basename when we drop common extensions, and the include @@ -4504,9 +5023,11 @@ def _ClassifyInclude(fileinfo, include, is_system): target_dir, target_base = ( os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName()))) include_dir, include_base = os.path.split(_DropCommonSuffixes(include)) + target_dir_pub = os.path.normpath(target_dir + '/../public') + target_dir_pub = target_dir_pub.replace('\\', '/') if target_base == include_base and ( include_dir == target_dir or - include_dir == os.path.normpath(target_dir + '/../public')): + include_dir == target_dir_pub): return _LIKELY_MY_HEADER # If the target and include share some initial basename @@ -4550,7 +5071,7 @@ def CheckIncludeLine(filename, clean_lines, linenum, include_state, error): # naming convention but not the include convention. match = Match(r'#include\s*"([^/]+\.h)"', line) if match and not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1)): - error(filename, linenum, 'build/include', 4, + error(filename, linenum, 'build/include_subdir', 4, 'Include the directory when naming .h files') # we shouldn't include a file more than once. actually, there are a @@ -4559,17 +5080,34 @@ def CheckIncludeLine(filename, clean_lines, linenum, include_state, error): match = _RE_PATTERN_INCLUDE.search(line) if match: include = match.group(2) - is_system = (match.group(1) == '<') + used_angle_brackets = (match.group(1) == '<') duplicate_line = include_state.FindHeader(include) if duplicate_line >= 0: error(filename, linenum, 'build/include', 4, '"%s" already included at %s:%s' % (include, filename, duplicate_line)) - elif (include.endswith('.cc') and + return + + for extension in GetNonHeaderExtensions(): + if (include.endswith('.' + extension) and os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)): - error(filename, linenum, 'build/include', 4, - 'Do not include .cc files from other packages') - elif not _THIRD_PARTY_HEADERS_PATTERN.match(include): + error(filename, linenum, 'build/include', 4, + 'Do not include .' + extension + ' files from other packages') + return + + # We DO want to include a 3rd party looking header if it matches the + # filename. Otherwise we get an erroneous error "...should include its + # header" error later. + third_src_header = False + for ext in GetHeaderExtensions(): + basefilename = filename[0:len(filename) - len(fileinfo.Extension())] + headerfile = basefilename + '.' + ext + headername = FileInfo(headerfile).RepositoryName() + if headername in include or include in headername: + third_src_header = True + break + + if third_src_header or not _THIRD_PARTY_HEADERS_PATTERN.match(include): include_state.include_list[-1].append((include, linenum)) # We want to ensure that headers appear in the right order: @@ -4584,7 +5122,7 @@ def CheckIncludeLine(filename, clean_lines, linenum, include_state, error): # track of the highest type seen, and complains if we see a # lower type after that. error_message = include_state.CheckNextIncludeOrder( - _ClassifyInclude(fileinfo, include, is_system)) + _ClassifyInclude(fileinfo, include, used_angle_brackets, _include_order)) if error_message: error(filename, linenum, 'build/include_order', 4, '%s. Should be: %s.h, c system, c++ system, other.' % @@ -4623,7 +5161,7 @@ def _GetTextInside(text, start_pattern): # Give opening punctuations to get the matching close-punctuations. matching_punctuation = {'(': ')', '{': '}', '[': ']'} - closing_punctuation = set(matching_punctuation.itervalues()) + closing_punctuation = set(itervalues(matching_punctuation)) # Find the position to start extracting text. match = re.search(start_pattern, text, re.M) @@ -4717,8 +5255,6 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension, if match: include_state.ResetSection(match.group(1)) - # Make Windows paths like Unix. - fullname = os.path.abspath(filename).replace('\\', '/') # Perform other checks now that we are sure that this is not an include line CheckCasts(filename, clean_lines, linenum, error) @@ -4786,9 +5322,14 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension, % (match.group(1), match.group(2))) if Search(r'\busing namespace\b', line): - error(filename, linenum, 'build/namespaces', 5, - 'Do not use namespace using-directives. ' - 'Use using-declarations instead.') + if Search(r'\bliterals\b', line): + error(filename, linenum, 'build/namespaces_literals', 5, + 'Do not use namespace using-directives. ' + 'Use using-declarations instead.') + else: + error(filename, linenum, 'build/namespaces', 5, + 'Do not use namespace using-directives. ' + 'Use using-declarations instead.') # Detect variable-length arrays. match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line) @@ -4835,7 +5376,7 @@ def CheckLanguage(filename, clean_lines, linenum, file_extension, if (IsHeaderExtension(file_extension) and Search(r'\bnamespace\s*{', line) and line[-1] != '\\'): - error(filename, linenum, 'build/namespaces', 4, + error(filename, linenum, 'build/namespaces_headers', 4, 'Do not use unnamed namespaces in header files. See ' 'https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces' ' for more information.') @@ -5212,7 +5753,7 @@ def CheckCasts(filename, clean_lines, linenum, error): if not expecting_function: CheckCStyleCast(filename, clean_lines, linenum, 'static_cast', - r'\((int|float|double|bool|char|u?int(16|32|64))\)', error) + r'\((int|float|double|bool|char|u?int(16|32|64)|size_t)\)', error) # This doesn't catch all cases. Consider (const char * const)"hello". # @@ -5365,11 +5906,11 @@ _HEADERS_CONTAINING_TEMPLATES = ( )), ('<limits>', ('numeric_limits',)), ('<list>', ('list',)), - ('<map>', ('map', 'multimap',)), + ('<map>', ('multimap',)), ('<memory>', ('allocator', 'make_shared', 'make_unique', 'shared_ptr', 'unique_ptr', 'weak_ptr')), ('<queue>', ('queue', 'priority_queue',)), - ('<set>', ('set', 'multiset',)), + ('<set>', ('multiset',)), ('<stack>', ('stack',)), ('<string>', ('char_traits', 'basic_string',)), ('<tuple>', ('tuple',)), @@ -5398,11 +5939,21 @@ _re_pattern_headers_maybe_templates = [] for _header, _templates in _HEADERS_MAYBE_TEMPLATES: for _template in _templates: # Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or - # type::max(). + # 'type::max()'. _re_pattern_headers_maybe_templates.append( (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'), _template, _header)) +# Match set<type>, but not foo->set<type>, foo.set<type> +_re_pattern_headers_maybe_templates.append( + (re.compile(r'[^>.]\bset\s*\<'), + 'set<>', + '<set>')) +# Match 'map<type> var' and 'std::map<type>(...)', but not 'map<type>(...)'' +_re_pattern_headers_maybe_templates.append( + (re.compile(r'(std\b::\bmap\s*\<)|(^(std\b::\b)map\b\(\s*\<)'), + 'map<>', + '<map>')) # Other scripts may reach in and modify this pattern. _re_pattern_templates = [] @@ -5435,7 +5986,7 @@ def FilesBelongToSameModule(filename_cc, filename_h): some false positives. This should be sufficiently rare in practice. Args: - filename_cc: is the path for the .cc file + filename_cc: is the path for the source (e.g. .cc) file filename_h: is the path for the header path Returns: @@ -5443,20 +5994,23 @@ def FilesBelongToSameModule(filename_cc, filename_h): bool: True if filename_cc and filename_h belong to the same module. string: the additional prefix needed to open the header file. """ + fileinfo_cc = FileInfo(filename_cc) + if not fileinfo_cc.Extension().lstrip('.') in GetNonHeaderExtensions(): + return (False, '') - fileinfo = FileInfo(filename_cc) - if not fileinfo.IsSource(): + fileinfo_h = FileInfo(filename_h) + if not IsHeaderExtension(fileinfo_h.Extension().lstrip('.')): return (False, '') - filename_cc = filename_cc[:-len(fileinfo.Extension())] - matched_test_suffix = Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()) + + filename_cc = filename_cc[:-(len(fileinfo_cc.Extension()))] + matched_test_suffix = Search(_TEST_FILE_SUFFIX, fileinfo_cc.BaseName()) if matched_test_suffix: filename_cc = filename_cc[:-len(matched_test_suffix.group(1))] + filename_cc = filename_cc.replace('/public/', '/') filename_cc = filename_cc.replace('/internal/', '/') - if not filename_h.endswith('.h'): - return (False, '') - filename_h = filename_h[:-len('.h')] + filename_h = filename_h[:-(len(fileinfo_h.Extension()))] if filename_h.endswith('-inl'): filename_h = filename_h[:-len('-inl')] filename_h = filename_h.replace('/public/', '/') @@ -5482,18 +6036,19 @@ def UpdateIncludeState(filename, include_dict, io=codecs): """ headerfile = None try: - headerfile = io.open(filename, 'r', 'utf8', 'replace') + with io.open(filename, 'r', 'utf8', 'replace') as headerfile: + linenum = 0 + for line in headerfile: + linenum += 1 + clean_line = CleanseComments(line) + match = _RE_PATTERN_INCLUDE.search(clean_line) + if match: + include = match.group(2) + include_dict.setdefault(include, linenum) + return True except IOError: return False - linenum = 0 - for line in headerfile: - linenum += 1 - clean_line = CleanseComments(line) - match = _RE_PATTERN_INCLUDE.search(clean_line) - if match: - include = match.group(2) - include_dict.setdefault(include, linenum) - return True + def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, @@ -5571,7 +6126,7 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, # include_dict is modified during iteration, so we iterate over a copy of # the keys. - header_keys = include_dict.keys() + header_keys = list(include_dict.keys()) for header in header_keys: (same_module, common_path) = FilesBelongToSameModule(abs_filename, header) fullpath = common_path + header @@ -5583,11 +6138,13 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, # didn't include it in the .h file. # TODO(unknown): Do a better job of finding .h files so we are confident that # not having the .h file means there isn't one. - if filename.endswith('.cc') and not header_found: - return + if not header_found: + for extension in GetNonHeaderExtensions(): + if filename.endswith('.' + extension): + return # All the lines have been processed, report the errors found. - for required_header_unstripped in required: + for required_header_unstripped in sorted(required, key=required.__getitem__): template = required[required_header_unstripped][1] if required_header_unstripped.strip('<>"') not in include_dict: error(filename, required[required_header_unstripped][0], @@ -5726,11 +6283,9 @@ def IsBlockInNameSpace(nesting_state, is_forward_declaration): Whether or not the new block is directly in a namespace. """ if is_forward_declaration: - if len(nesting_state.stack) >= 1 and ( - isinstance(nesting_state.stack[-1], _NamespaceInfo)): - return True - else: - return False + return len(nesting_state.stack) >= 1 and ( + isinstance(nesting_state.stack[-1], _NamespaceInfo)) + return (len(nesting_state.stack) > 1 and nesting_state.stack[-1].check_namespace_indentation and @@ -5780,7 +6335,7 @@ def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum, def ProcessLine(filename, file_extension, clean_lines, line, include_state, function_state, nesting_state, error, - extra_check_functions=[]): + extra_check_functions=None): """Processes a single line in the file. Args: @@ -5819,8 +6374,9 @@ def ProcessLine(filename, file_extension, clean_lines, line, CheckMakePairUsesDeduction(filename, clean_lines, line, error) CheckRedundantVirtual(filename, clean_lines, line, error) CheckRedundantOverrideOrFinal(filename, clean_lines, line, error) - for check_fn in extra_check_functions: - check_fn(filename, clean_lines, line, error) + if extra_check_functions: + for check_fn in extra_check_functions: + check_fn(filename, clean_lines, line, error) def FlagCxx11Features(filename, clean_lines, linenum, error): """Flag those c++11 features that we only allow in certain places. @@ -5894,7 +6450,7 @@ def FlagCxx14Features(filename, clean_lines, linenum, error): def ProcessFileData(filename, file_extension, lines, error, - extra_check_functions=[]): + extra_check_functions=None): """Performs lint checks and reports any errors to the given error function. Args: @@ -5994,7 +6550,7 @@ def ProcessConfigOverrides(filename): if _cpplint_state.quiet: # Suppress "Ignoring file" warning when using --quiet. return False - sys.stderr.write('Ignoring "%s": file excluded by "%s". ' + _cpplint_state.PrintInfo('Ignoring "%s": file excluded by "%s". ' 'File path component "%s" matches ' 'pattern "%s"\n' % (filename, cfg_file, base_name, val)) @@ -6002,34 +6558,38 @@ def ProcessConfigOverrides(filename): elif name == 'linelength': global _line_length try: - _line_length = int(val) + _line_length = int(val) except ValueError: - sys.stderr.write('Line length must be numeric.') + _cpplint_state.PrintError('Line length must be numeric.') + elif name == 'extensions': + ProcessExtensionsOption(val) elif name == 'root': global _root # root directories are specified relative to CPPLINT.cfg dir. _root = os.path.join(os.path.dirname(cfg_file), val) elif name == 'headers': ProcessHppHeadersOption(val) + elif name == 'includeorder': + ProcessIncludeOrderOption(val) else: - sys.stderr.write( + _cpplint_state.PrintError( 'Invalid configuration option (%s) in file %s\n' % (name, cfg_file)) except IOError: - sys.stderr.write( + _cpplint_state.PrintError( "Skipping config file '%s': Can't open for reading\n" % cfg_file) keep_looking = False # Apply all the accumulated filters in reverse order (top-level directory # config options having the least priority). - for filter in reversed(cfg_filters): - _AddFilters(filter) + for cfg_filter in reversed(cfg_filters): + _AddFilters(cfg_filter) return True -def ProcessFile(filename, vlevel, extra_check_functions=[]): +def ProcessFile(filename, vlevel, extra_check_functions=None): """Does google-lint on a single file. Args: @@ -6067,7 +6627,8 @@ def ProcessFile(filename, vlevel, extra_check_functions=[]): codecs.getwriter('utf8'), 'replace').read().split('\n') else: - lines = codecs.open(filename, 'r', 'utf8', 'replace').read().split('\n') + with codecs.open(filename, 'r', 'utf8', 'replace') as target_file: + lines = target_file.read().split('\n') # Remove trailing '\r'. # The -1 accounts for the extra trailing blank line we get from split() @@ -6079,7 +6640,7 @@ def ProcessFile(filename, vlevel, extra_check_functions=[]): lf_lines.append(linenum + 1) except IOError: - sys.stderr.write( + _cpplint_state.PrintError( "Skipping input '%s': Can't open for reading\n" % filename) _RestoreFilters() return @@ -6089,9 +6650,9 @@ def ProcessFile(filename, vlevel, extra_check_functions=[]): # When reading from stdin, the extension is unknown, so no cpplint tests # should rely on the extension. - if filename != '-' and file_extension not in _valid_extensions: - sys.stderr.write('Ignoring %s; not a valid file name ' - '(%s)\n' % (filename, ', '.join(_valid_extensions))) + if filename != '-' and file_extension not in GetAllExtensions(): + _cpplint_state.PrintError('Ignoring %s; not a valid file name ' + '(%s)\n' % (filename, ', '.join(GetAllExtensions()))) else: ProcessFileData(filename, file_extension, lines, Error, extra_check_functions) @@ -6117,7 +6678,7 @@ def ProcessFile(filename, vlevel, extra_check_functions=[]): # Suppress printing anything if --quiet was passed unless the error # count has increased after processing this file. if not _cpplint_state.quiet or old_errors != _cpplint_state.error_count: - sys.stdout.write('Done processing %s\n' % filename) + _cpplint_state.PrintInfo('Done processing %s\n' % filename) _RestoreFilters() @@ -6127,12 +6688,21 @@ def PrintUsage(message): Args: message: The optional error message. """ - sys.stderr.write(_USAGE) + sys.stderr.write(_USAGE % (sorted(list(GetAllExtensions())), + ','.join(sorted(list(GetAllExtensions()))), + sorted(GetHeaderExtensions()), + ','.join(sorted(GetHeaderExtensions())))) + if message: sys.exit('\nFATAL ERROR: ' + message) else: - sys.exit(1) + sys.exit(0) +def PrintVersion(): + sys.stdout.write('Cpplint fork (https://github.com/cpplint/cpplint)\n') + sys.stdout.write('cpplint ' + __VERSION__ + '\n') + sys.stdout.write('Python ' + sys.version + '\n') + sys.exit(0) def PrintCategories(): """Prints a list of all the error-categories used by error messages. @@ -6156,12 +6726,18 @@ def ParseArguments(args): """ try: (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=', + 'v=', + 'version', 'counting=', 'filter=', 'root=', + 'repository=', 'linelength=', 'extensions=', + 'exclude=', + 'recursive', 'headers=', + 'includeorder=', 'quiet']) except getopt.GetoptError: PrintUsage('Invalid arguments.') @@ -6171,17 +6747,21 @@ def ParseArguments(args): filters = '' quiet = _Quiet() counting_style = '' + recursive = False for (opt, val) in opts: if opt == '--help': PrintUsage(None) + if opt == '--version': + PrintVersion() elif opt == '--output': - if val not in ('emacs', 'vs7', 'eclipse'): - PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.') + if val not in ('emacs', 'vs7', 'eclipse', 'junit', 'sed', 'gsed'): + PrintUsage('The only allowed output formats are emacs, vs7, eclipse ' + 'sed, gsed and junit.') output_format = val elif opt == '--quiet': quiet = True - elif opt == '--verbose': + elif opt == '--verbose' or opt == '--v': verbosity = int(val) elif opt == '--filter': filters = val @@ -6194,49 +6774,126 @@ def ParseArguments(args): elif opt == '--root': global _root _root = val + elif opt == '--repository': + global _repository + _repository = val elif opt == '--linelength': global _line_length try: - _line_length = int(val) + _line_length = int(val) except ValueError: - PrintUsage('Line length must be digits.') + PrintUsage('Line length must be digits.') + elif opt == '--exclude': + global _excludes + if not _excludes: + _excludes = set() + _excludes.update(glob.glob(val)) elif opt == '--extensions': - global _valid_extensions - try: - _valid_extensions = set(val.split(',')) - except ValueError: - PrintUsage('Extensions must be comma separated list.') + ProcessExtensionsOption(val) elif opt == '--headers': ProcessHppHeadersOption(val) + elif opt == '--recursive': + recursive = True + elif opt == '--includeorder': + ProcessIncludeOrderOption(val) if not filenames: PrintUsage('No files were specified.') + if recursive: + filenames = _ExpandDirectories(filenames) + + if _excludes: + filenames = _FilterExcludedFiles(filenames) + _SetOutputFormat(output_format) _SetQuiet(quiet) _SetVerboseLevel(verbosity) _SetFilters(filters) _SetCountingStyle(counting_style) + filenames.sort() return filenames +def _ExpandDirectories(filenames): + """Searches a list of filenames and replaces directories in the list with + all files descending from those directories. Files with extensions not in + the valid extensions list are excluded. -def main(): - filenames = ParseArguments(sys.argv[1:]) - - # Change stderr to write with replacement characters so we don't die - # if we try to print something containing non-ASCII characters. - sys.stderr = codecs.StreamReaderWriter(sys.stderr, - codecs.getreader('utf8'), - codecs.getwriter('utf8'), - 'replace') + Args: + filenames: A list of files or directories - _cpplint_state.ResetErrorCounts() + Returns: + A list of all files that are members of filenames or descended from a + directory in filenames + """ + expanded = set() for filename in filenames: - ProcessFile(filename, _cpplint_state.verbose_level) - # If --quiet is passed, suppress printing error count unless there are errors. - if not _cpplint_state.quiet or _cpplint_state.error_count > 0: - _cpplint_state.PrintErrorCounts() + if not os.path.isdir(filename): + expanded.add(filename) + continue + + for root, _, files in os.walk(filename): + for loopfile in files: + fullname = os.path.join(root, loopfile) + if fullname.startswith('.' + os.path.sep): + fullname = fullname[len('.' + os.path.sep):] + expanded.add(fullname) + + filtered = [] + for filename in expanded: + if os.path.splitext(filename)[1][1:] in GetAllExtensions(): + filtered.append(filename) + return filtered + +def _FilterExcludedFiles(fnames): + """Filters out files listed in the --exclude command line switch. File paths + in the switch are evaluated relative to the current working directory + """ + exclude_paths = [os.path.abspath(f) for f in _excludes] + # because globbing does not work recursively, exclude all subpath of all excluded entries + return [f for f in fnames + if not any(e for e in exclude_paths + if _IsParentOrSame(e, os.path.abspath(f)))] + +def _IsParentOrSame(parent, child): + """Return true if child is subdirectory of parent. + Assumes both paths are absolute and don't contain symlinks. + """ + parent = os.path.normpath(parent) + child = os.path.normpath(child) + if parent == child: + return True + + prefix = os.path.commonprefix([parent, child]) + if prefix != parent: + return False + # Note: os.path.commonprefix operates on character basis, so + # take extra care of situations like '/foo/ba' and '/foo/bar/baz' + child_suffix = child[len(prefix):] + child_suffix = child_suffix.lstrip(os.sep) + return child == os.path.join(prefix, child_suffix) + +def main(): + filenames = ParseArguments(sys.argv[1:]) + backup_err = sys.stderr + try: + # Change stderr to write with replacement characters so we don't die + # if we try to print something containing non-ASCII characters. + sys.stderr = codecs.StreamReader(sys.stderr, 'replace') + + _cpplint_state.ResetErrorCounts() + for filename in filenames: + ProcessFile(filename, _cpplint_state.verbose_level) + # If --quiet is passed, suppress printing error count unless there are errors. + if not _cpplint_state.quiet or _cpplint_state.error_count > 0: + _cpplint_state.PrintErrorCounts() + + if _cpplint_state.output_format == 'junit': + sys.stderr.write(_cpplint_state.FormatJUnitXML()) + + finally: + sys.stderr = backup_err sys.exit(_cpplint_state.error_count > 0) diff --git a/tools/cpplint.py-update b/tools/cpplint.py-update index 4af4389..3d32330 100755 --- a/tools/cpplint.py-update +++ b/tools/cpplint.py-update @@ -15,7 +15,10 @@ set -eu -GITHUB_URL="https://github.com/google/styleguide/raw/gh-pages" +# The outdated Google version that only supports Python 2. +GITHUB_URL="https://github.com/google/styleguide/raw/gh-pages/cpplint" +# The forked version with Python 3 support. +GITHUB_URL="https://github.com/cpplint/cpplint/raw/develop" SCRIPT_DIR="$(dirname "$(readlink -f -- "$0")")" usage() { @@ -46,8 +49,11 @@ main() { # Download cpplint.py from upstream. local cpplint_py="${SCRIPT_DIR}/cpplint.py" - wget "${GITHUB_URL}/cpplint/cpplint.py" -O "${cpplint_py}" - sed -i '2i# pylint: skip-file' "${cpplint_py}" + wget "${GITHUB_URL}/cpplint.py" -O "${cpplint_py}" + sed -i \ + -e '1s|python$|python3|' \ + -e '2i# pylint: skip-file' \ + "${cpplint_py}" chmod +x "${cpplint_py}" } |