aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLuis Hector Chavez <lhchavez@google.com>2019-03-27 15:41:03 -0700
committerandroid-build-merger <android-build-merger@google.com>2019-03-27 15:41:03 -0700
commitbdc3a2ff93946aa96ff9f2d7fb80bd6b003b734f (patch)
treefbeebfbaf36b82f45be3202f0cec9db66a2e929c
parentb9f69fb285422eefe2e692b5c1a168b3d06055f7 (diff)
parent3f99b8c7397a1a9235fcd477d9ca241508c3e03f (diff)
downloadminijail-bdc3a2ff93946aa96ff9f2d7fb80bd6b003b734f.tar.gz
tools/compile_seccomp_policy: Support long lines am: d0fd13d56b
am: 3f99b8c739 Change-Id: I628a0446fe1e46655eb89a22539353da9913f2af
-rw-r--r--tools/parser.py120
-rwxr-xr-xtools/parser_unittest.py46
2 files changed, 103 insertions, 63 deletions
diff --git a/tools/parser.py b/tools/parser.py
index 65297f9..7179717 100644
--- a/tools/parser.py
+++ b/tools/parser.py
@@ -27,13 +27,14 @@ import re
import bpf
-Token = collections.namedtuple('token',
- ['type', 'value', 'filename', 'line', 'column'])
+Token = collections.namedtuple(
+ 'token', ['type', 'value', 'filename', 'line', 'line_number', 'column'])
# A regex that can tokenize a Minijail policy file line.
_TOKEN_SPECIFICATION = (
('COMMENT', r'#.*$'),
('WHITESPACE', r'\s+'),
+ ('CONTINUATION', r'\\$'),
('DEFAULT', r'@default'),
('INCLUDE', r'@include'),
('FREQUENCY', r'@frequency'),
@@ -67,8 +68,16 @@ class ParseException(Exception):
"""An exception that is raised when parsing fails."""
# pylint: disable=too-many-arguments
- def __init__(self, message, filename, line, line_number=1, token=None):
+ def __init__(self,
+ message,
+ filename,
+ *,
+ line='',
+ line_number=1,
+ token=None):
if token:
+ line = token.line
+ line_number = token.line_number
column = token.column
length = len(token.value)
else:
@@ -105,42 +114,58 @@ class ParserState:
"""Return the current line number being processed."""
return self._line_number
- def set_line(self, line):
- """Update the current line being processed."""
- self._line = line
- self._line_number += 1
-
def error(self, message, token=None):
"""Raise a ParserException with the provided message."""
- raise ParseException(message, self.filename, self.line,
- self.line_number, token)
-
- def tokenize(self):
+ raise ParseException(
+ message,
+ self.filename,
+ line=self._line,
+ line_number=self._line_number,
+ token=token)
+
+ def tokenize(self, lines):
"""Return a list of tokens for the current line."""
tokens = []
- last_end = 0
- for token in _TOKEN_RE.finditer(self.line):
- if token.start() != last_end:
+ for line_number, line in enumerate(lines):
+ self._line_number = line_number + 1
+ self._line = line.rstrip('\r\n')
+
+ last_end = 0
+ for token in _TOKEN_RE.finditer(self._line):
+ if token.start() != last_end:
+ self.error(
+ 'invalid token',
+ token=Token('INVALID',
+ self._line[last_end:token.start()],
+ self.filename, self._line,
+ self._line_number, last_end))
+ last_end = token.end()
+
+ # Omit whitespace and comments now to avoid sprinkling this logic
+ # elsewhere.
+ if token.lastgroup in ('WHITESPACE', 'COMMENT',
+ 'CONTINUATION'):
+ continue
+ tokens.append(
+ Token(token.lastgroup, token.group(), self.filename,
+ self._line, self._line_number, token.start()))
+ if last_end != len(self._line):
self.error(
'invalid token',
- token=Token('INVALID', self.line[last_end:token.start()],
- self.filename, self.line_number, last_end))
- last_end = token.end()
+ token=Token('INVALID', self._line[last_end:],
+ self.filename, self._line, self._line_number,
+ last_end))
- # Omit whitespace and comments now to avoid sprinkling this logic
- # elsewhere.
- if token.lastgroup in ('WHITESPACE', 'COMMENT'):
+ if self._line.endswith('\\'):
+ # This line is not finished yet.
continue
- tokens.append(
- Token(token.lastgroup, token.group(), self.filename,
- self.line_number, token.start()))
- if last_end != len(self.line):
- self.error(
- 'invalid token',
- token=Token('INVALID', self.line[last_end:], self.filename,
- self.line_number, last_end))
- return tokens
+
+ if tokens:
+ # Return a copy of the token list so that the caller can be free
+ # to modify it.
+ yield tokens[::]
+ tokens.clear()
Atom = collections.namedtuple('Atom', ['argument_index', 'op', 'value'])
@@ -580,13 +605,7 @@ class PolicyParser:
try:
frequency_mapping = collections.defaultdict(int)
with open(filename) as frequency_file:
- for line in frequency_file:
- self._parser_state.set_line(line.rstrip())
- tokens = self._parser_state.tokenize()
-
- if not tokens:
- continue
-
+ for tokens in self._parser_state.tokenize(frequency_file):
syscall_numbers = self._parse_syscall_descriptor(tokens)
if not tokens:
self._parser_state.error('missing colon')
@@ -653,14 +672,7 @@ class PolicyParser:
try:
statements = []
with open(filename) as policy_file:
- for line in policy_file:
- self._parser_state.set_line(line.rstrip())
- tokens = self._parser_state.tokenize()
-
- if not tokens:
- # Allow empty lines.
- continue
-
+ for tokens in self._parser_state.tokenize(policy_file):
if tokens[0].type == 'INCLUDE':
statements.extend(
self._parse_include_statement(tokens))
@@ -693,8 +705,10 @@ class PolicyParser:
try:
statements = [x for x in self._parse_policy_file(filename)]
except RecursionError:
- raise ParseException('recursion limit exceeded', filename,
- self._parser_states[-1].line)
+ raise ParseException(
+ 'recursion limit exceeded',
+ filename,
+ line=self._parser_states[-1].line)
# Collapse statements into a single syscall-to-filter-list.
syscall_filter_mapping = {}
@@ -717,11 +731,13 @@ class PolicyParser:
# to add another one.
continue
if len(unconditional_actions_suffix) > 1:
- raise ParseException(('Syscall %s (number %d) already had '
- 'an unconditional action applied') %
- (filter_statement.syscall.name,
- filter_statement.syscall.number),
- filename, self._parser_states[-1].line)
+ raise ParseException(
+ ('Syscall %s (number %d) already had '
+ 'an unconditional action applied') %
+ (filter_statement.syscall.name,
+ filter_statement.syscall.number),
+ filename,
+ line=self._parser_states[-1].line)
assert not unconditional_actions_suffix
filter_statement.filters.append(
Filter(expression=None, action=self._default_action))
diff --git a/tools/parser_unittest.py b/tools/parser_unittest.py
index 1173f89..e9f0ce2 100755
--- a/tools/parser_unittest.py
+++ b/tools/parser_unittest.py
@@ -40,8 +40,7 @@ class TokenizerTests(unittest.TestCase):
@staticmethod
def _tokenize(line):
parser_state = parser.ParserState('<memory>')
- parser_state.set_line(line)
- return parser_state.tokenize()
+ return list(parser_state.tokenize([line]))[0]
def test_tokenize(self):
"""Accept valid tokens."""
@@ -105,8 +104,7 @@ class ParseConstantTests(unittest.TestCase):
def _tokenize(self, line):
# pylint: disable=protected-access
- self.parser._parser_state.set_line(line)
- return self.parser._parser_state.tokenize()
+ return list(self.parser._parser_state.tokenize([line]))[0]
def test_parse_constant_unsigned(self):
"""Accept reasonably-sized unsigned constants."""
@@ -232,7 +230,7 @@ class ParseConstantTests(unittest.TestCase):
def test_parse_empty_constant(self):
"""Reject parsing nothing."""
with self.assertRaisesRegex(parser.ParseException, 'empty constant'):
- self.parser.parse_value(self._tokenize(''))
+ self.parser.parse_value([])
with self.assertRaisesRegex(parser.ParseException, 'empty constant'):
self.parser.parse_value(self._tokenize('0|'))
@@ -252,8 +250,7 @@ class ParseFilterExpressionTests(unittest.TestCase):
def _tokenize(self, line):
# pylint: disable=protected-access
- self.parser._parser_state.set_line(line)
- return self.parser._parser_state.tokenize()
+ return list(self.parser._parser_state.tokenize([line]))[0]
def test_parse_argument_expression(self):
"""Accept valid argument expressions."""
@@ -313,8 +310,7 @@ class ParseFilterTests(unittest.TestCase):
def _tokenize(self, line):
# pylint: disable=protected-access
- self.parser._parser_state.set_line(line)
- return self.parser._parser_state.tokenize()
+ return list(self.parser._parser_state.tokenize([line]))[0]
def test_parse_filter(self):
"""Accept valid filters."""
@@ -393,8 +389,7 @@ class ParseFilterStatementTests(unittest.TestCase):
def _tokenize(self, line):
# pylint: disable=protected-access
- self.parser._parser_state.set_line(line)
- return self.parser._parser_state.tokenize()
+ return list(self.parser._parser_state.tokenize([line]))[0]
def test_parse_filter_statement(self):
"""Accept valid filter statements."""
@@ -522,6 +517,35 @@ class ParseFileTests(unittest.TestCase):
]),
]))
+ def test_parse_multiline(self):
+ """Allow simple multi-line policy files."""
+ path = self._write_file(
+ 'test.policy', """
+ # Comment.
+ read: \
+ allow
+ write: allow
+ """)
+
+ self.assertEqual(
+ self.parser.parse_file(path),
+ parser.ParsedPolicy(
+ default_action=bpf.KillProcess(),
+ filter_statements=[
+ parser.FilterStatement(
+ syscall=parser.Syscall('read', 0),
+ frequency=1,
+ filters=[
+ parser.Filter(None, bpf.Allow()),
+ ]),
+ parser.FilterStatement(
+ syscall=parser.Syscall('write', 1),
+ frequency=1,
+ filters=[
+ parser.Filter(None, bpf.Allow()),
+ ]),
+ ]))
+
def test_parse_default(self):
"""Allow defining a default action."""
path = self._write_file(