aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorserpilliere <serpilliere@users.noreply.github.com>2017-03-10 15:02:00 +0100
committerEli Bendersky <eliben@users.noreply.github.com>2017-03-10 06:02:00 -0800
commit471442fe06b095c085d0219a95fbf1f177b6449b (patch)
treeca5f4fa2fecce3b869f5c04d5f52f4cd0382ee6b
parent2129f5fb1ec2d06000f10c96510874c5303d9f8c (diff)
downloadpycparser-471442fe06b095c085d0219a95fbf1f177b6449b.tar.gz
Add column support in c_parser (#178)
-rw-r--r--pycparser/c_parser.py112
-rw-r--r--pycparser/plyparser.py11
-rwxr-xr-xtests/test_c_parser.py63
3 files changed, 100 insertions, 86 deletions
diff --git a/pycparser/c_parser.py b/pycparser/c_parser.py
index 7124b66..f84d6bc 100644
--- a/pycparser/c_parser.py
+++ b/pycparser/c_parser.py
@@ -563,16 +563,16 @@ class CParser(PLYParser):
""" pp_directive : PPHASH
"""
self._parse_error('Directives not supported yet',
- self._coord(p.lineno(1)))
+ self._token_coord(p, 1))
def p_pppragma_directive(self, p):
""" pppragma_directive : PPPRAGMA
| PPPRAGMA PPPRAGMASTR
"""
if len(p) == 3:
- p[0] = c_ast.Pragma(p[2], self._coord(p.lineno(2)))
+ p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
else:
- p[0] = c_ast.Pragma("", self._coord(p.lineno(1)))
+ p[0] = c_ast.Pragma("", self._token_coord(p, 1))
# In function definitions, the declarator can be followed by
# a declaration list, for old "K&R style" function definitios.
@@ -585,7 +585,7 @@ class CParser(PLYParser):
qual=[],
storage=[],
type=[c_ast.IdentifierType(['int'],
- coord=self._coord(p.lineno(1)))],
+ coord=self._token_coord(p, 1))],
function=[])
p[0] = self._build_function_definition(
@@ -780,7 +780,7 @@ class CParser(PLYParser):
| UNSIGNED
| __INT128
"""
- p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1)))
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
def p_type_specifier(self, p):
""" type_specifier : typedef_name
@@ -858,7 +858,7 @@ class CParser(PLYParser):
p[0] = klass(
name=p[2],
decls=None,
- coord=self._coord(p.lineno(2)))
+ coord=self._token_coord(p, 2))
def p_struct_or_union_specifier_2(self, p):
""" struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close
@@ -867,7 +867,7 @@ class CParser(PLYParser):
p[0] = klass(
name=None,
decls=p[3],
- coord=self._coord(p.lineno(2)))
+ coord=self._token_coord(p, 2))
def p_struct_or_union_specifier_3(self, p):
""" struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close
@@ -877,7 +877,7 @@ class CParser(PLYParser):
p[0] = klass(
name=p[2],
decls=p[4],
- coord=self._coord(p.lineno(2)))
+ coord=self._token_coord(p, 2))
def p_struct_or_union(self, p):
""" struct_or_union : STRUCT
@@ -966,18 +966,18 @@ class CParser(PLYParser):
""" enum_specifier : ENUM ID
| ENUM TYPEID
"""
- p[0] = c_ast.Enum(p[2], None, self._coord(p.lineno(1)))
+ p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
def p_enum_specifier_2(self, p):
""" enum_specifier : ENUM brace_open enumerator_list brace_close
"""
- p[0] = c_ast.Enum(None, p[3], self._coord(p.lineno(1)))
+ p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
def p_enum_specifier_3(self, p):
""" enum_specifier : ENUM ID brace_open enumerator_list brace_close
| ENUM TYPEID brace_open enumerator_list brace_close
"""
- p[0] = c_ast.Enum(p[2], p[4], self._coord(p.lineno(1)))
+ p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
def p_enumerator_list(self, p):
""" enumerator_list : enumerator
@@ -999,11 +999,11 @@ class CParser(PLYParser):
if len(p) == 2:
enumerator = c_ast.Enumerator(
p[1], None,
- self._coord(p.lineno(1)))
+ self._token_coord(p, 1))
else:
enumerator = c_ast.Enumerator(
p[1], p[3],
- self._coord(p.lineno(1)))
+ self._token_coord(p, 1))
self._add_identifier(enumerator.name, enumerator.coord)
p[0] = enumerator
@@ -1034,7 +1034,7 @@ class CParser(PLYParser):
declname=p[1],
type=None,
quals=None,
- coord=self._coord(p.lineno(1)))
+ coord=self._token_coord(p, 1))
@parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
def p_direct_xxx_declarator_2(self, p):
@@ -1085,7 +1085,7 @@ class CParser(PLYParser):
"""
arr = c_ast.ArrayDecl(
type=None,
- dim=c_ast.ID(p[4], self._coord(p.lineno(4))),
+ dim=c_ast.ID(p[4], self._token_coord(p, 4)),
dim_quals=p[3] if p[3] != None else [],
coord=p[1].coord)
@@ -1124,7 +1124,7 @@ class CParser(PLYParser):
""" pointer : TIMES type_qualifier_list_opt
| TIMES type_qualifier_list_opt pointer
"""
- coord = self._coord(p.lineno(1))
+ coord = self._token_coord(p, 1)
# Pointer decls nest from inside out. This is important when different
# levels have different qualifiers. For example:
#
@@ -1161,7 +1161,7 @@ class CParser(PLYParser):
| parameter_list COMMA ELLIPSIS
"""
if len(p) > 2:
- p[1].params.append(c_ast.EllipsisParam(self._coord(p.lineno(3))))
+ p[1].params.append(c_ast.EllipsisParam(self._token_coord(p, 3)))
p[0] = p[1]
@@ -1192,7 +1192,7 @@ class CParser(PLYParser):
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
- coord=self._coord(p.lineno(1)))]
+ coord=self._token_coord(p, 1))]
p[0] = self._build_declarations(
spec=spec,
decls=[dict(decl=p[2])])[0]
@@ -1203,7 +1203,7 @@ class CParser(PLYParser):
spec = p[1]
if not spec['type']:
spec['type'] = [c_ast.IdentifierType(['int'],
- coord=self._coord(p.lineno(1)))]
+ coord=self._token_coord(p, 1))]
# Parameters can have the same names as typedefs. The trouble is that
# the parameter's name gets grouped into declaration_specifiers, making
@@ -1222,7 +1222,7 @@ class CParser(PLYParser):
name='',
quals=spec['qual'],
type=p[2] or c_ast.TypeDecl(None, None, None),
- coord=self._coord(p.lineno(2)))
+ coord=self._token_coord(p, 2))
typename = spec['type']
decl = self._fix_decl_name_type(decl, typename)
@@ -1248,7 +1248,7 @@ class CParser(PLYParser):
| brace_open initializer_list COMMA brace_close
"""
if p[2] is None:
- p[0] = c_ast.InitList([], self._coord(p.lineno(1)))
+ p[0] = c_ast.InitList([], self._token_coord(p, 1))
else:
p[0] = p[2]
@@ -1291,7 +1291,7 @@ class CParser(PLYParser):
name='',
quals=p[1]['qual'],
type=p[2] or c_ast.TypeDecl(None, None, None),
- coord=self._coord(p.lineno(2)))
+ coord=self._token_coord(p, 2))
p[0] = self._fix_decl_name_type(typename, p[1]['type'])
@@ -1340,14 +1340,14 @@ class CParser(PLYParser):
type=c_ast.TypeDecl(None, None, None),
dim=p[2],
dim_quals=[],
- coord=self._coord(p.lineno(1)))
+ coord=self._token_coord(p, 1))
def p_direct_abstract_declarator_4(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET
"""
arr = c_ast.ArrayDecl(
type=None,
- dim=c_ast.ID(p[3], self._coord(p.lineno(3))),
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
dim_quals=[],
coord=p[1].coord)
@@ -1358,9 +1358,9 @@ class CParser(PLYParser):
"""
p[0] = c_ast.ArrayDecl(
type=c_ast.TypeDecl(None, None, None),
- dim=c_ast.ID(p[3], self._coord(p.lineno(3))),
+ dim=c_ast.ID(p[3], self._token_coord(p, 3)),
dim_quals=[],
- coord=self._coord(p.lineno(1)))
+ coord=self._token_coord(p, 1))
def p_direct_abstract_declarator_6(self, p):
""" direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN
@@ -1378,7 +1378,7 @@ class CParser(PLYParser):
p[0] = c_ast.FuncDecl(
args=p[2],
type=c_ast.TypeDecl(None, None, None),
- coord=self._coord(p.lineno(1)))
+ coord=self._token_coord(p, 1))
# declaration is a list, statement isn't. To make it consistent, block_item
# will always be a list
@@ -1402,72 +1402,72 @@ class CParser(PLYParser):
""" compound_statement : brace_open block_item_list_opt brace_close """
p[0] = c_ast.Compound(
block_items=p[2],
- coord=self._coord(p.lineno(1)))
+ coord=self._token_coord(p, 1))
def p_labeled_statement_1(self, p):
""" labeled_statement : ID COLON statement """
- p[0] = c_ast.Label(p[1], p[3], self._coord(p.lineno(1)))
+ p[0] = c_ast.Label(p[1], p[3], self._token_coord(p, 1))
def p_labeled_statement_2(self, p):
""" labeled_statement : CASE constant_expression COLON statement """
- p[0] = c_ast.Case(p[2], [p[4]], self._coord(p.lineno(1)))
+ p[0] = c_ast.Case(p[2], [p[4]], self._token_coord(p, 1))
def p_labeled_statement_3(self, p):
""" labeled_statement : DEFAULT COLON statement """
- p[0] = c_ast.Default([p[3]], self._coord(p.lineno(1)))
+ p[0] = c_ast.Default([p[3]], self._token_coord(p, 1))
def p_selection_statement_1(self, p):
""" selection_statement : IF LPAREN expression RPAREN statement """
- p[0] = c_ast.If(p[3], p[5], None, self._coord(p.lineno(1)))
+ p[0] = c_ast.If(p[3], p[5], None, self._token_coord(p, 1))
def p_selection_statement_2(self, p):
""" selection_statement : IF LPAREN expression RPAREN statement ELSE statement """
- p[0] = c_ast.If(p[3], p[5], p[7], self._coord(p.lineno(1)))
+ p[0] = c_ast.If(p[3], p[5], p[7], self._token_coord(p, 1))
def p_selection_statement_3(self, p):
""" selection_statement : SWITCH LPAREN expression RPAREN statement """
p[0] = fix_switch_cases(
- c_ast.Switch(p[3], p[5], self._coord(p.lineno(1))))
+ c_ast.Switch(p[3], p[5], self._token_coord(p, 1)))
def p_iteration_statement_1(self, p):
""" iteration_statement : WHILE LPAREN expression RPAREN statement """
- p[0] = c_ast.While(p[3], p[5], self._coord(p.lineno(1)))
+ p[0] = c_ast.While(p[3], p[5], self._token_coord(p, 1))
def p_iteration_statement_2(self, p):
""" iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI """
- p[0] = c_ast.DoWhile(p[5], p[2], self._coord(p.lineno(1)))
+ p[0] = c_ast.DoWhile(p[5], p[2], self._token_coord(p, 1))
def p_iteration_statement_3(self, p):
""" iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement """
- p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._coord(p.lineno(1)))
+ p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._token_coord(p, 1))
def p_iteration_statement_4(self, p):
""" iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement """
- p[0] = c_ast.For(c_ast.DeclList(p[3], self._coord(p.lineno(1))),
- p[4], p[6], p[8], self._coord(p.lineno(1)))
+ p[0] = c_ast.For(c_ast.DeclList(p[3], self._token_coord(p, 1)),
+ p[4], p[6], p[8], self._token_coord(p, 1))
def p_jump_statement_1(self, p):
""" jump_statement : GOTO ID SEMI """
- p[0] = c_ast.Goto(p[2], self._coord(p.lineno(1)))
+ p[0] = c_ast.Goto(p[2], self._token_coord(p, 1))
def p_jump_statement_2(self, p):
""" jump_statement : BREAK SEMI """
- p[0] = c_ast.Break(self._coord(p.lineno(1)))
+ p[0] = c_ast.Break(self._token_coord(p, 1))
def p_jump_statement_3(self, p):
""" jump_statement : CONTINUE SEMI """
- p[0] = c_ast.Continue(self._coord(p.lineno(1)))
+ p[0] = c_ast.Continue(self._token_coord(p, 1))
def p_jump_statement_4(self, p):
""" jump_statement : RETURN expression SEMI
| RETURN SEMI
"""
- p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._coord(p.lineno(1)))
+ p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._token_coord(p, 1))
def p_expression_statement(self, p):
""" expression_statement : expression_opt SEMI """
if p[1] is None:
- p[0] = c_ast.EmptyStatement(self._coord(p.lineno(2)))
+ p[0] = c_ast.EmptyStatement(self._token_coord(p, 2))
else:
p[0] = p[1]
@@ -1486,7 +1486,7 @@ class CParser(PLYParser):
def p_typedef_name(self, p):
""" typedef_name : TYPEID """
- p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1)))
+ p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
def p_assignment_expression(self, p):
""" assignment_expression : conditional_expression
@@ -1562,7 +1562,7 @@ class CParser(PLYParser):
def p_cast_expression_2(self, p):
""" cast_expression : LPAREN type_name RPAREN cast_expression """
- p[0] = c_ast.Cast(p[2], p[4], self._coord(p.lineno(1)))
+ p[0] = c_ast.Cast(p[2], p[4], self._token_coord(p, 1))
def p_unary_expression_1(self, p):
""" unary_expression : postfix_expression """
@@ -1582,7 +1582,7 @@ class CParser(PLYParser):
p[0] = c_ast.UnaryOp(
p[1],
p[2] if len(p) == 3 else p[3],
- self._coord(p.lineno(1)))
+ self._token_coord(p, 1))
def p_unary_operator(self, p):
""" unary_operator : AND
@@ -1614,7 +1614,7 @@ class CParser(PLYParser):
| postfix_expression ARROW ID
| postfix_expression ARROW TYPEID
"""
- field = c_ast.ID(p[3], self._coord(p.lineno(3)))
+ field = c_ast.ID(p[3], self._token_coord(p, 3))
p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
def p_postfix_expression_5(self, p):
@@ -1650,7 +1650,7 @@ class CParser(PLYParser):
def p_primary_expression_5(self, p):
""" primary_expression : OFFSETOF LPAREN type_name COMMA offsetof_member_designator RPAREN
"""
- coord = self._coord(p.lineno(1))
+ coord = self._token_coord(p, 1)
p[0] = c_ast.FuncCall(c_ast.ID(p[1], coord),
c_ast.ExprList([p[3], p[5]], coord),
coord)
@@ -1663,7 +1663,7 @@ class CParser(PLYParser):
if len(p) == 2:
p[0] = p[1]
elif len(p) == 4:
- field = c_ast.ID(p[3], self._coord(p.lineno(3)))
+ field = c_ast.ID(p[3], self._token_coord(p, 3))
p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord)
elif len(p) == 5:
p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord)
@@ -1682,7 +1682,7 @@ class CParser(PLYParser):
def p_identifier(self, p):
""" identifier : ID """
- p[0] = c_ast.ID(p[1], self._coord(p.lineno(1)))
+ p[0] = c_ast.ID(p[1], self._token_coord(p, 1))
def p_constant_1(self, p):
""" constant : INT_CONST_DEC
@@ -1691,21 +1691,21 @@ class CParser(PLYParser):
| INT_CONST_BIN
"""
p[0] = c_ast.Constant(
- 'int', p[1], self._coord(p.lineno(1)))
+ 'int', p[1], self._token_coord(p, 1))
def p_constant_2(self, p):
""" constant : FLOAT_CONST
| HEX_FLOAT_CONST
"""
p[0] = c_ast.Constant(
- 'float', p[1], self._coord(p.lineno(1)))
+ 'float', p[1], self._token_coord(p, 1))
def p_constant_3(self, p):
""" constant : CHAR_CONST
| WCHAR_CONST
"""
p[0] = c_ast.Constant(
- 'char', p[1], self._coord(p.lineno(1)))
+ 'char', p[1], self._token_coord(p, 1))
# The "unified" string and wstring literal rules are for supporting
# concatenation of adjacent string literals.
@@ -1718,7 +1718,7 @@ class CParser(PLYParser):
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
- 'string', p[1], self._coord(p.lineno(1)))
+ 'string', p[1], self._token_coord(p, 1))
else:
p[1].value = p[1].value[:-1] + p[2][1:]
p[0] = p[1]
@@ -1729,7 +1729,7 @@ class CParser(PLYParser):
"""
if len(p) == 2: # single literal
p[0] = c_ast.Constant(
- 'string', p[1], self._coord(p.lineno(1)))
+ 'string', p[1], self._token_coord(p, 1))
else:
p[1].value = p[1].value.rstrip()[:-1] + p[2][2:]
p[0] = p[1]
diff --git a/pycparser/plyparser.py b/pycparser/plyparser.py
index 42ad3fb..af91922 100644
--- a/pycparser/plyparser.py
+++ b/pycparser/plyparser.py
@@ -51,6 +51,17 @@ class PLYParser(object):
line=lineno,
column=column)
+ def _token_coord(self, p, token_idx):
+ """ Returns the coordinates for the YaccProduction objet 'p' indexed
+ with 'token_idx'. The coordinate includes the 'lineno' and
+ 'column'. Both follow the lex semantic, starting from 1.
+ """
+ last_cr = p.lexer.lexer.lexdata.rfind('\n', 0, p.lexpos(token_idx))
+ if last_cr < 0:
+ last_cr = -1
+ column = (p.lexpos(token_idx) - (last_cr))
+ return self._coord(p.lineno(token_idx), column)
+
def _parse_error(self, msg, coord):
raise ParseError("%s: %s" % (coord, msg))
diff --git a/tests/test_c_parser.py b/tests/test_c_parser.py
index 0835013..8b09a3f 100755
--- a/tests/test_c_parser.py
+++ b/tests/test_c_parser.py
@@ -88,12 +88,15 @@ class TestCParser_base(unittest.TestCase):
def setUp(self):
self.cparser = _c_parser
- def assert_coord(self, node, line, file=None):
+ def assert_coord(self, node, line, column=None, file=None):
self.assertEqual(node.coord.line, line)
+ if column is not None:
+ self.assertEqual(node.coord.column, column)
if file:
self.assertEqual(node.coord.file, file)
+
class TestCParser_fundamentals(TestCParser_base):
def get_decl(self, txt, index=0):
""" Given a source and an index returns the expanded
@@ -134,10 +137,10 @@ class TestCParser_fundamentals(TestCParser_base):
def test_coords(self):
""" Tests the "coordinates" of parsed elements - file
- name and line numbers, with modification insterted by
- #line directives.
+ name, line and column numbers, with modification
+ insterted by #line directives.
"""
- self.assert_coord(self.parse('int a;').ext[0], 1)
+ self.assert_coord(self.parse('int a;').ext[0], 1, 5)
t1 = """
int a;
@@ -145,9 +148,9 @@ class TestCParser_fundamentals(TestCParser_base):
int c;
"""
f1 = self.parse(t1, filename='test.c')
- self.assert_coord(f1.ext[0], 2, 'test.c')
- self.assert_coord(f1.ext[1], 3, 'test.c')
- self.assert_coord(f1.ext[2], 6, 'test.c')
+ self.assert_coord(f1.ext[0], 2, 13, 'test.c')
+ self.assert_coord(f1.ext[1], 3, 13, 'test.c')
+ self.assert_coord(f1.ext[2], 6, 13, 'test.c')
t1_1 = '''
int main() {
@@ -156,8 +159,8 @@ class TestCParser_fundamentals(TestCParser_base):
return 0;
}'''
f1_1 = self.parse(t1_1, filename='test.c')
- self.assert_coord(f1_1.ext[0].body.block_items[0], 3, 'test.c')
- self.assert_coord(f1_1.ext[0].body.block_items[1], 4, 'test.c')
+ self.assert_coord(f1_1.ext[0].body.block_items[0], 3, 13, 'test.c')
+ self.assert_coord(f1_1.ext[0].body.block_items[1], 4, 13, 'test.c')
t1_2 = '''
int main () {
@@ -165,13 +168,13 @@ class TestCParser_fundamentals(TestCParser_base):
}'''
f1_2 = self.parse(t1_2, filename='test.c')
# make sure that the Cast has a coord (issue 23)
- self.assert_coord(f1_2.ext[0].body.block_items[0].init, 3, 'test.c')
+ self.assert_coord(f1_2.ext[0].body.block_items[0].init, 3, 21, file='test.c')
t2 = """
#line 99
int c;
"""
- self.assert_coord(self.parse(t2).ext[0], 99)
+ self.assert_coord(self.parse(t2).ext[0], 99, 13)
t3 = """
int dsf;
@@ -180,9 +183,9 @@ class TestCParser_fundamentals(TestCParser_base):
char d;
"""
f3 = self.parse(t3, filename='test.c')
- self.assert_coord(f3.ext[0], 2, 'test.c')
- self.assert_coord(f3.ext[1], 3, 'test.c')
- self.assert_coord(f3.ext[2], 3000, 'in.h')
+ self.assert_coord(f3.ext[0], 2, 13, 'test.c')
+ self.assert_coord(f3.ext[1], 3, 14, 'test.c')
+ self.assert_coord(f3.ext[2], 3000, 14, 'in.h')
t4 = """
#line 20 "restore.h"
@@ -195,17 +198,17 @@ class TestCParser_fundamentals(TestCParser_base):
char* ro;
"""
f4 = self.parse(t4, filename='myb.c')
- self.assert_coord(f4.ext[0], 20, 'restore.h')
- self.assert_coord(f4.ext[1], 30, 'includes/daween.ph')
- self.assert_coord(f4.ext[2], 30, 'includes/daween.ph')
- self.assert_coord(f4.ext[3], 50000, 'includes/daween.ph')
+ self.assert_coord(f4.ext[0], 20, 13, 'restore.h')
+ self.assert_coord(f4.ext[1], 30, 14, 'includes/daween.ph')
+ self.assert_coord(f4.ext[2], 30, 17, 'includes/daween.ph')
+ self.assert_coord(f4.ext[3], 50000, 13, 'includes/daween.ph')
t5 = """
int
#line 99
c;
"""
- self.assert_coord(self.parse(t5).ext[0], 99)
+ self.assert_coord(self.parse(t5).ext[0], 99, 9)
# coord for ellipsis
t6 = """
@@ -213,7 +216,7 @@ class TestCParser_fundamentals(TestCParser_base):
...) {
}"""
f6 = self.parse(t6, filename='z.c')
- self.assert_coord(self.parse(t6).ext[0].decl.type.args.params[1], 3)
+ self.assert_coord(self.parse(t6).ext[0].decl.type.args.params[1], 3, 17)
def test_forloop_coord(self):
t = '''\
@@ -224,9 +227,9 @@ class TestCParser_fundamentals(TestCParser_base):
'''
s = self.parse(t, filename='f.c')
forloop = s.ext[0].body.block_items[0]
- self.assert_coord(forloop.init, 2, 'f.c')
- self.assert_coord(forloop.cond, 2, 'f.c')
- self.assert_coord(forloop.next, 3, 'f.c')
+ self.assert_coord(forloop.init, 2, 13, 'f.c')
+ self.assert_coord(forloop.cond, 2, 26, 'f.c')
+ self.assert_coord(forloop.next, 3, 17, 'f.c')
def test_simple_decls(self):
self.assertEqual(self.get_decl('int a;'),
@@ -510,7 +513,7 @@ class TestCParser_fundamentals(TestCParser_base):
"""
compound = self.parse(e).ext[0].body
self.assertTrue(isinstance(compound, Compound))
- self.assert_coord(compound, 2, '')
+ self.assert_coord(compound, 2)
# The C99 compound literal feature
#
@@ -747,8 +750,8 @@ class TestCParser_fundamentals(TestCParser_base):
"""
s7_ast = self.parse(s7, filename='test.c')
- self.assert_coord(s7_ast.ext[0].type.decls[2], 6, 'test.c')
- self.assert_coord(s7_ast.ext[0].type.decls[3], 78,
+ self.assert_coord(s7_ast.ext[0].type.decls[2], 6, 22, 'test.c')
+ self.assert_coord(s7_ast.ext[0].type.decls[3], 78, 22,
r'D:\eli\cpp_stuff\libc_include/sys/reent.h')
s8 = """
@@ -1566,10 +1569,10 @@ class TestCParser_whole_code(TestCParser_base):
ps1 = self.parse(s1)
self.assert_num_klass_nodes(ps1, EmptyStatement, 3)
self.assert_num_klass_nodes(ps1, Return, 1)
- self.assert_coord(ps1.ext[0].body.block_items[0], 3, '')
- self.assert_coord(ps1.ext[0].body.block_items[1], 4, '')
- self.assert_coord(ps1.ext[0].body.block_items[2], 4, '')
- self.assert_coord(ps1.ext[0].body.block_items[3], 6, '')
+ self.assert_coord(ps1.ext[0].body.block_items[0], 3)
+ self.assert_coord(ps1.ext[0].body.block_items[1], 4)
+ self.assert_coord(ps1.ext[0].body.block_items[2], 4)
+ self.assert_coord(ps1.ext[0].body.block_items[3], 6)
def test_switch_statement(self):
def assert_case_node(node, const_value):