aboutsummaryrefslogtreecommitdiff
path: root/catapult/common/py_vulcanize/py_vulcanize/strip_js_comments_unittest.py
blob: 685cb824a24f6c274616b4fd237cff260ecc7e0c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

"""Tests for strip_js_comments module."""

import unittest

from py_vulcanize import strip_js_comments


# This test case tests a protected method.
# pylint: disable=W0212
class JavaScriptStripCommentTests(unittest.TestCase):
  """Test case for _strip_js_comments and _TokenizeJS."""

  def test_strip_comments(self):
    self.assertEquals(
        'A ', strip_js_comments.StripJSComments('A // foo'))
    self.assertEquals(
        'A bar', strip_js_comments.StripJSComments('A // foo\nbar'))
    self.assertEquals(
        'A  b', strip_js_comments.StripJSComments('A /* foo */ b'))
    self.assertEquals(
        'A  b', strip_js_comments.StripJSComments('A /* foo\n */ b'))

  def test_tokenize_empty(self):
    tokens = list(strip_js_comments._TokenizeJS(''))
    self.assertEquals([], tokens)

  def test_tokenize_nl(self):
    tokens = list(strip_js_comments._TokenizeJS('\n'))
    self.assertEquals(['\n'], tokens)

  def test_tokenize_slashslash_comment(self):
    tokens = list(strip_js_comments._TokenizeJS('A // foo'))
    self.assertEquals(['A ', '//', ' foo'], tokens)

  def test_tokenize_slashslash_comment_then_newline(self):
    tokens = list(strip_js_comments._TokenizeJS('A // foo\nbar'))
    self.assertEquals(['A ', '//', ' foo', '\n', 'bar'], tokens)

  def test_tokenize_cstyle_comment_one_line(self):
    tokens = list(strip_js_comments._TokenizeJS('A /* foo */'))
    self.assertEquals(['A ', '/*', ' foo ', '*/'], tokens)

  def test_tokenize_cstyle_comment_multi_line(self):
    tokens = list(strip_js_comments._TokenizeJS('A /* foo\n*bar\n*/'))
    self.assertEquals(['A ', '/*', ' foo', '\n', '*bar', '\n', '*/'], tokens)


if __name__ == '__main__':
  unittest.main()