test_lexers.py 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129
  1. """Test lexers module"""
  2. # Copyright (c) IPython Development Team.
  3. # Distributed under the terms of the Modified BSD License.
  4. from unittest import TestCase
  5. from pygments.token import Token
  6. from pygments.lexers import BashLexer
  7. from .. import lexers
  8. class TestLexers(TestCase):
  9. """Collection of lexers tests"""
  10. def setUp(self):
  11. self.lexer = lexers.IPythonLexer()
  12. self.bash_lexer = BashLexer()
  13. def testIPythonLexer(self):
  14. fragment = '!echo $HOME\n'
  15. tokens = [
  16. (Token.Operator, '!'),
  17. ]
  18. tokens.extend(self.bash_lexer.get_tokens(fragment[1:]))
  19. self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))
  20. fragment_2 = '!' + fragment
  21. tokens_2 = [
  22. (Token.Operator, '!!'),
  23. ] + tokens[1:]
  24. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  25. fragment_2 = '\t %%!\n' + fragment[1:]
  26. tokens_2 = [
  27. (Token.Text, '\t '),
  28. (Token.Operator, '%%!'),
  29. (Token.Text, '\n'),
  30. ] + tokens[1:]
  31. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  32. fragment_2 = 'x = ' + fragment
  33. tokens_2 = [
  34. (Token.Name, 'x'),
  35. (Token.Text, ' '),
  36. (Token.Operator, '='),
  37. (Token.Text, ' '),
  38. ] + tokens
  39. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  40. fragment_2 = 'x, = ' + fragment
  41. tokens_2 = [
  42. (Token.Name, 'x'),
  43. (Token.Punctuation, ','),
  44. (Token.Text, ' '),
  45. (Token.Operator, '='),
  46. (Token.Text, ' '),
  47. ] + tokens
  48. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  49. fragment_2 = 'x, = %sx ' + fragment[1:]
  50. tokens_2 = [
  51. (Token.Name, 'x'),
  52. (Token.Punctuation, ','),
  53. (Token.Text, ' '),
  54. (Token.Operator, '='),
  55. (Token.Text, ' '),
  56. (Token.Operator, '%'),
  57. (Token.Keyword, 'sx'),
  58. (Token.Text, ' '),
  59. ] + tokens[1:]
  60. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  61. fragment_2 = 'f = %R function () {}\n'
  62. tokens_2 = [
  63. (Token.Name, 'f'),
  64. (Token.Text, ' '),
  65. (Token.Operator, '='),
  66. (Token.Text, ' '),
  67. (Token.Operator, '%'),
  68. (Token.Keyword, 'R'),
  69. (Token.Text, ' function () {}\n'),
  70. ]
  71. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  72. fragment_2 = '\t%%xyz\n$foo\n'
  73. tokens_2 = [
  74. (Token.Text, '\t'),
  75. (Token.Operator, '%%'),
  76. (Token.Keyword, 'xyz'),
  77. (Token.Text, '\n$foo\n'),
  78. ]
  79. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  80. fragment_2 = '%system?\n'
  81. tokens_2 = [
  82. (Token.Operator, '%'),
  83. (Token.Keyword, 'system'),
  84. (Token.Operator, '?'),
  85. (Token.Text, '\n'),
  86. ]
  87. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  88. fragment_2 = 'x != y\n'
  89. tokens_2 = [
  90. (Token.Name, 'x'),
  91. (Token.Text, ' '),
  92. (Token.Operator, '!='),
  93. (Token.Text, ' '),
  94. (Token.Name, 'y'),
  95. (Token.Text, '\n'),
  96. ]
  97. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  98. fragment_2 = ' ?math.sin\n'
  99. tokens_2 = [
  100. (Token.Text, ' '),
  101. (Token.Operator, '?'),
  102. (Token.Text, 'math.sin'),
  103. (Token.Text, '\n'),
  104. ]
  105. self.assertEqual(tokens_2, list(self.lexer.get_tokens(fragment_2)))
  106. fragment = ' *int*?\n'
  107. tokens = [
  108. (Token.Text, ' *int*'),
  109. (Token.Operator, '?'),
  110. (Token.Text, '\n'),
  111. ]
  112. self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))