token.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160
  1. """Token constants (from "token.h")."""
  2. __all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
  3. # This file is automatically generated; please don't muck it up!
  4. #
  5. # To update the symbols in this file, 'cd' to the top directory of
  6. # the python source tree after building the interpreter and run:
  7. #
  8. # ./python Lib/token.py
  9. #--start constants--
  10. ENDMARKER = 0
  11. NAME = 1
  12. NUMBER = 2
  13. STRING = 3
  14. NEWLINE = 4
  15. INDENT = 5
  16. DEDENT = 6
  17. LPAR = 7
  18. RPAR = 8
  19. LSQB = 9
  20. RSQB = 10
  21. COLON = 11
  22. COMMA = 12
  23. SEMI = 13
  24. PLUS = 14
  25. MINUS = 15
  26. STAR = 16
  27. SLASH = 17
  28. VBAR = 18
  29. AMPER = 19
  30. LESS = 20
  31. GREATER = 21
  32. EQUAL = 22
  33. DOT = 23
  34. PERCENT = 24
  35. LBRACE = 25
  36. RBRACE = 26
  37. EQEQUAL = 27
  38. NOTEQUAL = 28
  39. LESSEQUAL = 29
  40. GREATEREQUAL = 30
  41. TILDE = 31
  42. CIRCUMFLEX = 32
  43. LEFTSHIFT = 33
  44. RIGHTSHIFT = 34
  45. DOUBLESTAR = 35
  46. PLUSEQUAL = 36
  47. MINEQUAL = 37
  48. STAREQUAL = 38
  49. SLASHEQUAL = 39
  50. PERCENTEQUAL = 40
  51. AMPEREQUAL = 41
  52. VBAREQUAL = 42
  53. CIRCUMFLEXEQUAL = 43
  54. LEFTSHIFTEQUAL = 44
  55. RIGHTSHIFTEQUAL = 45
  56. DOUBLESTAREQUAL = 46
  57. DOUBLESLASH = 47
  58. DOUBLESLASHEQUAL = 48
  59. AT = 49
  60. ATEQUAL = 50
  61. RARROW = 51
  62. ELLIPSIS = 52
  63. # Don't forget to update the table _PyParser_TokenNames in tokenizer.c!
  64. OP = 53
  65. ERRORTOKEN = 54
  66. # These aren't used by the C tokenizer but are needed for tokenize.py
  67. COMMENT = 55
  68. NL = 56
  69. ENCODING = 57
  70. N_TOKENS = 58
  71. # Special definitions for cooperation with parser
  72. NT_OFFSET = 256
  73. #--end constants--
  74. tok_name = {value: name
  75. for name, value in globals().items()
  76. if isinstance(value, int) and not name.startswith('_')}
  77. __all__.extend(tok_name.values())
  78. def ISTERMINAL(x):
  79. return x < NT_OFFSET
  80. def ISNONTERMINAL(x):
  81. return x >= NT_OFFSET
  82. def ISEOF(x):
  83. return x == ENDMARKER
  84. def _main():
  85. import re
  86. import sys
  87. args = sys.argv[1:]
  88. inFileName = args and args[0] or "Include/token.h"
  89. outFileName = "Lib/token.py"
  90. if len(args) > 1:
  91. outFileName = args[1]
  92. try:
  93. fp = open(inFileName)
  94. except OSError as err:
  95. sys.stdout.write("I/O error: %s\n" % str(err))
  96. sys.exit(1)
  97. with fp:
  98. lines = fp.read().split("\n")
  99. prog = re.compile(
  100. r"#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)",
  101. re.IGNORECASE)
  102. comment_regex = re.compile(
  103. r"^\s*/\*\s*(.+?)\s*\*/\s*$",
  104. re.IGNORECASE)
  105. tokens = {}
  106. prev_val = None
  107. for line in lines:
  108. match = prog.match(line)
  109. if match:
  110. name, val = match.group(1, 2)
  111. val = int(val)
  112. tokens[val] = {'token': name} # reverse so we can sort them...
  113. prev_val = val
  114. else:
  115. comment_match = comment_regex.match(line)
  116. if comment_match and prev_val is not None:
  117. comment = comment_match.group(1)
  118. tokens[prev_val]['comment'] = comment
  119. keys = sorted(tokens.keys())
  120. # load the output skeleton from the target:
  121. try:
  122. fp = open(outFileName)
  123. except OSError as err:
  124. sys.stderr.write("I/O error: %s\n" % str(err))
  125. sys.exit(2)
  126. with fp:
  127. format = fp.read().split("\n")
  128. try:
  129. start = format.index("#--start constants--") + 1
  130. end = format.index("#--end constants--")
  131. except ValueError:
  132. sys.stderr.write("target does not contain format markers")
  133. sys.exit(3)
  134. lines = []
  135. for key in keys:
  136. lines.append("%s = %d" % (tokens[key]["token"], key))
  137. if "comment" in tokens[key]:
  138. lines.append("# %s" % tokens[key]["comment"])
  139. format[start:end] = lines
  140. try:
  141. fp = open(outFileName, 'w')
  142. except OSError as err:
  143. sys.stderr.write("I/O error: %s\n" % str(err))
  144. sys.exit(4)
  145. with fp:
  146. fp.write("\n".join(format))
  147. if __name__ == "__main__":
  148. _main()