token.py 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. """Token constants (from "token.h")."""
  2. __all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
  3. # This file is automatically generated; please don't muck it up!
  4. #
  5. # To update the symbols in this file, 'cd' to the top directory of
  6. # the python source tree after building the interpreter and run:
  7. #
  8. # ./python Lib/token.py
  9. #--start constants--
  10. ENDMARKER = 0
  11. NAME = 1
  12. NUMBER = 2
  13. STRING = 3
  14. NEWLINE = 4
  15. INDENT = 5
  16. DEDENT = 6
  17. LPAR = 7
  18. RPAR = 8
  19. LSQB = 9
  20. RSQB = 10
  21. COLON = 11
  22. COMMA = 12
  23. SEMI = 13
  24. PLUS = 14
  25. MINUS = 15
  26. STAR = 16
  27. SLASH = 17
  28. VBAR = 18
  29. AMPER = 19
  30. LESS = 20
  31. GREATER = 21
  32. EQUAL = 22
  33. DOT = 23
  34. PERCENT = 24
  35. LBRACE = 25
  36. RBRACE = 26
  37. EQEQUAL = 27
  38. NOTEQUAL = 28
  39. LESSEQUAL = 29
  40. GREATEREQUAL = 30
  41. TILDE = 31
  42. CIRCUMFLEX = 32
  43. LEFTSHIFT = 33
  44. RIGHTSHIFT = 34
  45. DOUBLESTAR = 35
  46. PLUSEQUAL = 36
  47. MINEQUAL = 37
  48. STAREQUAL = 38
  49. SLASHEQUAL = 39
  50. PERCENTEQUAL = 40
  51. AMPEREQUAL = 41
  52. VBAREQUAL = 42
  53. CIRCUMFLEXEQUAL = 43
  54. LEFTSHIFTEQUAL = 44
  55. RIGHTSHIFTEQUAL = 45
  56. DOUBLESTAREQUAL = 46
  57. DOUBLESLASH = 47
  58. DOUBLESLASHEQUAL = 48
  59. AT = 49
  60. ATEQUAL = 50
  61. RARROW = 51
  62. ELLIPSIS = 52
  63. OP = 53
  64. AWAIT = 54
  65. ASYNC = 55
  66. ERRORTOKEN = 56
  67. N_TOKENS = 57
  68. NT_OFFSET = 256
  69. #--end constants--
  70. tok_name = {value: name
  71. for name, value in globals().items()
  72. if isinstance(value, int) and not name.startswith('_')}
  73. __all__.extend(tok_name.values())
  74. def ISTERMINAL(x):
  75. return x < NT_OFFSET
  76. def ISNONTERMINAL(x):
  77. return x >= NT_OFFSET
  78. def ISEOF(x):
  79. return x == ENDMARKER
  80. def _main():
  81. import re
  82. import sys
  83. args = sys.argv[1:]
  84. inFileName = args and args[0] or "Include/token.h"
  85. outFileName = "Lib/token.py"
  86. if len(args) > 1:
  87. outFileName = args[1]
  88. try:
  89. fp = open(inFileName)
  90. except OSError as err:
  91. sys.stdout.write("I/O error: %s\n" % str(err))
  92. sys.exit(1)
  93. with fp:
  94. lines = fp.read().split("\n")
  95. prog = re.compile(
  96. "#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)",
  97. re.IGNORECASE)
  98. tokens = {}
  99. for line in lines:
  100. match = prog.match(line)
  101. if match:
  102. name, val = match.group(1, 2)
  103. val = int(val)
  104. tokens[val] = name # reverse so we can sort them...
  105. keys = sorted(tokens.keys())
  106. # load the output skeleton from the target:
  107. try:
  108. fp = open(outFileName)
  109. except OSError as err:
  110. sys.stderr.write("I/O error: %s\n" % str(err))
  111. sys.exit(2)
  112. with fp:
  113. format = fp.read().split("\n")
  114. try:
  115. start = format.index("#--start constants--") + 1
  116. end = format.index("#--end constants--")
  117. except ValueError:
  118. sys.stderr.write("target does not contain format markers")
  119. sys.exit(3)
  120. lines = []
  121. for val in keys:
  122. lines.append("%s = %d" % (tokens[val], val))
  123. format[start:end] = lines
  124. try:
  125. fp = open(outFileName, 'w')
  126. except OSError as err:
  127. sys.stderr.write("I/O error: %s\n" % str(err))
  128. sys.exit(4)
  129. with fp:
  130. fp.write("\n".join(format))
  131. if __name__ == "__main__":
  132. _main()