conv.py 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
  2. # Licensed to PSF under a Contributor Agreement.
  3. """Convert graminit.[ch] spit out by pgen to Python code.
  4. Pgen is the Python parser generator. It is useful to quickly create a
  5. parser from a grammar file in Python's grammar notation. But I don't
  6. want my parsers to be written in C (yet), so I'm translating the
  7. parsing tables to Python data structures and writing a Python parse
  8. engine.
  9. Note that the token numbers are constants determined by the standard
  10. Python tokenizer. The standard token module defines these numbers and
  11. their names (the names are not used much). The token numbers are
  12. hardcoded into the Python tokenizer and into pgen. A Python
  13. implementation of the Python tokenizer is also available, in the
  14. standard tokenize module.
  15. On the other hand, symbol numbers (representing the grammar's
  16. non-terminals) are assigned by pgen based on the actual grammar
  17. input.
  18. Note: this module is pretty much obsolete; the pgen module generates
  19. equivalent grammar tables directly from the Grammar.txt input file
  20. without having to invoke the Python pgen C program.
  21. """
  22. # Python imports
  23. import re
  24. # Local imports
  25. from pgen2 import grammar, token
  26. class Converter(grammar.Grammar):
  27. """Grammar subclass that reads classic pgen output files.
  28. The run() method reads the tables as produced by the pgen parser
  29. generator, typically contained in two C files, graminit.h and
  30. graminit.c. The other methods are for internal use only.
  31. See the base class for more documentation.
  32. """
  33. def run(self, graminit_h, graminit_c):
  34. """Load the grammar tables from the text files written by pgen."""
  35. self.parse_graminit_h(graminit_h)
  36. self.parse_graminit_c(graminit_c)
  37. self.finish_off()
  38. def parse_graminit_h(self, filename):
  39. """Parse the .h file written by pgen. (Internal)
  40. This file is a sequence of #define statements defining the
  41. nonterminals of the grammar as numbers. We build two tables
  42. mapping the numbers to names and back.
  43. """
  44. try:
  45. f = open(filename)
  46. except OSError as err:
  47. print("Can't open %s: %s" % (filename, err))
  48. return False
  49. self.symbol2number = {}
  50. self.number2symbol = {}
  51. lineno = 0
  52. for line in f:
  53. lineno += 1
  54. mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line)
  55. if not mo and line.strip():
  56. print("%s(%s): can't parse %s" % (filename, lineno,
  57. line.strip()))
  58. else:
  59. symbol, number = mo.groups()
  60. number = int(number)
  61. assert symbol not in self.symbol2number
  62. assert number not in self.number2symbol
  63. self.symbol2number[symbol] = number
  64. self.number2symbol[number] = symbol
  65. return True
  66. def parse_graminit_c(self, filename):
  67. """Parse the .c file written by pgen. (Internal)
  68. The file looks as follows. The first two lines are always this:
  69. #include "pgenheaders.h"
  70. #include "grammar.h"
  71. After that come four blocks:
  72. 1) one or more state definitions
  73. 2) a table defining dfas
  74. 3) a table defining labels
  75. 4) a struct defining the grammar
  76. A state definition has the following form:
  77. - one or more arc arrays, each of the form:
  78. static arc arcs_<n>_<m>[<k>] = {
  79. {<i>, <j>},
  80. ...
  81. };
  82. - followed by a state array, of the form:
  83. static state states_<s>[<t>] = {
  84. {<k>, arcs_<n>_<m>},
  85. ...
  86. };
  87. """
  88. try:
  89. f = open(filename)
  90. except OSError as err:
  91. print("Can't open %s: %s" % (filename, err))
  92. return False
  93. # The code below essentially uses f's iterator-ness!
  94. lineno = 0
  95. # Expect the two #include lines
  96. lineno, line = lineno+1, next(f)
  97. assert line == '#include "pgenheaders.h"\n', (lineno, line)
  98. lineno, line = lineno+1, next(f)
  99. assert line == '#include "grammar.h"\n', (lineno, line)
  100. # Parse the state definitions
  101. lineno, line = lineno+1, next(f)
  102. allarcs = {}
  103. states = []
  104. while line.startswith("static arc "):
  105. while line.startswith("static arc "):
  106. mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$",
  107. line)
  108. assert mo, (lineno, line)
  109. n, m, k = list(map(int, mo.groups()))
  110. arcs = []
  111. for _ in range(k):
  112. lineno, line = lineno+1, next(f)
  113. mo = re.match(r"\s+{(\d+), (\d+)},$", line)
  114. assert mo, (lineno, line)
  115. i, j = list(map(int, mo.groups()))
  116. arcs.append((i, j))
  117. lineno, line = lineno+1, next(f)
  118. assert line == "};\n", (lineno, line)
  119. allarcs[(n, m)] = arcs
  120. lineno, line = lineno+1, next(f)
  121. mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line)
  122. assert mo, (lineno, line)
  123. s, t = list(map(int, mo.groups()))
  124. assert s == len(states), (lineno, line)
  125. state = []
  126. for _ in range(t):
  127. lineno, line = lineno+1, next(f)
  128. mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line)
  129. assert mo, (lineno, line)
  130. k, n, m = list(map(int, mo.groups()))
  131. arcs = allarcs[n, m]
  132. assert k == len(arcs), (lineno, line)
  133. state.append(arcs)
  134. states.append(state)
  135. lineno, line = lineno+1, next(f)
  136. assert line == "};\n", (lineno, line)
  137. lineno, line = lineno+1, next(f)
  138. self.states = states
  139. # Parse the dfas
  140. dfas = {}
  141. mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line)
  142. assert mo, (lineno, line)
  143. ndfas = int(mo.group(1))
  144. for i in range(ndfas):
  145. lineno, line = lineno+1, next(f)
  146. mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$',
  147. line)
  148. assert mo, (lineno, line)
  149. symbol = mo.group(2)
  150. number, x, y, z = list(map(int, mo.group(1, 3, 4, 5)))
  151. assert self.symbol2number[symbol] == number, (lineno, line)
  152. assert self.number2symbol[number] == symbol, (lineno, line)
  153. assert x == 0, (lineno, line)
  154. state = states[z]
  155. assert y == len(state), (lineno, line)
  156. lineno, line = lineno+1, next(f)
  157. mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line)
  158. assert mo, (lineno, line)
  159. first = {}
  160. rawbitset = eval(mo.group(1))
  161. for i, c in enumerate(rawbitset):
  162. byte = ord(c)
  163. for j in range(8):
  164. if byte & (1<<j):
  165. first[i*8 + j] = 1
  166. dfas[number] = (state, first)
  167. lineno, line = lineno+1, next(f)
  168. assert line == "};\n", (lineno, line)
  169. self.dfas = dfas
  170. # Parse the labels
  171. labels = []
  172. lineno, line = lineno+1, next(f)
  173. mo = re.match(r"static label labels\[(\d+)\] = {$", line)
  174. assert mo, (lineno, line)
  175. nlabels = int(mo.group(1))
  176. for i in range(nlabels):
  177. lineno, line = lineno+1, next(f)
  178. mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line)
  179. assert mo, (lineno, line)
  180. x, y = mo.groups()
  181. x = int(x)
  182. if y == "0":
  183. y = None
  184. else:
  185. y = eval(y)
  186. labels.append((x, y))
  187. lineno, line = lineno+1, next(f)
  188. assert line == "};\n", (lineno, line)
  189. self.labels = labels
  190. # Parse the grammar struct
  191. lineno, line = lineno+1, next(f)
  192. assert line == "grammar _PyParser_Grammar = {\n", (lineno, line)
  193. lineno, line = lineno+1, next(f)
  194. mo = re.match(r"\s+(\d+),$", line)
  195. assert mo, (lineno, line)
  196. ndfas = int(mo.group(1))
  197. assert ndfas == len(self.dfas)
  198. lineno, line = lineno+1, next(f)
  199. assert line == "\tdfas,\n", (lineno, line)
  200. lineno, line = lineno+1, next(f)
  201. mo = re.match(r"\s+{(\d+), labels},$", line)
  202. assert mo, (lineno, line)
  203. nlabels = int(mo.group(1))
  204. assert nlabels == len(self.labels), (lineno, line)
  205. lineno, line = lineno+1, next(f)
  206. mo = re.match(r"\s+(\d+)$", line)
  207. assert mo, (lineno, line)
  208. start = int(mo.group(1))
  209. assert start in self.number2symbol, (lineno, line)
  210. self.start = start
  211. lineno, line = lineno+1, next(f)
  212. assert line == "};\n", (lineno, line)
  213. try:
  214. lineno, line = lineno+1, next(f)
  215. except StopIteration:
  216. pass
  217. else:
  218. assert 0, (lineno, line)
  219. def finish_off(self):
  220. """Create additional useful structures. (Internal)."""
  221. self.keywords = {} # map from keyword strings to arc labels
  222. self.tokens = {} # map from numeric token values to arc labels
  223. for ilabel, (type, value) in enumerate(self.labels):
  224. if type == token.NAME and value is not None:
  225. self.keywords[value] = ilabel
  226. elif value is None:
  227. self.tokens[type] = ilabel