grammar_notation.py 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269
  1. """
  2. pygments.lexers.grammar_notation
  3. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  4. Lexers for grammar notations like BNF.
  5. :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import RegexLexer, bygroups, include, this, using, words
  10. from pygments.token import Comment, Keyword, Literal, Name, Number, \
  11. Operator, Punctuation, String, Text
  12. __all__ = ['BnfLexer', 'AbnfLexer', 'JsgfLexer', 'PegLexer']
  13. class BnfLexer(RegexLexer):
  14. """
  15. This lexer is for grammar notations which are similar to
  16. original BNF.
  17. In order to maximize a number of targets of this lexer,
  18. let's decide some designs:
  19. * We don't distinguish `Terminal Symbol`.
  20. * We do assume that `NonTerminal Symbol` are always enclosed
  21. with arrow brackets.
  22. * We do assume that `NonTerminal Symbol` may include
  23. any printable characters except arrow brackets and ASCII 0x20.
  24. This assumption is for `RBNF <http://www.rfc-base.org/txt/rfc-5511.txt>`_.
  25. * We do assume that target notation doesn't support comment.
  26. * We don't distinguish any operators and punctuation except
  27. `::=`.
  28. Though these desision making might cause too minimal highlighting
  29. and you might be disappointed, but it is reasonable for us.
  30. .. versionadded:: 2.1
  31. """
  32. name = 'BNF'
  33. aliases = ['bnf']
  34. filenames = ['*.bnf']
  35. mimetypes = ['text/x-bnf']
  36. tokens = {
  37. 'root': [
  38. (r'(<)([ -;=?-~]+)(>)',
  39. bygroups(Punctuation, Name.Class, Punctuation)),
  40. # an only operator
  41. (r'::=', Operator),
  42. # fallback
  43. (r'[^<>:]+', Text), # for performance
  44. (r'.', Text),
  45. ],
  46. }
  47. class AbnfLexer(RegexLexer):
  48. """
  49. Lexer for `IETF 7405 ABNF
  50. <http://www.ietf.org/rfc/rfc7405.txt>`_
  51. (Updates `5234 <http://www.ietf.org/rfc/rfc5234.txt>`_)
  52. grammars.
  53. .. versionadded:: 2.1
  54. """
  55. name = 'ABNF'
  56. aliases = ['abnf']
  57. filenames = ['*.abnf']
  58. mimetypes = ['text/x-abnf']
  59. _core_rules = (
  60. 'ALPHA', 'BIT', 'CHAR', 'CR', 'CRLF', 'CTL', 'DIGIT',
  61. 'DQUOTE', 'HEXDIG', 'HTAB', 'LF', 'LWSP', 'OCTET',
  62. 'SP', 'VCHAR', 'WSP')
  63. tokens = {
  64. 'root': [
  65. # comment
  66. (r';.*$', Comment.Single),
  67. # quoted
  68. # double quote itself in this state, it is as '%x22'.
  69. (r'(%[si])?"[^"]*"', Literal),
  70. # binary (but i have never seen...)
  71. (r'%b[01]+\-[01]+\b', Literal), # range
  72. (r'%b[01]+(\.[01]+)*\b', Literal), # concat
  73. # decimal
  74. (r'%d[0-9]+\-[0-9]+\b', Literal), # range
  75. (r'%d[0-9]+(\.[0-9]+)*\b', Literal), # concat
  76. # hexadecimal
  77. (r'%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b', Literal), # range
  78. (r'%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b', Literal), # concat
  79. # repetition (<a>*<b>element) including nRule
  80. (r'\b[0-9]+\*[0-9]+', Operator),
  81. (r'\b[0-9]+\*', Operator),
  82. (r'\b[0-9]+', Operator),
  83. (r'\*', Operator),
  84. # Strictly speaking, these are not keyword but
  85. # are called `Core Rule'.
  86. (words(_core_rules, suffix=r'\b'), Keyword),
  87. # nonterminals (ALPHA *(ALPHA / DIGIT / "-"))
  88. (r'[a-zA-Z][a-zA-Z0-9-]+\b', Name.Class),
  89. # operators
  90. (r'(=/|=|/)', Operator),
  91. # punctuation
  92. (r'[\[\]()]', Punctuation),
  93. # fallback
  94. (r'\s+', Text),
  95. (r'.', Text),
  96. ],
  97. }
  98. class JsgfLexer(RegexLexer):
  99. """
  100. For `JSpeech Grammar Format <https://www.w3.org/TR/jsgf/>`_
  101. grammars.
  102. .. versionadded:: 2.2
  103. """
  104. name = 'JSGF'
  105. aliases = ['jsgf']
  106. filenames = ['*.jsgf']
  107. mimetypes = ['application/jsgf', 'application/x-jsgf', 'text/jsgf']
  108. flags = re.MULTILINE | re.UNICODE
  109. tokens = {
  110. 'root': [
  111. include('comments'),
  112. include('non-comments'),
  113. ],
  114. 'comments': [
  115. (r'/\*\*(?!/)', Comment.Multiline, 'documentation comment'),
  116. (r'/\*[\w\W]*?\*/', Comment.Multiline),
  117. (r'//.*', Comment.Single),
  118. ],
  119. 'non-comments': [
  120. (r'\A#JSGF[^;]*', Comment.Preproc),
  121. (r'\s+', Text),
  122. (r';', Punctuation),
  123. (r'[=|()\[\]*+]', Operator),
  124. (r'/[^/]+/', Number.Float),
  125. (r'"', String.Double, 'string'),
  126. (r'\{', String.Other, 'tag'),
  127. (words(('import', 'public'), suffix=r'\b'), Keyword.Reserved),
  128. (r'grammar\b', Keyword.Reserved, 'grammar name'),
  129. (r'(<)(NULL|VOID)(>)',
  130. bygroups(Punctuation, Name.Builtin, Punctuation)),
  131. (r'<', Punctuation, 'rulename'),
  132. (r'\w+|[^\s;=|()\[\]*+/"{<\w]+', Text),
  133. ],
  134. 'string': [
  135. (r'"', String.Double, '#pop'),
  136. (r'\\.', String.Escape),
  137. (r'[^\\"]+', String.Double),
  138. ],
  139. 'tag': [
  140. (r'\}', String.Other, '#pop'),
  141. (r'\\.', String.Escape),
  142. (r'[^\\}]+', String.Other),
  143. ],
  144. 'grammar name': [
  145. (r';', Punctuation, '#pop'),
  146. (r'\s+', Text),
  147. (r'\.', Punctuation),
  148. (r'[^;\s.]+', Name.Namespace),
  149. ],
  150. 'rulename': [
  151. (r'>', Punctuation, '#pop'),
  152. (r'\*', Punctuation),
  153. (r'\s+', Text),
  154. (r'([^.>]+)(\s*)(\.)', bygroups(Name.Namespace, Text, Punctuation)),
  155. (r'[^.>]+', Name.Constant),
  156. ],
  157. 'documentation comment': [
  158. (r'\*/', Comment.Multiline, '#pop'),
  159. (r'(^\s*\*?\s*)(@(?:example|see)\s+)'
  160. r'([\w\W]*?(?=(?:^\s*\*?\s*@|\*/)))',
  161. bygroups(Comment.Multiline, Comment.Special,
  162. using(this, state='example'))),
  163. (r'(^\s*\*?\s*)(@\S*)',
  164. bygroups(Comment.Multiline, Comment.Special)),
  165. (r'[^*\n@]+|\w|\W', Comment.Multiline),
  166. ],
  167. 'example': [
  168. (r'\n\s*\*', Comment.Multiline),
  169. include('non-comments'),
  170. (r'.', Comment.Multiline),
  171. ],
  172. }
  173. class PegLexer(RegexLexer):
  174. """
  175. This lexer is for `Parsing Expression Grammars
  176. <https://bford.info/pub/lang/peg.pdf>`_ (PEG).
  177. Various implementations of PEG have made different decisions
  178. regarding the syntax, so let's try to be accommodating:
  179. * `<-`, `←`, `:`, and `=` are all accepted as rule operators.
  180. * Both `|` and `/` are choice operators.
  181. * `^`, `↑`, and `~` are cut operators.
  182. * A single `a-z` character immediately before a string, or
  183. multiple `a-z` characters following a string, are part of the
  184. string (e.g., `r"..."` or `"..."ilmsuxa`).
  185. .. versionadded:: 2.6
  186. """
  187. name = 'PEG'
  188. aliases = ['peg']
  189. filenames = ['*.peg']
  190. mimetypes = ['text/x-peg']
  191. tokens = {
  192. 'root': [
  193. # Comments
  194. (r'#.*', Comment.Single),
  195. # All operators
  196. (r'<-|[←:=/|&!?*+^↑~]', Operator),
  197. # Other punctuation
  198. (r'[()]', Punctuation),
  199. # Keywords
  200. (r'\.', Keyword),
  201. # Character classes
  202. (r'(\[)([^\]]*(?:\\.[^\]\\]*)*)(\])',
  203. bygroups(Punctuation, String, Punctuation)),
  204. # Single and double quoted strings (with optional modifiers)
  205. (r'[a-z]?"[^"\\]*(?:\\.[^"\\]*)*"[a-z]*', String.Double),
  206. (r"[a-z]?'[^'\\]*(?:\\.[^'\\]*)*'[a-z]*", String.Single),
  207. # Nonterminals are not whitespace, operators, or punctuation
  208. (r'[^\s<←:=/|&!?*+\^↑~()\[\]"\'#]+', Name.Class),
  209. # Fallback
  210. (r'.', Text),
  211. ],
  212. }