dylan.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286
  1. """
  2. pygments.lexers.dylan
  3. ~~~~~~~~~~~~~~~~~~~~~
  4. Lexers for the Dylan language.
  5. :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, default
  10. from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
  11. Number, Punctuation, Generic, Literal
  12. __all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
  13. class DylanLexer(RegexLexer):
  14. """
  15. For the `Dylan <http://www.opendylan.org/>`_ language.
  16. .. versionadded:: 0.7
  17. """
  18. name = 'Dylan'
  19. aliases = ['dylan']
  20. filenames = ['*.dylan', '*.dyl', '*.intr']
  21. mimetypes = ['text/x-dylan']
  22. flags = re.IGNORECASE
  23. builtins = {
  24. 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
  25. 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
  26. 'each-subclass', 'exception', 'exclude', 'function', 'generic',
  27. 'handler', 'inherited', 'inline', 'inline-only', 'instance',
  28. 'interface', 'import', 'keyword', 'library', 'macro', 'method',
  29. 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
  30. 'singleton', 'slot', 'thread', 'variable', 'virtual'}
  31. keywords = {
  32. 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
  33. 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
  34. 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
  35. 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
  36. 'while'}
  37. operators = {
  38. '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
  39. '>', '>=', '&', '|'}
  40. functions = {
  41. 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
  42. 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
  43. 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
  44. 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
  45. 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
  46. 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
  47. 'condition-format-arguments', 'condition-format-string', 'conjoin',
  48. 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
  49. 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
  50. 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
  51. 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
  52. 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
  53. 'function-arguments', 'function-return-values',
  54. 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
  55. 'generic-function-methods', 'head', 'head-setter', 'identity',
  56. 'initialize', 'instance?', 'integral?', 'intersection',
  57. 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
  58. 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
  59. 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
  60. 'min', 'modulo', 'negative', 'negative?', 'next-method',
  61. 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
  62. 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
  63. 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
  64. 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
  65. 'remove-method', 'replace-elements!', 'replace-subsequence!',
  66. 'restart-query', 'return-allowed?', 'return-description',
  67. 'return-query', 'reverse', 'reverse!', 'round', 'round/',
  68. 'row-major-index', 'second', 'second-setter', 'shallow-copy',
  69. 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
  70. 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
  71. 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
  72. 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
  73. 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
  74. 'vector', 'zero?'}
  75. valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
  76. def get_tokens_unprocessed(self, text):
  77. for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
  78. if token is Name:
  79. lowercase_value = value.lower()
  80. if lowercase_value in self.builtins:
  81. yield index, Name.Builtin, value
  82. continue
  83. if lowercase_value in self.keywords:
  84. yield index, Keyword, value
  85. continue
  86. if lowercase_value in self.functions:
  87. yield index, Name.Builtin, value
  88. continue
  89. if lowercase_value in self.operators:
  90. yield index, Operator, value
  91. continue
  92. yield index, token, value
  93. tokens = {
  94. 'root': [
  95. # Whitespace
  96. (r'\s+', Text),
  97. # single line comment
  98. (r'//.*?\n', Comment.Single),
  99. # lid header
  100. (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
  101. bygroups(Name.Attribute, Operator, Text, String)),
  102. default('code') # no header match, switch to code
  103. ],
  104. 'code': [
  105. # Whitespace
  106. (r'\s+', Text),
  107. # single line comment
  108. (r'//.*?\n', Comment.Single),
  109. # multi-line comment
  110. (r'/\*', Comment.Multiline, 'comment'),
  111. # strings and characters
  112. (r'"', String, 'string'),
  113. (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
  114. # binary integer
  115. (r'#b[01]+', Number.Bin),
  116. # octal integer
  117. (r'#o[0-7]+', Number.Oct),
  118. # floating point
  119. (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
  120. # decimal integer
  121. (r'[-+]?\d+', Number.Integer),
  122. # hex integer
  123. (r'#x[0-9a-f]+', Number.Hex),
  124. # Macro parameters
  125. (r'(\?' + valid_name + ')(:)'
  126. r'(token|name|variable|expression|body|case-body|\*)',
  127. bygroups(Name.Tag, Operator, Name.Builtin)),
  128. (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
  129. bygroups(Name.Tag, Operator, Name.Builtin)),
  130. (r'\?' + valid_name, Name.Tag),
  131. # Punctuation
  132. (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
  133. # Most operators are picked up as names and then re-flagged.
  134. # This one isn't valid in a name though, so we pick it up now.
  135. (r':=', Operator),
  136. # Pick up #t / #f before we match other stuff with #.
  137. (r'#[tf]', Literal),
  138. # #"foo" style keywords
  139. (r'#"', String.Symbol, 'keyword'),
  140. # #rest, #key, #all-keys, etc.
  141. (r'#[a-z0-9-]+', Keyword),
  142. # required-init-keyword: style keywords.
  143. (valid_name + ':', Keyword),
  144. # class names
  145. ('<' + valid_name + '>', Name.Class),
  146. # define variable forms.
  147. (r'\*' + valid_name + r'\*', Name.Variable.Global),
  148. # define constant forms.
  149. (r'\$' + valid_name, Name.Constant),
  150. # everything else. We re-flag some of these in the method above.
  151. (valid_name, Name),
  152. ],
  153. 'comment': [
  154. (r'[^*/]', Comment.Multiline),
  155. (r'/\*', Comment.Multiline, '#push'),
  156. (r'\*/', Comment.Multiline, '#pop'),
  157. (r'[*/]', Comment.Multiline)
  158. ],
  159. 'keyword': [
  160. (r'"', String.Symbol, '#pop'),
  161. (r'[^\\"]+', String.Symbol), # all other characters
  162. ],
  163. 'string': [
  164. (r'"', String, '#pop'),
  165. (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
  166. (r'[^\\"\n]+', String), # all other characters
  167. (r'\\\n', String), # line continuation
  168. (r'\\', String), # stray backslash
  169. ]
  170. }
  171. class DylanLidLexer(RegexLexer):
  172. """
  173. For Dylan LID (Library Interchange Definition) files.
  174. .. versionadded:: 1.6
  175. """
  176. name = 'DylanLID'
  177. aliases = ['dylan-lid', 'lid']
  178. filenames = ['*.lid', '*.hdp']
  179. mimetypes = ['text/x-dylan-lid']
  180. flags = re.IGNORECASE
  181. tokens = {
  182. 'root': [
  183. # Whitespace
  184. (r'\s+', Text),
  185. # single line comment
  186. (r'//.*?\n', Comment.Single),
  187. # lid header
  188. (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
  189. bygroups(Name.Attribute, Operator, Text, String)),
  190. ]
  191. }
  192. class DylanConsoleLexer(Lexer):
  193. """
  194. For Dylan interactive console output like:
  195. .. sourcecode:: dylan-console
  196. ? let a = 1;
  197. => 1
  198. ? a
  199. => 1
  200. This is based on a copy of the RubyConsoleLexer.
  201. .. versionadded:: 1.6
  202. """
  203. name = 'Dylan session'
  204. aliases = ['dylan-console', 'dylan-repl']
  205. filenames = ['*.dylan-console']
  206. mimetypes = ['text/x-dylan-console']
  207. _line_re = re.compile('.*?\n')
  208. _prompt_re = re.compile(r'\?| ')
  209. def get_tokens_unprocessed(self, text):
  210. dylexer = DylanLexer(**self.options)
  211. curcode = ''
  212. insertions = []
  213. for match in self._line_re.finditer(text):
  214. line = match.group()
  215. m = self._prompt_re.match(line)
  216. if m is not None:
  217. end = m.end()
  218. insertions.append((len(curcode),
  219. [(0, Generic.Prompt, line[:end])]))
  220. curcode += line[end:]
  221. else:
  222. if curcode:
  223. yield from do_insertions(insertions,
  224. dylexer.get_tokens_unprocessed(curcode))
  225. curcode = ''
  226. insertions = []
  227. yield match.start(), Generic.Output, line
  228. if curcode:
  229. yield from do_insertions(insertions,
  230. dylexer.get_tokens_unprocessed(curcode))