urbi.py 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145
  1. """
  2. pygments.lexers.urbi
  3. ~~~~~~~~~~~~~~~~~~~~
  4. Lexers for UrbiScript language.
  5. :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import ExtendedRegexLexer, words
  10. from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
  11. Number, Punctuation
  12. __all__ = ['UrbiscriptLexer']
  13. class UrbiscriptLexer(ExtendedRegexLexer):
  14. """
  15. For UrbiScript source code.
  16. .. versionadded:: 1.5
  17. """
  18. name = 'UrbiScript'
  19. aliases = ['urbiscript']
  20. filenames = ['*.u']
  21. mimetypes = ['application/x-urbiscript']
  22. flags = re.DOTALL
  23. # TODO
  24. # - handle Experimental and deprecated tags with specific tokens
  25. # - handle Angles and Durations with specific tokens
  26. def blob_callback(lexer, match, ctx):
  27. text_before_blob = match.group(1)
  28. blob_start = match.group(2)
  29. blob_size_str = match.group(3)
  30. blob_size = int(blob_size_str)
  31. yield match.start(), String, text_before_blob
  32. ctx.pos += len(text_before_blob)
  33. # if blob size doesn't match blob format (example : "\B(2)(aaa)")
  34. # yield blob as a string
  35. if ctx.text[match.end() + blob_size] != ")":
  36. result = "\\B(" + blob_size_str + ")("
  37. yield match.start(), String, result
  38. ctx.pos += len(result)
  39. return
  40. # if blob is well formated, yield as Escape
  41. blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")"
  42. yield match.start(), String.Escape, blob_text
  43. ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")"
  44. tokens = {
  45. 'root': [
  46. (r'\s+', Text),
  47. # comments
  48. (r'//.*?\n', Comment),
  49. (r'/\*', Comment.Multiline, 'comment'),
  50. (r'(every|for|loop|while)(?:;|&|\||,)', Keyword),
  51. (words((
  52. 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl',
  53. 'continue', 'default', 'else', 'enum', 'every', 'external',
  54. 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return',
  55. 'stopif', 'switch', 'this', 'throw', 'timeout', 'try',
  56. 'waituntil', 'whenever', 'while'), suffix=r'\b'),
  57. Keyword),
  58. (words((
  59. 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double',
  60. 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend',
  61. 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register',
  62. 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast',
  63. 'struct', 'template', 'typedef', 'typeid', 'typename', 'union',
  64. 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'),
  65. Keyword.Reserved),
  66. # deprecated keywords, use a meaningfull token when available
  67. (r'(emit|foreach|internal|loopn|static)\b', Keyword),
  68. # ignored keywords, use a meaningfull token when available
  69. (r'(private|protected|public)\b', Keyword),
  70. (r'(var|do|const|function|class)\b', Keyword.Declaration),
  71. (r'(true|false|nil|void)\b', Keyword.Constant),
  72. (words((
  73. 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code',
  74. 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory',
  75. 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File',
  76. 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group',
  77. 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List',
  78. 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil',
  79. 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern',
  80. 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub',
  81. 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket',
  82. 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout',
  83. 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject',
  84. 'UValue', 'UVar'), suffix=r'\b'),
  85. Name.Builtin),
  86. (r'(?:this)\b', Name.Builtin.Pseudo),
  87. # don't match single | and &
  88. (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator),
  89. (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b',
  90. Operator.Word),
  91. (r'[{}\[\]()]+', Punctuation),
  92. (r'(?:;|\||,|&|\?|!)+', Punctuation),
  93. (r'[$a-zA-Z_]\w*', Name.Other),
  94. (r'0x[0-9a-fA-F]+', Number.Hex),
  95. # Float, Integer, Angle and Duration
  96. (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?'
  97. r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float),
  98. # handle binary blob in strings
  99. (r'"', String.Double, "string.double"),
  100. (r"'", String.Single, "string.single"),
  101. ],
  102. 'string.double': [
  103. (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback),
  104. (r'(\\\\|\\[^\\]|[^"\\])*?"', String.Double, '#pop'),
  105. ],
  106. 'string.single': [
  107. (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback),
  108. (r"(\\\\|\\[^\\]|[^'\\])*?'", String.Single, '#pop'),
  109. ],
  110. # from http://pygments.org/docs/lexerdevelopment/#changing-states
  111. 'comment': [
  112. (r'[^*/]', Comment.Multiline),
  113. (r'/\*', Comment.Multiline, '#push'),
  114. (r'\*/', Comment.Multiline, '#pop'),
  115. (r'[*/]', Comment.Multiline),
  116. ]
  117. }
  118. def analyse_text(text):
  119. """This is fairly similar to C and others, but freezeif and
  120. waituntil are unique keywords."""
  121. result = 0
  122. if 'freezeif' in text:
  123. result += 0.05
  124. if 'waituntil' in text:
  125. result += 0.05
  126. return result