make.py 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. """
  2. pygments.lexers.make
  3. ~~~~~~~~~~~~~~~~~~~~
  4. Lexers for Makefiles and similar.
  5. :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import Lexer, RegexLexer, include, bygroups, \
  10. do_insertions, using
  11. from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
  12. Punctuation
  13. from pygments.lexers.shell import BashLexer
  14. __all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer']
  15. class MakefileLexer(Lexer):
  16. """
  17. Lexer for BSD and GNU make extensions (lenient enough to handle both in
  18. the same file even).
  19. *Rewritten in Pygments 0.10.*
  20. """
  21. name = 'Makefile'
  22. aliases = ['make', 'makefile', 'mf', 'bsdmake']
  23. filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile']
  24. mimetypes = ['text/x-makefile']
  25. r_special = re.compile(
  26. r'^(?:'
  27. # BSD Make
  28. r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|'
  29. # GNU Make
  30. r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:|vpath)|'
  31. # GNU Automake
  32. r'\s*(if|else|endif))(?=\s)')
  33. r_comment = re.compile(r'^\s*@?#')
  34. def get_tokens_unprocessed(self, text):
  35. ins = []
  36. lines = text.splitlines(True)
  37. done = ''
  38. lex = BaseMakefileLexer(**self.options)
  39. backslashflag = False
  40. for line in lines:
  41. if self.r_special.match(line) or backslashflag:
  42. ins.append((len(done), [(0, Comment.Preproc, line)]))
  43. backslashflag = line.strip().endswith('\\')
  44. elif self.r_comment.match(line):
  45. ins.append((len(done), [(0, Comment, line)]))
  46. else:
  47. done += line
  48. yield from do_insertions(ins, lex.get_tokens_unprocessed(done))
  49. def analyse_text(text):
  50. # Many makefiles have $(BIG_CAPS) style variables
  51. if re.search(r'\$\([A-Z_]+\)', text):
  52. return 0.1
  53. class BaseMakefileLexer(RegexLexer):
  54. """
  55. Lexer for simple Makefiles (no preprocessing).
  56. .. versionadded:: 0.10
  57. """
  58. name = 'Base Makefile'
  59. aliases = ['basemake']
  60. filenames = []
  61. mimetypes = []
  62. tokens = {
  63. 'root': [
  64. # recipes (need to allow spaces because of expandtabs)
  65. (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)),
  66. # special variables
  67. (r'\$[<@$+%?|*]', Keyword),
  68. (r'\s+', Text),
  69. (r'#.*?\n', Comment),
  70. (r'(export)(\s+)(?=[\w${}\t -]+\n)',
  71. bygroups(Keyword, Text), 'export'),
  72. (r'export\s+', Keyword),
  73. # assignment
  74. (r'([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)',
  75. bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))),
  76. # strings
  77. (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  78. (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  79. # targets
  80. (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text),
  81. 'block-header'),
  82. # expansions
  83. (r'\$\(', Keyword, 'expansion'),
  84. ],
  85. 'expansion': [
  86. (r'[^\w$().-]+', Text),
  87. (r'[\w.-]+', Name.Variable),
  88. (r'\$', Keyword),
  89. (r'\(', Keyword, '#push'),
  90. (r'\)', Keyword, '#pop'),
  91. ],
  92. 'export': [
  93. (r'[\w${}-]+', Name.Variable),
  94. (r'\n', Text, '#pop'),
  95. (r'\s+', Text),
  96. ],
  97. 'block-header': [
  98. (r'[,|]', Punctuation),
  99. (r'#.*?\n', Comment, '#pop'),
  100. (r'\\\n', Text), # line continuation
  101. (r'\$\(', Keyword, 'expansion'),
  102. (r'[a-zA-Z_]+', Name),
  103. (r'\n', Text, '#pop'),
  104. (r'.', Text),
  105. ],
  106. }
  107. class CMakeLexer(RegexLexer):
  108. """
  109. Lexer for `CMake <http://cmake.org/Wiki/CMake>`_ files.
  110. .. versionadded:: 1.2
  111. """
  112. name = 'CMake'
  113. aliases = ['cmake']
  114. filenames = ['*.cmake', 'CMakeLists.txt']
  115. mimetypes = ['text/x-cmake']
  116. tokens = {
  117. 'root': [
  118. # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|'
  119. # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|'
  120. # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|'
  121. # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|'
  122. # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|'
  123. # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|'
  124. # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|'
  125. # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|'
  126. # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|'
  127. # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|'
  128. # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|'
  129. # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|'
  130. # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|'
  131. # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|'
  132. # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|'
  133. # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|'
  134. # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|'
  135. # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|'
  136. # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|'
  137. # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|'
  138. # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|'
  139. # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|'
  140. # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|'
  141. # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|'
  142. # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|'
  143. # r'COUNTARGS)\b', Name.Builtin, 'args'),
  144. (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text,
  145. Punctuation), 'args'),
  146. include('keywords'),
  147. include('ws')
  148. ],
  149. 'args': [
  150. (r'\(', Punctuation, '#push'),
  151. (r'\)', Punctuation, '#pop'),
  152. (r'(\$\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
  153. (r'(\$ENV\{)(.+?)(\})', bygroups(Operator, Name.Variable, Operator)),
  154. (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)),
  155. (r'(?s)".*?"', String.Double),
  156. (r'\\\S+', String),
  157. (r'[^)$"# \t\n]+', String),
  158. (r'\n', Text), # explicitly legal
  159. include('keywords'),
  160. include('ws')
  161. ],
  162. 'string': [
  163. ],
  164. 'keywords': [
  165. (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|'
  166. r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword),
  167. ],
  168. 'ws': [
  169. (r'[ \t]+', Text),
  170. (r'#.*\n', Comment),
  171. ]
  172. }
  173. def analyse_text(text):
  174. exp = (
  175. r'^[ \t]*CMAKE_MINIMUM_REQUIRED[ \t]*'
  176. r'\([ \t]*VERSION[ \t]*\d+(\.\d+)*[ \t]*'
  177. r'([ \t]FATAL_ERROR)?[ \t]*\)[ \t]*'
  178. r'(#[^\n]*)?$'
  179. )
  180. if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE):
  181. return 0.8
  182. return 0.0