templates.py 70 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263
  1. """
  2. pygments.lexers.templates
  3. ~~~~~~~~~~~~~~~~~~~~~~~~~
  4. Lexers for various template engines' markup.
  5. :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexers.html import HtmlLexer, XmlLexer
  10. from pygments.lexers.javascript import JavascriptLexer, LassoLexer
  11. from pygments.lexers.css import CssLexer
  12. from pygments.lexers.php import PhpLexer
  13. from pygments.lexers.python import PythonLexer
  14. from pygments.lexers.perl import PerlLexer
  15. from pygments.lexers.jvm import JavaLexer, TeaLangLexer
  16. from pygments.lexers.data import YamlLexer
  17. from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
  18. include, using, this, default, combined
  19. from pygments.token import Error, Punctuation, Whitespace, \
  20. Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
  21. from pygments.util import html_doctype_matches, looks_like_xml
  22. __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
  23. 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
  24. 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
  25. 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
  26. 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
  27. 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
  28. 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
  29. 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
  30. 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
  31. 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
  32. 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
  33. 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
  34. 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
  35. 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
  36. 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
  37. 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
  38. 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
  39. 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
  40. 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
  41. 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer']
  42. class ErbLexer(Lexer):
  43. """
  44. Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
  45. lexer.
  46. Just highlights ruby code between the preprocessor directives, other data
  47. is left untouched by the lexer.
  48. All options are also forwarded to the `RubyLexer`.
  49. """
  50. name = 'ERB'
  51. aliases = ['erb']
  52. mimetypes = ['application/x-ruby-templating']
  53. _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
  54. def __init__(self, **options):
  55. from pygments.lexers.ruby import RubyLexer
  56. self.ruby_lexer = RubyLexer(**options)
  57. Lexer.__init__(self, **options)
  58. def get_tokens_unprocessed(self, text):
  59. """
  60. Since ERB doesn't allow "<%" and other tags inside of ruby
  61. blocks we have to use a split approach here that fails for
  62. that too.
  63. """
  64. tokens = self._block_re.split(text)
  65. tokens.reverse()
  66. state = idx = 0
  67. try:
  68. while True:
  69. # text
  70. if state == 0:
  71. val = tokens.pop()
  72. yield idx, Other, val
  73. idx += len(val)
  74. state = 1
  75. # block starts
  76. elif state == 1:
  77. tag = tokens.pop()
  78. # literals
  79. if tag in ('<%%', '%%>'):
  80. yield idx, Other, tag
  81. idx += 3
  82. state = 0
  83. # comment
  84. elif tag == '<%#':
  85. yield idx, Comment.Preproc, tag
  86. val = tokens.pop()
  87. yield idx + 3, Comment, val
  88. idx += 3 + len(val)
  89. state = 2
  90. # blocks or output
  91. elif tag in ('<%', '<%=', '<%-'):
  92. yield idx, Comment.Preproc, tag
  93. idx += len(tag)
  94. data = tokens.pop()
  95. r_idx = 0
  96. for r_idx, r_token, r_value in \
  97. self.ruby_lexer.get_tokens_unprocessed(data):
  98. yield r_idx + idx, r_token, r_value
  99. idx += len(data)
  100. state = 2
  101. elif tag in ('%>', '-%>'):
  102. yield idx, Error, tag
  103. idx += len(tag)
  104. state = 0
  105. # % raw ruby statements
  106. else:
  107. yield idx, Comment.Preproc, tag[0]
  108. r_idx = 0
  109. for r_idx, r_token, r_value in \
  110. self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
  111. yield idx + 1 + r_idx, r_token, r_value
  112. idx += len(tag)
  113. state = 0
  114. # block ends
  115. elif state == 2:
  116. tag = tokens.pop()
  117. if tag not in ('%>', '-%>'):
  118. yield idx, Other, tag
  119. else:
  120. yield idx, Comment.Preproc, tag
  121. idx += len(tag)
  122. state = 0
  123. except IndexError:
  124. return
  125. def analyse_text(text):
  126. if '<%' in text and '%>' in text:
  127. return 0.4
  128. class SmartyLexer(RegexLexer):
  129. """
  130. Generic `Smarty <http://smarty.php.net/>`_ template lexer.
  131. Just highlights smarty code between the preprocessor directives, other
  132. data is left untouched by the lexer.
  133. """
  134. name = 'Smarty'
  135. aliases = ['smarty']
  136. filenames = ['*.tpl']
  137. mimetypes = ['application/x-smarty']
  138. flags = re.MULTILINE | re.DOTALL
  139. tokens = {
  140. 'root': [
  141. (r'[^{]+', Other),
  142. (r'(\{)(\*.*?\*)(\})',
  143. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  144. (r'(\{php\})(.*?)(\{/php\})',
  145. bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
  146. Comment.Preproc)),
  147. (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
  148. bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
  149. (r'\{', Comment.Preproc, 'smarty')
  150. ],
  151. 'smarty': [
  152. (r'\s+', Text),
  153. (r'\{', Comment.Preproc, '#push'),
  154. (r'\}', Comment.Preproc, '#pop'),
  155. (r'#[a-zA-Z_]\w*#', Name.Variable),
  156. (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
  157. (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
  158. (r'(true|false|null)\b', Keyword.Constant),
  159. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  160. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  161. (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  162. (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  163. (r'[a-zA-Z_]\w*', Name.Attribute)
  164. ]
  165. }
  166. def analyse_text(text):
  167. rv = 0.0
  168. if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
  169. rv += 0.15
  170. if re.search(r'\{include\s+file=.*?\}', text):
  171. rv += 0.15
  172. if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
  173. rv += 0.15
  174. if re.search(r'\{\$.*?\}', text):
  175. rv += 0.01
  176. return rv
  177. class VelocityLexer(RegexLexer):
  178. """
  179. Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
  180. Just highlights velocity directives and variable references, other
  181. data is left untouched by the lexer.
  182. """
  183. name = 'Velocity'
  184. aliases = ['velocity']
  185. filenames = ['*.vm', '*.fhtml']
  186. flags = re.MULTILINE | re.DOTALL
  187. identifier = r'[a-zA-Z_]\w*'
  188. tokens = {
  189. 'root': [
  190. (r'[^{#$]+', Other),
  191. (r'(#)(\*.*?\*)(#)',
  192. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  193. (r'(##)(.*?$)',
  194. bygroups(Comment.Preproc, Comment)),
  195. (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
  196. bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
  197. 'directiveparams'),
  198. (r'(#\{?)(' + identifier + r')(\}|\b)',
  199. bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
  200. (r'\$!?\{?', Punctuation, 'variable')
  201. ],
  202. 'variable': [
  203. (identifier, Name.Variable),
  204. (r'\(', Punctuation, 'funcparams'),
  205. (r'(\.)(' + identifier + r')',
  206. bygroups(Punctuation, Name.Variable), '#push'),
  207. (r'\}', Punctuation, '#pop'),
  208. default('#pop')
  209. ],
  210. 'directiveparams': [
  211. (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
  212. Operator),
  213. (r'\[', Operator, 'rangeoperator'),
  214. (r'\b' + identifier + r'\b', Name.Function),
  215. include('funcparams')
  216. ],
  217. 'rangeoperator': [
  218. (r'\.\.', Operator),
  219. include('funcparams'),
  220. (r'\]', Operator, '#pop')
  221. ],
  222. 'funcparams': [
  223. (r'\$!?\{?', Punctuation, 'variable'),
  224. (r'\s+', Text),
  225. (r'[,:]', Punctuation),
  226. (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  227. (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  228. (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  229. (r"\b[0-9]+\b", Number),
  230. (r'(true|false|null)\b', Keyword.Constant),
  231. (r'\(', Punctuation, '#push'),
  232. (r'\)', Punctuation, '#pop'),
  233. (r'\{', Punctuation, '#push'),
  234. (r'\}', Punctuation, '#pop'),
  235. (r'\[', Punctuation, '#push'),
  236. (r'\]', Punctuation, '#pop'),
  237. ]
  238. }
  239. def analyse_text(text):
  240. rv = 0.0
  241. if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
  242. rv += 0.25
  243. if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
  244. rv += 0.15
  245. if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
  246. rv += 0.15
  247. if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
  248. r'(\.\w+(\([^)]*\))?)*\}?', text):
  249. rv += 0.01
  250. return rv
  251. class VelocityHtmlLexer(DelegatingLexer):
  252. """
  253. Subclass of the `VelocityLexer` that highlights unlexed data
  254. with the `HtmlLexer`.
  255. """
  256. name = 'HTML+Velocity'
  257. aliases = ['html+velocity']
  258. alias_filenames = ['*.html', '*.fhtml']
  259. mimetypes = ['text/html+velocity']
  260. def __init__(self, **options):
  261. super().__init__(HtmlLexer, VelocityLexer, **options)
  262. class VelocityXmlLexer(DelegatingLexer):
  263. """
  264. Subclass of the `VelocityLexer` that highlights unlexed data
  265. with the `XmlLexer`.
  266. """
  267. name = 'XML+Velocity'
  268. aliases = ['xml+velocity']
  269. alias_filenames = ['*.xml', '*.vm']
  270. mimetypes = ['application/xml+velocity']
  271. def __init__(self, **options):
  272. super().__init__(XmlLexer, VelocityLexer, **options)
  273. def analyse_text(text):
  274. rv = VelocityLexer.analyse_text(text) - 0.01
  275. if looks_like_xml(text):
  276. rv += 0.4
  277. return rv
  278. class DjangoLexer(RegexLexer):
  279. """
  280. Generic `django <http://www.djangoproject.com/documentation/templates/>`_
  281. and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
  282. It just highlights django/jinja code between the preprocessor directives,
  283. other data is left untouched by the lexer.
  284. """
  285. name = 'Django/Jinja'
  286. aliases = ['django', 'jinja']
  287. mimetypes = ['application/x-django-templating', 'application/x-jinja']
  288. flags = re.M | re.S
  289. tokens = {
  290. 'root': [
  291. (r'[^{]+', Other),
  292. (r'\{\{', Comment.Preproc, 'var'),
  293. # jinja/django comments
  294. (r'\{#.*?#\}', Comment),
  295. # django comments
  296. (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
  297. r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
  298. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  299. Comment, Comment.Preproc, Text, Keyword, Text,
  300. Comment.Preproc)),
  301. # raw jinja blocks
  302. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  303. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  304. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  305. Text, Comment.Preproc, Text, Keyword, Text,
  306. Comment.Preproc)),
  307. # filter blocks
  308. (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
  309. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  310. 'block'),
  311. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  312. bygroups(Comment.Preproc, Text, Keyword), 'block'),
  313. (r'\{', Other)
  314. ],
  315. 'varnames': [
  316. (r'(\|)(\s*)([a-zA-Z_]\w*)',
  317. bygroups(Operator, Text, Name.Function)),
  318. (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
  319. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  320. (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
  321. (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
  322. r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
  323. Keyword),
  324. (r'(loop|block|super|forloop)\b', Name.Builtin),
  325. (r'[a-zA-Z_][\w-]*', Name.Variable),
  326. (r'\.\w+', Name.Variable),
  327. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  328. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  329. (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
  330. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  331. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  332. ],
  333. 'var': [
  334. (r'\s+', Text),
  335. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  336. include('varnames')
  337. ],
  338. 'block': [
  339. (r'\s+', Text),
  340. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  341. include('varnames'),
  342. (r'.', Punctuation)
  343. ]
  344. }
  345. def analyse_text(text):
  346. rv = 0.0
  347. if re.search(r'\{%\s*(block|extends)', text) is not None:
  348. rv += 0.4
  349. if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
  350. rv += 0.1
  351. if re.search(r'\{\{.*?\}\}', text) is not None:
  352. rv += 0.1
  353. return rv
  354. class MyghtyLexer(RegexLexer):
  355. """
  356. Generic `myghty templates`_ lexer. Code that isn't Myghty
  357. markup is yielded as `Token.Other`.
  358. .. versionadded:: 0.6
  359. .. _myghty templates: http://www.myghty.org/
  360. """
  361. name = 'Myghty'
  362. aliases = ['myghty']
  363. filenames = ['*.myt', 'autodelegate']
  364. mimetypes = ['application/x-myghty']
  365. tokens = {
  366. 'root': [
  367. (r'\s+', Text),
  368. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  369. bygroups(Name.Tag, Text, Name.Function, Name.Tag,
  370. using(this), Name.Tag)),
  371. (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
  372. bygroups(Name.Tag, Name.Function, Name.Tag,
  373. using(PythonLexer), Name.Tag)),
  374. (r'(<&[^|])(.*?)(,.*?)?(&>)',
  375. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  376. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  377. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  378. (r'</&>', Name.Tag),
  379. (r'(?s)(<%!?)(.*?)(%>)',
  380. bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
  381. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  382. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  383. bygroups(Name.Tag, using(PythonLexer), Other)),
  384. (r"""(?sx)
  385. (.+?) # anything, followed by:
  386. (?:
  387. (?<=\n)(?=[%#]) | # an eval or comment line
  388. (?=</?[%&]) | # a substitution or block or
  389. # call start or end
  390. # - don't consume
  391. (\\\n) | # an escaped newline
  392. \Z # end of string
  393. )""", bygroups(Other, Operator)),
  394. ]
  395. }
  396. class MyghtyHtmlLexer(DelegatingLexer):
  397. """
  398. Subclass of the `MyghtyLexer` that highlights unlexed data
  399. with the `HtmlLexer`.
  400. .. versionadded:: 0.6
  401. """
  402. name = 'HTML+Myghty'
  403. aliases = ['html+myghty']
  404. mimetypes = ['text/html+myghty']
  405. def __init__(self, **options):
  406. super().__init__(HtmlLexer, MyghtyLexer, **options)
  407. class MyghtyXmlLexer(DelegatingLexer):
  408. """
  409. Subclass of the `MyghtyLexer` that highlights unlexed data
  410. with the `XmlLexer`.
  411. .. versionadded:: 0.6
  412. """
  413. name = 'XML+Myghty'
  414. aliases = ['xml+myghty']
  415. mimetypes = ['application/xml+myghty']
  416. def __init__(self, **options):
  417. super().__init__(XmlLexer, MyghtyLexer, **options)
  418. class MyghtyJavascriptLexer(DelegatingLexer):
  419. """
  420. Subclass of the `MyghtyLexer` that highlights unlexed data
  421. with the `JavascriptLexer`.
  422. .. versionadded:: 0.6
  423. """
  424. name = 'JavaScript+Myghty'
  425. aliases = ['javascript+myghty', 'js+myghty']
  426. mimetypes = ['application/x-javascript+myghty',
  427. 'text/x-javascript+myghty',
  428. 'text/javascript+mygthy']
  429. def __init__(self, **options):
  430. super().__init__(JavascriptLexer, MyghtyLexer, **options)
  431. class MyghtyCssLexer(DelegatingLexer):
  432. """
  433. Subclass of the `MyghtyLexer` that highlights unlexed data
  434. with the `CssLexer`.
  435. .. versionadded:: 0.6
  436. """
  437. name = 'CSS+Myghty'
  438. aliases = ['css+myghty']
  439. mimetypes = ['text/css+myghty']
  440. def __init__(self, **options):
  441. super().__init__(CssLexer, MyghtyLexer, **options)
  442. class MasonLexer(RegexLexer):
  443. """
  444. Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
  445. Mason markup is HTML.
  446. .. _mason templates: http://www.masonhq.com/
  447. .. versionadded:: 1.4
  448. """
  449. name = 'Mason'
  450. aliases = ['mason']
  451. filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
  452. mimetypes = ['application/x-mason']
  453. tokens = {
  454. 'root': [
  455. (r'\s+', Text),
  456. (r'(?s)(<%doc>)(.*?)(</%doc>)',
  457. bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
  458. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  459. bygroups(Name.Tag, Text, Name.Function, Name.Tag,
  460. using(this), Name.Tag)),
  461. (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
  462. bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
  463. (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
  464. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  465. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  466. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  467. (r'</&>', Name.Tag),
  468. (r'(?s)(<%!?)(.*?)(%>)',
  469. bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
  470. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  471. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  472. bygroups(Name.Tag, using(PerlLexer), Other)),
  473. (r"""(?sx)
  474. (.+?) # anything, followed by:
  475. (?:
  476. (?<=\n)(?=[%#]) | # an eval or comment line
  477. (?=</?[%&]) | # a substitution or block or
  478. # call start or end
  479. # - don't consume
  480. (\\\n) | # an escaped newline
  481. \Z # end of string
  482. )""", bygroups(using(HtmlLexer), Operator)),
  483. ]
  484. }
  485. def analyse_text(text):
  486. result = 0.0
  487. if re.search(r'</%(class|doc|init)>', text) is not None:
  488. result = 1.0
  489. elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
  490. result = 0.11
  491. return result
  492. class MakoLexer(RegexLexer):
  493. """
  494. Generic `mako templates`_ lexer. Code that isn't Mako
  495. markup is yielded as `Token.Other`.
  496. .. versionadded:: 0.7
  497. .. _mako templates: http://www.makotemplates.org/
  498. """
  499. name = 'Mako'
  500. aliases = ['mako']
  501. filenames = ['*.mao']
  502. mimetypes = ['application/x-mako']
  503. tokens = {
  504. 'root': [
  505. (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
  506. bygroups(Text, Comment.Preproc, Keyword, Other)),
  507. (r'(\s*)(%)([^\n]*)(\n|\Z)',
  508. bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
  509. (r'(\s*)(##[^\n]*)(\n|\Z)',
  510. bygroups(Text, Comment.Preproc, Other)),
  511. (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
  512. (r'(<%)([\w.:]+)',
  513. bygroups(Comment.Preproc, Name.Builtin), 'tag'),
  514. (r'(</%)([\w.:]+)(>)',
  515. bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
  516. (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
  517. (r'(?s)(<%(?:!?))(.*?)(%>)',
  518. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  519. (r'(\$\{)(.*?)(\})',
  520. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  521. (r'''(?sx)
  522. (.+?) # anything, followed by:
  523. (?:
  524. (?<=\n)(?=%|\#\#) | # an eval or comment line
  525. (?=\#\*) | # multiline comment
  526. (?=</?%) | # a python block
  527. # call start or end
  528. (?=\$\{) | # a substitution
  529. (?<=\n)(?=\s*%) |
  530. # - don't consume
  531. (\\\n) | # an escaped newline
  532. \Z # end of string
  533. )
  534. ''', bygroups(Other, Operator)),
  535. (r'\s+', Text),
  536. ],
  537. 'ondeftags': [
  538. (r'<%', Comment.Preproc),
  539. (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
  540. include('tag'),
  541. ],
  542. 'tag': [
  543. (r'((?:\w+)\s*=)(\s*)(".*?")',
  544. bygroups(Name.Attribute, Text, String)),
  545. (r'/?\s*>', Comment.Preproc, '#pop'),
  546. (r'\s+', Text),
  547. ],
  548. 'attr': [
  549. ('".*?"', String, '#pop'),
  550. ("'.*?'", String, '#pop'),
  551. (r'[^\s>]+', String, '#pop'),
  552. ],
  553. }
  554. class MakoHtmlLexer(DelegatingLexer):
  555. """
  556. Subclass of the `MakoLexer` that highlights unlexed data
  557. with the `HtmlLexer`.
  558. .. versionadded:: 0.7
  559. """
  560. name = 'HTML+Mako'
  561. aliases = ['html+mako']
  562. mimetypes = ['text/html+mako']
  563. def __init__(self, **options):
  564. super().__init__(HtmlLexer, MakoLexer, **options)
  565. class MakoXmlLexer(DelegatingLexer):
  566. """
  567. Subclass of the `MakoLexer` that highlights unlexed data
  568. with the `XmlLexer`.
  569. .. versionadded:: 0.7
  570. """
  571. name = 'XML+Mako'
  572. aliases = ['xml+mako']
  573. mimetypes = ['application/xml+mako']
  574. def __init__(self, **options):
  575. super().__init__(XmlLexer, MakoLexer, **options)
  576. class MakoJavascriptLexer(DelegatingLexer):
  577. """
  578. Subclass of the `MakoLexer` that highlights unlexed data
  579. with the `JavascriptLexer`.
  580. .. versionadded:: 0.7
  581. """
  582. name = 'JavaScript+Mako'
  583. aliases = ['javascript+mako', 'js+mako']
  584. mimetypes = ['application/x-javascript+mako',
  585. 'text/x-javascript+mako',
  586. 'text/javascript+mako']
  587. def __init__(self, **options):
  588. super().__init__(JavascriptLexer, MakoLexer, **options)
  589. class MakoCssLexer(DelegatingLexer):
  590. """
  591. Subclass of the `MakoLexer` that highlights unlexed data
  592. with the `CssLexer`.
  593. .. versionadded:: 0.7
  594. """
  595. name = 'CSS+Mako'
  596. aliases = ['css+mako']
  597. mimetypes = ['text/css+mako']
  598. def __init__(self, **options):
  599. super().__init__(CssLexer, MakoLexer, **options)
  600. # Genshi and Cheetah lexers courtesy of Matt Good.
  601. class CheetahPythonLexer(Lexer):
  602. """
  603. Lexer for handling Cheetah's special $ tokens in Python syntax.
  604. """
  605. def get_tokens_unprocessed(self, text):
  606. pylexer = PythonLexer(**self.options)
  607. for pos, type_, value in pylexer.get_tokens_unprocessed(text):
  608. if type_ == Token.Error and value == '$':
  609. type_ = Comment.Preproc
  610. yield pos, type_, value
  611. class CheetahLexer(RegexLexer):
  612. """
  613. Generic `cheetah templates`_ lexer. Code that isn't Cheetah
  614. markup is yielded as `Token.Other`. This also works for
  615. `spitfire templates`_ which use the same syntax.
  616. .. _cheetah templates: http://www.cheetahtemplate.org/
  617. .. _spitfire templates: http://code.google.com/p/spitfire/
  618. """
  619. name = 'Cheetah'
  620. aliases = ['cheetah', 'spitfire']
  621. filenames = ['*.tmpl', '*.spt']
  622. mimetypes = ['application/x-cheetah', 'application/x-spitfire']
  623. tokens = {
  624. 'root': [
  625. (r'(##[^\n]*)$',
  626. (bygroups(Comment))),
  627. (r'#[*](.|\n)*?[*]#', Comment),
  628. (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
  629. (r'#slurp$', Comment.Preproc),
  630. (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
  631. (bygroups(Comment.Preproc, using(CheetahPythonLexer),
  632. Comment.Preproc))),
  633. # TODO support other Python syntax like $foo['bar']
  634. (r'(\$)([a-zA-Z_][\w.]*\w)',
  635. bygroups(Comment.Preproc, using(CheetahPythonLexer))),
  636. (r'(?s)(\$\{!?)(.*?)(\})',
  637. bygroups(Comment.Preproc, using(CheetahPythonLexer),
  638. Comment.Preproc)),
  639. (r'''(?sx)
  640. (.+?) # anything, followed by:
  641. (?:
  642. (?=\#[#a-zA-Z]*) | # an eval comment
  643. (?=\$[a-zA-Z_{]) | # a substitution
  644. \Z # end of string
  645. )
  646. ''', Other),
  647. (r'\s+', Text),
  648. ],
  649. }
  650. class CheetahHtmlLexer(DelegatingLexer):
  651. """
  652. Subclass of the `CheetahLexer` that highlights unlexed data
  653. with the `HtmlLexer`.
  654. """
  655. name = 'HTML+Cheetah'
  656. aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
  657. mimetypes = ['text/html+cheetah', 'text/html+spitfire']
  658. def __init__(self, **options):
  659. super().__init__(HtmlLexer, CheetahLexer, **options)
  660. class CheetahXmlLexer(DelegatingLexer):
  661. """
  662. Subclass of the `CheetahLexer` that highlights unlexed data
  663. with the `XmlLexer`.
  664. """
  665. name = 'XML+Cheetah'
  666. aliases = ['xml+cheetah', 'xml+spitfire']
  667. mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
  668. def __init__(self, **options):
  669. super().__init__(XmlLexer, CheetahLexer, **options)
  670. class CheetahJavascriptLexer(DelegatingLexer):
  671. """
  672. Subclass of the `CheetahLexer` that highlights unlexed data
  673. with the `JavascriptLexer`.
  674. """
  675. name = 'JavaScript+Cheetah'
  676. aliases = ['javascript+cheetah', 'js+cheetah',
  677. 'javascript+spitfire', 'js+spitfire']
  678. mimetypes = ['application/x-javascript+cheetah',
  679. 'text/x-javascript+cheetah',
  680. 'text/javascript+cheetah',
  681. 'application/x-javascript+spitfire',
  682. 'text/x-javascript+spitfire',
  683. 'text/javascript+spitfire']
  684. def __init__(self, **options):
  685. super().__init__(JavascriptLexer, CheetahLexer, **options)
  686. class GenshiTextLexer(RegexLexer):
  687. """
  688. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
  689. templates.
  690. """
  691. name = 'Genshi Text'
  692. aliases = ['genshitext']
  693. mimetypes = ['application/x-genshi-text', 'text/x-genshi']
  694. tokens = {
  695. 'root': [
  696. (r'[^#$\s]+', Other),
  697. (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
  698. (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
  699. include('variable'),
  700. (r'[#$\s]', Other),
  701. ],
  702. 'directive': [
  703. (r'\n', Text, '#pop'),
  704. (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
  705. (r'(choose|when|with)([^\S\n]+)(.*)',
  706. bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
  707. (r'(choose|otherwise)\b', Keyword, '#pop'),
  708. (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
  709. ],
  710. 'variable': [
  711. (r'(?<!\$)(\$\{)(.+?)(\})',
  712. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  713. (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
  714. Name.Variable),
  715. ]
  716. }
  717. class GenshiMarkupLexer(RegexLexer):
  718. """
  719. Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
  720. `GenshiLexer`.
  721. """
  722. flags = re.DOTALL
  723. tokens = {
  724. 'root': [
  725. (r'[^<$]+', Other),
  726. (r'(<\?python)(.*?)(\?>)',
  727. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  728. # yield style and script blocks as Other
  729. (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
  730. (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
  731. (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
  732. include('variable'),
  733. (r'[<$]', Other),
  734. ],
  735. 'pytag': [
  736. (r'\s+', Text),
  737. (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
  738. (r'/?\s*>', Name.Tag, '#pop'),
  739. ],
  740. 'pyattr': [
  741. ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
  742. ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
  743. (r'[^\s>]+', String, '#pop'),
  744. ],
  745. 'tag': [
  746. (r'\s+', Text),
  747. (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
  748. (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
  749. (r'/?\s*>', Name.Tag, '#pop'),
  750. ],
  751. 'attr': [
  752. ('"', String, 'attr-dstring'),
  753. ("'", String, 'attr-sstring'),
  754. (r'[^\s>]*', String, '#pop')
  755. ],
  756. 'attr-dstring': [
  757. ('"', String, '#pop'),
  758. include('strings'),
  759. ("'", String)
  760. ],
  761. 'attr-sstring': [
  762. ("'", String, '#pop'),
  763. include('strings'),
  764. ("'", String)
  765. ],
  766. 'strings': [
  767. ('[^"\'$]+', String),
  768. include('variable')
  769. ],
  770. 'variable': [
  771. (r'(?<!\$)(\$\{)(.+?)(\})',
  772. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  773. (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
  774. Name.Variable),
  775. ]
  776. }
  777. class HtmlGenshiLexer(DelegatingLexer):
  778. """
  779. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
  780. `kid <http://kid-templating.org/>`_ kid HTML templates.
  781. """
  782. name = 'HTML+Genshi'
  783. aliases = ['html+genshi', 'html+kid']
  784. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  785. mimetypes = ['text/html+genshi']
  786. def __init__(self, **options):
  787. super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
  788. def analyse_text(text):
  789. rv = 0.0
  790. if re.search(r'\$\{.*?\}', text) is not None:
  791. rv += 0.2
  792. if re.search(r'py:(.*?)=["\']', text) is not None:
  793. rv += 0.2
  794. return rv + HtmlLexer.analyse_text(text) - 0.01
  795. class GenshiLexer(DelegatingLexer):
  796. """
  797. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
  798. `kid <http://kid-templating.org/>`_ kid XML templates.
  799. """
  800. name = 'Genshi'
  801. aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
  802. filenames = ['*.kid']
  803. alias_filenames = ['*.xml']
  804. mimetypes = ['application/x-genshi', 'application/x-kid']
  805. def __init__(self, **options):
  806. super().__init__(XmlLexer, GenshiMarkupLexer, **options)
  807. def analyse_text(text):
  808. rv = 0.0
  809. if re.search(r'\$\{.*?\}', text) is not None:
  810. rv += 0.2
  811. if re.search(r'py:(.*?)=["\']', text) is not None:
  812. rv += 0.2
  813. return rv + XmlLexer.analyse_text(text) - 0.01
  814. class JavascriptGenshiLexer(DelegatingLexer):
  815. """
  816. A lexer that highlights javascript code in genshi text templates.
  817. """
  818. name = 'JavaScript+Genshi Text'
  819. aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
  820. 'javascript+genshi']
  821. alias_filenames = ['*.js']
  822. mimetypes = ['application/x-javascript+genshi',
  823. 'text/x-javascript+genshi',
  824. 'text/javascript+genshi']
  825. def __init__(self, **options):
  826. super().__init__(JavascriptLexer, GenshiTextLexer, **options)
  827. def analyse_text(text):
  828. return GenshiLexer.analyse_text(text) - 0.05
  829. class CssGenshiLexer(DelegatingLexer):
  830. """
  831. A lexer that highlights CSS definitions in genshi text templates.
  832. """
  833. name = 'CSS+Genshi Text'
  834. aliases = ['css+genshitext', 'css+genshi']
  835. alias_filenames = ['*.css']
  836. mimetypes = ['text/css+genshi']
  837. def __init__(self, **options):
  838. super().__init__(CssLexer, GenshiTextLexer, **options)
  839. def analyse_text(text):
  840. return GenshiLexer.analyse_text(text) - 0.05
  841. class RhtmlLexer(DelegatingLexer):
  842. """
  843. Subclass of the ERB lexer that highlights the unlexed data with the
  844. html lexer.
  845. Nested Javascript and CSS is highlighted too.
  846. """
  847. name = 'RHTML'
  848. aliases = ['rhtml', 'html+erb', 'html+ruby']
  849. filenames = ['*.rhtml']
  850. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  851. mimetypes = ['text/html+ruby']
  852. def __init__(self, **options):
  853. super().__init__(HtmlLexer, ErbLexer, **options)
  854. def analyse_text(text):
  855. rv = ErbLexer.analyse_text(text) - 0.01
  856. if html_doctype_matches(text):
  857. # one more than the XmlErbLexer returns
  858. rv += 0.5
  859. return rv
  860. class XmlErbLexer(DelegatingLexer):
  861. """
  862. Subclass of `ErbLexer` which highlights data outside preprocessor
  863. directives with the `XmlLexer`.
  864. """
  865. name = 'XML+Ruby'
  866. aliases = ['xml+ruby', 'xml+erb']
  867. alias_filenames = ['*.xml']
  868. mimetypes = ['application/xml+ruby']
  869. def __init__(self, **options):
  870. super().__init__(XmlLexer, ErbLexer, **options)
  871. def analyse_text(text):
  872. rv = ErbLexer.analyse_text(text) - 0.01
  873. if looks_like_xml(text):
  874. rv += 0.4
  875. return rv
  876. class CssErbLexer(DelegatingLexer):
  877. """
  878. Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
  879. """
  880. name = 'CSS+Ruby'
  881. aliases = ['css+ruby', 'css+erb']
  882. alias_filenames = ['*.css']
  883. mimetypes = ['text/css+ruby']
  884. def __init__(self, **options):
  885. super().__init__(CssLexer, ErbLexer, **options)
  886. def analyse_text(text):
  887. return ErbLexer.analyse_text(text) - 0.05
  888. class JavascriptErbLexer(DelegatingLexer):
  889. """
  890. Subclass of `ErbLexer` which highlights unlexed data with the
  891. `JavascriptLexer`.
  892. """
  893. name = 'JavaScript+Ruby'
  894. aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
  895. alias_filenames = ['*.js']
  896. mimetypes = ['application/x-javascript+ruby',
  897. 'text/x-javascript+ruby',
  898. 'text/javascript+ruby']
  899. def __init__(self, **options):
  900. super().__init__(JavascriptLexer, ErbLexer, **options)
  901. def analyse_text(text):
  902. return ErbLexer.analyse_text(text) - 0.05
  903. class HtmlPhpLexer(DelegatingLexer):
  904. """
  905. Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
  906. Nested Javascript and CSS is highlighted too.
  907. """
  908. name = 'HTML+PHP'
  909. aliases = ['html+php']
  910. filenames = ['*.phtml']
  911. alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
  912. '*.php[345]']
  913. mimetypes = ['application/x-php',
  914. 'application/x-httpd-php', 'application/x-httpd-php3',
  915. 'application/x-httpd-php4', 'application/x-httpd-php5']
  916. def __init__(self, **options):
  917. super().__init__(HtmlLexer, PhpLexer, **options)
  918. def analyse_text(text):
  919. rv = PhpLexer.analyse_text(text) - 0.01
  920. if html_doctype_matches(text):
  921. rv += 0.5
  922. return rv
  923. class XmlPhpLexer(DelegatingLexer):
  924. """
  925. Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
  926. """
  927. name = 'XML+PHP'
  928. aliases = ['xml+php']
  929. alias_filenames = ['*.xml', '*.php', '*.php[345]']
  930. mimetypes = ['application/xml+php']
  931. def __init__(self, **options):
  932. super().__init__(XmlLexer, PhpLexer, **options)
  933. def analyse_text(text):
  934. rv = PhpLexer.analyse_text(text) - 0.01
  935. if looks_like_xml(text):
  936. rv += 0.4
  937. return rv
  938. class CssPhpLexer(DelegatingLexer):
  939. """
  940. Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
  941. """
  942. name = 'CSS+PHP'
  943. aliases = ['css+php']
  944. alias_filenames = ['*.css']
  945. mimetypes = ['text/css+php']
  946. def __init__(self, **options):
  947. super().__init__(CssLexer, PhpLexer, **options)
  948. def analyse_text(text):
  949. return PhpLexer.analyse_text(text) - 0.05
  950. class JavascriptPhpLexer(DelegatingLexer):
  951. """
  952. Subclass of `PhpLexer` which highlights unmatched data with the
  953. `JavascriptLexer`.
  954. """
  955. name = 'JavaScript+PHP'
  956. aliases = ['javascript+php', 'js+php']
  957. alias_filenames = ['*.js']
  958. mimetypes = ['application/x-javascript+php',
  959. 'text/x-javascript+php',
  960. 'text/javascript+php']
  961. def __init__(self, **options):
  962. super().__init__(JavascriptLexer, PhpLexer, **options)
  963. def analyse_text(text):
  964. return PhpLexer.analyse_text(text)
  965. class HtmlSmartyLexer(DelegatingLexer):
  966. """
  967. Subclass of the `SmartyLexer` that highlights unlexed data with the
  968. `HtmlLexer`.
  969. Nested Javascript and CSS is highlighted too.
  970. """
  971. name = 'HTML+Smarty'
  972. aliases = ['html+smarty']
  973. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
  974. mimetypes = ['text/html+smarty']
  975. def __init__(self, **options):
  976. super().__init__(HtmlLexer, SmartyLexer, **options)
  977. def analyse_text(text):
  978. rv = SmartyLexer.analyse_text(text) - 0.01
  979. if html_doctype_matches(text):
  980. rv += 0.5
  981. return rv
  982. class XmlSmartyLexer(DelegatingLexer):
  983. """
  984. Subclass of the `SmartyLexer` that highlights unlexed data with the
  985. `XmlLexer`.
  986. """
  987. name = 'XML+Smarty'
  988. aliases = ['xml+smarty']
  989. alias_filenames = ['*.xml', '*.tpl']
  990. mimetypes = ['application/xml+smarty']
  991. def __init__(self, **options):
  992. super().__init__(XmlLexer, SmartyLexer, **options)
  993. def analyse_text(text):
  994. rv = SmartyLexer.analyse_text(text) - 0.01
  995. if looks_like_xml(text):
  996. rv += 0.4
  997. return rv
  998. class CssSmartyLexer(DelegatingLexer):
  999. """
  1000. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1001. `CssLexer`.
  1002. """
  1003. name = 'CSS+Smarty'
  1004. aliases = ['css+smarty']
  1005. alias_filenames = ['*.css', '*.tpl']
  1006. mimetypes = ['text/css+smarty']
  1007. def __init__(self, **options):
  1008. super().__init__(CssLexer, SmartyLexer, **options)
  1009. def analyse_text(text):
  1010. return SmartyLexer.analyse_text(text) - 0.05
  1011. class JavascriptSmartyLexer(DelegatingLexer):
  1012. """
  1013. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1014. `JavascriptLexer`.
  1015. """
  1016. name = 'JavaScript+Smarty'
  1017. aliases = ['javascript+smarty', 'js+smarty']
  1018. alias_filenames = ['*.js', '*.tpl']
  1019. mimetypes = ['application/x-javascript+smarty',
  1020. 'text/x-javascript+smarty',
  1021. 'text/javascript+smarty']
  1022. def __init__(self, **options):
  1023. super().__init__(JavascriptLexer, SmartyLexer, **options)
  1024. def analyse_text(text):
  1025. return SmartyLexer.analyse_text(text) - 0.05
  1026. class HtmlDjangoLexer(DelegatingLexer):
  1027. """
  1028. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1029. `HtmlLexer`.
  1030. Nested Javascript and CSS is highlighted too.
  1031. """
  1032. name = 'HTML+Django/Jinja'
  1033. aliases = ['html+django', 'html+jinja', 'htmldjango']
  1034. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  1035. mimetypes = ['text/html+django', 'text/html+jinja']
  1036. def __init__(self, **options):
  1037. super().__init__(HtmlLexer, DjangoLexer, **options)
  1038. def analyse_text(text):
  1039. rv = DjangoLexer.analyse_text(text) - 0.01
  1040. if html_doctype_matches(text):
  1041. rv += 0.5
  1042. return rv
  1043. class XmlDjangoLexer(DelegatingLexer):
  1044. """
  1045. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1046. `XmlLexer`.
  1047. """
  1048. name = 'XML+Django/Jinja'
  1049. aliases = ['xml+django', 'xml+jinja']
  1050. alias_filenames = ['*.xml']
  1051. mimetypes = ['application/xml+django', 'application/xml+jinja']
  1052. def __init__(self, **options):
  1053. super().__init__(XmlLexer, DjangoLexer, **options)
  1054. def analyse_text(text):
  1055. rv = DjangoLexer.analyse_text(text) - 0.01
  1056. if looks_like_xml(text):
  1057. rv += 0.4
  1058. return rv
  1059. class CssDjangoLexer(DelegatingLexer):
  1060. """
  1061. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1062. `CssLexer`.
  1063. """
  1064. name = 'CSS+Django/Jinja'
  1065. aliases = ['css+django', 'css+jinja']
  1066. alias_filenames = ['*.css']
  1067. mimetypes = ['text/css+django', 'text/css+jinja']
  1068. def __init__(self, **options):
  1069. super().__init__(CssLexer, DjangoLexer, **options)
  1070. def analyse_text(text):
  1071. return DjangoLexer.analyse_text(text) - 0.05
  1072. class JavascriptDjangoLexer(DelegatingLexer):
  1073. """
  1074. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1075. `JavascriptLexer`.
  1076. """
  1077. name = 'JavaScript+Django/Jinja'
  1078. aliases = ['javascript+django', 'js+django',
  1079. 'javascript+jinja', 'js+jinja']
  1080. alias_filenames = ['*.js']
  1081. mimetypes = ['application/x-javascript+django',
  1082. 'application/x-javascript+jinja',
  1083. 'text/x-javascript+django',
  1084. 'text/x-javascript+jinja',
  1085. 'text/javascript+django',
  1086. 'text/javascript+jinja']
  1087. def __init__(self, **options):
  1088. super().__init__(JavascriptLexer, DjangoLexer, **options)
  1089. def analyse_text(text):
  1090. return DjangoLexer.analyse_text(text) - 0.05
  1091. class JspRootLexer(RegexLexer):
  1092. """
  1093. Base for the `JspLexer`. Yields `Token.Other` for area outside of
  1094. JSP tags.
  1095. .. versionadded:: 0.7
  1096. """
  1097. tokens = {
  1098. 'root': [
  1099. (r'<%\S?', Keyword, 'sec'),
  1100. # FIXME: I want to make these keywords but still parse attributes.
  1101. (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
  1102. Keyword),
  1103. (r'[^<]+', Other),
  1104. (r'<', Other),
  1105. ],
  1106. 'sec': [
  1107. (r'%>', Keyword, '#pop'),
  1108. # note: '\w\W' != '.' without DOTALL.
  1109. (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
  1110. ],
  1111. }
  1112. class JspLexer(DelegatingLexer):
  1113. """
  1114. Lexer for Java Server Pages.
  1115. .. versionadded:: 0.7
  1116. """
  1117. name = 'Java Server Page'
  1118. aliases = ['jsp']
  1119. filenames = ['*.jsp']
  1120. mimetypes = ['application/x-jsp']
  1121. def __init__(self, **options):
  1122. super().__init__(XmlLexer, JspRootLexer, **options)
  1123. def analyse_text(text):
  1124. rv = JavaLexer.analyse_text(text) - 0.01
  1125. if looks_like_xml(text):
  1126. rv += 0.4
  1127. if '<%' in text and '%>' in text:
  1128. rv += 0.1
  1129. return rv
  1130. class EvoqueLexer(RegexLexer):
  1131. """
  1132. For files using the Evoque templating system.
  1133. .. versionadded:: 1.1
  1134. """
  1135. name = 'Evoque'
  1136. aliases = ['evoque']
  1137. filenames = ['*.evoque']
  1138. mimetypes = ['application/x-evoque']
  1139. flags = re.DOTALL
  1140. tokens = {
  1141. 'root': [
  1142. (r'[^#$]+', Other),
  1143. (r'#\[', Comment.Multiline, 'comment'),
  1144. (r'\$\$', Other),
  1145. # svn keywords
  1146. (r'\$\w+:[^$\n]*\$', Comment.Multiline),
  1147. # directives: begin, end
  1148. (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
  1149. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1150. String, Punctuation)),
  1151. # directives: evoque, overlay
  1152. # see doc for handling first name arg: /directives/evoque/
  1153. # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
  1154. # should be using(PythonLexer), not passed out as String
  1155. (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
  1156. r'(.*?)((?(4)%)\})',
  1157. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1158. String, using(PythonLexer), Punctuation)),
  1159. # directives: if, for, prefer, test
  1160. (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
  1161. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1162. using(PythonLexer), Punctuation)),
  1163. # directive clauses (no {} expression)
  1164. (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
  1165. # expressions
  1166. (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
  1167. bygroups(Punctuation, None, using(PythonLexer),
  1168. Name.Builtin, None, None, Punctuation)),
  1169. (r'#', Other),
  1170. ],
  1171. 'comment': [
  1172. (r'[^\]#]', Comment.Multiline),
  1173. (r'#\[', Comment.Multiline, '#push'),
  1174. (r'\]#', Comment.Multiline, '#pop'),
  1175. (r'[\]#]', Comment.Multiline)
  1176. ],
  1177. }
  1178. def analyse_text(text):
  1179. """Evoque templates use $evoque, which is unique."""
  1180. if '$evoque' in text:
  1181. return 1
  1182. class EvoqueHtmlLexer(DelegatingLexer):
  1183. """
  1184. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1185. `HtmlLexer`.
  1186. .. versionadded:: 1.1
  1187. """
  1188. name = 'HTML+Evoque'
  1189. aliases = ['html+evoque']
  1190. filenames = ['*.html']
  1191. mimetypes = ['text/html+evoque']
  1192. def __init__(self, **options):
  1193. super().__init__(HtmlLexer, EvoqueLexer, **options)
  1194. def analyse_text(text):
  1195. return EvoqueLexer.analyse_text(text)
  1196. class EvoqueXmlLexer(DelegatingLexer):
  1197. """
  1198. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1199. `XmlLexer`.
  1200. .. versionadded:: 1.1
  1201. """
  1202. name = 'XML+Evoque'
  1203. aliases = ['xml+evoque']
  1204. filenames = ['*.xml']
  1205. mimetypes = ['application/xml+evoque']
  1206. def __init__(self, **options):
  1207. super().__init__(XmlLexer, EvoqueLexer, **options)
  1208. def analyse_text(text):
  1209. return EvoqueLexer.analyse_text(text)
  1210. class ColdfusionLexer(RegexLexer):
  1211. """
  1212. Coldfusion statements
  1213. """
  1214. name = 'cfstatement'
  1215. aliases = ['cfs']
  1216. filenames = []
  1217. mimetypes = []
  1218. flags = re.IGNORECASE
  1219. tokens = {
  1220. 'root': [
  1221. (r'//.*?\n', Comment.Single),
  1222. (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
  1223. (r'\+\+|--', Operator),
  1224. (r'[-+*/^&=!]', Operator),
  1225. (r'<=|>=|<|>|==', Operator),
  1226. (r'mod\b', Operator),
  1227. (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
  1228. (r'\|\||&&', Operator),
  1229. (r'\?', Operator),
  1230. (r'"', String.Double, 'string'),
  1231. # There is a special rule for allowing html in single quoted
  1232. # strings, evidently.
  1233. (r"'.*?'", String.Single),
  1234. (r'\d+', Number),
  1235. (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
  1236. r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
  1237. r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
  1238. (r'(true|false|null)\b', Keyword.Constant),
  1239. (r'(application|session|client|cookie|super|this|variables|arguments)\b',
  1240. Name.Constant),
  1241. (r'([a-z_$][\w.]*)(\s*)(\()',
  1242. bygroups(Name.Function, Text, Punctuation)),
  1243. (r'[a-z_$][\w.]*', Name.Variable),
  1244. (r'[()\[\]{};:,.\\]', Punctuation),
  1245. (r'\s+', Text),
  1246. ],
  1247. 'string': [
  1248. (r'""', String.Double),
  1249. (r'#.+?#', String.Interp),
  1250. (r'[^"#]+', String.Double),
  1251. (r'#', String.Double),
  1252. (r'"', String.Double, '#pop'),
  1253. ],
  1254. }
  1255. class ColdfusionMarkupLexer(RegexLexer):
  1256. """
  1257. Coldfusion markup only
  1258. """
  1259. name = 'Coldfusion'
  1260. aliases = ['cf']
  1261. filenames = []
  1262. mimetypes = []
  1263. tokens = {
  1264. 'root': [
  1265. (r'[^<]+', Other),
  1266. include('tags'),
  1267. (r'<[^<>]*', Other),
  1268. ],
  1269. 'tags': [
  1270. (r'<!---', Comment.Multiline, 'cfcomment'),
  1271. (r'(?s)<!--.*?-->', Comment),
  1272. (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
  1273. (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
  1274. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1275. # negative lookbehind is for strings with embedded >
  1276. (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
  1277. r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
  1278. r'mailpart|mail|header|content|zip|image|lock|argument|try|'
  1279. r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
  1280. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1281. ],
  1282. 'cfoutput': [
  1283. (r'[^#<]+', Other),
  1284. (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
  1285. Punctuation)),
  1286. # (r'<cfoutput.*?>', Name.Builtin, '#push'),
  1287. (r'</cfoutput.*?>', Name.Builtin, '#pop'),
  1288. include('tags'),
  1289. (r'(?s)<[^<>]*', Other),
  1290. (r'#', Other),
  1291. ],
  1292. 'cfcomment': [
  1293. (r'<!---', Comment.Multiline, '#push'),
  1294. (r'--->', Comment.Multiline, '#pop'),
  1295. (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
  1296. ],
  1297. }
  1298. class ColdfusionHtmlLexer(DelegatingLexer):
  1299. """
  1300. Coldfusion markup in html
  1301. """
  1302. name = 'Coldfusion HTML'
  1303. aliases = ['cfm']
  1304. filenames = ['*.cfm', '*.cfml']
  1305. mimetypes = ['application/x-coldfusion']
  1306. def __init__(self, **options):
  1307. super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
  1308. class ColdfusionCFCLexer(DelegatingLexer):
  1309. """
  1310. Coldfusion markup/script components
  1311. .. versionadded:: 2.0
  1312. """
  1313. name = 'Coldfusion CFC'
  1314. aliases = ['cfc']
  1315. filenames = ['*.cfc']
  1316. mimetypes = []
  1317. def __init__(self, **options):
  1318. super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
  1319. class SspLexer(DelegatingLexer):
  1320. """
  1321. Lexer for Scalate Server Pages.
  1322. .. versionadded:: 1.4
  1323. """
  1324. name = 'Scalate Server Page'
  1325. aliases = ['ssp']
  1326. filenames = ['*.ssp']
  1327. mimetypes = ['application/x-ssp']
  1328. def __init__(self, **options):
  1329. super().__init__(XmlLexer, JspRootLexer, **options)
  1330. def analyse_text(text):
  1331. rv = 0.0
  1332. if re.search(r'val \w+\s*:', text):
  1333. rv += 0.6
  1334. if looks_like_xml(text):
  1335. rv += 0.2
  1336. if '<%' in text and '%>' in text:
  1337. rv += 0.1
  1338. return rv
  1339. class TeaTemplateRootLexer(RegexLexer):
  1340. """
  1341. Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
  1342. code blocks.
  1343. .. versionadded:: 1.5
  1344. """
  1345. tokens = {
  1346. 'root': [
  1347. (r'<%\S?', Keyword, 'sec'),
  1348. (r'[^<]+', Other),
  1349. (r'<', Other),
  1350. ],
  1351. 'sec': [
  1352. (r'%>', Keyword, '#pop'),
  1353. # note: '\w\W' != '.' without DOTALL.
  1354. (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
  1355. ],
  1356. }
  1357. class TeaTemplateLexer(DelegatingLexer):
  1358. """
  1359. Lexer for `Tea Templates <http://teatrove.org/>`_.
  1360. .. versionadded:: 1.5
  1361. """
  1362. name = 'Tea'
  1363. aliases = ['tea']
  1364. filenames = ['*.tea']
  1365. mimetypes = ['text/x-tea']
  1366. def __init__(self, **options):
  1367. super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
  1368. def analyse_text(text):
  1369. rv = TeaLangLexer.analyse_text(text) - 0.01
  1370. if looks_like_xml(text):
  1371. rv += 0.4
  1372. if '<%' in text and '%>' in text:
  1373. rv += 0.1
  1374. return rv
  1375. class LassoHtmlLexer(DelegatingLexer):
  1376. """
  1377. Subclass of the `LassoLexer` which highlights unhandled data with the
  1378. `HtmlLexer`.
  1379. Nested JavaScript and CSS is also highlighted.
  1380. .. versionadded:: 1.6
  1381. """
  1382. name = 'HTML+Lasso'
  1383. aliases = ['html+lasso']
  1384. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
  1385. '*.incl', '*.inc', '*.las']
  1386. mimetypes = ['text/html+lasso',
  1387. 'application/x-httpd-lasso',
  1388. 'application/x-httpd-lasso[89]']
  1389. def __init__(self, **options):
  1390. super().__init__(HtmlLexer, LassoLexer, **options)
  1391. def analyse_text(text):
  1392. rv = LassoLexer.analyse_text(text) - 0.01
  1393. if html_doctype_matches(text): # same as HTML lexer
  1394. rv += 0.5
  1395. return rv
  1396. class LassoXmlLexer(DelegatingLexer):
  1397. """
  1398. Subclass of the `LassoLexer` which highlights unhandled data with the
  1399. `XmlLexer`.
  1400. .. versionadded:: 1.6
  1401. """
  1402. name = 'XML+Lasso'
  1403. aliases = ['xml+lasso']
  1404. alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
  1405. '*.incl', '*.inc', '*.las']
  1406. mimetypes = ['application/xml+lasso']
  1407. def __init__(self, **options):
  1408. super().__init__(XmlLexer, LassoLexer, **options)
  1409. def analyse_text(text):
  1410. rv = LassoLexer.analyse_text(text) - 0.01
  1411. if looks_like_xml(text):
  1412. rv += 0.4
  1413. return rv
  1414. class LassoCssLexer(DelegatingLexer):
  1415. """
  1416. Subclass of the `LassoLexer` which highlights unhandled data with the
  1417. `CssLexer`.
  1418. .. versionadded:: 1.6
  1419. """
  1420. name = 'CSS+Lasso'
  1421. aliases = ['css+lasso']
  1422. alias_filenames = ['*.css']
  1423. mimetypes = ['text/css+lasso']
  1424. def __init__(self, **options):
  1425. options['requiredelimiters'] = True
  1426. super().__init__(CssLexer, LassoLexer, **options)
  1427. def analyse_text(text):
  1428. rv = LassoLexer.analyse_text(text) - 0.05
  1429. if re.search(r'\w+:[^;]+;', text):
  1430. rv += 0.1
  1431. if 'padding:' in text:
  1432. rv += 0.1
  1433. return rv
  1434. class LassoJavascriptLexer(DelegatingLexer):
  1435. """
  1436. Subclass of the `LassoLexer` which highlights unhandled data with the
  1437. `JavascriptLexer`.
  1438. .. versionadded:: 1.6
  1439. """
  1440. name = 'JavaScript+Lasso'
  1441. aliases = ['javascript+lasso', 'js+lasso']
  1442. alias_filenames = ['*.js']
  1443. mimetypes = ['application/x-javascript+lasso',
  1444. 'text/x-javascript+lasso',
  1445. 'text/javascript+lasso']
  1446. def __init__(self, **options):
  1447. options['requiredelimiters'] = True
  1448. super().__init__(JavascriptLexer, LassoLexer, **options)
  1449. def analyse_text(text):
  1450. rv = LassoLexer.analyse_text(text) - 0.05
  1451. return rv
  1452. class HandlebarsLexer(RegexLexer):
  1453. """
  1454. Generic `handlebars <http://handlebarsjs.com/>` template lexer.
  1455. Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
  1456. Everything else is left for a delegating lexer.
  1457. .. versionadded:: 2.0
  1458. """
  1459. name = "Handlebars"
  1460. aliases = ['handlebars']
  1461. tokens = {
  1462. 'root': [
  1463. (r'[^{]+', Other),
  1464. # Comment start {{! }} or {{!--
  1465. (r'\{\{!.*\}\}', Comment),
  1466. # HTML Escaping open {{{expression
  1467. (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
  1468. # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
  1469. (r'(\{\{)([#~/]+)([^\s}]*)',
  1470. bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
  1471. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
  1472. ],
  1473. 'tag': [
  1474. (r'\s+', Text),
  1475. # HTML Escaping close }}}
  1476. (r'\}\}\}', Comment.Special, '#pop'),
  1477. # blockClose}}, includes optional tilde ~
  1478. (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
  1479. # {{opt=something}}
  1480. (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
  1481. # Partials {{> ...}}
  1482. (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
  1483. (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
  1484. (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
  1485. 'dynamic-partial'),
  1486. include('generic'),
  1487. ],
  1488. 'dynamic-partial': [
  1489. (r'\s+', Text),
  1490. (r'\)', Punctuation, '#pop'),
  1491. (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
  1492. Name.Variable, Text)),
  1493. (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
  1494. using(this, state='variable'))),
  1495. (r'[\w-]+', Name.Function),
  1496. include('generic'),
  1497. ],
  1498. 'variable': [
  1499. (r'[()/@a-zA-Z][\w-]*', Name.Variable),
  1500. (r'\.[\w-]+', Name.Variable),
  1501. (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
  1502. ],
  1503. 'generic': [
  1504. include('variable'),
  1505. # borrowed from DjangoLexer
  1506. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1507. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1508. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1509. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1510. ]
  1511. }
  1512. class HandlebarsHtmlLexer(DelegatingLexer):
  1513. """
  1514. Subclass of the `HandlebarsLexer` that highlights unlexed data with the
  1515. `HtmlLexer`.
  1516. .. versionadded:: 2.0
  1517. """
  1518. name = "HTML+Handlebars"
  1519. aliases = ["html+handlebars"]
  1520. filenames = ['*.handlebars', '*.hbs']
  1521. mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
  1522. def __init__(self, **options):
  1523. super().__init__(HtmlLexer, HandlebarsLexer, **options)
  1524. class YamlJinjaLexer(DelegatingLexer):
  1525. """
  1526. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1527. `YamlLexer`.
  1528. Commonly used in Saltstack salt states.
  1529. .. versionadded:: 2.0
  1530. """
  1531. name = 'YAML+Jinja'
  1532. aliases = ['yaml+jinja', 'salt', 'sls']
  1533. filenames = ['*.sls']
  1534. mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
  1535. def __init__(self, **options):
  1536. super().__init__(YamlLexer, DjangoLexer, **options)
  1537. class LiquidLexer(RegexLexer):
  1538. """
  1539. Lexer for `Liquid templates
  1540. <http://www.rubydoc.info/github/Shopify/liquid>`_.
  1541. .. versionadded:: 2.0
  1542. """
  1543. name = 'liquid'
  1544. aliases = ['liquid']
  1545. filenames = ['*.liquid']
  1546. tokens = {
  1547. 'root': [
  1548. (r'[^{]+', Text),
  1549. # tags and block tags
  1550. (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
  1551. # output tags
  1552. (r'(\{\{)(\s*)([^\s}]+)',
  1553. bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
  1554. 'output'),
  1555. (r'\{', Text)
  1556. ],
  1557. 'tag-or-block': [
  1558. # builtin logic blocks
  1559. (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
  1560. (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
  1561. combined('end-of-block', 'whitespace', 'generic')),
  1562. (r'(else)(\s*)(%\})',
  1563. bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
  1564. # other builtin blocks
  1565. (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
  1566. bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
  1567. Whitespace, Punctuation), '#pop'),
  1568. (r'(comment)(\s*)(%\})',
  1569. bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
  1570. (r'(raw)(\s*)(%\})',
  1571. bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
  1572. # end of block
  1573. (r'(end(case|unless|if))(\s*)(%\})',
  1574. bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
  1575. (r'(end([^\s%]+))(\s*)(%\})',
  1576. bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
  1577. # builtin tags (assign and include are handled together with usual tags)
  1578. (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
  1579. bygroups(Name.Tag, Whitespace,
  1580. using(this, state='generic'), Punctuation, Whitespace),
  1581. 'variable-tag-markup'),
  1582. # other tags or blocks
  1583. (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
  1584. ],
  1585. 'output': [
  1586. include('whitespace'),
  1587. (r'\}\}', Punctuation, '#pop'), # end of output
  1588. (r'\|', Punctuation, 'filters')
  1589. ],
  1590. 'filters': [
  1591. include('whitespace'),
  1592. (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
  1593. (r'([^\s|:]+)(:?)(\s*)',
  1594. bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
  1595. ],
  1596. 'filter-markup': [
  1597. (r'\|', Punctuation, '#pop'),
  1598. include('end-of-tag'),
  1599. include('default-param-markup')
  1600. ],
  1601. 'condition': [
  1602. include('end-of-block'),
  1603. include('whitespace'),
  1604. (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
  1605. bygroups(using(this, state = 'generic'), Whitespace, Operator,
  1606. Whitespace, using(this, state = 'generic'), Whitespace,
  1607. Punctuation)),
  1608. (r'\b!', Operator),
  1609. (r'\bnot\b', Operator.Word),
  1610. (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
  1611. bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
  1612. Whitespace, using(this, state = 'generic'))),
  1613. include('generic'),
  1614. include('whitespace')
  1615. ],
  1616. 'generic-value': [
  1617. include('generic'),
  1618. include('end-at-whitespace')
  1619. ],
  1620. 'operator': [
  1621. (r'(\s*)((=|!|>|<)=?)(\s*)',
  1622. bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
  1623. (r'(\s*)(\bcontains\b)(\s*)',
  1624. bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
  1625. ],
  1626. 'end-of-tag': [
  1627. (r'\}\}', Punctuation, '#pop')
  1628. ],
  1629. 'end-of-block': [
  1630. (r'%\}', Punctuation, ('#pop', '#pop'))
  1631. ],
  1632. 'end-at-whitespace': [
  1633. (r'\s+', Whitespace, '#pop')
  1634. ],
  1635. # states for unknown markup
  1636. 'param-markup': [
  1637. include('whitespace'),
  1638. # params with colons or equals
  1639. (r'([^\s=:]+)(\s*)(=|:)',
  1640. bygroups(Name.Attribute, Whitespace, Operator)),
  1641. # explicit variables
  1642. (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
  1643. bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
  1644. Whitespace, Punctuation)),
  1645. include('string'),
  1646. include('number'),
  1647. include('keyword'),
  1648. (r',', Punctuation)
  1649. ],
  1650. 'default-param-markup': [
  1651. include('param-markup'),
  1652. (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
  1653. ],
  1654. 'variable-param-markup': [
  1655. include('param-markup'),
  1656. include('variable'),
  1657. (r'.', Text) # fallback
  1658. ],
  1659. 'tag-markup': [
  1660. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1661. include('default-param-markup')
  1662. ],
  1663. 'variable-tag-markup': [
  1664. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1665. include('variable-param-markup')
  1666. ],
  1667. # states for different values types
  1668. 'keyword': [
  1669. (r'\b(false|true)\b', Keyword.Constant)
  1670. ],
  1671. 'variable': [
  1672. (r'[a-zA-Z_]\w*', Name.Variable),
  1673. (r'(?<=\w)\.(?=\w)', Punctuation)
  1674. ],
  1675. 'string': [
  1676. (r"'[^']*'", String.Single),
  1677. (r'"[^"]*"', String.Double)
  1678. ],
  1679. 'number': [
  1680. (r'\d+\.\d+', Number.Float),
  1681. (r'\d+', Number.Integer)
  1682. ],
  1683. 'generic': [ # decides for variable, string, keyword or number
  1684. include('keyword'),
  1685. include('string'),
  1686. include('number'),
  1687. include('variable')
  1688. ],
  1689. 'whitespace': [
  1690. (r'[ \t]+', Whitespace)
  1691. ],
  1692. # states for builtin blocks
  1693. 'comment': [
  1694. (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
  1695. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1696. Punctuation), ('#pop', '#pop')),
  1697. (r'.', Comment)
  1698. ],
  1699. 'raw': [
  1700. (r'[^{]+', Text),
  1701. (r'(\{%)(\s*)(endraw)(\s*)(%\})',
  1702. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1703. Punctuation), '#pop'),
  1704. (r'\{', Text)
  1705. ],
  1706. }
  1707. class TwigLexer(RegexLexer):
  1708. """
  1709. `Twig <http://twig.sensiolabs.org/>`_ template lexer.
  1710. It just highlights Twig code between the preprocessor directives,
  1711. other data is left untouched by the lexer.
  1712. .. versionadded:: 2.0
  1713. """
  1714. name = 'Twig'
  1715. aliases = ['twig']
  1716. mimetypes = ['application/x-twig']
  1717. flags = re.M | re.S
  1718. # Note that a backslash is included in the following two patterns
  1719. # PHP uses a backslash as a namespace separator
  1720. _ident_char = r'[\\\w-]|[^\x00-\x7f]'
  1721. _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
  1722. _ident_end = r'(?:' + _ident_char + ')*'
  1723. _ident_inner = _ident_begin + _ident_end
  1724. tokens = {
  1725. 'root': [
  1726. (r'[^{]+', Other),
  1727. (r'\{\{', Comment.Preproc, 'var'),
  1728. # twig comments
  1729. (r'\{\#.*?\#\}', Comment),
  1730. # raw twig blocks
  1731. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  1732. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  1733. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1734. Other, Comment.Preproc, Text, Keyword, Text,
  1735. Comment.Preproc)),
  1736. (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
  1737. r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
  1738. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1739. Other, Comment.Preproc, Text, Keyword, Text,
  1740. Comment.Preproc)),
  1741. # filter blocks
  1742. (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
  1743. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  1744. 'tag'),
  1745. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  1746. bygroups(Comment.Preproc, Text, Keyword), 'tag'),
  1747. (r'\{', Other),
  1748. ],
  1749. 'varnames': [
  1750. (r'(\|)(\s*)(%s)' % _ident_inner,
  1751. bygroups(Operator, Text, Name.Function)),
  1752. (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
  1753. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  1754. (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
  1755. (r'(in|not|and|b-and|or|b-or|b-xor|is'
  1756. r'if|elseif|else|import'
  1757. r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
  1758. r'matches|starts\s+with|ends\s+with)\b',
  1759. Keyword),
  1760. (r'(loop|block|parent)\b', Name.Builtin),
  1761. (_ident_inner, Name.Variable),
  1762. (r'\.' + _ident_inner, Name.Variable),
  1763. (r'\.[0-9]+', Number),
  1764. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1765. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1766. (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
  1767. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1768. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1769. ],
  1770. 'var': [
  1771. (r'\s+', Text),
  1772. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  1773. include('varnames')
  1774. ],
  1775. 'tag': [
  1776. (r'\s+', Text),
  1777. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  1778. include('varnames'),
  1779. (r'.', Punctuation),
  1780. ],
  1781. }
  1782. class TwigHtmlLexer(DelegatingLexer):
  1783. """
  1784. Subclass of the `TwigLexer` that highlights unlexed data with the
  1785. `HtmlLexer`.
  1786. .. versionadded:: 2.0
  1787. """
  1788. name = "HTML+Twig"
  1789. aliases = ["html+twig"]
  1790. filenames = ['*.twig']
  1791. mimetypes = ['text/html+twig']
  1792. def __init__(self, **options):
  1793. super().__init__(HtmlLexer, TwigLexer, **options)
  1794. class Angular2Lexer(RegexLexer):
  1795. """
  1796. Generic
  1797. `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_
  1798. template lexer.
  1799. Highlights only the Angular template tags (stuff between `{{` and `}}` and
  1800. special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
  1801. Everything else is left for a delegating lexer.
  1802. .. versionadded:: 2.1
  1803. """
  1804. name = "Angular2"
  1805. aliases = ['ng2']
  1806. tokens = {
  1807. 'root': [
  1808. (r'[^{([*#]+', Other),
  1809. # {{meal.name}}
  1810. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
  1811. # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
  1812. (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
  1813. bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
  1814. 'attr'),
  1815. (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
  1816. bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
  1817. # *ngIf="..."; #f="ngForm"
  1818. (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
  1819. bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
  1820. (r'([*#])([\w:.-]+)(\s*)',
  1821. bygroups(Punctuation, Name.Attribute, Text)),
  1822. ],
  1823. 'ngExpression': [
  1824. (r'\s+(\|\s+)?', Text),
  1825. (r'\}\}', Comment.Preproc, '#pop'),
  1826. # Literals
  1827. (r':?(true|false)', String.Boolean),
  1828. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1829. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1830. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1831. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1832. # Variabletext
  1833. (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
  1834. (r'\.[\w-]+(\(.*\))?', Name.Variable),
  1835. # inline If
  1836. (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
  1837. bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
  1838. ],
  1839. 'attr': [
  1840. ('".*?"', String, '#pop'),
  1841. ("'.*?'", String, '#pop'),
  1842. (r'[^\s>]+', String, '#pop'),
  1843. ],
  1844. }
  1845. class Angular2HtmlLexer(DelegatingLexer):
  1846. """
  1847. Subclass of the `Angular2Lexer` that highlights unlexed data with the
  1848. `HtmlLexer`.
  1849. .. versionadded:: 2.0
  1850. """
  1851. name = "HTML + Angular2"
  1852. aliases = ["html+ng2"]
  1853. filenames = ['*.ng2']
  1854. def __init__(self, **options):
  1855. super().__init__(HtmlLexer, Angular2Lexer, **options)