templates.py 71 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296
  1. """
  2. pygments.lexers.templates
  3. ~~~~~~~~~~~~~~~~~~~~~~~~~
  4. Lexers for various template engines' markup.
  5. :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexers.html import HtmlLexer, XmlLexer
  10. from pygments.lexers.javascript import JavascriptLexer, LassoLexer
  11. from pygments.lexers.css import CssLexer
  12. from pygments.lexers.php import PhpLexer
  13. from pygments.lexers.python import PythonLexer
  14. from pygments.lexers.perl import PerlLexer
  15. from pygments.lexers.jvm import JavaLexer, TeaLangLexer
  16. from pygments.lexers.data import YamlLexer
  17. from pygments.lexers.sql import SqlLexer
  18. from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
  19. include, using, this, default, combined
  20. from pygments.token import Error, Punctuation, Whitespace, \
  21. Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
  22. from pygments.util import html_doctype_matches, looks_like_xml
  23. __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
  24. 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
  25. 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
  26. 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
  27. 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
  28. 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
  29. 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
  30. 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
  31. 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
  32. 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
  33. 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
  34. 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
  35. 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
  36. 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
  37. 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
  38. 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
  39. 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
  40. 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
  41. 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
  42. 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer',
  43. 'SqlJinjaLexer']
  44. class ErbLexer(Lexer):
  45. """
  46. Generic ERB (Ruby Templating) lexer.
  47. Just highlights ruby code between the preprocessor directives, other data
  48. is left untouched by the lexer.
  49. All options are also forwarded to the `RubyLexer`.
  50. """
  51. name = 'ERB'
  52. url = 'https://github.com/ruby/erb'
  53. aliases = ['erb']
  54. mimetypes = ['application/x-ruby-templating']
  55. _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
  56. def __init__(self, **options):
  57. from pygments.lexers.ruby import RubyLexer
  58. self.ruby_lexer = RubyLexer(**options)
  59. Lexer.__init__(self, **options)
  60. def get_tokens_unprocessed(self, text):
  61. """
  62. Since ERB doesn't allow "<%" and other tags inside of ruby
  63. blocks we have to use a split approach here that fails for
  64. that too.
  65. """
  66. tokens = self._block_re.split(text)
  67. tokens.reverse()
  68. state = idx = 0
  69. try:
  70. while True:
  71. # text
  72. if state == 0:
  73. val = tokens.pop()
  74. yield idx, Other, val
  75. idx += len(val)
  76. state = 1
  77. # block starts
  78. elif state == 1:
  79. tag = tokens.pop()
  80. # literals
  81. if tag in ('<%%', '%%>'):
  82. yield idx, Other, tag
  83. idx += 3
  84. state = 0
  85. # comment
  86. elif tag == '<%#':
  87. yield idx, Comment.Preproc, tag
  88. val = tokens.pop()
  89. yield idx + 3, Comment, val
  90. idx += 3 + len(val)
  91. state = 2
  92. # blocks or output
  93. elif tag in ('<%', '<%=', '<%-'):
  94. yield idx, Comment.Preproc, tag
  95. idx += len(tag)
  96. data = tokens.pop()
  97. r_idx = 0
  98. for r_idx, r_token, r_value in \
  99. self.ruby_lexer.get_tokens_unprocessed(data):
  100. yield r_idx + idx, r_token, r_value
  101. idx += len(data)
  102. state = 2
  103. elif tag in ('%>', '-%>'):
  104. yield idx, Error, tag
  105. idx += len(tag)
  106. state = 0
  107. # % raw ruby statements
  108. else:
  109. yield idx, Comment.Preproc, tag[0]
  110. r_idx = 0
  111. for r_idx, r_token, r_value in \
  112. self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
  113. yield idx + 1 + r_idx, r_token, r_value
  114. idx += len(tag)
  115. state = 0
  116. # block ends
  117. elif state == 2:
  118. tag = tokens.pop()
  119. if tag not in ('%>', '-%>'):
  120. yield idx, Other, tag
  121. else:
  122. yield idx, Comment.Preproc, tag
  123. idx += len(tag)
  124. state = 0
  125. except IndexError:
  126. return
  127. def analyse_text(text):
  128. if '<%' in text and '%>' in text:
  129. return 0.4
  130. class SmartyLexer(RegexLexer):
  131. """
  132. Generic Smarty template lexer.
  133. Just highlights smarty code between the preprocessor directives, other
  134. data is left untouched by the lexer.
  135. """
  136. name = 'Smarty'
  137. url = 'https://www.smarty.net/'
  138. aliases = ['smarty']
  139. filenames = ['*.tpl']
  140. mimetypes = ['application/x-smarty']
  141. flags = re.MULTILINE | re.DOTALL
  142. tokens = {
  143. 'root': [
  144. (r'[^{]+', Other),
  145. (r'(\{)(\*.*?\*)(\})',
  146. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  147. (r'(\{php\})(.*?)(\{/php\})',
  148. bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
  149. Comment.Preproc)),
  150. (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
  151. bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
  152. (r'\{', Comment.Preproc, 'smarty')
  153. ],
  154. 'smarty': [
  155. (r'\s+', Text),
  156. (r'\{', Comment.Preproc, '#push'),
  157. (r'\}', Comment.Preproc, '#pop'),
  158. (r'#[a-zA-Z_]\w*#', Name.Variable),
  159. (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
  160. (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
  161. (r'(true|false|null)\b', Keyword.Constant),
  162. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  163. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  164. (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  165. (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  166. (r'[a-zA-Z_]\w*', Name.Attribute)
  167. ]
  168. }
  169. def analyse_text(text):
  170. rv = 0.0
  171. if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
  172. rv += 0.15
  173. if re.search(r'\{include\s+file=.*?\}', text):
  174. rv += 0.15
  175. if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
  176. rv += 0.15
  177. if re.search(r'\{\$.*?\}', text):
  178. rv += 0.01
  179. return rv
  180. class VelocityLexer(RegexLexer):
  181. """
  182. Generic Velocity template lexer.
  183. Just highlights velocity directives and variable references, other
  184. data is left untouched by the lexer.
  185. """
  186. name = 'Velocity'
  187. url = 'https://velocity.apache.org/'
  188. aliases = ['velocity']
  189. filenames = ['*.vm', '*.fhtml']
  190. flags = re.MULTILINE | re.DOTALL
  191. identifier = r'[a-zA-Z_]\w*'
  192. tokens = {
  193. 'root': [
  194. (r'[^{#$]+', Other),
  195. (r'(#)(\*.*?\*)(#)',
  196. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  197. (r'(##)(.*?$)',
  198. bygroups(Comment.Preproc, Comment)),
  199. (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
  200. bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
  201. 'directiveparams'),
  202. (r'(#\{?)(' + identifier + r')(\}|\b)',
  203. bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
  204. (r'\$!?\{?', Punctuation, 'variable')
  205. ],
  206. 'variable': [
  207. (identifier, Name.Variable),
  208. (r'\(', Punctuation, 'funcparams'),
  209. (r'(\.)(' + identifier + r')',
  210. bygroups(Punctuation, Name.Variable), '#push'),
  211. (r'\}', Punctuation, '#pop'),
  212. default('#pop')
  213. ],
  214. 'directiveparams': [
  215. (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
  216. Operator),
  217. (r'\[', Operator, 'rangeoperator'),
  218. (r'\b' + identifier + r'\b', Name.Function),
  219. include('funcparams')
  220. ],
  221. 'rangeoperator': [
  222. (r'\.\.', Operator),
  223. include('funcparams'),
  224. (r'\]', Operator, '#pop')
  225. ],
  226. 'funcparams': [
  227. (r'\$!?\{?', Punctuation, 'variable'),
  228. (r'\s+', Text),
  229. (r'[,:]', Punctuation),
  230. (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  231. (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  232. (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  233. (r"\b[0-9]+\b", Number),
  234. (r'(true|false|null)\b', Keyword.Constant),
  235. (r'\(', Punctuation, '#push'),
  236. (r'\)', Punctuation, '#pop'),
  237. (r'\{', Punctuation, '#push'),
  238. (r'\}', Punctuation, '#pop'),
  239. (r'\[', Punctuation, '#push'),
  240. (r'\]', Punctuation, '#pop'),
  241. ]
  242. }
  243. def analyse_text(text):
  244. rv = 0.0
  245. if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
  246. rv += 0.25
  247. if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
  248. rv += 0.15
  249. if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
  250. rv += 0.15
  251. if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
  252. r'(\.\w+(\([^)]*\))?)*\}?', text):
  253. rv += 0.01
  254. return rv
  255. class VelocityHtmlLexer(DelegatingLexer):
  256. """
  257. Subclass of the `VelocityLexer` that highlights unlexed data
  258. with the `HtmlLexer`.
  259. """
  260. name = 'HTML+Velocity'
  261. aliases = ['html+velocity']
  262. alias_filenames = ['*.html', '*.fhtml']
  263. mimetypes = ['text/html+velocity']
  264. def __init__(self, **options):
  265. super().__init__(HtmlLexer, VelocityLexer, **options)
  266. class VelocityXmlLexer(DelegatingLexer):
  267. """
  268. Subclass of the `VelocityLexer` that highlights unlexed data
  269. with the `XmlLexer`.
  270. """
  271. name = 'XML+Velocity'
  272. aliases = ['xml+velocity']
  273. alias_filenames = ['*.xml', '*.vm']
  274. mimetypes = ['application/xml+velocity']
  275. def __init__(self, **options):
  276. super().__init__(XmlLexer, VelocityLexer, **options)
  277. def analyse_text(text):
  278. rv = VelocityLexer.analyse_text(text) - 0.01
  279. if looks_like_xml(text):
  280. rv += 0.4
  281. return rv
  282. class DjangoLexer(RegexLexer):
  283. """
  284. Generic `django <http://www.djangoproject.com/documentation/templates/>`_
  285. and `jinja <https://jinja.pocoo.org/jinja/>`_ template lexer.
  286. It just highlights django/jinja code between the preprocessor directives,
  287. other data is left untouched by the lexer.
  288. """
  289. name = 'Django/Jinja'
  290. aliases = ['django', 'jinja']
  291. mimetypes = ['application/x-django-templating', 'application/x-jinja']
  292. flags = re.M | re.S
  293. tokens = {
  294. 'root': [
  295. (r'[^{]+', Other),
  296. (r'\{\{', Comment.Preproc, 'var'),
  297. # jinja/django comments
  298. (r'\{#.*?#\}', Comment),
  299. # django comments
  300. (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
  301. r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
  302. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  303. Comment, Comment.Preproc, Text, Keyword, Text,
  304. Comment.Preproc)),
  305. # raw jinja blocks
  306. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  307. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  308. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  309. Text, Comment.Preproc, Text, Keyword, Text,
  310. Comment.Preproc)),
  311. # filter blocks
  312. (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
  313. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  314. 'block'),
  315. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  316. bygroups(Comment.Preproc, Text, Keyword), 'block'),
  317. (r'\{', Other)
  318. ],
  319. 'varnames': [
  320. (r'(\|)(\s*)([a-zA-Z_]\w*)',
  321. bygroups(Operator, Text, Name.Function)),
  322. (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
  323. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  324. (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
  325. (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
  326. r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
  327. Keyword),
  328. (r'(loop|block|super|forloop)\b', Name.Builtin),
  329. (r'[a-zA-Z_][\w-]*', Name.Variable),
  330. (r'\.\w+', Name.Variable),
  331. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  332. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  333. (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
  334. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  335. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  336. ],
  337. 'var': [
  338. (r'\s+', Text),
  339. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  340. include('varnames')
  341. ],
  342. 'block': [
  343. (r'\s+', Text),
  344. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  345. include('varnames'),
  346. (r'.', Punctuation)
  347. ]
  348. }
  349. def analyse_text(text):
  350. rv = 0.0
  351. if re.search(r'\{%\s*(block|extends)', text) is not None:
  352. rv += 0.4
  353. if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
  354. rv += 0.1
  355. if re.search(r'\{\{.*?\}\}', text) is not None:
  356. rv += 0.1
  357. return rv
  358. class MyghtyLexer(RegexLexer):
  359. """
  360. Generic myghty templates lexer. Code that isn't Myghty
  361. markup is yielded as `Token.Other`.
  362. .. versionadded:: 0.6
  363. """
  364. name = 'Myghty'
  365. url = 'http://www.myghty.org/'
  366. aliases = ['myghty']
  367. filenames = ['*.myt', 'autodelegate']
  368. mimetypes = ['application/x-myghty']
  369. tokens = {
  370. 'root': [
  371. (r'\s+', Text),
  372. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  373. bygroups(Name.Tag, Text, Name.Function, Name.Tag,
  374. using(this), Name.Tag)),
  375. (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
  376. bygroups(Name.Tag, Name.Function, Name.Tag,
  377. using(PythonLexer), Name.Tag)),
  378. (r'(<&[^|])(.*?)(,.*?)?(&>)',
  379. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  380. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  381. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  382. (r'</&>', Name.Tag),
  383. (r'(?s)(<%!?)(.*?)(%>)',
  384. bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
  385. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  386. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  387. bygroups(Name.Tag, using(PythonLexer), Other)),
  388. (r"""(?sx)
  389. (.+?) # anything, followed by:
  390. (?:
  391. (?<=\n)(?=[%#]) | # an eval or comment line
  392. (?=</?[%&]) | # a substitution or block or
  393. # call start or end
  394. # - don't consume
  395. (\\\n) | # an escaped newline
  396. \Z # end of string
  397. )""", bygroups(Other, Operator)),
  398. ]
  399. }
  400. class MyghtyHtmlLexer(DelegatingLexer):
  401. """
  402. Subclass of the `MyghtyLexer` that highlights unlexed data
  403. with the `HtmlLexer`.
  404. .. versionadded:: 0.6
  405. """
  406. name = 'HTML+Myghty'
  407. aliases = ['html+myghty']
  408. mimetypes = ['text/html+myghty']
  409. def __init__(self, **options):
  410. super().__init__(HtmlLexer, MyghtyLexer, **options)
  411. class MyghtyXmlLexer(DelegatingLexer):
  412. """
  413. Subclass of the `MyghtyLexer` that highlights unlexed data
  414. with the `XmlLexer`.
  415. .. versionadded:: 0.6
  416. """
  417. name = 'XML+Myghty'
  418. aliases = ['xml+myghty']
  419. mimetypes = ['application/xml+myghty']
  420. def __init__(self, **options):
  421. super().__init__(XmlLexer, MyghtyLexer, **options)
  422. class MyghtyJavascriptLexer(DelegatingLexer):
  423. """
  424. Subclass of the `MyghtyLexer` that highlights unlexed data
  425. with the `JavascriptLexer`.
  426. .. versionadded:: 0.6
  427. """
  428. name = 'JavaScript+Myghty'
  429. aliases = ['javascript+myghty', 'js+myghty']
  430. mimetypes = ['application/x-javascript+myghty',
  431. 'text/x-javascript+myghty',
  432. 'text/javascript+mygthy']
  433. def __init__(self, **options):
  434. super().__init__(JavascriptLexer, MyghtyLexer, **options)
  435. class MyghtyCssLexer(DelegatingLexer):
  436. """
  437. Subclass of the `MyghtyLexer` that highlights unlexed data
  438. with the `CssLexer`.
  439. .. versionadded:: 0.6
  440. """
  441. name = 'CSS+Myghty'
  442. aliases = ['css+myghty']
  443. mimetypes = ['text/css+myghty']
  444. def __init__(self, **options):
  445. super().__init__(CssLexer, MyghtyLexer, **options)
  446. class MasonLexer(RegexLexer):
  447. """
  448. Generic mason templates lexer. Stolen from Myghty lexer. Code that isn't
  449. Mason markup is HTML.
  450. .. versionadded:: 1.4
  451. """
  452. name = 'Mason'
  453. url = 'http://www.masonhq.com/'
  454. aliases = ['mason']
  455. filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
  456. mimetypes = ['application/x-mason']
  457. tokens = {
  458. 'root': [
  459. (r'\s+', Whitespace),
  460. (r'(?s)(<%doc>)(.*?)(</%doc>)',
  461. bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
  462. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  463. bygroups(Name.Tag, Whitespace, Name.Function, Name.Tag,
  464. using(this), Name.Tag)),
  465. (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
  466. bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
  467. (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
  468. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  469. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  470. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  471. (r'</&>', Name.Tag),
  472. (r'(?s)(<%!?)(.*?)(%>)',
  473. bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
  474. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  475. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  476. bygroups(Name.Tag, using(PerlLexer), Other)),
  477. (r"""(?sx)
  478. (.+?) # anything, followed by:
  479. (?:
  480. (?<=\n)(?=[%#]) | # an eval or comment line
  481. (?=</?[%&]) | # a substitution or block or
  482. # call start or end
  483. # - don't consume
  484. (\\\n) | # an escaped newline
  485. \Z # end of string
  486. )""", bygroups(using(HtmlLexer), Operator)),
  487. ]
  488. }
  489. def analyse_text(text):
  490. result = 0.0
  491. if re.search(r'</%(class|doc|init)>', text) is not None:
  492. result = 1.0
  493. elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
  494. result = 0.11
  495. return result
  496. class MakoLexer(RegexLexer):
  497. """
  498. Generic mako templates lexer. Code that isn't Mako
  499. markup is yielded as `Token.Other`.
  500. .. versionadded:: 0.7
  501. """
  502. name = 'Mako'
  503. url = 'http://www.makotemplates.org/'
  504. aliases = ['mako']
  505. filenames = ['*.mao']
  506. mimetypes = ['application/x-mako']
  507. tokens = {
  508. 'root': [
  509. (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
  510. bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
  511. (r'(\s*)(%)([^\n]*)(\n|\Z)',
  512. bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
  513. (r'(\s*)(##[^\n]*)(\n|\Z)',
  514. bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
  515. (r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
  516. (r'(<%)([\w.:]+)',
  517. bygroups(Comment.Preproc, Name.Builtin), 'tag'),
  518. (r'(</%)([\w.:]+)(>)',
  519. bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
  520. (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
  521. (r'(?s)(<%(?:!?))(.*?)(%>)',
  522. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  523. (r'(\$\{)(.*?)(\})',
  524. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  525. (r'''(?sx)
  526. (.+?) # anything, followed by:
  527. (?:
  528. (?<=\n)(?=%|\#\#) | # an eval or comment line
  529. (?=\#\*) | # multiline comment
  530. (?=</?%) | # a python block
  531. # call start or end
  532. (?=\$\{) | # a substitution
  533. (?<=\n)(?=\s*%) |
  534. # - don't consume
  535. (\\\n) | # an escaped newline
  536. \Z # end of string
  537. )
  538. ''', bygroups(Other, Operator)),
  539. (r'\s+', Text),
  540. ],
  541. 'ondeftags': [
  542. (r'<%', Comment.Preproc),
  543. (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
  544. include('tag'),
  545. ],
  546. 'tag': [
  547. (r'((?:\w+)\s*=)(\s*)(".*?")',
  548. bygroups(Name.Attribute, Text, String)),
  549. (r'/?\s*>', Comment.Preproc, '#pop'),
  550. (r'\s+', Text),
  551. ],
  552. 'attr': [
  553. ('".*?"', String, '#pop'),
  554. ("'.*?'", String, '#pop'),
  555. (r'[^\s>]+', String, '#pop'),
  556. ],
  557. }
  558. class MakoHtmlLexer(DelegatingLexer):
  559. """
  560. Subclass of the `MakoLexer` that highlights unlexed data
  561. with the `HtmlLexer`.
  562. .. versionadded:: 0.7
  563. """
  564. name = 'HTML+Mako'
  565. aliases = ['html+mako']
  566. mimetypes = ['text/html+mako']
  567. def __init__(self, **options):
  568. super().__init__(HtmlLexer, MakoLexer, **options)
  569. class MakoXmlLexer(DelegatingLexer):
  570. """
  571. Subclass of the `MakoLexer` that highlights unlexed data
  572. with the `XmlLexer`.
  573. .. versionadded:: 0.7
  574. """
  575. name = 'XML+Mako'
  576. aliases = ['xml+mako']
  577. mimetypes = ['application/xml+mako']
  578. def __init__(self, **options):
  579. super().__init__(XmlLexer, MakoLexer, **options)
  580. class MakoJavascriptLexer(DelegatingLexer):
  581. """
  582. Subclass of the `MakoLexer` that highlights unlexed data
  583. with the `JavascriptLexer`.
  584. .. versionadded:: 0.7
  585. """
  586. name = 'JavaScript+Mako'
  587. aliases = ['javascript+mako', 'js+mako']
  588. mimetypes = ['application/x-javascript+mako',
  589. 'text/x-javascript+mako',
  590. 'text/javascript+mako']
  591. def __init__(self, **options):
  592. super().__init__(JavascriptLexer, MakoLexer, **options)
  593. class MakoCssLexer(DelegatingLexer):
  594. """
  595. Subclass of the `MakoLexer` that highlights unlexed data
  596. with the `CssLexer`.
  597. .. versionadded:: 0.7
  598. """
  599. name = 'CSS+Mako'
  600. aliases = ['css+mako']
  601. mimetypes = ['text/css+mako']
  602. def __init__(self, **options):
  603. super().__init__(CssLexer, MakoLexer, **options)
  604. # Genshi and Cheetah lexers courtesy of Matt Good.
  605. class CheetahPythonLexer(Lexer):
  606. """
  607. Lexer for handling Cheetah's special $ tokens in Python syntax.
  608. """
  609. def get_tokens_unprocessed(self, text):
  610. pylexer = PythonLexer(**self.options)
  611. for pos, type_, value in pylexer.get_tokens_unprocessed(text):
  612. if type_ == Token.Error and value == '$':
  613. type_ = Comment.Preproc
  614. yield pos, type_, value
  615. class CheetahLexer(RegexLexer):
  616. """
  617. Generic cheetah templates lexer. Code that isn't Cheetah
  618. markup is yielded as `Token.Other`. This also works for
  619. `spitfire templates`_ which use the same syntax.
  620. .. _spitfire templates: http://code.google.com/p/spitfire/
  621. """
  622. name = 'Cheetah'
  623. url = 'http://www.cheetahtemplate.org/'
  624. aliases = ['cheetah', 'spitfire']
  625. filenames = ['*.tmpl', '*.spt']
  626. mimetypes = ['application/x-cheetah', 'application/x-spitfire']
  627. tokens = {
  628. 'root': [
  629. (r'(##[^\n]*)$',
  630. (bygroups(Comment))),
  631. (r'#[*](.|\n)*?[*]#', Comment),
  632. (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
  633. (r'#slurp$', Comment.Preproc),
  634. (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
  635. (bygroups(Comment.Preproc, using(CheetahPythonLexer),
  636. Comment.Preproc))),
  637. # TODO support other Python syntax like $foo['bar']
  638. (r'(\$)([a-zA-Z_][\w.]*\w)',
  639. bygroups(Comment.Preproc, using(CheetahPythonLexer))),
  640. (r'(?s)(\$\{!?)(.*?)(\})',
  641. bygroups(Comment.Preproc, using(CheetahPythonLexer),
  642. Comment.Preproc)),
  643. (r'''(?sx)
  644. (.+?) # anything, followed by:
  645. (?:
  646. (?=\#[#a-zA-Z]*) | # an eval comment
  647. (?=\$[a-zA-Z_{]) | # a substitution
  648. \Z # end of string
  649. )
  650. ''', Other),
  651. (r'\s+', Text),
  652. ],
  653. }
  654. class CheetahHtmlLexer(DelegatingLexer):
  655. """
  656. Subclass of the `CheetahLexer` that highlights unlexed data
  657. with the `HtmlLexer`.
  658. """
  659. name = 'HTML+Cheetah'
  660. aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
  661. mimetypes = ['text/html+cheetah', 'text/html+spitfire']
  662. def __init__(self, **options):
  663. super().__init__(HtmlLexer, CheetahLexer, **options)
  664. class CheetahXmlLexer(DelegatingLexer):
  665. """
  666. Subclass of the `CheetahLexer` that highlights unlexed data
  667. with the `XmlLexer`.
  668. """
  669. name = 'XML+Cheetah'
  670. aliases = ['xml+cheetah', 'xml+spitfire']
  671. mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
  672. def __init__(self, **options):
  673. super().__init__(XmlLexer, CheetahLexer, **options)
  674. class CheetahJavascriptLexer(DelegatingLexer):
  675. """
  676. Subclass of the `CheetahLexer` that highlights unlexed data
  677. with the `JavascriptLexer`.
  678. """
  679. name = 'JavaScript+Cheetah'
  680. aliases = ['javascript+cheetah', 'js+cheetah',
  681. 'javascript+spitfire', 'js+spitfire']
  682. mimetypes = ['application/x-javascript+cheetah',
  683. 'text/x-javascript+cheetah',
  684. 'text/javascript+cheetah',
  685. 'application/x-javascript+spitfire',
  686. 'text/x-javascript+spitfire',
  687. 'text/javascript+spitfire']
  688. def __init__(self, **options):
  689. super().__init__(JavascriptLexer, CheetahLexer, **options)
  690. class GenshiTextLexer(RegexLexer):
  691. """
  692. A lexer that highlights genshi text templates.
  693. """
  694. name = 'Genshi Text'
  695. url = 'http://genshi.edgewall.org/'
  696. aliases = ['genshitext']
  697. mimetypes = ['application/x-genshi-text', 'text/x-genshi']
  698. tokens = {
  699. 'root': [
  700. (r'[^#$\s]+', Other),
  701. (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
  702. (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
  703. include('variable'),
  704. (r'[#$\s]', Other),
  705. ],
  706. 'directive': [
  707. (r'\n', Text, '#pop'),
  708. (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
  709. (r'(choose|when|with)([^\S\n]+)(.*)',
  710. bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
  711. (r'(choose|otherwise)\b', Keyword, '#pop'),
  712. (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
  713. ],
  714. 'variable': [
  715. (r'(?<!\$)(\$\{)(.+?)(\})',
  716. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  717. (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
  718. Name.Variable),
  719. ]
  720. }
  721. class GenshiMarkupLexer(RegexLexer):
  722. """
  723. Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
  724. `GenshiLexer`.
  725. """
  726. flags = re.DOTALL
  727. tokens = {
  728. 'root': [
  729. (r'[^<$]+', Other),
  730. (r'(<\?python)(.*?)(\?>)',
  731. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  732. # yield style and script blocks as Other
  733. (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
  734. (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
  735. (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
  736. include('variable'),
  737. (r'[<$]', Other),
  738. ],
  739. 'pytag': [
  740. (r'\s+', Text),
  741. (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
  742. (r'/?\s*>', Name.Tag, '#pop'),
  743. ],
  744. 'pyattr': [
  745. ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
  746. ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
  747. (r'[^\s>]+', String, '#pop'),
  748. ],
  749. 'tag': [
  750. (r'\s+', Text),
  751. (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
  752. (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
  753. (r'/?\s*>', Name.Tag, '#pop'),
  754. ],
  755. 'attr': [
  756. ('"', String, 'attr-dstring'),
  757. ("'", String, 'attr-sstring'),
  758. (r'[^\s>]*', String, '#pop')
  759. ],
  760. 'attr-dstring': [
  761. ('"', String, '#pop'),
  762. include('strings'),
  763. ("'", String)
  764. ],
  765. 'attr-sstring': [
  766. ("'", String, '#pop'),
  767. include('strings'),
  768. ("'", String)
  769. ],
  770. 'strings': [
  771. ('[^"\'$]+', String),
  772. include('variable')
  773. ],
  774. 'variable': [
  775. (r'(?<!\$)(\$\{)(.+?)(\})',
  776. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  777. (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
  778. Name.Variable),
  779. ]
  780. }
  781. class HtmlGenshiLexer(DelegatingLexer):
  782. """
  783. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
  784. `kid <http://kid-templating.org/>`_ kid HTML templates.
  785. """
  786. name = 'HTML+Genshi'
  787. aliases = ['html+genshi', 'html+kid']
  788. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  789. mimetypes = ['text/html+genshi']
  790. def __init__(self, **options):
  791. super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
  792. def analyse_text(text):
  793. rv = 0.0
  794. if re.search(r'\$\{.*?\}', text) is not None:
  795. rv += 0.2
  796. if re.search(r'py:(.*?)=["\']', text) is not None:
  797. rv += 0.2
  798. return rv + HtmlLexer.analyse_text(text) - 0.01
  799. class GenshiLexer(DelegatingLexer):
  800. """
  801. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
  802. `kid <http://kid-templating.org/>`_ kid XML templates.
  803. """
  804. name = 'Genshi'
  805. aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
  806. filenames = ['*.kid']
  807. alias_filenames = ['*.xml']
  808. mimetypes = ['application/x-genshi', 'application/x-kid']
  809. def __init__(self, **options):
  810. super().__init__(XmlLexer, GenshiMarkupLexer, **options)
  811. def analyse_text(text):
  812. rv = 0.0
  813. if re.search(r'\$\{.*?\}', text) is not None:
  814. rv += 0.2
  815. if re.search(r'py:(.*?)=["\']', text) is not None:
  816. rv += 0.2
  817. return rv + XmlLexer.analyse_text(text) - 0.01
  818. class JavascriptGenshiLexer(DelegatingLexer):
  819. """
  820. A lexer that highlights javascript code in genshi text templates.
  821. """
  822. name = 'JavaScript+Genshi Text'
  823. aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
  824. 'javascript+genshi']
  825. alias_filenames = ['*.js']
  826. mimetypes = ['application/x-javascript+genshi',
  827. 'text/x-javascript+genshi',
  828. 'text/javascript+genshi']
  829. def __init__(self, **options):
  830. super().__init__(JavascriptLexer, GenshiTextLexer, **options)
  831. def analyse_text(text):
  832. return GenshiLexer.analyse_text(text) - 0.05
  833. class CssGenshiLexer(DelegatingLexer):
  834. """
  835. A lexer that highlights CSS definitions in genshi text templates.
  836. """
  837. name = 'CSS+Genshi Text'
  838. aliases = ['css+genshitext', 'css+genshi']
  839. alias_filenames = ['*.css']
  840. mimetypes = ['text/css+genshi']
  841. def __init__(self, **options):
  842. super().__init__(CssLexer, GenshiTextLexer, **options)
  843. def analyse_text(text):
  844. return GenshiLexer.analyse_text(text) - 0.05
  845. class RhtmlLexer(DelegatingLexer):
  846. """
  847. Subclass of the ERB lexer that highlights the unlexed data with the
  848. html lexer.
  849. Nested Javascript and CSS is highlighted too.
  850. """
  851. name = 'RHTML'
  852. aliases = ['rhtml', 'html+erb', 'html+ruby']
  853. filenames = ['*.rhtml']
  854. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  855. mimetypes = ['text/html+ruby']
  856. def __init__(self, **options):
  857. super().__init__(HtmlLexer, ErbLexer, **options)
  858. def analyse_text(text):
  859. rv = ErbLexer.analyse_text(text) - 0.01
  860. if html_doctype_matches(text):
  861. # one more than the XmlErbLexer returns
  862. rv += 0.5
  863. return rv
  864. class XmlErbLexer(DelegatingLexer):
  865. """
  866. Subclass of `ErbLexer` which highlights data outside preprocessor
  867. directives with the `XmlLexer`.
  868. """
  869. name = 'XML+Ruby'
  870. aliases = ['xml+ruby', 'xml+erb']
  871. alias_filenames = ['*.xml']
  872. mimetypes = ['application/xml+ruby']
  873. def __init__(self, **options):
  874. super().__init__(XmlLexer, ErbLexer, **options)
  875. def analyse_text(text):
  876. rv = ErbLexer.analyse_text(text) - 0.01
  877. if looks_like_xml(text):
  878. rv += 0.4
  879. return rv
  880. class CssErbLexer(DelegatingLexer):
  881. """
  882. Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
  883. """
  884. name = 'CSS+Ruby'
  885. aliases = ['css+ruby', 'css+erb']
  886. alias_filenames = ['*.css']
  887. mimetypes = ['text/css+ruby']
  888. def __init__(self, **options):
  889. super().__init__(CssLexer, ErbLexer, **options)
  890. def analyse_text(text):
  891. return ErbLexer.analyse_text(text) - 0.05
  892. class JavascriptErbLexer(DelegatingLexer):
  893. """
  894. Subclass of `ErbLexer` which highlights unlexed data with the
  895. `JavascriptLexer`.
  896. """
  897. name = 'JavaScript+Ruby'
  898. aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
  899. alias_filenames = ['*.js']
  900. mimetypes = ['application/x-javascript+ruby',
  901. 'text/x-javascript+ruby',
  902. 'text/javascript+ruby']
  903. def __init__(self, **options):
  904. super().__init__(JavascriptLexer, ErbLexer, **options)
  905. def analyse_text(text):
  906. return ErbLexer.analyse_text(text) - 0.05
  907. class HtmlPhpLexer(DelegatingLexer):
  908. """
  909. Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
  910. Nested Javascript and CSS is highlighted too.
  911. """
  912. name = 'HTML+PHP'
  913. aliases = ['html+php']
  914. filenames = ['*.phtml']
  915. alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
  916. '*.php[345]']
  917. mimetypes = ['application/x-php',
  918. 'application/x-httpd-php', 'application/x-httpd-php3',
  919. 'application/x-httpd-php4', 'application/x-httpd-php5']
  920. def __init__(self, **options):
  921. super().__init__(HtmlLexer, PhpLexer, **options)
  922. def analyse_text(text):
  923. rv = PhpLexer.analyse_text(text) - 0.01
  924. if html_doctype_matches(text):
  925. rv += 0.5
  926. return rv
  927. class XmlPhpLexer(DelegatingLexer):
  928. """
  929. Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
  930. """
  931. name = 'XML+PHP'
  932. aliases = ['xml+php']
  933. alias_filenames = ['*.xml', '*.php', '*.php[345]']
  934. mimetypes = ['application/xml+php']
  935. def __init__(self, **options):
  936. super().__init__(XmlLexer, PhpLexer, **options)
  937. def analyse_text(text):
  938. rv = PhpLexer.analyse_text(text) - 0.01
  939. if looks_like_xml(text):
  940. rv += 0.4
  941. return rv
  942. class CssPhpLexer(DelegatingLexer):
  943. """
  944. Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
  945. """
  946. name = 'CSS+PHP'
  947. aliases = ['css+php']
  948. alias_filenames = ['*.css']
  949. mimetypes = ['text/css+php']
  950. def __init__(self, **options):
  951. super().__init__(CssLexer, PhpLexer, **options)
  952. def analyse_text(text):
  953. return PhpLexer.analyse_text(text) - 0.05
  954. class JavascriptPhpLexer(DelegatingLexer):
  955. """
  956. Subclass of `PhpLexer` which highlights unmatched data with the
  957. `JavascriptLexer`.
  958. """
  959. name = 'JavaScript+PHP'
  960. aliases = ['javascript+php', 'js+php']
  961. alias_filenames = ['*.js']
  962. mimetypes = ['application/x-javascript+php',
  963. 'text/x-javascript+php',
  964. 'text/javascript+php']
  965. def __init__(self, **options):
  966. super().__init__(JavascriptLexer, PhpLexer, **options)
  967. def analyse_text(text):
  968. return PhpLexer.analyse_text(text)
  969. class HtmlSmartyLexer(DelegatingLexer):
  970. """
  971. Subclass of the `SmartyLexer` that highlights unlexed data with the
  972. `HtmlLexer`.
  973. Nested Javascript and CSS is highlighted too.
  974. """
  975. name = 'HTML+Smarty'
  976. aliases = ['html+smarty']
  977. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
  978. mimetypes = ['text/html+smarty']
  979. def __init__(self, **options):
  980. super().__init__(HtmlLexer, SmartyLexer, **options)
  981. def analyse_text(text):
  982. rv = SmartyLexer.analyse_text(text) - 0.01
  983. if html_doctype_matches(text):
  984. rv += 0.5
  985. return rv
  986. class XmlSmartyLexer(DelegatingLexer):
  987. """
  988. Subclass of the `SmartyLexer` that highlights unlexed data with the
  989. `XmlLexer`.
  990. """
  991. name = 'XML+Smarty'
  992. aliases = ['xml+smarty']
  993. alias_filenames = ['*.xml', '*.tpl']
  994. mimetypes = ['application/xml+smarty']
  995. def __init__(self, **options):
  996. super().__init__(XmlLexer, SmartyLexer, **options)
  997. def analyse_text(text):
  998. rv = SmartyLexer.analyse_text(text) - 0.01
  999. if looks_like_xml(text):
  1000. rv += 0.4
  1001. return rv
  1002. class CssSmartyLexer(DelegatingLexer):
  1003. """
  1004. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1005. `CssLexer`.
  1006. """
  1007. name = 'CSS+Smarty'
  1008. aliases = ['css+smarty']
  1009. alias_filenames = ['*.css', '*.tpl']
  1010. mimetypes = ['text/css+smarty']
  1011. def __init__(self, **options):
  1012. super().__init__(CssLexer, SmartyLexer, **options)
  1013. def analyse_text(text):
  1014. return SmartyLexer.analyse_text(text) - 0.05
  1015. class JavascriptSmartyLexer(DelegatingLexer):
  1016. """
  1017. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1018. `JavascriptLexer`.
  1019. """
  1020. name = 'JavaScript+Smarty'
  1021. aliases = ['javascript+smarty', 'js+smarty']
  1022. alias_filenames = ['*.js', '*.tpl']
  1023. mimetypes = ['application/x-javascript+smarty',
  1024. 'text/x-javascript+smarty',
  1025. 'text/javascript+smarty']
  1026. def __init__(self, **options):
  1027. super().__init__(JavascriptLexer, SmartyLexer, **options)
  1028. def analyse_text(text):
  1029. return SmartyLexer.analyse_text(text) - 0.05
  1030. class HtmlDjangoLexer(DelegatingLexer):
  1031. """
  1032. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1033. `HtmlLexer`.
  1034. Nested Javascript and CSS is highlighted too.
  1035. """
  1036. name = 'HTML+Django/Jinja'
  1037. aliases = ['html+django', 'html+jinja', 'htmldjango']
  1038. filenames = ['*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2']
  1039. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  1040. mimetypes = ['text/html+django', 'text/html+jinja']
  1041. def __init__(self, **options):
  1042. super().__init__(HtmlLexer, DjangoLexer, **options)
  1043. def analyse_text(text):
  1044. rv = DjangoLexer.analyse_text(text) - 0.01
  1045. if html_doctype_matches(text):
  1046. rv += 0.5
  1047. return rv
  1048. class XmlDjangoLexer(DelegatingLexer):
  1049. """
  1050. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1051. `XmlLexer`.
  1052. """
  1053. name = 'XML+Django/Jinja'
  1054. aliases = ['xml+django', 'xml+jinja']
  1055. filenames = ['*.xml.j2', '*.xml.jinja2']
  1056. alias_filenames = ['*.xml']
  1057. mimetypes = ['application/xml+django', 'application/xml+jinja']
  1058. def __init__(self, **options):
  1059. super().__init__(XmlLexer, DjangoLexer, **options)
  1060. def analyse_text(text):
  1061. rv = DjangoLexer.analyse_text(text) - 0.01
  1062. if looks_like_xml(text):
  1063. rv += 0.4
  1064. return rv
  1065. class CssDjangoLexer(DelegatingLexer):
  1066. """
  1067. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1068. `CssLexer`.
  1069. """
  1070. name = 'CSS+Django/Jinja'
  1071. aliases = ['css+django', 'css+jinja']
  1072. filenames = ['*.css.j2', '*.css.jinja2']
  1073. alias_filenames = ['*.css']
  1074. mimetypes = ['text/css+django', 'text/css+jinja']
  1075. def __init__(self, **options):
  1076. super().__init__(CssLexer, DjangoLexer, **options)
  1077. def analyse_text(text):
  1078. return DjangoLexer.analyse_text(text) - 0.05
  1079. class JavascriptDjangoLexer(DelegatingLexer):
  1080. """
  1081. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1082. `JavascriptLexer`.
  1083. """
  1084. name = 'JavaScript+Django/Jinja'
  1085. aliases = ['javascript+django', 'js+django',
  1086. 'javascript+jinja', 'js+jinja']
  1087. filenames = ['*.js.j2', '*.js.jinja2']
  1088. alias_filenames = ['*.js']
  1089. mimetypes = ['application/x-javascript+django',
  1090. 'application/x-javascript+jinja',
  1091. 'text/x-javascript+django',
  1092. 'text/x-javascript+jinja',
  1093. 'text/javascript+django',
  1094. 'text/javascript+jinja']
  1095. def __init__(self, **options):
  1096. super().__init__(JavascriptLexer, DjangoLexer, **options)
  1097. def analyse_text(text):
  1098. return DjangoLexer.analyse_text(text) - 0.05
  1099. class JspRootLexer(RegexLexer):
  1100. """
  1101. Base for the `JspLexer`. Yields `Token.Other` for area outside of
  1102. JSP tags.
  1103. .. versionadded:: 0.7
  1104. """
  1105. tokens = {
  1106. 'root': [
  1107. (r'<%\S?', Keyword, 'sec'),
  1108. # FIXME: I want to make these keywords but still parse attributes.
  1109. (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
  1110. Keyword),
  1111. (r'[^<]+', Other),
  1112. (r'<', Other),
  1113. ],
  1114. 'sec': [
  1115. (r'%>', Keyword, '#pop'),
  1116. # note: '\w\W' != '.' without DOTALL.
  1117. (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
  1118. ],
  1119. }
  1120. class JspLexer(DelegatingLexer):
  1121. """
  1122. Lexer for Java Server Pages.
  1123. .. versionadded:: 0.7
  1124. """
  1125. name = 'Java Server Page'
  1126. aliases = ['jsp']
  1127. filenames = ['*.jsp']
  1128. mimetypes = ['application/x-jsp']
  1129. def __init__(self, **options):
  1130. super().__init__(XmlLexer, JspRootLexer, **options)
  1131. def analyse_text(text):
  1132. rv = JavaLexer.analyse_text(text) - 0.01
  1133. if looks_like_xml(text):
  1134. rv += 0.4
  1135. if '<%' in text and '%>' in text:
  1136. rv += 0.1
  1137. return rv
  1138. class EvoqueLexer(RegexLexer):
  1139. """
  1140. For files using the Evoque templating system.
  1141. .. versionadded:: 1.1
  1142. """
  1143. name = 'Evoque'
  1144. aliases = ['evoque']
  1145. filenames = ['*.evoque']
  1146. mimetypes = ['application/x-evoque']
  1147. flags = re.DOTALL
  1148. tokens = {
  1149. 'root': [
  1150. (r'[^#$]+', Other),
  1151. (r'#\[', Comment.Multiline, 'comment'),
  1152. (r'\$\$', Other),
  1153. # svn keywords
  1154. (r'\$\w+:[^$\n]*\$', Comment.Multiline),
  1155. # directives: begin, end
  1156. (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
  1157. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1158. String, Punctuation)),
  1159. # directives: evoque, overlay
  1160. # see doc for handling first name arg: /directives/evoque/
  1161. # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
  1162. # should be using(PythonLexer), not passed out as String
  1163. (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
  1164. r'(.*?)((?(4)%)\})',
  1165. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1166. String, using(PythonLexer), Punctuation)),
  1167. # directives: if, for, prefer, test
  1168. (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
  1169. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1170. using(PythonLexer), Punctuation)),
  1171. # directive clauses (no {} expression)
  1172. (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
  1173. # expressions
  1174. (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
  1175. bygroups(Punctuation, None, using(PythonLexer),
  1176. Name.Builtin, None, None, Punctuation)),
  1177. (r'#', Other),
  1178. ],
  1179. 'comment': [
  1180. (r'[^\]#]', Comment.Multiline),
  1181. (r'#\[', Comment.Multiline, '#push'),
  1182. (r'\]#', Comment.Multiline, '#pop'),
  1183. (r'[\]#]', Comment.Multiline)
  1184. ],
  1185. }
  1186. def analyse_text(text):
  1187. """Evoque templates use $evoque, which is unique."""
  1188. if '$evoque' in text:
  1189. return 1
  1190. class EvoqueHtmlLexer(DelegatingLexer):
  1191. """
  1192. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1193. `HtmlLexer`.
  1194. .. versionadded:: 1.1
  1195. """
  1196. name = 'HTML+Evoque'
  1197. aliases = ['html+evoque']
  1198. filenames = ['*.html']
  1199. mimetypes = ['text/html+evoque']
  1200. def __init__(self, **options):
  1201. super().__init__(HtmlLexer, EvoqueLexer, **options)
  1202. def analyse_text(text):
  1203. return EvoqueLexer.analyse_text(text)
  1204. class EvoqueXmlLexer(DelegatingLexer):
  1205. """
  1206. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1207. `XmlLexer`.
  1208. .. versionadded:: 1.1
  1209. """
  1210. name = 'XML+Evoque'
  1211. aliases = ['xml+evoque']
  1212. filenames = ['*.xml']
  1213. mimetypes = ['application/xml+evoque']
  1214. def __init__(self, **options):
  1215. super().__init__(XmlLexer, EvoqueLexer, **options)
  1216. def analyse_text(text):
  1217. return EvoqueLexer.analyse_text(text)
  1218. class ColdfusionLexer(RegexLexer):
  1219. """
  1220. Coldfusion statements
  1221. """
  1222. name = 'cfstatement'
  1223. aliases = ['cfs']
  1224. filenames = []
  1225. mimetypes = []
  1226. flags = re.IGNORECASE
  1227. tokens = {
  1228. 'root': [
  1229. (r'//.*?\n', Comment.Single),
  1230. (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
  1231. (r'\+\+|--', Operator),
  1232. (r'[-+*/^&=!]', Operator),
  1233. (r'<=|>=|<|>|==', Operator),
  1234. (r'mod\b', Operator),
  1235. (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
  1236. (r'\|\||&&', Operator),
  1237. (r'\?', Operator),
  1238. (r'"', String.Double, 'string'),
  1239. # There is a special rule for allowing html in single quoted
  1240. # strings, evidently.
  1241. (r"'.*?'", String.Single),
  1242. (r'\d+', Number),
  1243. (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
  1244. r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
  1245. r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
  1246. (r'(true|false|null)\b', Keyword.Constant),
  1247. (r'(application|session|client|cookie|super|this|variables|arguments)\b',
  1248. Name.Constant),
  1249. (r'([a-z_$][\w.]*)(\s*)(\()',
  1250. bygroups(Name.Function, Text, Punctuation)),
  1251. (r'[a-z_$][\w.]*', Name.Variable),
  1252. (r'[()\[\]{};:,.\\]', Punctuation),
  1253. (r'\s+', Text),
  1254. ],
  1255. 'string': [
  1256. (r'""', String.Double),
  1257. (r'#.+?#', String.Interp),
  1258. (r'[^"#]+', String.Double),
  1259. (r'#', String.Double),
  1260. (r'"', String.Double, '#pop'),
  1261. ],
  1262. }
  1263. class ColdfusionMarkupLexer(RegexLexer):
  1264. """
  1265. Coldfusion markup only
  1266. """
  1267. name = 'Coldfusion'
  1268. aliases = ['cf']
  1269. filenames = []
  1270. mimetypes = []
  1271. tokens = {
  1272. 'root': [
  1273. (r'[^<]+', Other),
  1274. include('tags'),
  1275. (r'<[^<>]*', Other),
  1276. ],
  1277. 'tags': [
  1278. (r'<!---', Comment.Multiline, 'cfcomment'),
  1279. (r'(?s)<!--.*?-->', Comment),
  1280. (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
  1281. (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
  1282. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1283. # negative lookbehind is for strings with embedded >
  1284. (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
  1285. r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
  1286. r'mailpart|mail|header|content|zip|image|lock|argument|try|'
  1287. r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
  1288. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1289. ],
  1290. 'cfoutput': [
  1291. (r'[^#<]+', Other),
  1292. (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
  1293. Punctuation)),
  1294. # (r'<cfoutput.*?>', Name.Builtin, '#push'),
  1295. (r'</cfoutput.*?>', Name.Builtin, '#pop'),
  1296. include('tags'),
  1297. (r'(?s)<[^<>]*', Other),
  1298. (r'#', Other),
  1299. ],
  1300. 'cfcomment': [
  1301. (r'<!---', Comment.Multiline, '#push'),
  1302. (r'--->', Comment.Multiline, '#pop'),
  1303. (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
  1304. ],
  1305. }
  1306. class ColdfusionHtmlLexer(DelegatingLexer):
  1307. """
  1308. Coldfusion markup in html
  1309. """
  1310. name = 'Coldfusion HTML'
  1311. aliases = ['cfm']
  1312. filenames = ['*.cfm', '*.cfml']
  1313. mimetypes = ['application/x-coldfusion']
  1314. def __init__(self, **options):
  1315. super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
  1316. class ColdfusionCFCLexer(DelegatingLexer):
  1317. """
  1318. Coldfusion markup/script components
  1319. .. versionadded:: 2.0
  1320. """
  1321. name = 'Coldfusion CFC'
  1322. aliases = ['cfc']
  1323. filenames = ['*.cfc']
  1324. mimetypes = []
  1325. def __init__(self, **options):
  1326. super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
  1327. class SspLexer(DelegatingLexer):
  1328. """
  1329. Lexer for Scalate Server Pages.
  1330. .. versionadded:: 1.4
  1331. """
  1332. name = 'Scalate Server Page'
  1333. aliases = ['ssp']
  1334. filenames = ['*.ssp']
  1335. mimetypes = ['application/x-ssp']
  1336. def __init__(self, **options):
  1337. super().__init__(XmlLexer, JspRootLexer, **options)
  1338. def analyse_text(text):
  1339. rv = 0.0
  1340. if re.search(r'val \w+\s*:', text):
  1341. rv += 0.6
  1342. if looks_like_xml(text):
  1343. rv += 0.2
  1344. if '<%' in text and '%>' in text:
  1345. rv += 0.1
  1346. return rv
  1347. class TeaTemplateRootLexer(RegexLexer):
  1348. """
  1349. Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
  1350. code blocks.
  1351. .. versionadded:: 1.5
  1352. """
  1353. tokens = {
  1354. 'root': [
  1355. (r'<%\S?', Keyword, 'sec'),
  1356. (r'[^<]+', Other),
  1357. (r'<', Other),
  1358. ],
  1359. 'sec': [
  1360. (r'%>', Keyword, '#pop'),
  1361. # note: '\w\W' != '.' without DOTALL.
  1362. (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
  1363. ],
  1364. }
  1365. class TeaTemplateLexer(DelegatingLexer):
  1366. """
  1367. Lexer for `Tea Templates <http://teatrove.org/>`_.
  1368. .. versionadded:: 1.5
  1369. """
  1370. name = 'Tea'
  1371. aliases = ['tea']
  1372. filenames = ['*.tea']
  1373. mimetypes = ['text/x-tea']
  1374. def __init__(self, **options):
  1375. super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
  1376. def analyse_text(text):
  1377. rv = TeaLangLexer.analyse_text(text) - 0.01
  1378. if looks_like_xml(text):
  1379. rv += 0.4
  1380. if '<%' in text and '%>' in text:
  1381. rv += 0.1
  1382. return rv
  1383. class LassoHtmlLexer(DelegatingLexer):
  1384. """
  1385. Subclass of the `LassoLexer` which highlights unhandled data with the
  1386. `HtmlLexer`.
  1387. Nested JavaScript and CSS is also highlighted.
  1388. .. versionadded:: 1.6
  1389. """
  1390. name = 'HTML+Lasso'
  1391. aliases = ['html+lasso']
  1392. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
  1393. '*.incl', '*.inc', '*.las']
  1394. mimetypes = ['text/html+lasso',
  1395. 'application/x-httpd-lasso',
  1396. 'application/x-httpd-lasso[89]']
  1397. def __init__(self, **options):
  1398. super().__init__(HtmlLexer, LassoLexer, **options)
  1399. def analyse_text(text):
  1400. rv = LassoLexer.analyse_text(text) - 0.01
  1401. if html_doctype_matches(text): # same as HTML lexer
  1402. rv += 0.5
  1403. return rv
  1404. class LassoXmlLexer(DelegatingLexer):
  1405. """
  1406. Subclass of the `LassoLexer` which highlights unhandled data with the
  1407. `XmlLexer`.
  1408. .. versionadded:: 1.6
  1409. """
  1410. name = 'XML+Lasso'
  1411. aliases = ['xml+lasso']
  1412. alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
  1413. '*.incl', '*.inc', '*.las']
  1414. mimetypes = ['application/xml+lasso']
  1415. def __init__(self, **options):
  1416. super().__init__(XmlLexer, LassoLexer, **options)
  1417. def analyse_text(text):
  1418. rv = LassoLexer.analyse_text(text) - 0.01
  1419. if looks_like_xml(text):
  1420. rv += 0.4
  1421. return rv
  1422. class LassoCssLexer(DelegatingLexer):
  1423. """
  1424. Subclass of the `LassoLexer` which highlights unhandled data with the
  1425. `CssLexer`.
  1426. .. versionadded:: 1.6
  1427. """
  1428. name = 'CSS+Lasso'
  1429. aliases = ['css+lasso']
  1430. alias_filenames = ['*.css']
  1431. mimetypes = ['text/css+lasso']
  1432. def __init__(self, **options):
  1433. options['requiredelimiters'] = True
  1434. super().__init__(CssLexer, LassoLexer, **options)
  1435. def analyse_text(text):
  1436. rv = LassoLexer.analyse_text(text) - 0.05
  1437. if re.search(r'\w+:[^;]+;', text):
  1438. rv += 0.1
  1439. if 'padding:' in text:
  1440. rv += 0.1
  1441. return rv
  1442. class LassoJavascriptLexer(DelegatingLexer):
  1443. """
  1444. Subclass of the `LassoLexer` which highlights unhandled data with the
  1445. `JavascriptLexer`.
  1446. .. versionadded:: 1.6
  1447. """
  1448. name = 'JavaScript+Lasso'
  1449. aliases = ['javascript+lasso', 'js+lasso']
  1450. alias_filenames = ['*.js']
  1451. mimetypes = ['application/x-javascript+lasso',
  1452. 'text/x-javascript+lasso',
  1453. 'text/javascript+lasso']
  1454. def __init__(self, **options):
  1455. options['requiredelimiters'] = True
  1456. super().__init__(JavascriptLexer, LassoLexer, **options)
  1457. def analyse_text(text):
  1458. rv = LassoLexer.analyse_text(text) - 0.05
  1459. return rv
  1460. class HandlebarsLexer(RegexLexer):
  1461. """
  1462. Generic handlebars template lexer.
  1463. Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
  1464. Everything else is left for a delegating lexer.
  1465. .. versionadded:: 2.0
  1466. """
  1467. name = "Handlebars"
  1468. url = 'https://handlebarsjs.com/'
  1469. aliases = ['handlebars']
  1470. tokens = {
  1471. 'root': [
  1472. (r'[^{]+', Other),
  1473. # Comment start {{! }} or {{!--
  1474. (r'\{\{!.*\}\}', Comment),
  1475. # HTML Escaping open {{{expression
  1476. (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
  1477. # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
  1478. (r'(\{\{)([#~/]+)([^\s}]*)',
  1479. bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
  1480. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
  1481. ],
  1482. 'tag': [
  1483. (r'\s+', Text),
  1484. # HTML Escaping close }}}
  1485. (r'\}\}\}', Comment.Special, '#pop'),
  1486. # blockClose}}, includes optional tilde ~
  1487. (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
  1488. # {{opt=something}}
  1489. (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
  1490. # Partials {{> ...}}
  1491. (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
  1492. (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
  1493. (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
  1494. 'dynamic-partial'),
  1495. include('generic'),
  1496. ],
  1497. 'dynamic-partial': [
  1498. (r'\s+', Text),
  1499. (r'\)', Punctuation, '#pop'),
  1500. (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
  1501. Name.Variable, Text)),
  1502. (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
  1503. using(this, state='variable'))),
  1504. (r'[\w-]+', Name.Function),
  1505. include('generic'),
  1506. ],
  1507. 'variable': [
  1508. (r'[()/@a-zA-Z][\w-]*', Name.Variable),
  1509. (r'\.[\w-]+', Name.Variable),
  1510. (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
  1511. ],
  1512. 'generic': [
  1513. include('variable'),
  1514. # borrowed from DjangoLexer
  1515. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1516. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1517. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1518. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1519. ]
  1520. }
  1521. class HandlebarsHtmlLexer(DelegatingLexer):
  1522. """
  1523. Subclass of the `HandlebarsLexer` that highlights unlexed data with the
  1524. `HtmlLexer`.
  1525. .. versionadded:: 2.0
  1526. """
  1527. name = "HTML+Handlebars"
  1528. aliases = ["html+handlebars"]
  1529. filenames = ['*.handlebars', '*.hbs']
  1530. mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
  1531. def __init__(self, **options):
  1532. super().__init__(HtmlLexer, HandlebarsLexer, **options)
  1533. class YamlJinjaLexer(DelegatingLexer):
  1534. """
  1535. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1536. `YamlLexer`.
  1537. Commonly used in Saltstack salt states.
  1538. .. versionadded:: 2.0
  1539. """
  1540. name = 'YAML+Jinja'
  1541. aliases = ['yaml+jinja', 'salt', 'sls']
  1542. filenames = ['*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2']
  1543. mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
  1544. def __init__(self, **options):
  1545. super().__init__(YamlLexer, DjangoLexer, **options)
  1546. class LiquidLexer(RegexLexer):
  1547. """
  1548. Lexer for Liquid templates.
  1549. .. versionadded:: 2.0
  1550. """
  1551. name = 'liquid'
  1552. url = 'https://www.rubydoc.info/github/Shopify/liquid'
  1553. aliases = ['liquid']
  1554. filenames = ['*.liquid']
  1555. tokens = {
  1556. 'root': [
  1557. (r'[^{]+', Text),
  1558. # tags and block tags
  1559. (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
  1560. # output tags
  1561. (r'(\{\{)(\s*)([^\s}]+)',
  1562. bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
  1563. 'output'),
  1564. (r'\{', Text)
  1565. ],
  1566. 'tag-or-block': [
  1567. # builtin logic blocks
  1568. (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
  1569. (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
  1570. combined('end-of-block', 'whitespace', 'generic')),
  1571. (r'(else)(\s*)(%\})',
  1572. bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
  1573. # other builtin blocks
  1574. (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
  1575. bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
  1576. Whitespace, Punctuation), '#pop'),
  1577. (r'(comment)(\s*)(%\})',
  1578. bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
  1579. (r'(raw)(\s*)(%\})',
  1580. bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
  1581. # end of block
  1582. (r'(end(case|unless|if))(\s*)(%\})',
  1583. bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
  1584. (r'(end([^\s%]+))(\s*)(%\})',
  1585. bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
  1586. # builtin tags (assign and include are handled together with usual tags)
  1587. (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
  1588. bygroups(Name.Tag, Whitespace,
  1589. using(this, state='generic'), Punctuation, Whitespace),
  1590. 'variable-tag-markup'),
  1591. # other tags or blocks
  1592. (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
  1593. ],
  1594. 'output': [
  1595. include('whitespace'),
  1596. (r'\}\}', Punctuation, '#pop'), # end of output
  1597. (r'\|', Punctuation, 'filters')
  1598. ],
  1599. 'filters': [
  1600. include('whitespace'),
  1601. (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
  1602. (r'([^\s|:]+)(:?)(\s*)',
  1603. bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
  1604. ],
  1605. 'filter-markup': [
  1606. (r'\|', Punctuation, '#pop'),
  1607. include('end-of-tag'),
  1608. include('default-param-markup')
  1609. ],
  1610. 'condition': [
  1611. include('end-of-block'),
  1612. include('whitespace'),
  1613. (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
  1614. bygroups(using(this, state = 'generic'), Whitespace, Operator,
  1615. Whitespace, using(this, state = 'generic'), Whitespace,
  1616. Punctuation)),
  1617. (r'\b!', Operator),
  1618. (r'\bnot\b', Operator.Word),
  1619. (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
  1620. bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
  1621. Whitespace, using(this, state = 'generic'))),
  1622. include('generic'),
  1623. include('whitespace')
  1624. ],
  1625. 'generic-value': [
  1626. include('generic'),
  1627. include('end-at-whitespace')
  1628. ],
  1629. 'operator': [
  1630. (r'(\s*)((=|!|>|<)=?)(\s*)',
  1631. bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
  1632. (r'(\s*)(\bcontains\b)(\s*)',
  1633. bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
  1634. ],
  1635. 'end-of-tag': [
  1636. (r'\}\}', Punctuation, '#pop')
  1637. ],
  1638. 'end-of-block': [
  1639. (r'%\}', Punctuation, ('#pop', '#pop'))
  1640. ],
  1641. 'end-at-whitespace': [
  1642. (r'\s+', Whitespace, '#pop')
  1643. ],
  1644. # states for unknown markup
  1645. 'param-markup': [
  1646. include('whitespace'),
  1647. # params with colons or equals
  1648. (r'([^\s=:]+)(\s*)(=|:)',
  1649. bygroups(Name.Attribute, Whitespace, Operator)),
  1650. # explicit variables
  1651. (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
  1652. bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
  1653. Whitespace, Punctuation)),
  1654. include('string'),
  1655. include('number'),
  1656. include('keyword'),
  1657. (r',', Punctuation)
  1658. ],
  1659. 'default-param-markup': [
  1660. include('param-markup'),
  1661. (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
  1662. ],
  1663. 'variable-param-markup': [
  1664. include('param-markup'),
  1665. include('variable'),
  1666. (r'.', Text) # fallback
  1667. ],
  1668. 'tag-markup': [
  1669. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1670. include('default-param-markup')
  1671. ],
  1672. 'variable-tag-markup': [
  1673. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1674. include('variable-param-markup')
  1675. ],
  1676. # states for different values types
  1677. 'keyword': [
  1678. (r'\b(false|true)\b', Keyword.Constant)
  1679. ],
  1680. 'variable': [
  1681. (r'[a-zA-Z_]\w*', Name.Variable),
  1682. (r'(?<=\w)\.(?=\w)', Punctuation)
  1683. ],
  1684. 'string': [
  1685. (r"'[^']*'", String.Single),
  1686. (r'"[^"]*"', String.Double)
  1687. ],
  1688. 'number': [
  1689. (r'\d+\.\d+', Number.Float),
  1690. (r'\d+', Number.Integer)
  1691. ],
  1692. 'generic': [ # decides for variable, string, keyword or number
  1693. include('keyword'),
  1694. include('string'),
  1695. include('number'),
  1696. include('variable')
  1697. ],
  1698. 'whitespace': [
  1699. (r'[ \t]+', Whitespace)
  1700. ],
  1701. # states for builtin blocks
  1702. 'comment': [
  1703. (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
  1704. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1705. Punctuation), ('#pop', '#pop')),
  1706. (r'.', Comment)
  1707. ],
  1708. 'raw': [
  1709. (r'[^{]+', Text),
  1710. (r'(\{%)(\s*)(endraw)(\s*)(%\})',
  1711. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1712. Punctuation), '#pop'),
  1713. (r'\{', Text)
  1714. ],
  1715. }
  1716. class TwigLexer(RegexLexer):
  1717. """
  1718. Twig template lexer.
  1719. It just highlights Twig code between the preprocessor directives,
  1720. other data is left untouched by the lexer.
  1721. .. versionadded:: 2.0
  1722. """
  1723. name = 'Twig'
  1724. aliases = ['twig']
  1725. mimetypes = ['application/x-twig']
  1726. flags = re.M | re.S
  1727. # Note that a backslash is included in the following two patterns
  1728. # PHP uses a backslash as a namespace separator
  1729. _ident_char = r'[\\\w-]|[^\x00-\x7f]'
  1730. _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
  1731. _ident_end = r'(?:' + _ident_char + ')*'
  1732. _ident_inner = _ident_begin + _ident_end
  1733. tokens = {
  1734. 'root': [
  1735. (r'[^{]+', Other),
  1736. (r'\{\{', Comment.Preproc, 'var'),
  1737. # twig comments
  1738. (r'\{\#.*?\#\}', Comment),
  1739. # raw twig blocks
  1740. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  1741. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  1742. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1743. Other, Comment.Preproc, Text, Keyword, Text,
  1744. Comment.Preproc)),
  1745. (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
  1746. r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
  1747. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1748. Other, Comment.Preproc, Text, Keyword, Text,
  1749. Comment.Preproc)),
  1750. # filter blocks
  1751. (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
  1752. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  1753. 'tag'),
  1754. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  1755. bygroups(Comment.Preproc, Text, Keyword), 'tag'),
  1756. (r'\{', Other),
  1757. ],
  1758. 'varnames': [
  1759. (r'(\|)(\s*)(%s)' % _ident_inner,
  1760. bygroups(Operator, Text, Name.Function)),
  1761. (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
  1762. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  1763. (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
  1764. (r'(in|not|and|b-and|or|b-or|b-xor|is'
  1765. r'if|elseif|else|import'
  1766. r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
  1767. r'matches|starts\s+with|ends\s+with)\b',
  1768. Keyword),
  1769. (r'(loop|block|parent)\b', Name.Builtin),
  1770. (_ident_inner, Name.Variable),
  1771. (r'\.' + _ident_inner, Name.Variable),
  1772. (r'\.[0-9]+', Number),
  1773. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1774. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1775. (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
  1776. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1777. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1778. ],
  1779. 'var': [
  1780. (r'\s+', Text),
  1781. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  1782. include('varnames')
  1783. ],
  1784. 'tag': [
  1785. (r'\s+', Text),
  1786. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  1787. include('varnames'),
  1788. (r'.', Punctuation),
  1789. ],
  1790. }
  1791. class TwigHtmlLexer(DelegatingLexer):
  1792. """
  1793. Subclass of the `TwigLexer` that highlights unlexed data with the
  1794. `HtmlLexer`.
  1795. .. versionadded:: 2.0
  1796. """
  1797. name = "HTML+Twig"
  1798. aliases = ["html+twig"]
  1799. filenames = ['*.twig']
  1800. mimetypes = ['text/html+twig']
  1801. def __init__(self, **options):
  1802. super().__init__(HtmlLexer, TwigLexer, **options)
  1803. class Angular2Lexer(RegexLexer):
  1804. """
  1805. Generic angular2 template lexer.
  1806. Highlights only the Angular template tags (stuff between `{{` and `}}` and
  1807. special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
  1808. Everything else is left for a delegating lexer.
  1809. .. versionadded:: 2.1
  1810. """
  1811. name = "Angular2"
  1812. url = 'https://angular.io/guide/template-syntax'
  1813. aliases = ['ng2']
  1814. tokens = {
  1815. 'root': [
  1816. (r'[^{([*#]+', Other),
  1817. # {{meal.name}}
  1818. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
  1819. # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
  1820. (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
  1821. bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
  1822. 'attr'),
  1823. (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
  1824. bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
  1825. # *ngIf="..."; #f="ngForm"
  1826. (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
  1827. bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
  1828. (r'([*#])([\w:.-]+)(\s*)',
  1829. bygroups(Punctuation, Name.Attribute, Text)),
  1830. ],
  1831. 'ngExpression': [
  1832. (r'\s+(\|\s+)?', Text),
  1833. (r'\}\}', Comment.Preproc, '#pop'),
  1834. # Literals
  1835. (r':?(true|false)', String.Boolean),
  1836. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1837. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1838. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1839. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1840. # Variabletext
  1841. (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
  1842. (r'\.[\w-]+(\(.*\))?', Name.Variable),
  1843. # inline If
  1844. (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
  1845. bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
  1846. ],
  1847. 'attr': [
  1848. ('".*?"', String, '#pop'),
  1849. ("'.*?'", String, '#pop'),
  1850. (r'[^\s>]+', String, '#pop'),
  1851. ],
  1852. }
  1853. class Angular2HtmlLexer(DelegatingLexer):
  1854. """
  1855. Subclass of the `Angular2Lexer` that highlights unlexed data with the
  1856. `HtmlLexer`.
  1857. .. versionadded:: 2.0
  1858. """
  1859. name = "HTML + Angular2"
  1860. aliases = ["html+ng2"]
  1861. filenames = ['*.ng2']
  1862. def __init__(self, **options):
  1863. super().__init__(HtmlLexer, Angular2Lexer, **options)
  1864. class SqlJinjaLexer(DelegatingLexer):
  1865. """
  1866. Templated SQL lexer.
  1867. .. versionadded:: 2.13
  1868. """
  1869. name = 'SQL+Jinja'
  1870. aliases = ['sql+jinja']
  1871. filenames = ['*.sql', '*.sql.j2', '*.sql.jinja2']
  1872. def __init__(self, **options):
  1873. super().__init__(SqlLexer, DjangoLexer, **options)
  1874. def analyse_text(text):
  1875. rv = 0.0
  1876. # dbt's ref function
  1877. if re.search(r'\{\{\s*ref\(.*\)\s*\}\}', text):
  1878. rv += 0.4
  1879. # dbt's source function
  1880. if re.search(r'\{\{\s*source\(.*\)\s*\}\}', text):
  1881. rv += 0.25
  1882. # Jinja macro
  1883. if re.search(r'\{%-?\s*macro \w+\(.*\)\s*-?%\}', text):
  1884. rv += 0.15
  1885. return rv