templates.py 72 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282
  1. # -*- coding: utf-8 -*-
  2. """
  3. pygments.lexers.templates
  4. ~~~~~~~~~~~~~~~~~~~~~~~~~
  5. Lexers for various template engines' markup.
  6. :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
  7. :license: BSD, see LICENSE for details.
  8. """
  9. import re
  10. from pygments.lexers.html import HtmlLexer, XmlLexer
  11. from pygments.lexers.javascript import JavascriptLexer, LassoLexer
  12. from pygments.lexers.css import CssLexer
  13. from pygments.lexers.php import PhpLexer
  14. from pygments.lexers.python import PythonLexer
  15. from pygments.lexers.perl import PerlLexer
  16. from pygments.lexers.jvm import JavaLexer, TeaLangLexer
  17. from pygments.lexers.data import YamlLexer
  18. from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
  19. include, using, this, default, combined
  20. from pygments.token import Error, Punctuation, Whitespace, \
  21. Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
  22. from pygments.util import html_doctype_matches, looks_like_xml
  23. __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
  24. 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
  25. 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
  26. 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
  27. 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
  28. 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
  29. 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
  30. 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
  31. 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
  32. 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
  33. 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
  34. 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
  35. 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
  36. 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
  37. 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
  38. 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
  39. 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
  40. 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
  41. 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
  42. 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer']
  43. class ErbLexer(Lexer):
  44. """
  45. Generic `ERB <http://ruby-doc.org/core/classes/ERB.html>`_ (Ruby Templating)
  46. lexer.
  47. Just highlights ruby code between the preprocessor directives, other data
  48. is left untouched by the lexer.
  49. All options are also forwarded to the `RubyLexer`.
  50. """
  51. name = 'ERB'
  52. aliases = ['erb']
  53. mimetypes = ['application/x-ruby-templating']
  54. _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
  55. def __init__(self, **options):
  56. from pygments.lexers.ruby import RubyLexer
  57. self.ruby_lexer = RubyLexer(**options)
  58. Lexer.__init__(self, **options)
  59. def get_tokens_unprocessed(self, text):
  60. """
  61. Since ERB doesn't allow "<%" and other tags inside of ruby
  62. blocks we have to use a split approach here that fails for
  63. that too.
  64. """
  65. tokens = self._block_re.split(text)
  66. tokens.reverse()
  67. state = idx = 0
  68. try:
  69. while True:
  70. # text
  71. if state == 0:
  72. val = tokens.pop()
  73. yield idx, Other, val
  74. idx += len(val)
  75. state = 1
  76. # block starts
  77. elif state == 1:
  78. tag = tokens.pop()
  79. # literals
  80. if tag in ('<%%', '%%>'):
  81. yield idx, Other, tag
  82. idx += 3
  83. state = 0
  84. # comment
  85. elif tag == '<%#':
  86. yield idx, Comment.Preproc, tag
  87. val = tokens.pop()
  88. yield idx + 3, Comment, val
  89. idx += 3 + len(val)
  90. state = 2
  91. # blocks or output
  92. elif tag in ('<%', '<%=', '<%-'):
  93. yield idx, Comment.Preproc, tag
  94. idx += len(tag)
  95. data = tokens.pop()
  96. r_idx = 0
  97. for r_idx, r_token, r_value in \
  98. self.ruby_lexer.get_tokens_unprocessed(data):
  99. yield r_idx + idx, r_token, r_value
  100. idx += len(data)
  101. state = 2
  102. elif tag in ('%>', '-%>'):
  103. yield idx, Error, tag
  104. idx += len(tag)
  105. state = 0
  106. # % raw ruby statements
  107. else:
  108. yield idx, Comment.Preproc, tag[0]
  109. r_idx = 0
  110. for r_idx, r_token, r_value in \
  111. self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
  112. yield idx + 1 + r_idx, r_token, r_value
  113. idx += len(tag)
  114. state = 0
  115. # block ends
  116. elif state == 2:
  117. tag = tokens.pop()
  118. if tag not in ('%>', '-%>'):
  119. yield idx, Other, tag
  120. else:
  121. yield idx, Comment.Preproc, tag
  122. idx += len(tag)
  123. state = 0
  124. except IndexError:
  125. return
  126. def analyse_text(text):
  127. if '<%' in text and '%>' in text:
  128. return 0.4
  129. class SmartyLexer(RegexLexer):
  130. """
  131. Generic `Smarty <http://smarty.php.net/>`_ template lexer.
  132. Just highlights smarty code between the preprocessor directives, other
  133. data is left untouched by the lexer.
  134. """
  135. name = 'Smarty'
  136. aliases = ['smarty']
  137. filenames = ['*.tpl']
  138. mimetypes = ['application/x-smarty']
  139. flags = re.MULTILINE | re.DOTALL
  140. tokens = {
  141. 'root': [
  142. (r'[^{]+', Other),
  143. (r'(\{)(\*.*?\*)(\})',
  144. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  145. (r'(\{php\})(.*?)(\{/php\})',
  146. bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
  147. Comment.Preproc)),
  148. (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
  149. bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
  150. (r'\{', Comment.Preproc, 'smarty')
  151. ],
  152. 'smarty': [
  153. (r'\s+', Text),
  154. (r'\{', Comment.Preproc, '#push'),
  155. (r'\}', Comment.Preproc, '#pop'),
  156. (r'#[a-zA-Z_]\w*#', Name.Variable),
  157. (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
  158. (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
  159. (r'(true|false|null)\b', Keyword.Constant),
  160. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  161. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  162. (r'"(\\\\|\\"|[^"])*"', String.Double),
  163. (r"'(\\\\|\\'|[^'])*'", String.Single),
  164. (r'[a-zA-Z_]\w*', Name.Attribute)
  165. ]
  166. }
  167. def analyse_text(text):
  168. rv = 0.0
  169. if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
  170. rv += 0.15
  171. if re.search(r'\{include\s+file=.*?\}', text):
  172. rv += 0.15
  173. if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
  174. rv += 0.15
  175. if re.search(r'\{\$.*?\}', text):
  176. rv += 0.01
  177. return rv
  178. class VelocityLexer(RegexLexer):
  179. """
  180. Generic `Velocity <http://velocity.apache.org/>`_ template lexer.
  181. Just highlights velocity directives and variable references, other
  182. data is left untouched by the lexer.
  183. """
  184. name = 'Velocity'
  185. aliases = ['velocity']
  186. filenames = ['*.vm', '*.fhtml']
  187. flags = re.MULTILINE | re.DOTALL
  188. identifier = r'[a-zA-Z_]\w*'
  189. tokens = {
  190. 'root': [
  191. (r'[^{#$]+', Other),
  192. (r'(#)(\*.*?\*)(#)',
  193. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  194. (r'(##)(.*?$)',
  195. bygroups(Comment.Preproc, Comment)),
  196. (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
  197. bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
  198. 'directiveparams'),
  199. (r'(#\{?)(' + identifier + r')(\}|\b)',
  200. bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
  201. (r'\$!?\{?', Punctuation, 'variable')
  202. ],
  203. 'variable': [
  204. (identifier, Name.Variable),
  205. (r'\(', Punctuation, 'funcparams'),
  206. (r'(\.)(' + identifier + r')',
  207. bygroups(Punctuation, Name.Variable), '#push'),
  208. (r'\}', Punctuation, '#pop'),
  209. default('#pop')
  210. ],
  211. 'directiveparams': [
  212. (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
  213. Operator),
  214. (r'\[', Operator, 'rangeoperator'),
  215. (r'\b' + identifier + r'\b', Name.Function),
  216. include('funcparams')
  217. ],
  218. 'rangeoperator': [
  219. (r'\.\.', Operator),
  220. include('funcparams'),
  221. (r'\]', Operator, '#pop')
  222. ],
  223. 'funcparams': [
  224. (r'\$!?\{?', Punctuation, 'variable'),
  225. (r'\s+', Text),
  226. (r'[,:]', Punctuation),
  227. (r'"(\\\\|\\"|[^"])*"', String.Double),
  228. (r"'(\\\\|\\'|[^'])*'", String.Single),
  229. (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  230. (r"\b[0-9]+\b", Number),
  231. (r'(true|false|null)\b', Keyword.Constant),
  232. (r'\(', Punctuation, '#push'),
  233. (r'\)', Punctuation, '#pop'),
  234. (r'\{', Punctuation, '#push'),
  235. (r'\}', Punctuation, '#pop'),
  236. (r'\[', Punctuation, '#push'),
  237. (r'\]', Punctuation, '#pop'),
  238. ]
  239. }
  240. def analyse_text(text):
  241. rv = 0.0
  242. if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text):
  243. rv += 0.25
  244. if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text):
  245. rv += 0.15
  246. if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text):
  247. rv += 0.15
  248. if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
  249. r'(\.\w+(\([^)]*\))?)*\}?', text):
  250. rv += 0.01
  251. return rv
  252. class VelocityHtmlLexer(DelegatingLexer):
  253. """
  254. Subclass of the `VelocityLexer` that highlights unlexed data
  255. with the `HtmlLexer`.
  256. """
  257. name = 'HTML+Velocity'
  258. aliases = ['html+velocity']
  259. alias_filenames = ['*.html', '*.fhtml']
  260. mimetypes = ['text/html+velocity']
  261. def __init__(self, **options):
  262. super(VelocityHtmlLexer, self).__init__(HtmlLexer, VelocityLexer,
  263. **options)
  264. class VelocityXmlLexer(DelegatingLexer):
  265. """
  266. Subclass of the `VelocityLexer` that highlights unlexed data
  267. with the `XmlLexer`.
  268. """
  269. name = 'XML+Velocity'
  270. aliases = ['xml+velocity']
  271. alias_filenames = ['*.xml', '*.vm']
  272. mimetypes = ['application/xml+velocity']
  273. def __init__(self, **options):
  274. super(VelocityXmlLexer, self).__init__(XmlLexer, VelocityLexer,
  275. **options)
  276. def analyse_text(text):
  277. rv = VelocityLexer.analyse_text(text) - 0.01
  278. if looks_like_xml(text):
  279. rv += 0.4
  280. return rv
  281. class DjangoLexer(RegexLexer):
  282. """
  283. Generic `django <http://www.djangoproject.com/documentation/templates/>`_
  284. and `jinja <http://wsgiarea.pocoo.org/jinja/>`_ template lexer.
  285. It just highlights django/jinja code between the preprocessor directives,
  286. other data is left untouched by the lexer.
  287. """
  288. name = 'Django/Jinja'
  289. aliases = ['django', 'jinja']
  290. mimetypes = ['application/x-django-templating', 'application/x-jinja']
  291. flags = re.M | re.S
  292. tokens = {
  293. 'root': [
  294. (r'[^{]+', Other),
  295. (r'\{\{', Comment.Preproc, 'var'),
  296. # jinja/django comments
  297. (r'\{[*#].*?[*#]\}', Comment),
  298. # django comments
  299. (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
  300. r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
  301. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  302. Comment, Comment.Preproc, Text, Keyword, Text,
  303. Comment.Preproc)),
  304. # raw jinja blocks
  305. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  306. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  307. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  308. Text, Comment.Preproc, Text, Keyword, Text,
  309. Comment.Preproc)),
  310. # filter blocks
  311. (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
  312. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  313. 'block'),
  314. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  315. bygroups(Comment.Preproc, Text, Keyword), 'block'),
  316. (r'\{', Other)
  317. ],
  318. 'varnames': [
  319. (r'(\|)(\s*)([a-zA-Z_]\w*)',
  320. bygroups(Operator, Text, Name.Function)),
  321. (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
  322. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  323. (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
  324. (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
  325. r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
  326. Keyword),
  327. (r'(loop|block|super|forloop)\b', Name.Builtin),
  328. (r'[a-zA-Z_][\w-]*', Name.Variable),
  329. (r'\.\w+', Name.Variable),
  330. (r':?"(\\\\|\\"|[^"])*"', String.Double),
  331. (r":?'(\\\\|\\'|[^'])*'", String.Single),
  332. (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
  333. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  334. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  335. ],
  336. 'var': [
  337. (r'\s+', Text),
  338. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  339. include('varnames')
  340. ],
  341. 'block': [
  342. (r'\s+', Text),
  343. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  344. include('varnames'),
  345. (r'.', Punctuation)
  346. ]
  347. }
  348. def analyse_text(text):
  349. rv = 0.0
  350. if re.search(r'\{%\s*(block|extends)', text) is not None:
  351. rv += 0.4
  352. if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
  353. rv += 0.1
  354. if re.search(r'\{\{.*?\}\}', text) is not None:
  355. rv += 0.1
  356. return rv
  357. class MyghtyLexer(RegexLexer):
  358. """
  359. Generic `myghty templates`_ lexer. Code that isn't Myghty
  360. markup is yielded as `Token.Other`.
  361. .. versionadded:: 0.6
  362. .. _myghty templates: http://www.myghty.org/
  363. """
  364. name = 'Myghty'
  365. aliases = ['myghty']
  366. filenames = ['*.myt', 'autodelegate']
  367. mimetypes = ['application/x-myghty']
  368. tokens = {
  369. 'root': [
  370. (r'\s+', Text),
  371. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  372. bygroups(Name.Tag, Text, Name.Function, Name.Tag,
  373. using(this), Name.Tag)),
  374. (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
  375. bygroups(Name.Tag, Name.Function, Name.Tag,
  376. using(PythonLexer), Name.Tag)),
  377. (r'(<&[^|])(.*?)(,.*?)?(&>)',
  378. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  379. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  380. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  381. (r'</&>', Name.Tag),
  382. (r'(?s)(<%!?)(.*?)(%>)',
  383. bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
  384. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  385. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  386. bygroups(Name.Tag, using(PythonLexer), Other)),
  387. (r"""(?sx)
  388. (.+?) # anything, followed by:
  389. (?:
  390. (?<=\n)(?=[%#]) | # an eval or comment line
  391. (?=</?[%&]) | # a substitution or block or
  392. # call start or end
  393. # - don't consume
  394. (\\\n) | # an escaped newline
  395. \Z # end of string
  396. )""", bygroups(Other, Operator)),
  397. ]
  398. }
  399. class MyghtyHtmlLexer(DelegatingLexer):
  400. """
  401. Subclass of the `MyghtyLexer` that highlights unlexed data
  402. with the `HtmlLexer`.
  403. .. versionadded:: 0.6
  404. """
  405. name = 'HTML+Myghty'
  406. aliases = ['html+myghty']
  407. mimetypes = ['text/html+myghty']
  408. def __init__(self, **options):
  409. super(MyghtyHtmlLexer, self).__init__(HtmlLexer, MyghtyLexer,
  410. **options)
  411. class MyghtyXmlLexer(DelegatingLexer):
  412. """
  413. Subclass of the `MyghtyLexer` that highlights unlexed data
  414. with the `XmlLexer`.
  415. .. versionadded:: 0.6
  416. """
  417. name = 'XML+Myghty'
  418. aliases = ['xml+myghty']
  419. mimetypes = ['application/xml+myghty']
  420. def __init__(self, **options):
  421. super(MyghtyXmlLexer, self).__init__(XmlLexer, MyghtyLexer,
  422. **options)
  423. class MyghtyJavascriptLexer(DelegatingLexer):
  424. """
  425. Subclass of the `MyghtyLexer` that highlights unlexed data
  426. with the `JavascriptLexer`.
  427. .. versionadded:: 0.6
  428. """
  429. name = 'JavaScript+Myghty'
  430. aliases = ['js+myghty', 'javascript+myghty']
  431. mimetypes = ['application/x-javascript+myghty',
  432. 'text/x-javascript+myghty',
  433. 'text/javascript+mygthy']
  434. def __init__(self, **options):
  435. super(MyghtyJavascriptLexer, self).__init__(JavascriptLexer,
  436. MyghtyLexer, **options)
  437. class MyghtyCssLexer(DelegatingLexer):
  438. """
  439. Subclass of the `MyghtyLexer` that highlights unlexed data
  440. with the `CssLexer`.
  441. .. versionadded:: 0.6
  442. """
  443. name = 'CSS+Myghty'
  444. aliases = ['css+myghty']
  445. mimetypes = ['text/css+myghty']
  446. def __init__(self, **options):
  447. super(MyghtyCssLexer, self).__init__(CssLexer, MyghtyLexer,
  448. **options)
  449. class MasonLexer(RegexLexer):
  450. """
  451. Generic `mason templates`_ lexer. Stolen from Myghty lexer. Code that isn't
  452. Mason markup is HTML.
  453. .. _mason templates: http://www.masonhq.com/
  454. .. versionadded:: 1.4
  455. """
  456. name = 'Mason'
  457. aliases = ['mason']
  458. filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
  459. mimetypes = ['application/x-mason']
  460. tokens = {
  461. 'root': [
  462. (r'\s+', Text),
  463. (r'(?s)(<%doc>)(.*?)(</%doc>)',
  464. bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
  465. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  466. bygroups(Name.Tag, Text, Name.Function, Name.Tag,
  467. using(this), Name.Tag)),
  468. (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
  469. bygroups(Name.Tag, Name.Function, Name.Tag,
  470. using(PerlLexer), Name.Tag)),
  471. (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
  472. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  473. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  474. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  475. (r'</&>', Name.Tag),
  476. (r'(?s)(<%!?)(.*?)(%>)',
  477. bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
  478. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  479. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  480. bygroups(Name.Tag, using(PerlLexer), Other)),
  481. (r"""(?sx)
  482. (.+?) # anything, followed by:
  483. (?:
  484. (?<=\n)(?=[%#]) | # an eval or comment line
  485. (?=</?[%&]) | # a substitution or block or
  486. # call start or end
  487. # - don't consume
  488. (\\\n) | # an escaped newline
  489. \Z # end of string
  490. )""", bygroups(using(HtmlLexer), Operator)),
  491. ]
  492. }
  493. def analyse_text(text):
  494. result = 0.0
  495. if re.search(r'</%(class|doc|init)%>', text) is not None:
  496. result = 1.0
  497. elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
  498. result = 0.11
  499. return result
  500. class MakoLexer(RegexLexer):
  501. """
  502. Generic `mako templates`_ lexer. Code that isn't Mako
  503. markup is yielded as `Token.Other`.
  504. .. versionadded:: 0.7
  505. .. _mako templates: http://www.makotemplates.org/
  506. """
  507. name = 'Mako'
  508. aliases = ['mako']
  509. filenames = ['*.mao']
  510. mimetypes = ['application/x-mako']
  511. tokens = {
  512. 'root': [
  513. (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
  514. bygroups(Text, Comment.Preproc, Keyword, Other)),
  515. (r'(\s*)(%)([^\n]*)(\n|\Z)',
  516. bygroups(Text, Comment.Preproc, using(PythonLexer), Other)),
  517. (r'(\s*)(##[^\n]*)(\n|\Z)',
  518. bygroups(Text, Comment.Preproc, Other)),
  519. (r'(?s)<%doc>.*?</%doc>', Comment.Preproc),
  520. (r'(<%)([\w.:]+)',
  521. bygroups(Comment.Preproc, Name.Builtin), 'tag'),
  522. (r'(</%)([\w.:]+)(>)',
  523. bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
  524. (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
  525. (r'(?s)(<%(?:!?))(.*?)(%>)',
  526. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  527. (r'(\$\{)(.*?)(\})',
  528. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  529. (r'''(?sx)
  530. (.+?) # anything, followed by:
  531. (?:
  532. (?<=\n)(?=%|\#\#) | # an eval or comment line
  533. (?=\#\*) | # multiline comment
  534. (?=</?%) | # a python block
  535. # call start or end
  536. (?=\$\{) | # a substitution
  537. (?<=\n)(?=\s*%) |
  538. # - don't consume
  539. (\\\n) | # an escaped newline
  540. \Z # end of string
  541. )
  542. ''', bygroups(Other, Operator)),
  543. (r'\s+', Text),
  544. ],
  545. 'ondeftags': [
  546. (r'<%', Comment.Preproc),
  547. (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
  548. include('tag'),
  549. ],
  550. 'tag': [
  551. (r'((?:\w+)\s*=)(\s*)(".*?")',
  552. bygroups(Name.Attribute, Text, String)),
  553. (r'/?\s*>', Comment.Preproc, '#pop'),
  554. (r'\s+', Text),
  555. ],
  556. 'attr': [
  557. ('".*?"', String, '#pop'),
  558. ("'.*?'", String, '#pop'),
  559. (r'[^\s>]+', String, '#pop'),
  560. ],
  561. }
  562. class MakoHtmlLexer(DelegatingLexer):
  563. """
  564. Subclass of the `MakoLexer` that highlights unlexed data
  565. with the `HtmlLexer`.
  566. .. versionadded:: 0.7
  567. """
  568. name = 'HTML+Mako'
  569. aliases = ['html+mako']
  570. mimetypes = ['text/html+mako']
  571. def __init__(self, **options):
  572. super(MakoHtmlLexer, self).__init__(HtmlLexer, MakoLexer,
  573. **options)
  574. class MakoXmlLexer(DelegatingLexer):
  575. """
  576. Subclass of the `MakoLexer` that highlights unlexed data
  577. with the `XmlLexer`.
  578. .. versionadded:: 0.7
  579. """
  580. name = 'XML+Mako'
  581. aliases = ['xml+mako']
  582. mimetypes = ['application/xml+mako']
  583. def __init__(self, **options):
  584. super(MakoXmlLexer, self).__init__(XmlLexer, MakoLexer,
  585. **options)
  586. class MakoJavascriptLexer(DelegatingLexer):
  587. """
  588. Subclass of the `MakoLexer` that highlights unlexed data
  589. with the `JavascriptLexer`.
  590. .. versionadded:: 0.7
  591. """
  592. name = 'JavaScript+Mako'
  593. aliases = ['js+mako', 'javascript+mako']
  594. mimetypes = ['application/x-javascript+mako',
  595. 'text/x-javascript+mako',
  596. 'text/javascript+mako']
  597. def __init__(self, **options):
  598. super(MakoJavascriptLexer, self).__init__(JavascriptLexer,
  599. MakoLexer, **options)
  600. class MakoCssLexer(DelegatingLexer):
  601. """
  602. Subclass of the `MakoLexer` that highlights unlexed data
  603. with the `CssLexer`.
  604. .. versionadded:: 0.7
  605. """
  606. name = 'CSS+Mako'
  607. aliases = ['css+mako']
  608. mimetypes = ['text/css+mako']
  609. def __init__(self, **options):
  610. super(MakoCssLexer, self).__init__(CssLexer, MakoLexer,
  611. **options)
  612. # Genshi and Cheetah lexers courtesy of Matt Good.
  613. class CheetahPythonLexer(Lexer):
  614. """
  615. Lexer for handling Cheetah's special $ tokens in Python syntax.
  616. """
  617. def get_tokens_unprocessed(self, text):
  618. pylexer = PythonLexer(**self.options)
  619. for pos, type_, value in pylexer.get_tokens_unprocessed(text):
  620. if type_ == Token.Error and value == '$':
  621. type_ = Comment.Preproc
  622. yield pos, type_, value
  623. class CheetahLexer(RegexLexer):
  624. """
  625. Generic `cheetah templates`_ lexer. Code that isn't Cheetah
  626. markup is yielded as `Token.Other`. This also works for
  627. `spitfire templates`_ which use the same syntax.
  628. .. _cheetah templates: http://www.cheetahtemplate.org/
  629. .. _spitfire templates: http://code.google.com/p/spitfire/
  630. """
  631. name = 'Cheetah'
  632. aliases = ['cheetah', 'spitfire']
  633. filenames = ['*.tmpl', '*.spt']
  634. mimetypes = ['application/x-cheetah', 'application/x-spitfire']
  635. tokens = {
  636. 'root': [
  637. (r'(##[^\n]*)$',
  638. (bygroups(Comment))),
  639. (r'#[*](.|\n)*?[*]#', Comment),
  640. (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
  641. (r'#slurp$', Comment.Preproc),
  642. (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
  643. (bygroups(Comment.Preproc, using(CheetahPythonLexer),
  644. Comment.Preproc))),
  645. # TODO support other Python syntax like $foo['bar']
  646. (r'(\$)([a-zA-Z_][\w.]*\w)',
  647. bygroups(Comment.Preproc, using(CheetahPythonLexer))),
  648. (r'(?s)(\$\{!?)(.*?)(\})',
  649. bygroups(Comment.Preproc, using(CheetahPythonLexer),
  650. Comment.Preproc)),
  651. (r'''(?sx)
  652. (.+?) # anything, followed by:
  653. (?:
  654. (?=\#[#a-zA-Z]*) | # an eval comment
  655. (?=\$[a-zA-Z_{]) | # a substitution
  656. \Z # end of string
  657. )
  658. ''', Other),
  659. (r'\s+', Text),
  660. ],
  661. }
  662. class CheetahHtmlLexer(DelegatingLexer):
  663. """
  664. Subclass of the `CheetahLexer` that highlights unlexed data
  665. with the `HtmlLexer`.
  666. """
  667. name = 'HTML+Cheetah'
  668. aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
  669. mimetypes = ['text/html+cheetah', 'text/html+spitfire']
  670. def __init__(self, **options):
  671. super(CheetahHtmlLexer, self).__init__(HtmlLexer, CheetahLexer,
  672. **options)
  673. class CheetahXmlLexer(DelegatingLexer):
  674. """
  675. Subclass of the `CheetahLexer` that highlights unlexed data
  676. with the `XmlLexer`.
  677. """
  678. name = 'XML+Cheetah'
  679. aliases = ['xml+cheetah', 'xml+spitfire']
  680. mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
  681. def __init__(self, **options):
  682. super(CheetahXmlLexer, self).__init__(XmlLexer, CheetahLexer,
  683. **options)
  684. class CheetahJavascriptLexer(DelegatingLexer):
  685. """
  686. Subclass of the `CheetahLexer` that highlights unlexed data
  687. with the `JavascriptLexer`.
  688. """
  689. name = 'JavaScript+Cheetah'
  690. aliases = ['js+cheetah', 'javascript+cheetah',
  691. 'js+spitfire', 'javascript+spitfire']
  692. mimetypes = ['application/x-javascript+cheetah',
  693. 'text/x-javascript+cheetah',
  694. 'text/javascript+cheetah',
  695. 'application/x-javascript+spitfire',
  696. 'text/x-javascript+spitfire',
  697. 'text/javascript+spitfire']
  698. def __init__(self, **options):
  699. super(CheetahJavascriptLexer, self).__init__(JavascriptLexer,
  700. CheetahLexer, **options)
  701. class GenshiTextLexer(RegexLexer):
  702. """
  703. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ text
  704. templates.
  705. """
  706. name = 'Genshi Text'
  707. aliases = ['genshitext']
  708. mimetypes = ['application/x-genshi-text', 'text/x-genshi']
  709. tokens = {
  710. 'root': [
  711. (r'[^#$\s]+', Other),
  712. (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
  713. (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
  714. include('variable'),
  715. (r'[#$\s]', Other),
  716. ],
  717. 'directive': [
  718. (r'\n', Text, '#pop'),
  719. (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
  720. (r'(choose|when|with)([^\S\n]+)(.*)',
  721. bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
  722. (r'(choose|otherwise)\b', Keyword, '#pop'),
  723. (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
  724. ],
  725. 'variable': [
  726. (r'(?<!\$)(\$\{)(.+?)(\})',
  727. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  728. (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
  729. Name.Variable),
  730. ]
  731. }
  732. class GenshiMarkupLexer(RegexLexer):
  733. """
  734. Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
  735. `GenshiLexer`.
  736. """
  737. flags = re.DOTALL
  738. tokens = {
  739. 'root': [
  740. (r'[^<$]+', Other),
  741. (r'(<\?python)(.*?)(\?>)',
  742. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  743. # yield style and script blocks as Other
  744. (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
  745. (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
  746. (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
  747. include('variable'),
  748. (r'[<$]', Other),
  749. ],
  750. 'pytag': [
  751. (r'\s+', Text),
  752. (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
  753. (r'/?\s*>', Name.Tag, '#pop'),
  754. ],
  755. 'pyattr': [
  756. ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
  757. ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
  758. (r'[^\s>]+', String, '#pop'),
  759. ],
  760. 'tag': [
  761. (r'\s+', Text),
  762. (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
  763. (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
  764. (r'/?\s*>', Name.Tag, '#pop'),
  765. ],
  766. 'attr': [
  767. ('"', String, 'attr-dstring'),
  768. ("'", String, 'attr-sstring'),
  769. (r'[^\s>]*', String, '#pop')
  770. ],
  771. 'attr-dstring': [
  772. ('"', String, '#pop'),
  773. include('strings'),
  774. ("'", String)
  775. ],
  776. 'attr-sstring': [
  777. ("'", String, '#pop'),
  778. include('strings'),
  779. ("'", String)
  780. ],
  781. 'strings': [
  782. ('[^"\'$]+', String),
  783. include('variable')
  784. ],
  785. 'variable': [
  786. (r'(?<!\$)(\$\{)(.+?)(\})',
  787. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  788. (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
  789. Name.Variable),
  790. ]
  791. }
  792. class HtmlGenshiLexer(DelegatingLexer):
  793. """
  794. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
  795. `kid <http://kid-templating.org/>`_ kid HTML templates.
  796. """
  797. name = 'HTML+Genshi'
  798. aliases = ['html+genshi', 'html+kid']
  799. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  800. mimetypes = ['text/html+genshi']
  801. def __init__(self, **options):
  802. super(HtmlGenshiLexer, self).__init__(HtmlLexer, GenshiMarkupLexer,
  803. **options)
  804. def analyse_text(text):
  805. rv = 0.0
  806. if re.search(r'\$\{.*?\}', text) is not None:
  807. rv += 0.2
  808. if re.search(r'py:(.*?)=["\']', text) is not None:
  809. rv += 0.2
  810. return rv + HtmlLexer.analyse_text(text) - 0.01
  811. class GenshiLexer(DelegatingLexer):
  812. """
  813. A lexer that highlights `genshi <http://genshi.edgewall.org/>`_ and
  814. `kid <http://kid-templating.org/>`_ kid XML templates.
  815. """
  816. name = 'Genshi'
  817. aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
  818. filenames = ['*.kid']
  819. alias_filenames = ['*.xml']
  820. mimetypes = ['application/x-genshi', 'application/x-kid']
  821. def __init__(self, **options):
  822. super(GenshiLexer, self).__init__(XmlLexer, GenshiMarkupLexer,
  823. **options)
  824. def analyse_text(text):
  825. rv = 0.0
  826. if re.search(r'\$\{.*?\}', text) is not None:
  827. rv += 0.2
  828. if re.search(r'py:(.*?)=["\']', text) is not None:
  829. rv += 0.2
  830. return rv + XmlLexer.analyse_text(text) - 0.01
  831. class JavascriptGenshiLexer(DelegatingLexer):
  832. """
  833. A lexer that highlights javascript code in genshi text templates.
  834. """
  835. name = 'JavaScript+Genshi Text'
  836. aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
  837. 'javascript+genshi']
  838. alias_filenames = ['*.js']
  839. mimetypes = ['application/x-javascript+genshi',
  840. 'text/x-javascript+genshi',
  841. 'text/javascript+genshi']
  842. def __init__(self, **options):
  843. super(JavascriptGenshiLexer, self).__init__(JavascriptLexer,
  844. GenshiTextLexer,
  845. **options)
  846. def analyse_text(text):
  847. return GenshiLexer.analyse_text(text) - 0.05
  848. class CssGenshiLexer(DelegatingLexer):
  849. """
  850. A lexer that highlights CSS definitions in genshi text templates.
  851. """
  852. name = 'CSS+Genshi Text'
  853. aliases = ['css+genshitext', 'css+genshi']
  854. alias_filenames = ['*.css']
  855. mimetypes = ['text/css+genshi']
  856. def __init__(self, **options):
  857. super(CssGenshiLexer, self).__init__(CssLexer, GenshiTextLexer,
  858. **options)
  859. def analyse_text(text):
  860. return GenshiLexer.analyse_text(text) - 0.05
  861. class RhtmlLexer(DelegatingLexer):
  862. """
  863. Subclass of the ERB lexer that highlights the unlexed data with the
  864. html lexer.
  865. Nested Javascript and CSS is highlighted too.
  866. """
  867. name = 'RHTML'
  868. aliases = ['rhtml', 'html+erb', 'html+ruby']
  869. filenames = ['*.rhtml']
  870. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  871. mimetypes = ['text/html+ruby']
  872. def __init__(self, **options):
  873. super(RhtmlLexer, self).__init__(HtmlLexer, ErbLexer, **options)
  874. def analyse_text(text):
  875. rv = ErbLexer.analyse_text(text) - 0.01
  876. if html_doctype_matches(text):
  877. # one more than the XmlErbLexer returns
  878. rv += 0.5
  879. return rv
  880. class XmlErbLexer(DelegatingLexer):
  881. """
  882. Subclass of `ErbLexer` which highlights data outside preprocessor
  883. directives with the `XmlLexer`.
  884. """
  885. name = 'XML+Ruby'
  886. aliases = ['xml+erb', 'xml+ruby']
  887. alias_filenames = ['*.xml']
  888. mimetypes = ['application/xml+ruby']
  889. def __init__(self, **options):
  890. super(XmlErbLexer, self).__init__(XmlLexer, ErbLexer, **options)
  891. def analyse_text(text):
  892. rv = ErbLexer.analyse_text(text) - 0.01
  893. if looks_like_xml(text):
  894. rv += 0.4
  895. return rv
  896. class CssErbLexer(DelegatingLexer):
  897. """
  898. Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
  899. """
  900. name = 'CSS+Ruby'
  901. aliases = ['css+erb', 'css+ruby']
  902. alias_filenames = ['*.css']
  903. mimetypes = ['text/css+ruby']
  904. def __init__(self, **options):
  905. super(CssErbLexer, self).__init__(CssLexer, ErbLexer, **options)
  906. def analyse_text(text):
  907. return ErbLexer.analyse_text(text) - 0.05
  908. class JavascriptErbLexer(DelegatingLexer):
  909. """
  910. Subclass of `ErbLexer` which highlights unlexed data with the
  911. `JavascriptLexer`.
  912. """
  913. name = 'JavaScript+Ruby'
  914. aliases = ['js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby']
  915. alias_filenames = ['*.js']
  916. mimetypes = ['application/x-javascript+ruby',
  917. 'text/x-javascript+ruby',
  918. 'text/javascript+ruby']
  919. def __init__(self, **options):
  920. super(JavascriptErbLexer, self).__init__(JavascriptLexer, ErbLexer,
  921. **options)
  922. def analyse_text(text):
  923. return ErbLexer.analyse_text(text) - 0.05
  924. class HtmlPhpLexer(DelegatingLexer):
  925. """
  926. Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
  927. Nested Javascript and CSS is highlighted too.
  928. """
  929. name = 'HTML+PHP'
  930. aliases = ['html+php']
  931. filenames = ['*.phtml']
  932. alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
  933. '*.php[345]']
  934. mimetypes = ['application/x-php',
  935. 'application/x-httpd-php', 'application/x-httpd-php3',
  936. 'application/x-httpd-php4', 'application/x-httpd-php5']
  937. def __init__(self, **options):
  938. super(HtmlPhpLexer, self).__init__(HtmlLexer, PhpLexer, **options)
  939. def analyse_text(text):
  940. rv = PhpLexer.analyse_text(text) - 0.01
  941. if html_doctype_matches(text):
  942. rv += 0.5
  943. return rv
  944. class XmlPhpLexer(DelegatingLexer):
  945. """
  946. Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
  947. """
  948. name = 'XML+PHP'
  949. aliases = ['xml+php']
  950. alias_filenames = ['*.xml', '*.php', '*.php[345]']
  951. mimetypes = ['application/xml+php']
  952. def __init__(self, **options):
  953. super(XmlPhpLexer, self).__init__(XmlLexer, PhpLexer, **options)
  954. def analyse_text(text):
  955. rv = PhpLexer.analyse_text(text) - 0.01
  956. if looks_like_xml(text):
  957. rv += 0.4
  958. return rv
  959. class CssPhpLexer(DelegatingLexer):
  960. """
  961. Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
  962. """
  963. name = 'CSS+PHP'
  964. aliases = ['css+php']
  965. alias_filenames = ['*.css']
  966. mimetypes = ['text/css+php']
  967. def __init__(self, **options):
  968. super(CssPhpLexer, self).__init__(CssLexer, PhpLexer, **options)
  969. def analyse_text(text):
  970. return PhpLexer.analyse_text(text) - 0.05
  971. class JavascriptPhpLexer(DelegatingLexer):
  972. """
  973. Subclass of `PhpLexer` which highlights unmatched data with the
  974. `JavascriptLexer`.
  975. """
  976. name = 'JavaScript+PHP'
  977. aliases = ['js+php', 'javascript+php']
  978. alias_filenames = ['*.js']
  979. mimetypes = ['application/x-javascript+php',
  980. 'text/x-javascript+php',
  981. 'text/javascript+php']
  982. def __init__(self, **options):
  983. super(JavascriptPhpLexer, self).__init__(JavascriptLexer, PhpLexer,
  984. **options)
  985. def analyse_text(text):
  986. return PhpLexer.analyse_text(text)
  987. class HtmlSmartyLexer(DelegatingLexer):
  988. """
  989. Subclass of the `SmartyLexer` that highlights unlexed data with the
  990. `HtmlLexer`.
  991. Nested Javascript and CSS is highlighted too.
  992. """
  993. name = 'HTML+Smarty'
  994. aliases = ['html+smarty']
  995. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
  996. mimetypes = ['text/html+smarty']
  997. def __init__(self, **options):
  998. super(HtmlSmartyLexer, self).__init__(HtmlLexer, SmartyLexer, **options)
  999. def analyse_text(text):
  1000. rv = SmartyLexer.analyse_text(text) - 0.01
  1001. if html_doctype_matches(text):
  1002. rv += 0.5
  1003. return rv
  1004. class XmlSmartyLexer(DelegatingLexer):
  1005. """
  1006. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1007. `XmlLexer`.
  1008. """
  1009. name = 'XML+Smarty'
  1010. aliases = ['xml+smarty']
  1011. alias_filenames = ['*.xml', '*.tpl']
  1012. mimetypes = ['application/xml+smarty']
  1013. def __init__(self, **options):
  1014. super(XmlSmartyLexer, self).__init__(XmlLexer, SmartyLexer, **options)
  1015. def analyse_text(text):
  1016. rv = SmartyLexer.analyse_text(text) - 0.01
  1017. if looks_like_xml(text):
  1018. rv += 0.4
  1019. return rv
  1020. class CssSmartyLexer(DelegatingLexer):
  1021. """
  1022. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1023. `CssLexer`.
  1024. """
  1025. name = 'CSS+Smarty'
  1026. aliases = ['css+smarty']
  1027. alias_filenames = ['*.css', '*.tpl']
  1028. mimetypes = ['text/css+smarty']
  1029. def __init__(self, **options):
  1030. super(CssSmartyLexer, self).__init__(CssLexer, SmartyLexer, **options)
  1031. def analyse_text(text):
  1032. return SmartyLexer.analyse_text(text) - 0.05
  1033. class JavascriptSmartyLexer(DelegatingLexer):
  1034. """
  1035. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1036. `JavascriptLexer`.
  1037. """
  1038. name = 'JavaScript+Smarty'
  1039. aliases = ['js+smarty', 'javascript+smarty']
  1040. alias_filenames = ['*.js', '*.tpl']
  1041. mimetypes = ['application/x-javascript+smarty',
  1042. 'text/x-javascript+smarty',
  1043. 'text/javascript+smarty']
  1044. def __init__(self, **options):
  1045. super(JavascriptSmartyLexer, self).__init__(JavascriptLexer, SmartyLexer,
  1046. **options)
  1047. def analyse_text(text):
  1048. return SmartyLexer.analyse_text(text) - 0.05
  1049. class HtmlDjangoLexer(DelegatingLexer):
  1050. """
  1051. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1052. `HtmlLexer`.
  1053. Nested Javascript and CSS is highlighted too.
  1054. """
  1055. name = 'HTML+Django/Jinja'
  1056. aliases = ['html+django', 'html+jinja', 'htmldjango']
  1057. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  1058. mimetypes = ['text/html+django', 'text/html+jinja']
  1059. def __init__(self, **options):
  1060. super(HtmlDjangoLexer, self).__init__(HtmlLexer, DjangoLexer, **options)
  1061. def analyse_text(text):
  1062. rv = DjangoLexer.analyse_text(text) - 0.01
  1063. if html_doctype_matches(text):
  1064. rv += 0.5
  1065. return rv
  1066. class XmlDjangoLexer(DelegatingLexer):
  1067. """
  1068. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1069. `XmlLexer`.
  1070. """
  1071. name = 'XML+Django/Jinja'
  1072. aliases = ['xml+django', 'xml+jinja']
  1073. alias_filenames = ['*.xml']
  1074. mimetypes = ['application/xml+django', 'application/xml+jinja']
  1075. def __init__(self, **options):
  1076. super(XmlDjangoLexer, self).__init__(XmlLexer, DjangoLexer, **options)
  1077. def analyse_text(text):
  1078. rv = DjangoLexer.analyse_text(text) - 0.01
  1079. if looks_like_xml(text):
  1080. rv += 0.4
  1081. return rv
  1082. class CssDjangoLexer(DelegatingLexer):
  1083. """
  1084. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1085. `CssLexer`.
  1086. """
  1087. name = 'CSS+Django/Jinja'
  1088. aliases = ['css+django', 'css+jinja']
  1089. alias_filenames = ['*.css']
  1090. mimetypes = ['text/css+django', 'text/css+jinja']
  1091. def __init__(self, **options):
  1092. super(CssDjangoLexer, self).__init__(CssLexer, DjangoLexer, **options)
  1093. def analyse_text(text):
  1094. return DjangoLexer.analyse_text(text) - 0.05
  1095. class JavascriptDjangoLexer(DelegatingLexer):
  1096. """
  1097. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1098. `JavascriptLexer`.
  1099. """
  1100. name = 'JavaScript+Django/Jinja'
  1101. aliases = ['js+django', 'javascript+django',
  1102. 'js+jinja', 'javascript+jinja']
  1103. alias_filenames = ['*.js']
  1104. mimetypes = ['application/x-javascript+django',
  1105. 'application/x-javascript+jinja',
  1106. 'text/x-javascript+django',
  1107. 'text/x-javascript+jinja',
  1108. 'text/javascript+django',
  1109. 'text/javascript+jinja']
  1110. def __init__(self, **options):
  1111. super(JavascriptDjangoLexer, self).__init__(JavascriptLexer, DjangoLexer,
  1112. **options)
  1113. def analyse_text(text):
  1114. return DjangoLexer.analyse_text(text) - 0.05
  1115. class JspRootLexer(RegexLexer):
  1116. """
  1117. Base for the `JspLexer`. Yields `Token.Other` for area outside of
  1118. JSP tags.
  1119. .. versionadded:: 0.7
  1120. """
  1121. tokens = {
  1122. 'root': [
  1123. (r'<%\S?', Keyword, 'sec'),
  1124. # FIXME: I want to make these keywords but still parse attributes.
  1125. (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
  1126. Keyword),
  1127. (r'[^<]+', Other),
  1128. (r'<', Other),
  1129. ],
  1130. 'sec': [
  1131. (r'%>', Keyword, '#pop'),
  1132. # note: '\w\W' != '.' without DOTALL.
  1133. (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
  1134. ],
  1135. }
  1136. class JspLexer(DelegatingLexer):
  1137. """
  1138. Lexer for Java Server Pages.
  1139. .. versionadded:: 0.7
  1140. """
  1141. name = 'Java Server Page'
  1142. aliases = ['jsp']
  1143. filenames = ['*.jsp']
  1144. mimetypes = ['application/x-jsp']
  1145. def __init__(self, **options):
  1146. super(JspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
  1147. def analyse_text(text):
  1148. rv = JavaLexer.analyse_text(text) - 0.01
  1149. if looks_like_xml(text):
  1150. rv += 0.4
  1151. if '<%' in text and '%>' in text:
  1152. rv += 0.1
  1153. return rv
  1154. class EvoqueLexer(RegexLexer):
  1155. """
  1156. For files using the Evoque templating system.
  1157. .. versionadded:: 1.1
  1158. """
  1159. name = 'Evoque'
  1160. aliases = ['evoque']
  1161. filenames = ['*.evoque']
  1162. mimetypes = ['application/x-evoque']
  1163. flags = re.DOTALL
  1164. tokens = {
  1165. 'root': [
  1166. (r'[^#$]+', Other),
  1167. (r'#\[', Comment.Multiline, 'comment'),
  1168. (r'\$\$', Other),
  1169. # svn keywords
  1170. (r'\$\w+:[^$\n]*\$', Comment.Multiline),
  1171. # directives: begin, end
  1172. (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
  1173. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1174. String, Punctuation)),
  1175. # directives: evoque, overlay
  1176. # see doc for handling first name arg: /directives/evoque/
  1177. # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
  1178. # should be using(PythonLexer), not passed out as String
  1179. (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+[^=,%}]+?)?'
  1180. r'(.*?)((?(4)%)\})',
  1181. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1182. String, using(PythonLexer), Punctuation)),
  1183. # directives: if, for, prefer, test
  1184. (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
  1185. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1186. using(PythonLexer), Punctuation)),
  1187. # directive clauses (no {} expression)
  1188. (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
  1189. # expressions
  1190. (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
  1191. bygroups(Punctuation, None, using(PythonLexer),
  1192. Name.Builtin, None, None, Punctuation)),
  1193. (r'#', Other),
  1194. ],
  1195. 'comment': [
  1196. (r'[^\]#]', Comment.Multiline),
  1197. (r'#\[', Comment.Multiline, '#push'),
  1198. (r'\]#', Comment.Multiline, '#pop'),
  1199. (r'[\]#]', Comment.Multiline)
  1200. ],
  1201. }
  1202. class EvoqueHtmlLexer(DelegatingLexer):
  1203. """
  1204. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1205. `HtmlLexer`.
  1206. .. versionadded:: 1.1
  1207. """
  1208. name = 'HTML+Evoque'
  1209. aliases = ['html+evoque']
  1210. filenames = ['*.html']
  1211. mimetypes = ['text/html+evoque']
  1212. def __init__(self, **options):
  1213. super(EvoqueHtmlLexer, self).__init__(HtmlLexer, EvoqueLexer,
  1214. **options)
  1215. class EvoqueXmlLexer(DelegatingLexer):
  1216. """
  1217. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1218. `XmlLexer`.
  1219. .. versionadded:: 1.1
  1220. """
  1221. name = 'XML+Evoque'
  1222. aliases = ['xml+evoque']
  1223. filenames = ['*.xml']
  1224. mimetypes = ['application/xml+evoque']
  1225. def __init__(self, **options):
  1226. super(EvoqueXmlLexer, self).__init__(XmlLexer, EvoqueLexer,
  1227. **options)
  1228. class ColdfusionLexer(RegexLexer):
  1229. """
  1230. Coldfusion statements
  1231. """
  1232. name = 'cfstatement'
  1233. aliases = ['cfs']
  1234. filenames = []
  1235. mimetypes = []
  1236. flags = re.IGNORECASE
  1237. tokens = {
  1238. 'root': [
  1239. (r'//.*?\n', Comment.Single),
  1240. (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
  1241. (r'\+\+|--', Operator),
  1242. (r'[-+*/^&=!]', Operator),
  1243. (r'<=|>=|<|>|==', Operator),
  1244. (r'mod\b', Operator),
  1245. (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
  1246. (r'\|\||&&', Operator),
  1247. (r'\?', Operator),
  1248. (r'"', String.Double, 'string'),
  1249. # There is a special rule for allowing html in single quoted
  1250. # strings, evidently.
  1251. (r"'.*?'", String.Single),
  1252. (r'\d+', Number),
  1253. (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
  1254. r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
  1255. r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
  1256. (r'(true|false|null)\b', Keyword.Constant),
  1257. (r'(application|session|client|cookie|super|this|variables|arguments)\b',
  1258. Name.Constant),
  1259. (r'([a-z_$][\w.]*)(\s*)(\()',
  1260. bygroups(Name.Function, Text, Punctuation)),
  1261. (r'[a-z_$][\w.]*', Name.Variable),
  1262. (r'[()\[\]{};:,.\\]', Punctuation),
  1263. (r'\s+', Text),
  1264. ],
  1265. 'string': [
  1266. (r'""', String.Double),
  1267. (r'#.+?#', String.Interp),
  1268. (r'[^"#]+', String.Double),
  1269. (r'#', String.Double),
  1270. (r'"', String.Double, '#pop'),
  1271. ],
  1272. }
  1273. class ColdfusionMarkupLexer(RegexLexer):
  1274. """
  1275. Coldfusion markup only
  1276. """
  1277. name = 'Coldfusion'
  1278. aliases = ['cf']
  1279. filenames = []
  1280. mimetypes = []
  1281. tokens = {
  1282. 'root': [
  1283. (r'[^<]+', Other),
  1284. include('tags'),
  1285. (r'<[^<>]*', Other),
  1286. ],
  1287. 'tags': [
  1288. (r'<!---', Comment.Multiline, 'cfcomment'),
  1289. (r'(?s)<!--.*?-->', Comment),
  1290. (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
  1291. (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
  1292. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1293. # negative lookbehind is for strings with embedded >
  1294. (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
  1295. r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
  1296. r'mailpart|mail|header|content|zip|image|lock|argument|try|'
  1297. r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
  1298. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1299. ],
  1300. 'cfoutput': [
  1301. (r'[^#<]+', Other),
  1302. (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
  1303. Punctuation)),
  1304. # (r'<cfoutput.*?>', Name.Builtin, '#push'),
  1305. (r'</cfoutput.*?>', Name.Builtin, '#pop'),
  1306. include('tags'),
  1307. (r'(?s)<[^<>]*', Other),
  1308. (r'#', Other),
  1309. ],
  1310. 'cfcomment': [
  1311. (r'<!---', Comment.Multiline, '#push'),
  1312. (r'--->', Comment.Multiline, '#pop'),
  1313. (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
  1314. ],
  1315. }
  1316. class ColdfusionHtmlLexer(DelegatingLexer):
  1317. """
  1318. Coldfusion markup in html
  1319. """
  1320. name = 'Coldfusion HTML'
  1321. aliases = ['cfm']
  1322. filenames = ['*.cfm', '*.cfml']
  1323. mimetypes = ['application/x-coldfusion']
  1324. def __init__(self, **options):
  1325. super(ColdfusionHtmlLexer, self).__init__(HtmlLexer, ColdfusionMarkupLexer,
  1326. **options)
  1327. class ColdfusionCFCLexer(DelegatingLexer):
  1328. """
  1329. Coldfusion markup/script components
  1330. .. versionadded:: 2.0
  1331. """
  1332. name = 'Coldfusion CFC'
  1333. aliases = ['cfc']
  1334. filenames = ['*.cfc']
  1335. mimetypes = []
  1336. def __init__(self, **options):
  1337. super(ColdfusionCFCLexer, self).__init__(ColdfusionHtmlLexer, ColdfusionLexer,
  1338. **options)
  1339. class SspLexer(DelegatingLexer):
  1340. """
  1341. Lexer for Scalate Server Pages.
  1342. .. versionadded:: 1.4
  1343. """
  1344. name = 'Scalate Server Page'
  1345. aliases = ['ssp']
  1346. filenames = ['*.ssp']
  1347. mimetypes = ['application/x-ssp']
  1348. def __init__(self, **options):
  1349. super(SspLexer, self).__init__(XmlLexer, JspRootLexer, **options)
  1350. def analyse_text(text):
  1351. rv = 0.0
  1352. if re.search(r'val \w+\s*:', text):
  1353. rv += 0.6
  1354. if looks_like_xml(text):
  1355. rv += 0.2
  1356. if '<%' in text and '%>' in text:
  1357. rv += 0.1
  1358. return rv
  1359. class TeaTemplateRootLexer(RegexLexer):
  1360. """
  1361. Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
  1362. code blocks.
  1363. .. versionadded:: 1.5
  1364. """
  1365. tokens = {
  1366. 'root': [
  1367. (r'<%\S?', Keyword, 'sec'),
  1368. (r'[^<]+', Other),
  1369. (r'<', Other),
  1370. ],
  1371. 'sec': [
  1372. (r'%>', Keyword, '#pop'),
  1373. # note: '\w\W' != '.' without DOTALL.
  1374. (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
  1375. ],
  1376. }
  1377. class TeaTemplateLexer(DelegatingLexer):
  1378. """
  1379. Lexer for `Tea Templates <http://teatrove.org/>`_.
  1380. .. versionadded:: 1.5
  1381. """
  1382. name = 'Tea'
  1383. aliases = ['tea']
  1384. filenames = ['*.tea']
  1385. mimetypes = ['text/x-tea']
  1386. def __init__(self, **options):
  1387. super(TeaTemplateLexer, self).__init__(XmlLexer,
  1388. TeaTemplateRootLexer, **options)
  1389. def analyse_text(text):
  1390. rv = TeaLangLexer.analyse_text(text) - 0.01
  1391. if looks_like_xml(text):
  1392. rv += 0.4
  1393. if '<%' in text and '%>' in text:
  1394. rv += 0.1
  1395. return rv
  1396. class LassoHtmlLexer(DelegatingLexer):
  1397. """
  1398. Subclass of the `LassoLexer` which highlights unhandled data with the
  1399. `HtmlLexer`.
  1400. Nested JavaScript and CSS is also highlighted.
  1401. .. versionadded:: 1.6
  1402. """
  1403. name = 'HTML+Lasso'
  1404. aliases = ['html+lasso']
  1405. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
  1406. '*.incl', '*.inc', '*.las']
  1407. mimetypes = ['text/html+lasso',
  1408. 'application/x-httpd-lasso',
  1409. 'application/x-httpd-lasso[89]']
  1410. def __init__(self, **options):
  1411. super(LassoHtmlLexer, self).__init__(HtmlLexer, LassoLexer, **options)
  1412. def analyse_text(text):
  1413. rv = LassoLexer.analyse_text(text) - 0.01
  1414. if html_doctype_matches(text): # same as HTML lexer
  1415. rv += 0.5
  1416. return rv
  1417. class LassoXmlLexer(DelegatingLexer):
  1418. """
  1419. Subclass of the `LassoLexer` which highlights unhandled data with the
  1420. `XmlLexer`.
  1421. .. versionadded:: 1.6
  1422. """
  1423. name = 'XML+Lasso'
  1424. aliases = ['xml+lasso']
  1425. alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
  1426. '*.incl', '*.inc', '*.las']
  1427. mimetypes = ['application/xml+lasso']
  1428. def __init__(self, **options):
  1429. super(LassoXmlLexer, self).__init__(XmlLexer, LassoLexer, **options)
  1430. def analyse_text(text):
  1431. rv = LassoLexer.analyse_text(text) - 0.01
  1432. if looks_like_xml(text):
  1433. rv += 0.4
  1434. return rv
  1435. class LassoCssLexer(DelegatingLexer):
  1436. """
  1437. Subclass of the `LassoLexer` which highlights unhandled data with the
  1438. `CssLexer`.
  1439. .. versionadded:: 1.6
  1440. """
  1441. name = 'CSS+Lasso'
  1442. aliases = ['css+lasso']
  1443. alias_filenames = ['*.css']
  1444. mimetypes = ['text/css+lasso']
  1445. def __init__(self, **options):
  1446. options['requiredelimiters'] = True
  1447. super(LassoCssLexer, self).__init__(CssLexer, LassoLexer, **options)
  1448. def analyse_text(text):
  1449. rv = LassoLexer.analyse_text(text) - 0.05
  1450. if re.search(r'\w+:.+?;', text):
  1451. rv += 0.1
  1452. if 'padding:' in text:
  1453. rv += 0.1
  1454. return rv
  1455. class LassoJavascriptLexer(DelegatingLexer):
  1456. """
  1457. Subclass of the `LassoLexer` which highlights unhandled data with the
  1458. `JavascriptLexer`.
  1459. .. versionadded:: 1.6
  1460. """
  1461. name = 'JavaScript+Lasso'
  1462. aliases = ['js+lasso', 'javascript+lasso']
  1463. alias_filenames = ['*.js']
  1464. mimetypes = ['application/x-javascript+lasso',
  1465. 'text/x-javascript+lasso',
  1466. 'text/javascript+lasso']
  1467. def __init__(self, **options):
  1468. options['requiredelimiters'] = True
  1469. super(LassoJavascriptLexer, self).__init__(JavascriptLexer, LassoLexer,
  1470. **options)
  1471. def analyse_text(text):
  1472. rv = LassoLexer.analyse_text(text) - 0.05
  1473. return rv
  1474. class HandlebarsLexer(RegexLexer):
  1475. """
  1476. Generic `handlebars <http://handlebarsjs.com/>` template lexer.
  1477. Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
  1478. Everything else is left for a delegating lexer.
  1479. .. versionadded:: 2.0
  1480. """
  1481. name = "Handlebars"
  1482. aliases = ['handlebars']
  1483. tokens = {
  1484. 'root': [
  1485. (r'[^{]+', Other),
  1486. # Comment start {{! }} or {{!--
  1487. (r'\{\{!.*\}\}', Comment),
  1488. # HTML Escaping open {{{expression
  1489. (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
  1490. # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
  1491. (r'(\{\{)([#~/]+)([^\s}]*)', bygroups(Comment.Preproc, Number.Attribute,Number.Attribute), 'tag'),
  1492. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
  1493. ],
  1494. 'tag': [
  1495. (r'\s+', Text),
  1496. # HTML Escaping close }}}
  1497. (r'\}\}\}', Comment.Special, '#pop'),
  1498. # blockClose}}, includes optional tilde ~
  1499. (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
  1500. # {{opt=something}}
  1501. (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
  1502. # Partials {{> ...}}
  1503. (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
  1504. (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
  1505. (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
  1506. 'dynamic-partial'),
  1507. include('generic'),
  1508. ],
  1509. 'dynamic-partial': [
  1510. (r'\s+', Text),
  1511. (r'\)', Punctuation, '#pop'),
  1512. (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
  1513. Name.Variable, Text)),
  1514. (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
  1515. using(this, state='variable'))),
  1516. (r'[\w-]+', Name.Function),
  1517. include('generic'),
  1518. ],
  1519. 'variable': [
  1520. (r'[()/@a-zA-Z][\w-]*', Name.Variable),
  1521. (r'\.[\w-]+', Name.Variable),
  1522. (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
  1523. ],
  1524. 'generic': [
  1525. include('variable'),
  1526. # borrowed from DjangoLexer
  1527. (r':?"(\\\\|\\"|[^"])*"', String.Double),
  1528. (r":?'(\\\\|\\'|[^'])*'", String.Single),
  1529. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1530. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1531. ]
  1532. }
  1533. class HandlebarsHtmlLexer(DelegatingLexer):
  1534. """
  1535. Subclass of the `HandlebarsLexer` that highlights unlexed data with the
  1536. `HtmlLexer`.
  1537. .. versionadded:: 2.0
  1538. """
  1539. name = "HTML+Handlebars"
  1540. aliases = ["html+handlebars"]
  1541. filenames = ['*.handlebars', '*.hbs']
  1542. mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
  1543. def __init__(self, **options):
  1544. super(HandlebarsHtmlLexer, self).__init__(HtmlLexer, HandlebarsLexer, **options)
  1545. class YamlJinjaLexer(DelegatingLexer):
  1546. """
  1547. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1548. `YamlLexer`.
  1549. Commonly used in Saltstack salt states.
  1550. .. versionadded:: 2.0
  1551. """
  1552. name = 'YAML+Jinja'
  1553. aliases = ['yaml+jinja', 'salt', 'sls']
  1554. filenames = ['*.sls']
  1555. mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
  1556. def __init__(self, **options):
  1557. super(YamlJinjaLexer, self).__init__(YamlLexer, DjangoLexer, **options)
  1558. class LiquidLexer(RegexLexer):
  1559. """
  1560. Lexer for `Liquid templates
  1561. <http://www.rubydoc.info/github/Shopify/liquid>`_.
  1562. .. versionadded:: 2.0
  1563. """
  1564. name = 'liquid'
  1565. aliases = ['liquid']
  1566. filenames = ['*.liquid']
  1567. tokens = {
  1568. 'root': [
  1569. (r'[^{]+', Text),
  1570. # tags and block tags
  1571. (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
  1572. # output tags
  1573. (r'(\{\{)(\s*)([^\s}]+)',
  1574. bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
  1575. 'output'),
  1576. (r'\{', Text)
  1577. ],
  1578. 'tag-or-block': [
  1579. # builtin logic blocks
  1580. (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
  1581. (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
  1582. combined('end-of-block', 'whitespace', 'generic')),
  1583. (r'(else)(\s*)(%\})',
  1584. bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
  1585. # other builtin blocks
  1586. (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
  1587. bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
  1588. Whitespace, Punctuation), '#pop'),
  1589. (r'(comment)(\s*)(%\})',
  1590. bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
  1591. (r'(raw)(\s*)(%\})',
  1592. bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
  1593. # end of block
  1594. (r'(end(case|unless|if))(\s*)(%\})',
  1595. bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
  1596. (r'(end([^\s%]+))(\s*)(%\})',
  1597. bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
  1598. # builtin tags (assign and include are handled together with usual tags)
  1599. (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
  1600. bygroups(Name.Tag, Whitespace,
  1601. using(this, state='generic'), Punctuation, Whitespace),
  1602. 'variable-tag-markup'),
  1603. # other tags or blocks
  1604. (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
  1605. ],
  1606. 'output': [
  1607. include('whitespace'),
  1608. (r'\}\}', Punctuation, '#pop'), # end of output
  1609. (r'\|', Punctuation, 'filters')
  1610. ],
  1611. 'filters': [
  1612. include('whitespace'),
  1613. (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
  1614. (r'([^\s|:]+)(:?)(\s*)',
  1615. bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
  1616. ],
  1617. 'filter-markup': [
  1618. (r'\|', Punctuation, '#pop'),
  1619. include('end-of-tag'),
  1620. include('default-param-markup')
  1621. ],
  1622. 'condition': [
  1623. include('end-of-block'),
  1624. include('whitespace'),
  1625. (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
  1626. bygroups(using(this, state = 'generic'), Whitespace, Operator,
  1627. Whitespace, using(this, state = 'generic'), Whitespace,
  1628. Punctuation)),
  1629. (r'\b!', Operator),
  1630. (r'\bnot\b', Operator.Word),
  1631. (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
  1632. bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
  1633. Whitespace, using(this, state = 'generic'))),
  1634. include('generic'),
  1635. include('whitespace')
  1636. ],
  1637. 'generic-value': [
  1638. include('generic'),
  1639. include('end-at-whitespace')
  1640. ],
  1641. 'operator': [
  1642. (r'(\s*)((=|!|>|<)=?)(\s*)',
  1643. bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
  1644. (r'(\s*)(\bcontains\b)(\s*)',
  1645. bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
  1646. ],
  1647. 'end-of-tag': [
  1648. (r'\}\}', Punctuation, '#pop')
  1649. ],
  1650. 'end-of-block': [
  1651. (r'%\}', Punctuation, ('#pop', '#pop'))
  1652. ],
  1653. 'end-at-whitespace': [
  1654. (r'\s+', Whitespace, '#pop')
  1655. ],
  1656. # states for unknown markup
  1657. 'param-markup': [
  1658. include('whitespace'),
  1659. # params with colons or equals
  1660. (r'([^\s=:]+)(\s*)(=|:)',
  1661. bygroups(Name.Attribute, Whitespace, Operator)),
  1662. # explicit variables
  1663. (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
  1664. bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
  1665. Whitespace, Punctuation)),
  1666. include('string'),
  1667. include('number'),
  1668. include('keyword'),
  1669. (r',', Punctuation)
  1670. ],
  1671. 'default-param-markup': [
  1672. include('param-markup'),
  1673. (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
  1674. ],
  1675. 'variable-param-markup': [
  1676. include('param-markup'),
  1677. include('variable'),
  1678. (r'.', Text) # fallback
  1679. ],
  1680. 'tag-markup': [
  1681. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1682. include('default-param-markup')
  1683. ],
  1684. 'variable-tag-markup': [
  1685. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1686. include('variable-param-markup')
  1687. ],
  1688. # states for different values types
  1689. 'keyword': [
  1690. (r'\b(false|true)\b', Keyword.Constant)
  1691. ],
  1692. 'variable': [
  1693. (r'[a-zA-Z_]\w*', Name.Variable),
  1694. (r'(?<=\w)\.(?=\w)', Punctuation)
  1695. ],
  1696. 'string': [
  1697. (r"'[^']*'", String.Single),
  1698. (r'"[^"]*"', String.Double)
  1699. ],
  1700. 'number': [
  1701. (r'\d+\.\d+', Number.Float),
  1702. (r'\d+', Number.Integer)
  1703. ],
  1704. 'generic': [ # decides for variable, string, keyword or number
  1705. include('keyword'),
  1706. include('string'),
  1707. include('number'),
  1708. include('variable')
  1709. ],
  1710. 'whitespace': [
  1711. (r'[ \t]+', Whitespace)
  1712. ],
  1713. # states for builtin blocks
  1714. 'comment': [
  1715. (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
  1716. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1717. Punctuation), ('#pop', '#pop')),
  1718. (r'.', Comment)
  1719. ],
  1720. 'raw': [
  1721. (r'[^{]+', Text),
  1722. (r'(\{%)(\s*)(endraw)(\s*)(%\})',
  1723. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1724. Punctuation), '#pop'),
  1725. (r'\{', Text)
  1726. ],
  1727. }
  1728. class TwigLexer(RegexLexer):
  1729. """
  1730. `Twig <http://twig.sensiolabs.org/>`_ template lexer.
  1731. It just highlights Twig code between the preprocessor directives,
  1732. other data is left untouched by the lexer.
  1733. .. versionadded:: 2.0
  1734. """
  1735. name = 'Twig'
  1736. aliases = ['twig']
  1737. mimetypes = ['application/x-twig']
  1738. flags = re.M | re.S
  1739. # Note that a backslash is included in the following two patterns
  1740. # PHP uses a backslash as a namespace separator
  1741. _ident_char = r'[\\\w-]|[^\x00-\x7f]'
  1742. _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
  1743. _ident_end = r'(?:' + _ident_char + ')*'
  1744. _ident_inner = _ident_begin + _ident_end
  1745. tokens = {
  1746. 'root': [
  1747. (r'[^{]+', Other),
  1748. (r'\{\{', Comment.Preproc, 'var'),
  1749. # twig comments
  1750. (r'\{\#.*?\#\}', Comment),
  1751. # raw twig blocks
  1752. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  1753. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  1754. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1755. Other, Comment.Preproc, Text, Keyword, Text,
  1756. Comment.Preproc)),
  1757. (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
  1758. r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
  1759. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1760. Other, Comment.Preproc, Text, Keyword, Text,
  1761. Comment.Preproc)),
  1762. # filter blocks
  1763. (r'(\{%%)(-?\s*)(filter)(\s+)(%s)' % _ident_inner,
  1764. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  1765. 'tag'),
  1766. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  1767. bygroups(Comment.Preproc, Text, Keyword), 'tag'),
  1768. (r'\{', Other),
  1769. ],
  1770. 'varnames': [
  1771. (r'(\|)(\s*)(%s)' % _ident_inner,
  1772. bygroups(Operator, Text, Name.Function)),
  1773. (r'(is)(\s+)(not)?(\s*)(%s)' % _ident_inner,
  1774. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  1775. (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
  1776. (r'(in|not|and|b-and|or|b-or|b-xor|is'
  1777. r'if|elseif|else|import'
  1778. r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
  1779. r'matches|starts\s+with|ends\s+with)\b',
  1780. Keyword),
  1781. (r'(loop|block|parent)\b', Name.Builtin),
  1782. (_ident_inner, Name.Variable),
  1783. (r'\.' + _ident_inner, Name.Variable),
  1784. (r'\.[0-9]+', Number),
  1785. (r':?"(\\\\|\\"|[^"])*"', String.Double),
  1786. (r":?'(\\\\|\\'|[^'])*'", String.Single),
  1787. (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
  1788. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1789. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1790. ],
  1791. 'var': [
  1792. (r'\s+', Text),
  1793. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  1794. include('varnames')
  1795. ],
  1796. 'tag': [
  1797. (r'\s+', Text),
  1798. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  1799. include('varnames'),
  1800. (r'.', Punctuation),
  1801. ],
  1802. }
  1803. class TwigHtmlLexer(DelegatingLexer):
  1804. """
  1805. Subclass of the `TwigLexer` that highlights unlexed data with the
  1806. `HtmlLexer`.
  1807. .. versionadded:: 2.0
  1808. """
  1809. name = "HTML+Twig"
  1810. aliases = ["html+twig"]
  1811. filenames = ['*.twig']
  1812. mimetypes = ['text/html+twig']
  1813. def __init__(self, **options):
  1814. super(TwigHtmlLexer, self).__init__(HtmlLexer, TwigLexer, **options)
  1815. class Angular2Lexer(RegexLexer):
  1816. """
  1817. Generic
  1818. `angular2 <http://victorsavkin.com/post/119943127151/angular-2-template-syntax>`_
  1819. template lexer.
  1820. Highlights only the Angular template tags (stuff between `{{` and `}}` and
  1821. special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
  1822. Everything else is left for a delegating lexer.
  1823. .. versionadded:: 2.1
  1824. """
  1825. name = "Angular2"
  1826. aliases = ['ng2']
  1827. tokens = {
  1828. 'root': [
  1829. (r'[^{([*#]+', Other),
  1830. # {{meal.name}}
  1831. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
  1832. # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
  1833. (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
  1834. bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
  1835. 'attr'),
  1836. (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
  1837. bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
  1838. # *ngIf="..."; #f="ngForm"
  1839. (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
  1840. bygroups(Punctuation, Name.Attribute, Punctuation, Operator), 'attr'),
  1841. (r'([*#])([\w:.-]+)(\s*)',
  1842. bygroups(Punctuation, Name.Attribute, Punctuation)),
  1843. ],
  1844. 'ngExpression': [
  1845. (r'\s+(\|\s+)?', Text),
  1846. (r'\}\}', Comment.Preproc, '#pop'),
  1847. # Literals
  1848. (r':?(true|false)', String.Boolean),
  1849. (r':?"(\\\\|\\"|[^"])*"', String.Double),
  1850. (r":?'(\\\\|\\'|[^'])*'", String.Single),
  1851. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1852. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1853. # Variabletext
  1854. (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
  1855. (r'\.[\w-]+(\(.*\))?', Name.Variable),
  1856. # inline If
  1857. (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
  1858. bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
  1859. ],
  1860. 'attr': [
  1861. ('".*?"', String, '#pop'),
  1862. ("'.*?'", String, '#pop'),
  1863. (r'[^\s>]+', String, '#pop'),
  1864. ],
  1865. }
  1866. class Angular2HtmlLexer(DelegatingLexer):
  1867. """
  1868. Subclass of the `Angular2Lexer` that highlights unlexed data with the
  1869. `HtmlLexer`.
  1870. .. versionadded:: 2.0
  1871. """
  1872. name = "HTML + Angular2"
  1873. aliases = ["html+ng2"]
  1874. filenames = ['*.ng2']
  1875. def __init__(self, **options):
  1876. super(Angular2HtmlLexer, self).__init__(HtmlLexer, Angular2Lexer, **options)