templates.py 74 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355
  1. """
  2. pygments.lexers.templates
  3. ~~~~~~~~~~~~~~~~~~~~~~~~~
  4. Lexers for various template engines' markup.
  5. :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexers.html import HtmlLexer, XmlLexer
  10. from pygments.lexers.javascript import JavascriptLexer, LassoLexer
  11. from pygments.lexers.css import CssLexer
  12. from pygments.lexers.php import PhpLexer
  13. from pygments.lexers.python import PythonLexer
  14. from pygments.lexers.perl import PerlLexer
  15. from pygments.lexers.jvm import JavaLexer, TeaLangLexer
  16. from pygments.lexers.data import YamlLexer
  17. from pygments.lexers.sql import SqlLexer
  18. from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \
  19. include, using, this, default, combined
  20. from pygments.token import Error, Punctuation, Whitespace, \
  21. Text, Comment, Operator, Keyword, Name, String, Number, Other, Token
  22. from pygments.util import html_doctype_matches, looks_like_xml
  23. __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer',
  24. 'JavascriptPhpLexer', 'ErbLexer', 'RhtmlLexer',
  25. 'XmlErbLexer', 'CssErbLexer', 'JavascriptErbLexer',
  26. 'SmartyLexer', 'HtmlSmartyLexer', 'XmlSmartyLexer',
  27. 'CssSmartyLexer', 'JavascriptSmartyLexer', 'DjangoLexer',
  28. 'HtmlDjangoLexer', 'CssDjangoLexer', 'XmlDjangoLexer',
  29. 'JavascriptDjangoLexer', 'GenshiLexer', 'HtmlGenshiLexer',
  30. 'GenshiTextLexer', 'CssGenshiLexer', 'JavascriptGenshiLexer',
  31. 'MyghtyLexer', 'MyghtyHtmlLexer', 'MyghtyXmlLexer',
  32. 'MyghtyCssLexer', 'MyghtyJavascriptLexer', 'MasonLexer', 'MakoLexer',
  33. 'MakoHtmlLexer', 'MakoXmlLexer', 'MakoJavascriptLexer',
  34. 'MakoCssLexer', 'JspLexer', 'CheetahLexer', 'CheetahHtmlLexer',
  35. 'CheetahXmlLexer', 'CheetahJavascriptLexer', 'EvoqueLexer',
  36. 'EvoqueHtmlLexer', 'EvoqueXmlLexer', 'ColdfusionLexer',
  37. 'ColdfusionHtmlLexer', 'ColdfusionCFCLexer', 'VelocityLexer',
  38. 'VelocityHtmlLexer', 'VelocityXmlLexer', 'SspLexer',
  39. 'TeaTemplateLexer', 'LassoHtmlLexer', 'LassoXmlLexer',
  40. 'LassoCssLexer', 'LassoJavascriptLexer', 'HandlebarsLexer',
  41. 'HandlebarsHtmlLexer', 'YamlJinjaLexer', 'LiquidLexer',
  42. 'TwigLexer', 'TwigHtmlLexer', 'Angular2Lexer', 'Angular2HtmlLexer',
  43. 'SqlJinjaLexer']
  44. class ErbLexer(Lexer):
  45. """
  46. Generic ERB (Ruby Templating) lexer.
  47. Just highlights ruby code between the preprocessor directives, other data
  48. is left untouched by the lexer.
  49. All options are also forwarded to the `RubyLexer`.
  50. """
  51. name = 'ERB'
  52. url = 'https://github.com/ruby/erb'
  53. aliases = ['erb']
  54. mimetypes = ['application/x-ruby-templating']
  55. version_added = ''
  56. _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M)
  57. def __init__(self, **options):
  58. from pygments.lexers.ruby import RubyLexer
  59. self.ruby_lexer = RubyLexer(**options)
  60. Lexer.__init__(self, **options)
  61. def get_tokens_unprocessed(self, text):
  62. """
  63. Since ERB doesn't allow "<%" and other tags inside of ruby
  64. blocks we have to use a split approach here that fails for
  65. that too.
  66. """
  67. tokens = self._block_re.split(text)
  68. tokens.reverse()
  69. state = idx = 0
  70. try:
  71. while True:
  72. # text
  73. if state == 0:
  74. val = tokens.pop()
  75. yield idx, Other, val
  76. idx += len(val)
  77. state = 1
  78. # block starts
  79. elif state == 1:
  80. tag = tokens.pop()
  81. # literals
  82. if tag in ('<%%', '%%>'):
  83. yield idx, Other, tag
  84. idx += 3
  85. state = 0
  86. # comment
  87. elif tag == '<%#':
  88. yield idx, Comment.Preproc, tag
  89. val = tokens.pop()
  90. yield idx + 3, Comment, val
  91. idx += 3 + len(val)
  92. state = 2
  93. # blocks or output
  94. elif tag in ('<%', '<%=', '<%-'):
  95. yield idx, Comment.Preproc, tag
  96. idx += len(tag)
  97. data = tokens.pop()
  98. r_idx = 0
  99. for r_idx, r_token, r_value in \
  100. self.ruby_lexer.get_tokens_unprocessed(data):
  101. yield r_idx + idx, r_token, r_value
  102. idx += len(data)
  103. state = 2
  104. elif tag in ('%>', '-%>'):
  105. yield idx, Error, tag
  106. idx += len(tag)
  107. state = 0
  108. # % raw ruby statements
  109. else:
  110. yield idx, Comment.Preproc, tag[0]
  111. r_idx = 0
  112. for r_idx, r_token, r_value in \
  113. self.ruby_lexer.get_tokens_unprocessed(tag[1:]):
  114. yield idx + 1 + r_idx, r_token, r_value
  115. idx += len(tag)
  116. state = 0
  117. # block ends
  118. elif state == 2:
  119. tag = tokens.pop()
  120. if tag not in ('%>', '-%>'):
  121. yield idx, Other, tag
  122. else:
  123. yield idx, Comment.Preproc, tag
  124. idx += len(tag)
  125. state = 0
  126. except IndexError:
  127. return
  128. def analyse_text(text):
  129. if '<%' in text and '%>' in text:
  130. return 0.4
  131. class SmartyLexer(RegexLexer):
  132. """
  133. Generic Smarty template lexer.
  134. Just highlights smarty code between the preprocessor directives, other
  135. data is left untouched by the lexer.
  136. """
  137. name = 'Smarty'
  138. url = 'https://www.smarty.net/'
  139. aliases = ['smarty']
  140. filenames = ['*.tpl']
  141. mimetypes = ['application/x-smarty']
  142. version_added = ''
  143. flags = re.MULTILINE | re.DOTALL
  144. tokens = {
  145. 'root': [
  146. (r'[^{]+', Other),
  147. (r'(\{)(\*.*?\*)(\})',
  148. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  149. (r'(\{php\})(.*?)(\{/php\})',
  150. bygroups(Comment.Preproc, using(PhpLexer, startinline=True),
  151. Comment.Preproc)),
  152. (r'(\{)(/?[a-zA-Z_]\w*)(\s*)',
  153. bygroups(Comment.Preproc, Name.Function, Text), 'smarty'),
  154. (r'\{', Comment.Preproc, 'smarty')
  155. ],
  156. 'smarty': [
  157. (r'\s+', Text),
  158. (r'\{', Comment.Preproc, '#push'),
  159. (r'\}', Comment.Preproc, '#pop'),
  160. (r'#[a-zA-Z_]\w*#', Name.Variable),
  161. (r'\$[a-zA-Z_]\w*(\.\w+)*', Name.Variable),
  162. (r'[~!%^&*()+=|\[\]:;,.<>/?@-]', Operator),
  163. (r'(true|false|null)\b', Keyword.Constant),
  164. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  165. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  166. (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  167. (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  168. (r'[a-zA-Z_]\w*', Name.Attribute)
  169. ]
  170. }
  171. def analyse_text(text):
  172. rv = 0.0
  173. if re.search(r'\{if\s+.*?\}.*?\{/if\}', text):
  174. rv += 0.15
  175. if re.search(r'\{include\s+file=.*?\}', text):
  176. rv += 0.15
  177. if re.search(r'\{foreach\s+.*?\}.*?\{/foreach\}', text):
  178. rv += 0.15
  179. if re.search(r'\{\$.*?\}', text):
  180. rv += 0.01
  181. return rv
  182. class VelocityLexer(RegexLexer):
  183. """
  184. Generic Velocity template lexer.
  185. Just highlights velocity directives and variable references, other
  186. data is left untouched by the lexer.
  187. """
  188. name = 'Velocity'
  189. url = 'https://velocity.apache.org/'
  190. aliases = ['velocity']
  191. filenames = ['*.vm', '*.fhtml']
  192. version_added = ''
  193. flags = re.MULTILINE | re.DOTALL
  194. identifier = r'[a-zA-Z_]\w*'
  195. tokens = {
  196. 'root': [
  197. (r'[^{#$]+', Other),
  198. (r'(#)(\*.*?\*)(#)',
  199. bygroups(Comment.Preproc, Comment, Comment.Preproc)),
  200. (r'(##)(.*?$)',
  201. bygroups(Comment.Preproc, Comment)),
  202. (r'(#\{?)(' + identifier + r')(\}?)(\s?\()',
  203. bygroups(Comment.Preproc, Name.Function, Comment.Preproc, Punctuation),
  204. 'directiveparams'),
  205. (r'(#\{?)(' + identifier + r')(\}|\b)',
  206. bygroups(Comment.Preproc, Name.Function, Comment.Preproc)),
  207. (r'\$!?\{?', Punctuation, 'variable')
  208. ],
  209. 'variable': [
  210. (identifier, Name.Variable),
  211. (r'\(', Punctuation, 'funcparams'),
  212. (r'(\.)(' + identifier + r')',
  213. bygroups(Punctuation, Name.Variable), '#push'),
  214. (r'\}', Punctuation, '#pop'),
  215. default('#pop')
  216. ],
  217. 'directiveparams': [
  218. (r'(&&|\|\||==?|!=?|[-<>+*%&|^/])|\b(eq|ne|gt|lt|ge|le|not|in)\b',
  219. Operator),
  220. (r'\[', Operator, 'rangeoperator'),
  221. (r'\b' + identifier + r'\b', Name.Function),
  222. include('funcparams')
  223. ],
  224. 'rangeoperator': [
  225. (r'\.\.', Operator),
  226. include('funcparams'),
  227. (r'\]', Operator, '#pop')
  228. ],
  229. 'funcparams': [
  230. (r'\$!?\{?', Punctuation, 'variable'),
  231. (r'\s+', Text),
  232. (r'[,:]', Punctuation),
  233. (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  234. (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  235. (r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  236. (r"\b[0-9]+\b", Number),
  237. (r'(true|false|null)\b', Keyword.Constant),
  238. (r'\(', Punctuation, '#push'),
  239. (r'\)', Punctuation, '#pop'),
  240. (r'\{', Punctuation, '#push'),
  241. (r'\}', Punctuation, '#pop'),
  242. (r'\[', Punctuation, '#push'),
  243. (r'\]', Punctuation, '#pop'),
  244. ]
  245. }
  246. def analyse_text(text):
  247. rv = 0.0
  248. if re.search(r'#\{?macro\}?\(.*?\).*?#\{?end\}?', text, re.DOTALL):
  249. rv += 0.25
  250. if re.search(r'#\{?if\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
  251. rv += 0.15
  252. if re.search(r'#\{?foreach\}?\(.+?\).*?#\{?end\}?', text, re.DOTALL):
  253. rv += 0.15
  254. if re.search(r'\$!?\{?[a-zA-Z_]\w*(\([^)]*\))?'
  255. r'(\.\w+(\([^)]*\))?)*\}?', text):
  256. rv += 0.01
  257. return rv
  258. class VelocityHtmlLexer(DelegatingLexer):
  259. """
  260. Subclass of the `VelocityLexer` that highlights unlexed data
  261. with the `HtmlLexer`.
  262. """
  263. name = 'HTML+Velocity'
  264. aliases = ['html+velocity']
  265. version_added = ''
  266. alias_filenames = ['*.html', '*.fhtml']
  267. mimetypes = ['text/html+velocity']
  268. url = 'https://velocity.apache.org/'
  269. def __init__(self, **options):
  270. super().__init__(HtmlLexer, VelocityLexer, **options)
  271. class VelocityXmlLexer(DelegatingLexer):
  272. """
  273. Subclass of the `VelocityLexer` that highlights unlexed data
  274. with the `XmlLexer`.
  275. """
  276. name = 'XML+Velocity'
  277. aliases = ['xml+velocity']
  278. version_added = ''
  279. alias_filenames = ['*.xml', '*.vm']
  280. mimetypes = ['application/xml+velocity']
  281. url = 'https://velocity.apache.org/'
  282. def __init__(self, **options):
  283. super().__init__(XmlLexer, VelocityLexer, **options)
  284. def analyse_text(text):
  285. rv = VelocityLexer.analyse_text(text) - 0.01
  286. if looks_like_xml(text):
  287. rv += 0.4
  288. return rv
  289. class DjangoLexer(RegexLexer):
  290. """
  291. Generic `Django <https://www.djangoproject.com/documentation/templates/>`_
  292. and `Jinja <https://jinja.palletsprojects.com>`_ template lexer.
  293. It just highlights django/jinja code between the preprocessor directives,
  294. other data is left untouched by the lexer.
  295. """
  296. name = 'Django/Jinja'
  297. aliases = ['django', 'jinja']
  298. mimetypes = ['application/x-django-templating', 'application/x-jinja']
  299. url = 'https://www.djangoproject.com/documentation/templates'
  300. version_added = ''
  301. flags = re.M | re.S
  302. tokens = {
  303. 'root': [
  304. (r'[^{]+', Other),
  305. (r'\{\{', Comment.Preproc, 'var'),
  306. # jinja/django comments
  307. (r'\{#.*?#\}', Comment),
  308. # django comments
  309. (r'(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)'
  310. r'(\{%)(-?\s*)(endcomment)(\s*-?)(%\})',
  311. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  312. Comment, Comment.Preproc, Text, Keyword, Text,
  313. Comment.Preproc)),
  314. # raw jinja blocks
  315. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  316. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  317. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  318. Text, Comment.Preproc, Text, Keyword, Text,
  319. Comment.Preproc)),
  320. # filter blocks
  321. (r'(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)',
  322. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  323. 'block'),
  324. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  325. bygroups(Comment.Preproc, Text, Keyword), 'block'),
  326. (r'\{', Other)
  327. ],
  328. 'varnames': [
  329. (r'(\|)(\s*)([a-zA-Z_]\w*)',
  330. bygroups(Operator, Text, Name.Function)),
  331. (r'(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)',
  332. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  333. (r'(_|true|false|none|True|False|None)\b', Keyword.Pseudo),
  334. (r'(in|as|reversed|recursive|not|and|or|is|if|else|import|'
  335. r'with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b',
  336. Keyword),
  337. (r'(loop|block|super|forloop)\b', Name.Builtin),
  338. (r'[a-zA-Z_][\w-]*', Name.Variable),
  339. (r'\.\w+', Name.Variable),
  340. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  341. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  342. (r'([{}()\[\]+\-*/%,:~]|[><=]=?|!=)', Operator),
  343. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  344. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  345. ],
  346. 'var': [
  347. (r'\s+', Text),
  348. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  349. include('varnames')
  350. ],
  351. 'block': [
  352. (r'\s+', Text),
  353. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  354. include('varnames'),
  355. (r'.', Punctuation)
  356. ]
  357. }
  358. def analyse_text(text):
  359. rv = 0.0
  360. if re.search(r'\{%\s*(block|extends)', text) is not None:
  361. rv += 0.4
  362. if re.search(r'\{%\s*if\s*.*?%\}', text) is not None:
  363. rv += 0.1
  364. if re.search(r'\{\{.*?\}\}', text) is not None:
  365. rv += 0.1
  366. return rv
  367. class MyghtyLexer(RegexLexer):
  368. """
  369. Generic myghty templates lexer. Code that isn't Myghty
  370. markup is yielded as `Token.Other`.
  371. """
  372. name = 'Myghty'
  373. url = 'http://www.myghty.org/'
  374. aliases = ['myghty']
  375. filenames = ['*.myt', 'autodelegate']
  376. mimetypes = ['application/x-myghty']
  377. version_added = '0.6'
  378. tokens = {
  379. 'root': [
  380. (r'\s+', Text),
  381. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  382. bygroups(Name.Tag, Text, Name.Function, Name.Tag,
  383. using(this), Name.Tag)),
  384. (r'(?s)(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)',
  385. bygroups(Name.Tag, Name.Function, Name.Tag,
  386. using(PythonLexer), Name.Tag)),
  387. (r'(<&[^|])(.*?)(,.*?)?(&>)',
  388. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  389. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  390. bygroups(Name.Tag, Name.Function, using(PythonLexer), Name.Tag)),
  391. (r'</&>', Name.Tag),
  392. (r'(?s)(<%!?)(.*?)(%>)',
  393. bygroups(Name.Tag, using(PythonLexer), Name.Tag)),
  394. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  395. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  396. bygroups(Name.Tag, using(PythonLexer), Other)),
  397. (r"""(?sx)
  398. (.+?) # anything, followed by:
  399. (?:
  400. (?<=\n)(?=[%#]) | # an eval or comment line
  401. (?=</?[%&]) | # a substitution or block or
  402. # call start or end
  403. # - don't consume
  404. (\\\n) | # an escaped newline
  405. \Z # end of string
  406. )""", bygroups(Other, Operator)),
  407. ]
  408. }
  409. class MyghtyHtmlLexer(DelegatingLexer):
  410. """
  411. Subclass of the `MyghtyLexer` that highlights unlexed data
  412. with the `HtmlLexer`.
  413. """
  414. name = 'HTML+Myghty'
  415. aliases = ['html+myghty']
  416. mimetypes = ['text/html+myghty']
  417. url = 'http://www.myghty.org/'
  418. version_added = '0.6'
  419. def __init__(self, **options):
  420. super().__init__(HtmlLexer, MyghtyLexer, **options)
  421. class MyghtyXmlLexer(DelegatingLexer):
  422. """
  423. Subclass of the `MyghtyLexer` that highlights unlexed data
  424. with the `XmlLexer`.
  425. """
  426. name = 'XML+Myghty'
  427. aliases = ['xml+myghty']
  428. mimetypes = ['application/xml+myghty']
  429. url = 'http://www.myghty.org/'
  430. version_added = '0.6'
  431. def __init__(self, **options):
  432. super().__init__(XmlLexer, MyghtyLexer, **options)
  433. class MyghtyJavascriptLexer(DelegatingLexer):
  434. """
  435. Subclass of the `MyghtyLexer` that highlights unlexed data
  436. with the `JavascriptLexer`.
  437. """
  438. name = 'JavaScript+Myghty'
  439. aliases = ['javascript+myghty', 'js+myghty']
  440. mimetypes = ['application/x-javascript+myghty',
  441. 'text/x-javascript+myghty',
  442. 'text/javascript+mygthy']
  443. url = 'http://www.myghty.org/'
  444. version_added = '0.6'
  445. def __init__(self, **options):
  446. super().__init__(JavascriptLexer, MyghtyLexer, **options)
  447. class MyghtyCssLexer(DelegatingLexer):
  448. """
  449. Subclass of the `MyghtyLexer` that highlights unlexed data
  450. with the `CssLexer`.
  451. """
  452. name = 'CSS+Myghty'
  453. aliases = ['css+myghty']
  454. mimetypes = ['text/css+myghty']
  455. url = 'http://www.myghty.org/'
  456. version_added = '0.6'
  457. def __init__(self, **options):
  458. super().__init__(CssLexer, MyghtyLexer, **options)
  459. class MasonLexer(RegexLexer):
  460. """
  461. Generic mason templates lexer. Stolen from Myghty lexer. Code that isn't
  462. Mason markup is HTML.
  463. """
  464. name = 'Mason'
  465. url = 'http://www.masonhq.com/'
  466. aliases = ['mason']
  467. filenames = ['*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler']
  468. mimetypes = ['application/x-mason']
  469. version_added = '1.4'
  470. tokens = {
  471. 'root': [
  472. (r'\s+', Whitespace),
  473. (r'(?s)(<%doc>)(.*?)(</%doc>)',
  474. bygroups(Name.Tag, Comment.Multiline, Name.Tag)),
  475. (r'(?s)(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)',
  476. bygroups(Name.Tag, Whitespace, Name.Function, Name.Tag,
  477. using(this), Name.Tag)),
  478. (r'(?s)(<%(\w+)(.*?)(>))(.*?)(</%\2\s*>)',
  479. bygroups(Name.Tag, None, None, None, using(PerlLexer), Name.Tag)),
  480. (r'(?s)(<&[^|])(.*?)(,.*?)?(&>)',
  481. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  482. (r'(?s)(<&\|)(.*?)(,.*?)?(&>)',
  483. bygroups(Name.Tag, Name.Function, using(PerlLexer), Name.Tag)),
  484. (r'</&>', Name.Tag),
  485. (r'(?s)(<%!?)(.*?)(%>)',
  486. bygroups(Name.Tag, using(PerlLexer), Name.Tag)),
  487. (r'(?<=^)#[^\n]*(\n|\Z)', Comment),
  488. (r'(?<=^)(%)([^\n]*)(\n|\Z)',
  489. bygroups(Name.Tag, using(PerlLexer), Other)),
  490. (r"""(?sx)
  491. (.+?) # anything, followed by:
  492. (?:
  493. (?<=\n)(?=[%#]) | # an eval or comment line
  494. (?=</?[%&]) | # a substitution or block or
  495. # call start or end
  496. # - don't consume
  497. (\\\n) | # an escaped newline
  498. \Z # end of string
  499. )""", bygroups(using(HtmlLexer), Operator)),
  500. ]
  501. }
  502. def analyse_text(text):
  503. result = 0.0
  504. if re.search(r'</%(class|doc|init)>', text) is not None:
  505. result = 1.0
  506. elif re.search(r'<&.+&>', text, re.DOTALL) is not None:
  507. result = 0.11
  508. return result
  509. class MakoLexer(RegexLexer):
  510. """
  511. Generic mako templates lexer. Code that isn't Mako
  512. markup is yielded as `Token.Other`.
  513. """
  514. name = 'Mako'
  515. url = 'http://www.makotemplates.org/'
  516. aliases = ['mako']
  517. filenames = ['*.mao']
  518. mimetypes = ['application/x-mako']
  519. version_added = '0.7'
  520. tokens = {
  521. 'root': [
  522. (r'(\s*)(%)(\s*end(?:\w+))(\n|\Z)',
  523. bygroups(Text.Whitespace, Comment.Preproc, Keyword, Other)),
  524. (r'(\s*)(%)([^\n]*)(\n|\Z)',
  525. bygroups(Text.Whitespace, Comment.Preproc, using(PythonLexer), Other)),
  526. (r'(\s*)(##[^\n]*)(\n|\Z)',
  527. bygroups(Text.Whitespace, Comment.Single, Text.Whitespace)),
  528. (r'(?s)<%doc>.*?</%doc>', Comment.Multiline),
  529. (r'(<%)([\w.:]+)',
  530. bygroups(Comment.Preproc, Name.Builtin), 'tag'),
  531. (r'(</%)([\w.:]+)(>)',
  532. bygroups(Comment.Preproc, Name.Builtin, Comment.Preproc)),
  533. (r'<%(?=([\w.:]+))', Comment.Preproc, 'ondeftags'),
  534. (r'(?s)(<%(?:!?))(.*?)(%>)',
  535. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  536. (r'(\$\{)(.*?)(\})',
  537. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  538. (r'''(?sx)
  539. (.+?) # anything, followed by:
  540. (?:
  541. (?<=\n)(?=%|\#\#) | # an eval or comment line
  542. (?=\#\*) | # multiline comment
  543. (?=</?%) | # a python block
  544. # call start or end
  545. (?=\$\{) | # a substitution
  546. (?<=\n)(?=\s*%) |
  547. # - don't consume
  548. (\\\n) | # an escaped newline
  549. \Z # end of string
  550. )
  551. ''', bygroups(Other, Operator)),
  552. (r'\s+', Text),
  553. ],
  554. 'ondeftags': [
  555. (r'<%', Comment.Preproc),
  556. (r'(?<=<%)(include|inherit|namespace|page)', Name.Builtin),
  557. include('tag'),
  558. ],
  559. 'tag': [
  560. (r'((?:\w+)\s*=)(\s*)(".*?")',
  561. bygroups(Name.Attribute, Text, String)),
  562. (r'/?\s*>', Comment.Preproc, '#pop'),
  563. (r'\s+', Text),
  564. ],
  565. 'attr': [
  566. ('".*?"', String, '#pop'),
  567. ("'.*?'", String, '#pop'),
  568. (r'[^\s>]+', String, '#pop'),
  569. ],
  570. }
  571. class MakoHtmlLexer(DelegatingLexer):
  572. """
  573. Subclass of the `MakoLexer` that highlights unlexed data
  574. with the `HtmlLexer`.
  575. """
  576. name = 'HTML+Mako'
  577. aliases = ['html+mako']
  578. mimetypes = ['text/html+mako']
  579. url = 'http://www.makotemplates.org/'
  580. version_added = '0.7'
  581. def __init__(self, **options):
  582. super().__init__(HtmlLexer, MakoLexer, **options)
  583. class MakoXmlLexer(DelegatingLexer):
  584. """
  585. Subclass of the `MakoLexer` that highlights unlexed data
  586. with the `XmlLexer`.
  587. """
  588. name = 'XML+Mako'
  589. aliases = ['xml+mako']
  590. mimetypes = ['application/xml+mako']
  591. url = 'http://www.makotemplates.org/'
  592. version_added = '0.7'
  593. def __init__(self, **options):
  594. super().__init__(XmlLexer, MakoLexer, **options)
  595. class MakoJavascriptLexer(DelegatingLexer):
  596. """
  597. Subclass of the `MakoLexer` that highlights unlexed data
  598. with the `JavascriptLexer`.
  599. """
  600. name = 'JavaScript+Mako'
  601. aliases = ['javascript+mako', 'js+mako']
  602. mimetypes = ['application/x-javascript+mako',
  603. 'text/x-javascript+mako',
  604. 'text/javascript+mako']
  605. url = 'http://www.makotemplates.org/'
  606. version_added = '0.7'
  607. def __init__(self, **options):
  608. super().__init__(JavascriptLexer, MakoLexer, **options)
  609. class MakoCssLexer(DelegatingLexer):
  610. """
  611. Subclass of the `MakoLexer` that highlights unlexed data
  612. with the `CssLexer`.
  613. """
  614. name = 'CSS+Mako'
  615. aliases = ['css+mako']
  616. mimetypes = ['text/css+mako']
  617. url = 'http://www.makotemplates.org/'
  618. version_added = '0.7'
  619. def __init__(self, **options):
  620. super().__init__(CssLexer, MakoLexer, **options)
  621. # Genshi and Cheetah lexers courtesy of Matt Good.
  622. class CheetahPythonLexer(Lexer):
  623. """
  624. Lexer for handling Cheetah's special $ tokens in Python syntax.
  625. """
  626. def get_tokens_unprocessed(self, text):
  627. pylexer = PythonLexer(**self.options)
  628. for pos, type_, value in pylexer.get_tokens_unprocessed(text):
  629. if type_ == Token.Error and value == '$':
  630. type_ = Comment.Preproc
  631. yield pos, type_, value
  632. class CheetahLexer(RegexLexer):
  633. """
  634. Generic cheetah templates lexer. Code that isn't Cheetah
  635. markup is yielded as `Token.Other`. This also works for
  636. `spitfire templates`_ which use the same syntax.
  637. .. _spitfire templates: http://code.google.com/p/spitfire/
  638. """
  639. name = 'Cheetah'
  640. url = 'http://www.cheetahtemplate.org/'
  641. aliases = ['cheetah', 'spitfire']
  642. filenames = ['*.tmpl', '*.spt']
  643. mimetypes = ['application/x-cheetah', 'application/x-spitfire']
  644. version_added = ''
  645. tokens = {
  646. 'root': [
  647. (r'(##[^\n]*)$',
  648. (bygroups(Comment))),
  649. (r'#[*](.|\n)*?[*]#', Comment),
  650. (r'#end[^#\n]*(?:#|$)', Comment.Preproc),
  651. (r'#slurp$', Comment.Preproc),
  652. (r'(#[a-zA-Z]+)([^#\n]*)(#|$)',
  653. (bygroups(Comment.Preproc, using(CheetahPythonLexer),
  654. Comment.Preproc))),
  655. # TODO support other Python syntax like $foo['bar']
  656. (r'(\$)([a-zA-Z_][\w.]*\w)',
  657. bygroups(Comment.Preproc, using(CheetahPythonLexer))),
  658. (r'(?s)(\$\{!?)(.*?)(\})',
  659. bygroups(Comment.Preproc, using(CheetahPythonLexer),
  660. Comment.Preproc)),
  661. (r'''(?sx)
  662. (.+?) # anything, followed by:
  663. (?:
  664. (?=\#[#a-zA-Z]*) | # an eval comment
  665. (?=\$[a-zA-Z_{]) | # a substitution
  666. \Z # end of string
  667. )
  668. ''', Other),
  669. (r'\s+', Text),
  670. ],
  671. }
  672. class CheetahHtmlLexer(DelegatingLexer):
  673. """
  674. Subclass of the `CheetahLexer` that highlights unlexed data
  675. with the `HtmlLexer`.
  676. """
  677. name = 'HTML+Cheetah'
  678. aliases = ['html+cheetah', 'html+spitfire', 'htmlcheetah']
  679. mimetypes = ['text/html+cheetah', 'text/html+spitfire']
  680. url = 'http://www.cheetahtemplate.org/'
  681. version_added = ''
  682. def __init__(self, **options):
  683. super().__init__(HtmlLexer, CheetahLexer, **options)
  684. class CheetahXmlLexer(DelegatingLexer):
  685. """
  686. Subclass of the `CheetahLexer` that highlights unlexed data
  687. with the `XmlLexer`.
  688. """
  689. name = 'XML+Cheetah'
  690. aliases = ['xml+cheetah', 'xml+spitfire']
  691. mimetypes = ['application/xml+cheetah', 'application/xml+spitfire']
  692. url = 'http://www.cheetahtemplate.org/'
  693. version_added = ''
  694. def __init__(self, **options):
  695. super().__init__(XmlLexer, CheetahLexer, **options)
  696. class CheetahJavascriptLexer(DelegatingLexer):
  697. """
  698. Subclass of the `CheetahLexer` that highlights unlexed data
  699. with the `JavascriptLexer`.
  700. """
  701. name = 'JavaScript+Cheetah'
  702. aliases = ['javascript+cheetah', 'js+cheetah',
  703. 'javascript+spitfire', 'js+spitfire']
  704. mimetypes = ['application/x-javascript+cheetah',
  705. 'text/x-javascript+cheetah',
  706. 'text/javascript+cheetah',
  707. 'application/x-javascript+spitfire',
  708. 'text/x-javascript+spitfire',
  709. 'text/javascript+spitfire']
  710. url = 'http://www.cheetahtemplate.org/'
  711. version_added = ''
  712. def __init__(self, **options):
  713. super().__init__(JavascriptLexer, CheetahLexer, **options)
  714. class GenshiTextLexer(RegexLexer):
  715. """
  716. A lexer that highlights genshi text templates.
  717. """
  718. name = 'Genshi Text'
  719. url = 'https://genshi.edgewall.org/'
  720. aliases = ['genshitext']
  721. mimetypes = ['application/x-genshi-text', 'text/x-genshi']
  722. version_added = ''
  723. tokens = {
  724. 'root': [
  725. (r'[^#$\s]+', Other),
  726. (r'^(\s*)(##.*)$', bygroups(Text, Comment)),
  727. (r'^(\s*)(#)', bygroups(Text, Comment.Preproc), 'directive'),
  728. include('variable'),
  729. (r'[#$\s]', Other),
  730. ],
  731. 'directive': [
  732. (r'\n', Text, '#pop'),
  733. (r'(?:def|for|if)\s+.*', using(PythonLexer), '#pop'),
  734. (r'(choose|when|with)([^\S\n]+)(.*)',
  735. bygroups(Keyword, Text, using(PythonLexer)), '#pop'),
  736. (r'(choose|otherwise)\b', Keyword, '#pop'),
  737. (r'(end\w*)([^\S\n]*)(.*)', bygroups(Keyword, Text, Comment), '#pop'),
  738. ],
  739. 'variable': [
  740. (r'(?<!\$)(\$\{)(.+?)(\})',
  741. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  742. (r'(?<!\$)(\$)([a-zA-Z_][\w.]*)',
  743. Name.Variable),
  744. ]
  745. }
  746. class GenshiMarkupLexer(RegexLexer):
  747. """
  748. Base lexer for Genshi markup, used by `HtmlGenshiLexer` and
  749. `GenshiLexer`.
  750. """
  751. flags = re.DOTALL
  752. tokens = {
  753. 'root': [
  754. (r'[^<$]+', Other),
  755. (r'(<\?python)(.*?)(\?>)',
  756. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  757. # yield style and script blocks as Other
  758. (r'<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>', Other),
  759. (r'<\s*py:[a-zA-Z0-9]+', Name.Tag, 'pytag'),
  760. (r'<\s*[a-zA-Z0-9:.]+', Name.Tag, 'tag'),
  761. include('variable'),
  762. (r'[<$]', Other),
  763. ],
  764. 'pytag': [
  765. (r'\s+', Text),
  766. (r'[\w:-]+\s*=', Name.Attribute, 'pyattr'),
  767. (r'/?\s*>', Name.Tag, '#pop'),
  768. ],
  769. 'pyattr': [
  770. ('(")(.*?)(")', bygroups(String, using(PythonLexer), String), '#pop'),
  771. ("(')(.*?)(')", bygroups(String, using(PythonLexer), String), '#pop'),
  772. (r'[^\s>]+', String, '#pop'),
  773. ],
  774. 'tag': [
  775. (r'\s+', Text),
  776. (r'py:[\w-]+\s*=', Name.Attribute, 'pyattr'),
  777. (r'[\w:-]+\s*=', Name.Attribute, 'attr'),
  778. (r'/?\s*>', Name.Tag, '#pop'),
  779. ],
  780. 'attr': [
  781. ('"', String, 'attr-dstring'),
  782. ("'", String, 'attr-sstring'),
  783. (r'[^\s>]*', String, '#pop')
  784. ],
  785. 'attr-dstring': [
  786. ('"', String, '#pop'),
  787. include('strings'),
  788. ("'", String)
  789. ],
  790. 'attr-sstring': [
  791. ("'", String, '#pop'),
  792. include('strings'),
  793. ("'", String)
  794. ],
  795. 'strings': [
  796. ('[^"\'$]+', String),
  797. include('variable')
  798. ],
  799. 'variable': [
  800. (r'(?<!\$)(\$\{)(.+?)(\})',
  801. bygroups(Comment.Preproc, using(PythonLexer), Comment.Preproc)),
  802. (r'(?<!\$)(\$)([a-zA-Z_][\w\.]*)',
  803. Name.Variable),
  804. ]
  805. }
  806. class HtmlGenshiLexer(DelegatingLexer):
  807. """
  808. A lexer that highlights `genshi <https://genshi.edgewall.org/>`_ and
  809. `kid <http://kid-templating.org/>`_ kid HTML templates.
  810. """
  811. name = 'HTML+Genshi'
  812. aliases = ['html+genshi', 'html+kid']
  813. version_added = ''
  814. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  815. mimetypes = ['text/html+genshi']
  816. url = 'https://genshi.edgewall.org/'
  817. def __init__(self, **options):
  818. super().__init__(HtmlLexer, GenshiMarkupLexer, **options)
  819. def analyse_text(text):
  820. rv = 0.0
  821. if re.search(r'\$\{.*?\}', text) is not None:
  822. rv += 0.2
  823. if re.search(r'py:(.*?)=["\']', text) is not None:
  824. rv += 0.2
  825. return rv + HtmlLexer.analyse_text(text) - 0.01
  826. class GenshiLexer(DelegatingLexer):
  827. """
  828. A lexer that highlights `genshi <https://genshi.edgewall.org/>`_ and
  829. `kid <http://kid-templating.org/>`_ kid XML templates.
  830. """
  831. name = 'Genshi'
  832. aliases = ['genshi', 'kid', 'xml+genshi', 'xml+kid']
  833. filenames = ['*.kid']
  834. version_added = ''
  835. alias_filenames = ['*.xml']
  836. mimetypes = ['application/x-genshi', 'application/x-kid']
  837. url = 'https://genshi.edgewall.org/'
  838. def __init__(self, **options):
  839. super().__init__(XmlLexer, GenshiMarkupLexer, **options)
  840. def analyse_text(text):
  841. rv = 0.0
  842. if re.search(r'\$\{.*?\}', text) is not None:
  843. rv += 0.2
  844. if re.search(r'py:(.*?)=["\']', text) is not None:
  845. rv += 0.2
  846. return rv + XmlLexer.analyse_text(text) - 0.01
  847. class JavascriptGenshiLexer(DelegatingLexer):
  848. """
  849. A lexer that highlights javascript code in genshi text templates.
  850. """
  851. name = 'JavaScript+Genshi Text'
  852. aliases = ['js+genshitext', 'js+genshi', 'javascript+genshitext',
  853. 'javascript+genshi']
  854. version_added = ''
  855. alias_filenames = ['*.js']
  856. mimetypes = ['application/x-javascript+genshi',
  857. 'text/x-javascript+genshi',
  858. 'text/javascript+genshi']
  859. url = 'https://genshi.edgewall.org'
  860. def __init__(self, **options):
  861. super().__init__(JavascriptLexer, GenshiTextLexer, **options)
  862. def analyse_text(text):
  863. return GenshiLexer.analyse_text(text) - 0.05
  864. class CssGenshiLexer(DelegatingLexer):
  865. """
  866. A lexer that highlights CSS definitions in genshi text templates.
  867. """
  868. name = 'CSS+Genshi Text'
  869. aliases = ['css+genshitext', 'css+genshi']
  870. version_added = ''
  871. alias_filenames = ['*.css']
  872. mimetypes = ['text/css+genshi']
  873. url = 'https://genshi.edgewall.org'
  874. def __init__(self, **options):
  875. super().__init__(CssLexer, GenshiTextLexer, **options)
  876. def analyse_text(text):
  877. return GenshiLexer.analyse_text(text) - 0.05
  878. class RhtmlLexer(DelegatingLexer):
  879. """
  880. Subclass of the ERB lexer that highlights the unlexed data with the
  881. html lexer.
  882. Nested Javascript and CSS is highlighted too.
  883. """
  884. name = 'RHTML'
  885. aliases = ['rhtml', 'html+erb', 'html+ruby']
  886. filenames = ['*.rhtml']
  887. version_added = ''
  888. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  889. mimetypes = ['text/html+ruby']
  890. url = 'https://github.com/ruby/erb'
  891. def __init__(self, **options):
  892. super().__init__(HtmlLexer, ErbLexer, **options)
  893. def analyse_text(text):
  894. rv = ErbLexer.analyse_text(text) - 0.01
  895. if html_doctype_matches(text):
  896. # one more than the XmlErbLexer returns
  897. rv += 0.5
  898. return rv
  899. class XmlErbLexer(DelegatingLexer):
  900. """
  901. Subclass of `ErbLexer` which highlights data outside preprocessor
  902. directives with the `XmlLexer`.
  903. """
  904. name = 'XML+Ruby'
  905. aliases = ['xml+ruby', 'xml+erb']
  906. version_added = ''
  907. alias_filenames = ['*.xml']
  908. mimetypes = ['application/xml+ruby']
  909. url = 'https://github.com/ruby/erb'
  910. def __init__(self, **options):
  911. super().__init__(XmlLexer, ErbLexer, **options)
  912. def analyse_text(text):
  913. rv = ErbLexer.analyse_text(text) - 0.01
  914. if looks_like_xml(text):
  915. rv += 0.4
  916. return rv
  917. class CssErbLexer(DelegatingLexer):
  918. """
  919. Subclass of `ErbLexer` which highlights unlexed data with the `CssLexer`.
  920. """
  921. name = 'CSS+Ruby'
  922. aliases = ['css+ruby', 'css+erb']
  923. version_added = ''
  924. alias_filenames = ['*.css']
  925. mimetypes = ['text/css+ruby']
  926. url = 'https://github.com/ruby/erb'
  927. def __init__(self, **options):
  928. super().__init__(CssLexer, ErbLexer, **options)
  929. def analyse_text(text):
  930. return ErbLexer.analyse_text(text) - 0.05
  931. class JavascriptErbLexer(DelegatingLexer):
  932. """
  933. Subclass of `ErbLexer` which highlights unlexed data with the
  934. `JavascriptLexer`.
  935. """
  936. name = 'JavaScript+Ruby'
  937. aliases = ['javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb']
  938. version_added = ''
  939. alias_filenames = ['*.js']
  940. mimetypes = ['application/x-javascript+ruby',
  941. 'text/x-javascript+ruby',
  942. 'text/javascript+ruby']
  943. url = 'https://github.com/ruby/erb'
  944. def __init__(self, **options):
  945. super().__init__(JavascriptLexer, ErbLexer, **options)
  946. def analyse_text(text):
  947. return ErbLexer.analyse_text(text) - 0.05
  948. class HtmlPhpLexer(DelegatingLexer):
  949. """
  950. Subclass of `PhpLexer` that highlights unhandled data with the `HtmlLexer`.
  951. Nested Javascript and CSS is highlighted too.
  952. """
  953. name = 'HTML+PHP'
  954. aliases = ['html+php']
  955. filenames = ['*.phtml']
  956. version_added = ''
  957. alias_filenames = ['*.php', '*.html', '*.htm', '*.xhtml',
  958. '*.php[345]']
  959. mimetypes = ['application/x-php',
  960. 'application/x-httpd-php', 'application/x-httpd-php3',
  961. 'application/x-httpd-php4', 'application/x-httpd-php5']
  962. url = 'https://www.php.net'
  963. def __init__(self, **options):
  964. super().__init__(HtmlLexer, PhpLexer, **options)
  965. def analyse_text(text):
  966. rv = PhpLexer.analyse_text(text) - 0.01
  967. if html_doctype_matches(text):
  968. rv += 0.5
  969. return rv
  970. class XmlPhpLexer(DelegatingLexer):
  971. """
  972. Subclass of `PhpLexer` that highlights unhandled data with the `XmlLexer`.
  973. """
  974. name = 'XML+PHP'
  975. aliases = ['xml+php']
  976. version_added = ''
  977. alias_filenames = ['*.xml', '*.php', '*.php[345]']
  978. mimetypes = ['application/xml+php']
  979. url = 'https://www.php.net'
  980. def __init__(self, **options):
  981. super().__init__(XmlLexer, PhpLexer, **options)
  982. def analyse_text(text):
  983. rv = PhpLexer.analyse_text(text) - 0.01
  984. if looks_like_xml(text):
  985. rv += 0.4
  986. return rv
  987. class CssPhpLexer(DelegatingLexer):
  988. """
  989. Subclass of `PhpLexer` which highlights unmatched data with the `CssLexer`.
  990. """
  991. name = 'CSS+PHP'
  992. aliases = ['css+php']
  993. version_added = ''
  994. alias_filenames = ['*.css']
  995. mimetypes = ['text/css+php']
  996. url = 'https://www.php.net'
  997. def __init__(self, **options):
  998. super().__init__(CssLexer, PhpLexer, **options)
  999. def analyse_text(text):
  1000. return PhpLexer.analyse_text(text) - 0.05
  1001. class JavascriptPhpLexer(DelegatingLexer):
  1002. """
  1003. Subclass of `PhpLexer` which highlights unmatched data with the
  1004. `JavascriptLexer`.
  1005. """
  1006. name = 'JavaScript+PHP'
  1007. aliases = ['javascript+php', 'js+php']
  1008. version_added = ''
  1009. alias_filenames = ['*.js']
  1010. mimetypes = ['application/x-javascript+php',
  1011. 'text/x-javascript+php',
  1012. 'text/javascript+php']
  1013. url = 'https://www.php.net'
  1014. def __init__(self, **options):
  1015. super().__init__(JavascriptLexer, PhpLexer, **options)
  1016. def analyse_text(text):
  1017. return PhpLexer.analyse_text(text)
  1018. class HtmlSmartyLexer(DelegatingLexer):
  1019. """
  1020. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1021. `HtmlLexer`.
  1022. Nested Javascript and CSS is highlighted too.
  1023. """
  1024. name = 'HTML+Smarty'
  1025. aliases = ['html+smarty']
  1026. version_added = ''
  1027. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.tpl']
  1028. mimetypes = ['text/html+smarty']
  1029. url = 'https://www.smarty.net/'
  1030. def __init__(self, **options):
  1031. super().__init__(HtmlLexer, SmartyLexer, **options)
  1032. def analyse_text(text):
  1033. rv = SmartyLexer.analyse_text(text) - 0.01
  1034. if html_doctype_matches(text):
  1035. rv += 0.5
  1036. return rv
  1037. class XmlSmartyLexer(DelegatingLexer):
  1038. """
  1039. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1040. `XmlLexer`.
  1041. """
  1042. name = 'XML+Smarty'
  1043. aliases = ['xml+smarty']
  1044. version_added = ''
  1045. alias_filenames = ['*.xml', '*.tpl']
  1046. mimetypes = ['application/xml+smarty']
  1047. url = 'https://www.smarty.net/'
  1048. def __init__(self, **options):
  1049. super().__init__(XmlLexer, SmartyLexer, **options)
  1050. def analyse_text(text):
  1051. rv = SmartyLexer.analyse_text(text) - 0.01
  1052. if looks_like_xml(text):
  1053. rv += 0.4
  1054. return rv
  1055. class CssSmartyLexer(DelegatingLexer):
  1056. """
  1057. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1058. `CssLexer`.
  1059. """
  1060. name = 'CSS+Smarty'
  1061. aliases = ['css+smarty']
  1062. version_added = ''
  1063. alias_filenames = ['*.css', '*.tpl']
  1064. mimetypes = ['text/css+smarty']
  1065. url = 'https://www.smarty.net/'
  1066. def __init__(self, **options):
  1067. super().__init__(CssLexer, SmartyLexer, **options)
  1068. def analyse_text(text):
  1069. return SmartyLexer.analyse_text(text) - 0.05
  1070. class JavascriptSmartyLexer(DelegatingLexer):
  1071. """
  1072. Subclass of the `SmartyLexer` that highlights unlexed data with the
  1073. `JavascriptLexer`.
  1074. """
  1075. name = 'JavaScript+Smarty'
  1076. aliases = ['javascript+smarty', 'js+smarty']
  1077. version_added = ''
  1078. alias_filenames = ['*.js', '*.tpl']
  1079. mimetypes = ['application/x-javascript+smarty',
  1080. 'text/x-javascript+smarty',
  1081. 'text/javascript+smarty']
  1082. url = 'https://www.smarty.net/'
  1083. def __init__(self, **options):
  1084. super().__init__(JavascriptLexer, SmartyLexer, **options)
  1085. def analyse_text(text):
  1086. return SmartyLexer.analyse_text(text) - 0.05
  1087. class HtmlDjangoLexer(DelegatingLexer):
  1088. """
  1089. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1090. `HtmlLexer`.
  1091. Nested Javascript and CSS is highlighted too.
  1092. """
  1093. name = 'HTML+Django/Jinja'
  1094. aliases = ['html+django', 'html+jinja', 'htmldjango']
  1095. filenames = ['*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2']
  1096. version_added = ''
  1097. alias_filenames = ['*.html', '*.htm', '*.xhtml']
  1098. mimetypes = ['text/html+django', 'text/html+jinja']
  1099. url = 'https://www.djangoproject.com/documentation/templates'
  1100. def __init__(self, **options):
  1101. super().__init__(HtmlLexer, DjangoLexer, **options)
  1102. def analyse_text(text):
  1103. rv = DjangoLexer.analyse_text(text) - 0.01
  1104. if html_doctype_matches(text):
  1105. rv += 0.5
  1106. return rv
  1107. class XmlDjangoLexer(DelegatingLexer):
  1108. """
  1109. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1110. `XmlLexer`.
  1111. """
  1112. name = 'XML+Django/Jinja'
  1113. aliases = ['xml+django', 'xml+jinja']
  1114. filenames = ['*.xml.j2', '*.xml.jinja2']
  1115. version_added = ''
  1116. alias_filenames = ['*.xml']
  1117. mimetypes = ['application/xml+django', 'application/xml+jinja']
  1118. url = 'https://www.djangoproject.com/documentation/templates'
  1119. def __init__(self, **options):
  1120. super().__init__(XmlLexer, DjangoLexer, **options)
  1121. def analyse_text(text):
  1122. rv = DjangoLexer.analyse_text(text) - 0.01
  1123. if looks_like_xml(text):
  1124. rv += 0.4
  1125. return rv
  1126. class CssDjangoLexer(DelegatingLexer):
  1127. """
  1128. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1129. `CssLexer`.
  1130. """
  1131. name = 'CSS+Django/Jinja'
  1132. aliases = ['css+django', 'css+jinja']
  1133. filenames = ['*.css.j2', '*.css.jinja2']
  1134. version_added = ''
  1135. alias_filenames = ['*.css']
  1136. mimetypes = ['text/css+django', 'text/css+jinja']
  1137. url = 'https://www.djangoproject.com/documentation/templates'
  1138. def __init__(self, **options):
  1139. super().__init__(CssLexer, DjangoLexer, **options)
  1140. def analyse_text(text):
  1141. return DjangoLexer.analyse_text(text) - 0.05
  1142. class JavascriptDjangoLexer(DelegatingLexer):
  1143. """
  1144. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1145. `JavascriptLexer`.
  1146. """
  1147. name = 'JavaScript+Django/Jinja'
  1148. aliases = ['javascript+django', 'js+django',
  1149. 'javascript+jinja', 'js+jinja']
  1150. filenames = ['*.js.j2', '*.js.jinja2']
  1151. version_added = ''
  1152. alias_filenames = ['*.js']
  1153. mimetypes = ['application/x-javascript+django',
  1154. 'application/x-javascript+jinja',
  1155. 'text/x-javascript+django',
  1156. 'text/x-javascript+jinja',
  1157. 'text/javascript+django',
  1158. 'text/javascript+jinja']
  1159. url = 'https://www.djangoproject.com/documentation/templates'
  1160. def __init__(self, **options):
  1161. super().__init__(JavascriptLexer, DjangoLexer, **options)
  1162. def analyse_text(text):
  1163. return DjangoLexer.analyse_text(text) - 0.05
  1164. class JspRootLexer(RegexLexer):
  1165. """
  1166. Base for the `JspLexer`. Yields `Token.Other` for area outside of
  1167. JSP tags.
  1168. .. versionadded:: 0.7
  1169. """
  1170. tokens = {
  1171. 'root': [
  1172. (r'<%\S?', Keyword, 'sec'),
  1173. # FIXME: I want to make these keywords but still parse attributes.
  1174. (r'</?jsp:(forward|getProperty|include|plugin|setProperty|useBean).*?>',
  1175. Keyword),
  1176. (r'[^<]+', Other),
  1177. (r'<', Other),
  1178. ],
  1179. 'sec': [
  1180. (r'%>', Keyword, '#pop'),
  1181. # note: '\w\W' != '.' without DOTALL.
  1182. (r'[\w\W]+?(?=%>|\Z)', using(JavaLexer)),
  1183. ],
  1184. }
  1185. class JspLexer(DelegatingLexer):
  1186. """
  1187. Lexer for Java Server Pages.
  1188. """
  1189. name = 'Java Server Page'
  1190. aliases = ['jsp']
  1191. filenames = ['*.jsp']
  1192. mimetypes = ['application/x-jsp']
  1193. url = 'https://projects.eclipse.org/projects/ee4j.jsp'
  1194. version_added = '0.7'
  1195. def __init__(self, **options):
  1196. super().__init__(XmlLexer, JspRootLexer, **options)
  1197. def analyse_text(text):
  1198. rv = JavaLexer.analyse_text(text) - 0.01
  1199. if looks_like_xml(text):
  1200. rv += 0.4
  1201. if '<%' in text and '%>' in text:
  1202. rv += 0.1
  1203. return rv
  1204. class EvoqueLexer(RegexLexer):
  1205. """
  1206. For files using the Evoque templating system.
  1207. """
  1208. name = 'Evoque'
  1209. aliases = ['evoque']
  1210. filenames = ['*.evoque']
  1211. mimetypes = ['application/x-evoque']
  1212. url = 'https://gizmojo.org/templating'
  1213. version_added = '1.1'
  1214. flags = re.DOTALL
  1215. tokens = {
  1216. 'root': [
  1217. (r'[^#$]+', Other),
  1218. (r'#\[', Comment.Multiline, 'comment'),
  1219. (r'\$\$', Other),
  1220. # svn keywords
  1221. (r'\$\w+:[^$\n]*\$', Comment.Multiline),
  1222. # directives: begin, end
  1223. (r'(\$)(begin|end)(\{(%)?)(.*?)((?(4)%)\})',
  1224. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1225. String, Punctuation)),
  1226. # directives: evoque, overlay
  1227. # see doc for handling first name arg: /directives/evoque/
  1228. # + minor inconsistency: the "name" in e.g. $overlay{name=site_base}
  1229. # should be using(PythonLexer), not passed out as String
  1230. (r'(\$)(evoque|overlay)(\{(%)?)(\s*[#\w\-"\'.]+)?'
  1231. r'(.*?)((?(4)%)\})',
  1232. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1233. String, using(PythonLexer), Punctuation)),
  1234. # directives: if, for, prefer, test
  1235. (r'(\$)(\w+)(\{(%)?)(.*?)((?(4)%)\})',
  1236. bygroups(Punctuation, Name.Builtin, Punctuation, None,
  1237. using(PythonLexer), Punctuation)),
  1238. # directive clauses (no {} expression)
  1239. (r'(\$)(else|rof|fi)', bygroups(Punctuation, Name.Builtin)),
  1240. # expressions
  1241. (r'(\$\{(%)?)(.*?)((!)(.*?))?((?(2)%)\})',
  1242. bygroups(Punctuation, None, using(PythonLexer),
  1243. Name.Builtin, None, None, Punctuation)),
  1244. (r'#', Other),
  1245. ],
  1246. 'comment': [
  1247. (r'[^\]#]', Comment.Multiline),
  1248. (r'#\[', Comment.Multiline, '#push'),
  1249. (r'\]#', Comment.Multiline, '#pop'),
  1250. (r'[\]#]', Comment.Multiline)
  1251. ],
  1252. }
  1253. def analyse_text(text):
  1254. """Evoque templates use $evoque, which is unique."""
  1255. if '$evoque' in text:
  1256. return 1
  1257. class EvoqueHtmlLexer(DelegatingLexer):
  1258. """
  1259. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1260. `HtmlLexer`.
  1261. """
  1262. name = 'HTML+Evoque'
  1263. aliases = ['html+evoque']
  1264. filenames = ['*.html']
  1265. mimetypes = ['text/html+evoque']
  1266. url = 'https://gizmojo.org/templating'
  1267. version_added = '1.1'
  1268. def __init__(self, **options):
  1269. super().__init__(HtmlLexer, EvoqueLexer, **options)
  1270. def analyse_text(text):
  1271. return EvoqueLexer.analyse_text(text)
  1272. class EvoqueXmlLexer(DelegatingLexer):
  1273. """
  1274. Subclass of the `EvoqueLexer` that highlights unlexed data with the
  1275. `XmlLexer`.
  1276. """
  1277. name = 'XML+Evoque'
  1278. aliases = ['xml+evoque']
  1279. filenames = ['*.xml']
  1280. mimetypes = ['application/xml+evoque']
  1281. url = 'https://gizmojo.org/templating'
  1282. version_added = '1.1'
  1283. def __init__(self, **options):
  1284. super().__init__(XmlLexer, EvoqueLexer, **options)
  1285. def analyse_text(text):
  1286. return EvoqueLexer.analyse_text(text)
  1287. class ColdfusionLexer(RegexLexer):
  1288. """
  1289. Coldfusion statements
  1290. """
  1291. name = 'cfstatement'
  1292. aliases = ['cfs']
  1293. filenames = []
  1294. mimetypes = []
  1295. url = 'https://www.adobe.com/products/coldfusion-family.html'
  1296. version_added = ''
  1297. flags = re.IGNORECASE
  1298. tokens = {
  1299. 'root': [
  1300. (r'//.*?\n', Comment.Single),
  1301. (r'/\*(?:.|\n)*?\*/', Comment.Multiline),
  1302. (r'\+\+|--', Operator),
  1303. (r'[-+*/^&=!]', Operator),
  1304. (r'<=|>=|<|>|==', Operator),
  1305. (r'mod\b', Operator),
  1306. (r'(eq|lt|gt|lte|gte|not|is|and|or)\b', Operator),
  1307. (r'\|\||&&', Operator),
  1308. (r'\?', Operator),
  1309. (r'"', String.Double, 'string'),
  1310. # There is a special rule for allowing html in single quoted
  1311. # strings, evidently.
  1312. (r"'.*?'", String.Single),
  1313. (r'\d+', Number),
  1314. (r'(if|else|len|var|xml|default|break|switch|component|property|function|do|'
  1315. r'try|catch|in|continue|for|return|while|required|any|array|binary|boolean|'
  1316. r'component|date|guid|numeric|query|string|struct|uuid|case)\b', Keyword),
  1317. (r'(true|false|null)\b', Keyword.Constant),
  1318. (r'(application|session|client|cookie|super|this|variables|arguments)\b',
  1319. Name.Constant),
  1320. (r'([a-z_$][\w.]*)(\s*)(\()',
  1321. bygroups(Name.Function, Text, Punctuation)),
  1322. (r'[a-z_$][\w.]*', Name.Variable),
  1323. (r'[()\[\]{};:,.\\]', Punctuation),
  1324. (r'\s+', Text),
  1325. ],
  1326. 'string': [
  1327. (r'""', String.Double),
  1328. (r'#.+?#', String.Interp),
  1329. (r'[^"#]+', String.Double),
  1330. (r'#', String.Double),
  1331. (r'"', String.Double, '#pop'),
  1332. ],
  1333. }
  1334. class ColdfusionMarkupLexer(RegexLexer):
  1335. """
  1336. Coldfusion markup only
  1337. """
  1338. name = 'Coldfusion'
  1339. aliases = ['cf']
  1340. filenames = []
  1341. mimetypes = []
  1342. url = 'https://www.adobe.com/products/coldfusion-family.html'
  1343. tokens = {
  1344. 'root': [
  1345. (r'[^<]+', Other),
  1346. include('tags'),
  1347. (r'<[^<>]*', Other),
  1348. ],
  1349. 'tags': [
  1350. (r'<!---', Comment.Multiline, 'cfcomment'),
  1351. (r'(?s)<!--.*?-->', Comment),
  1352. (r'<cfoutput.*?>', Name.Builtin, 'cfoutput'),
  1353. (r'(?s)(<cfscript.*?>)(.+?)(</cfscript.*?>)',
  1354. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1355. # negative lookbehind is for strings with embedded >
  1356. (r'(?s)(</?cf(?:component|include|if|else|elseif|loop|return|'
  1357. r'dbinfo|dump|abort|location|invoke|throw|file|savecontent|'
  1358. r'mailpart|mail|header|content|zip|image|lock|argument|try|'
  1359. r'catch|break|directory|http|set|function|param)\b)(.*?)((?<!\\)>)',
  1360. bygroups(Name.Builtin, using(ColdfusionLexer), Name.Builtin)),
  1361. ],
  1362. 'cfoutput': [
  1363. (r'[^#<]+', Other),
  1364. (r'(#)(.*?)(#)', bygroups(Punctuation, using(ColdfusionLexer),
  1365. Punctuation)),
  1366. # (r'<cfoutput.*?>', Name.Builtin, '#push'),
  1367. (r'</cfoutput.*?>', Name.Builtin, '#pop'),
  1368. include('tags'),
  1369. (r'(?s)<[^<>]*', Other),
  1370. (r'#', Other),
  1371. ],
  1372. 'cfcomment': [
  1373. (r'<!---', Comment.Multiline, '#push'),
  1374. (r'--->', Comment.Multiline, '#pop'),
  1375. (r'([^<-]|<(?!!---)|-(?!-->))+', Comment.Multiline),
  1376. ],
  1377. }
  1378. class ColdfusionHtmlLexer(DelegatingLexer):
  1379. """
  1380. Coldfusion markup in html
  1381. """
  1382. name = 'Coldfusion HTML'
  1383. aliases = ['cfm']
  1384. filenames = ['*.cfm', '*.cfml']
  1385. mimetypes = ['application/x-coldfusion']
  1386. url = 'https://www.adobe.com/products/coldfusion-family.html'
  1387. version_added = ''
  1388. def __init__(self, **options):
  1389. super().__init__(HtmlLexer, ColdfusionMarkupLexer, **options)
  1390. class ColdfusionCFCLexer(DelegatingLexer):
  1391. """
  1392. Coldfusion markup/script components
  1393. """
  1394. name = 'Coldfusion CFC'
  1395. aliases = ['cfc']
  1396. filenames = ['*.cfc']
  1397. mimetypes = []
  1398. url = 'https://www.adobe.com/products/coldfusion-family.html'
  1399. version_added = '2.0'
  1400. def __init__(self, **options):
  1401. super().__init__(ColdfusionHtmlLexer, ColdfusionLexer, **options)
  1402. class SspLexer(DelegatingLexer):
  1403. """
  1404. Lexer for Scalate Server Pages.
  1405. """
  1406. name = 'Scalate Server Page'
  1407. aliases = ['ssp']
  1408. filenames = ['*.ssp']
  1409. mimetypes = ['application/x-ssp']
  1410. url = 'https://scalate.github.io/scalate/'
  1411. version_added = '1.4'
  1412. def __init__(self, **options):
  1413. super().__init__(XmlLexer, JspRootLexer, **options)
  1414. def analyse_text(text):
  1415. rv = 0.0
  1416. if re.search(r'val \w+\s*:', text):
  1417. rv += 0.6
  1418. if looks_like_xml(text):
  1419. rv += 0.2
  1420. if '<%' in text and '%>' in text:
  1421. rv += 0.1
  1422. return rv
  1423. class TeaTemplateRootLexer(RegexLexer):
  1424. """
  1425. Base for the `TeaTemplateLexer`. Yields `Token.Other` for area outside of
  1426. code blocks.
  1427. .. versionadded:: 1.5
  1428. """
  1429. tokens = {
  1430. 'root': [
  1431. (r'<%\S?', Keyword, 'sec'),
  1432. (r'[^<]+', Other),
  1433. (r'<', Other),
  1434. ],
  1435. 'sec': [
  1436. (r'%>', Keyword, '#pop'),
  1437. # note: '\w\W' != '.' without DOTALL.
  1438. (r'[\w\W]+?(?=%>|\Z)', using(TeaLangLexer)),
  1439. ],
  1440. }
  1441. class TeaTemplateLexer(DelegatingLexer):
  1442. """
  1443. Lexer for Tea Templates.
  1444. """
  1445. name = 'Tea'
  1446. aliases = ['tea']
  1447. filenames = ['*.tea']
  1448. mimetypes = ['text/x-tea']
  1449. url = 'https://github.com/teatrove/teatrove'
  1450. version_added = '1.5'
  1451. def __init__(self, **options):
  1452. super().__init__(XmlLexer, TeaTemplateRootLexer, **options)
  1453. def analyse_text(text):
  1454. rv = TeaLangLexer.analyse_text(text) - 0.01
  1455. if looks_like_xml(text):
  1456. rv += 0.4
  1457. if '<%' in text and '%>' in text:
  1458. rv += 0.1
  1459. return rv
  1460. class LassoHtmlLexer(DelegatingLexer):
  1461. """
  1462. Subclass of the `LassoLexer` which highlights unhandled data with the
  1463. `HtmlLexer`.
  1464. Nested JavaScript and CSS is also highlighted.
  1465. """
  1466. name = 'HTML+Lasso'
  1467. aliases = ['html+lasso']
  1468. version_added = '1.6'
  1469. alias_filenames = ['*.html', '*.htm', '*.xhtml', '*.lasso', '*.lasso[89]',
  1470. '*.incl', '*.inc', '*.las']
  1471. mimetypes = ['text/html+lasso',
  1472. 'application/x-httpd-lasso',
  1473. 'application/x-httpd-lasso[89]']
  1474. url = 'https://www.lassosoft.com'
  1475. def __init__(self, **options):
  1476. super().__init__(HtmlLexer, LassoLexer, **options)
  1477. def analyse_text(text):
  1478. rv = LassoLexer.analyse_text(text) - 0.01
  1479. if html_doctype_matches(text): # same as HTML lexer
  1480. rv += 0.5
  1481. return rv
  1482. class LassoXmlLexer(DelegatingLexer):
  1483. """
  1484. Subclass of the `LassoLexer` which highlights unhandled data with the
  1485. `XmlLexer`.
  1486. """
  1487. name = 'XML+Lasso'
  1488. aliases = ['xml+lasso']
  1489. version_added = '1.6'
  1490. alias_filenames = ['*.xml', '*.lasso', '*.lasso[89]',
  1491. '*.incl', '*.inc', '*.las']
  1492. mimetypes = ['application/xml+lasso']
  1493. url = 'https://www.lassosoft.com'
  1494. def __init__(self, **options):
  1495. super().__init__(XmlLexer, LassoLexer, **options)
  1496. def analyse_text(text):
  1497. rv = LassoLexer.analyse_text(text) - 0.01
  1498. if looks_like_xml(text):
  1499. rv += 0.4
  1500. return rv
  1501. class LassoCssLexer(DelegatingLexer):
  1502. """
  1503. Subclass of the `LassoLexer` which highlights unhandled data with the
  1504. `CssLexer`.
  1505. """
  1506. name = 'CSS+Lasso'
  1507. aliases = ['css+lasso']
  1508. version_added = '1.6'
  1509. alias_filenames = ['*.css']
  1510. mimetypes = ['text/css+lasso']
  1511. url = 'https://www.lassosoft.com'
  1512. def __init__(self, **options):
  1513. options['requiredelimiters'] = True
  1514. super().__init__(CssLexer, LassoLexer, **options)
  1515. def analyse_text(text):
  1516. rv = LassoLexer.analyse_text(text) - 0.05
  1517. if re.search(r'\w+:[^;]+;', text):
  1518. rv += 0.1
  1519. if 'padding:' in text:
  1520. rv += 0.1
  1521. return rv
  1522. class LassoJavascriptLexer(DelegatingLexer):
  1523. """
  1524. Subclass of the `LassoLexer` which highlights unhandled data with the
  1525. `JavascriptLexer`.
  1526. """
  1527. name = 'JavaScript+Lasso'
  1528. aliases = ['javascript+lasso', 'js+lasso']
  1529. version_added = '1.6'
  1530. alias_filenames = ['*.js']
  1531. mimetypes = ['application/x-javascript+lasso',
  1532. 'text/x-javascript+lasso',
  1533. 'text/javascript+lasso']
  1534. url = 'https://www.lassosoft.com'
  1535. def __init__(self, **options):
  1536. options['requiredelimiters'] = True
  1537. super().__init__(JavascriptLexer, LassoLexer, **options)
  1538. def analyse_text(text):
  1539. rv = LassoLexer.analyse_text(text) - 0.05
  1540. return rv
  1541. class HandlebarsLexer(RegexLexer):
  1542. """
  1543. Generic handlebars template lexer.
  1544. Highlights only the Handlebars template tags (stuff between `{{` and `}}`).
  1545. Everything else is left for a delegating lexer.
  1546. """
  1547. name = "Handlebars"
  1548. url = 'https://handlebarsjs.com/'
  1549. aliases = ['handlebars']
  1550. version_added = '2.0'
  1551. tokens = {
  1552. 'root': [
  1553. (r'[^{]+', Other),
  1554. # Comment start {{! }} or {{!--
  1555. (r'\{\{!.*\}\}', Comment),
  1556. # HTML Escaping open {{{expression
  1557. (r'(\{\{\{)(\s*)', bygroups(Comment.Special, Text), 'tag'),
  1558. # {{blockOpen {{#blockOpen {{/blockClose with optional tilde ~
  1559. (r'(\{\{)([#~/]+)([^\s}]*)',
  1560. bygroups(Comment.Preproc, Number.Attribute, Number.Attribute), 'tag'),
  1561. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'tag'),
  1562. ],
  1563. 'tag': [
  1564. (r'\s+', Text),
  1565. # HTML Escaping close }}}
  1566. (r'\}\}\}', Comment.Special, '#pop'),
  1567. # blockClose}}, includes optional tilde ~
  1568. (r'(~?)(\}\})', bygroups(Number, Comment.Preproc), '#pop'),
  1569. # {{opt=something}}
  1570. (r'([^\s}]+)(=)', bygroups(Name.Attribute, Operator)),
  1571. # Partials {{> ...}}
  1572. (r'(>)(\s*)(@partial-block)', bygroups(Keyword, Text, Keyword)),
  1573. (r'(#?>)(\s*)([\w-]+)', bygroups(Keyword, Text, Name.Variable)),
  1574. (r'(>)(\s*)(\()', bygroups(Keyword, Text, Punctuation),
  1575. 'dynamic-partial'),
  1576. include('generic'),
  1577. ],
  1578. 'dynamic-partial': [
  1579. (r'\s+', Text),
  1580. (r'\)', Punctuation, '#pop'),
  1581. (r'(lookup)(\s+)(\.|this)(\s+)', bygroups(Keyword, Text,
  1582. Name.Variable, Text)),
  1583. (r'(lookup)(\s+)(\S+)', bygroups(Keyword, Text,
  1584. using(this, state='variable'))),
  1585. (r'[\w-]+', Name.Function),
  1586. include('generic'),
  1587. ],
  1588. 'variable': [
  1589. (r'[()/@a-zA-Z][\w-]*', Name.Variable),
  1590. (r'\.[\w-]+', Name.Variable),
  1591. (r'(this\/|\.\/|(\.\.\/)+)[\w-]+', Name.Variable),
  1592. ],
  1593. 'generic': [
  1594. include('variable'),
  1595. # borrowed from DjangoLexer
  1596. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1597. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1598. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1599. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1600. ]
  1601. }
  1602. class HandlebarsHtmlLexer(DelegatingLexer):
  1603. """
  1604. Subclass of the `HandlebarsLexer` that highlights unlexed data with the
  1605. `HtmlLexer`.
  1606. """
  1607. name = "HTML+Handlebars"
  1608. aliases = ["html+handlebars"]
  1609. filenames = ['*.handlebars', '*.hbs']
  1610. mimetypes = ['text/html+handlebars', 'text/x-handlebars-template']
  1611. url = 'https://handlebarsjs.com/'
  1612. version_added = '2.0'
  1613. def __init__(self, **options):
  1614. super().__init__(HtmlLexer, HandlebarsLexer, **options)
  1615. class YamlJinjaLexer(DelegatingLexer):
  1616. """
  1617. Subclass of the `DjangoLexer` that highlights unlexed data with the
  1618. `YamlLexer`.
  1619. Commonly used in Saltstack salt states.
  1620. """
  1621. name = 'YAML+Jinja'
  1622. aliases = ['yaml+jinja', 'salt', 'sls']
  1623. filenames = ['*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2']
  1624. mimetypes = ['text/x-yaml+jinja', 'text/x-sls']
  1625. url = 'https://jinja.palletsprojects.com'
  1626. version_added = '2.0'
  1627. def __init__(self, **options):
  1628. super().__init__(YamlLexer, DjangoLexer, **options)
  1629. class LiquidLexer(RegexLexer):
  1630. """
  1631. Lexer for Liquid templates.
  1632. """
  1633. name = 'liquid'
  1634. url = 'https://www.rubydoc.info/github/Shopify/liquid'
  1635. aliases = ['liquid']
  1636. filenames = ['*.liquid']
  1637. version_added = '2.0'
  1638. tokens = {
  1639. 'root': [
  1640. (r'[^{]+', Text),
  1641. # tags and block tags
  1642. (r'(\{%)(\s*)', bygroups(Punctuation, Whitespace), 'tag-or-block'),
  1643. # output tags
  1644. (r'(\{\{)(\s*)([^\s}]+)',
  1645. bygroups(Punctuation, Whitespace, using(this, state = 'generic')),
  1646. 'output'),
  1647. (r'\{', Text)
  1648. ],
  1649. 'tag-or-block': [
  1650. # builtin logic blocks
  1651. (r'(if|unless|elsif|case)(?=\s+)', Keyword.Reserved, 'condition'),
  1652. (r'(when)(\s+)', bygroups(Keyword.Reserved, Whitespace),
  1653. combined('end-of-block', 'whitespace', 'generic')),
  1654. (r'(else)(\s*)(%\})',
  1655. bygroups(Keyword.Reserved, Whitespace, Punctuation), '#pop'),
  1656. # other builtin blocks
  1657. (r'(capture)(\s+)([^\s%]+)(\s*)(%\})',
  1658. bygroups(Name.Tag, Whitespace, using(this, state = 'variable'),
  1659. Whitespace, Punctuation), '#pop'),
  1660. (r'(comment)(\s*)(%\})',
  1661. bygroups(Name.Tag, Whitespace, Punctuation), 'comment'),
  1662. (r'(raw)(\s*)(%\})',
  1663. bygroups(Name.Tag, Whitespace, Punctuation), 'raw'),
  1664. # end of block
  1665. (r'(end(case|unless|if))(\s*)(%\})',
  1666. bygroups(Keyword.Reserved, None, Whitespace, Punctuation), '#pop'),
  1667. (r'(end([^\s%]+))(\s*)(%\})',
  1668. bygroups(Name.Tag, None, Whitespace, Punctuation), '#pop'),
  1669. # builtin tags (assign and include are handled together with usual tags)
  1670. (r'(cycle)(\s+)(?:([^\s:]*)(:))?(\s*)',
  1671. bygroups(Name.Tag, Whitespace,
  1672. using(this, state='generic'), Punctuation, Whitespace),
  1673. 'variable-tag-markup'),
  1674. # other tags or blocks
  1675. (r'([^\s%]+)(\s*)', bygroups(Name.Tag, Whitespace), 'tag-markup')
  1676. ],
  1677. 'output': [
  1678. include('whitespace'),
  1679. (r'\}\}', Punctuation, '#pop'), # end of output
  1680. (r'\|', Punctuation, 'filters')
  1681. ],
  1682. 'filters': [
  1683. include('whitespace'),
  1684. (r'\}\}', Punctuation, ('#pop', '#pop')), # end of filters and output
  1685. (r'([^\s|:]+)(:?)(\s*)',
  1686. bygroups(Name.Function, Punctuation, Whitespace), 'filter-markup')
  1687. ],
  1688. 'filter-markup': [
  1689. (r'\|', Punctuation, '#pop'),
  1690. include('end-of-tag'),
  1691. include('default-param-markup')
  1692. ],
  1693. 'condition': [
  1694. include('end-of-block'),
  1695. include('whitespace'),
  1696. (r'([^\s=!><]+)(\s*)([=!><]=?)(\s*)(\S+)(\s*)(%\})',
  1697. bygroups(using(this, state = 'generic'), Whitespace, Operator,
  1698. Whitespace, using(this, state = 'generic'), Whitespace,
  1699. Punctuation)),
  1700. (r'\b!', Operator),
  1701. (r'\bnot\b', Operator.Word),
  1702. (r'([\w.\'"]+)(\s+)(contains)(\s+)([\w.\'"]+)',
  1703. bygroups(using(this, state = 'generic'), Whitespace, Operator.Word,
  1704. Whitespace, using(this, state = 'generic'))),
  1705. include('generic'),
  1706. include('whitespace')
  1707. ],
  1708. 'generic-value': [
  1709. include('generic'),
  1710. include('end-at-whitespace')
  1711. ],
  1712. 'operator': [
  1713. (r'(\s*)((=|!|>|<)=?)(\s*)',
  1714. bygroups(Whitespace, Operator, None, Whitespace), '#pop'),
  1715. (r'(\s*)(\bcontains\b)(\s*)',
  1716. bygroups(Whitespace, Operator.Word, Whitespace), '#pop'),
  1717. ],
  1718. 'end-of-tag': [
  1719. (r'\}\}', Punctuation, '#pop')
  1720. ],
  1721. 'end-of-block': [
  1722. (r'%\}', Punctuation, ('#pop', '#pop'))
  1723. ],
  1724. 'end-at-whitespace': [
  1725. (r'\s+', Whitespace, '#pop')
  1726. ],
  1727. # states for unknown markup
  1728. 'param-markup': [
  1729. include('whitespace'),
  1730. # params with colons or equals
  1731. (r'([^\s=:]+)(\s*)(=|:)',
  1732. bygroups(Name.Attribute, Whitespace, Operator)),
  1733. # explicit variables
  1734. (r'(\{\{)(\s*)([^\s}])(\s*)(\}\})',
  1735. bygroups(Punctuation, Whitespace, using(this, state = 'variable'),
  1736. Whitespace, Punctuation)),
  1737. include('string'),
  1738. include('number'),
  1739. include('keyword'),
  1740. (r',', Punctuation)
  1741. ],
  1742. 'default-param-markup': [
  1743. include('param-markup'),
  1744. (r'.', Text) # fallback for switches / variables / un-quoted strings / ...
  1745. ],
  1746. 'variable-param-markup': [
  1747. include('param-markup'),
  1748. include('variable'),
  1749. (r'.', Text) # fallback
  1750. ],
  1751. 'tag-markup': [
  1752. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1753. include('default-param-markup')
  1754. ],
  1755. 'variable-tag-markup': [
  1756. (r'%\}', Punctuation, ('#pop', '#pop')), # end of tag
  1757. include('variable-param-markup')
  1758. ],
  1759. # states for different values types
  1760. 'keyword': [
  1761. (r'\b(false|true)\b', Keyword.Constant)
  1762. ],
  1763. 'variable': [
  1764. (r'[a-zA-Z_]\w*', Name.Variable),
  1765. (r'(?<=\w)\.(?=\w)', Punctuation)
  1766. ],
  1767. 'string': [
  1768. (r"'[^']*'", String.Single),
  1769. (r'"[^"]*"', String.Double)
  1770. ],
  1771. 'number': [
  1772. (r'\d+\.\d+', Number.Float),
  1773. (r'\d+', Number.Integer)
  1774. ],
  1775. 'generic': [ # decides for variable, string, keyword or number
  1776. include('keyword'),
  1777. include('string'),
  1778. include('number'),
  1779. include('variable')
  1780. ],
  1781. 'whitespace': [
  1782. (r'[ \t]+', Whitespace)
  1783. ],
  1784. # states for builtin blocks
  1785. 'comment': [
  1786. (r'(\{%)(\s*)(endcomment)(\s*)(%\})',
  1787. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1788. Punctuation), ('#pop', '#pop')),
  1789. (r'.', Comment)
  1790. ],
  1791. 'raw': [
  1792. (r'[^{]+', Text),
  1793. (r'(\{%)(\s*)(endraw)(\s*)(%\})',
  1794. bygroups(Punctuation, Whitespace, Name.Tag, Whitespace,
  1795. Punctuation), '#pop'),
  1796. (r'\{', Text)
  1797. ],
  1798. }
  1799. class TwigLexer(RegexLexer):
  1800. """
  1801. Twig template lexer.
  1802. It just highlights Twig code between the preprocessor directives,
  1803. other data is left untouched by the lexer.
  1804. """
  1805. name = 'Twig'
  1806. aliases = ['twig']
  1807. mimetypes = ['application/x-twig']
  1808. url = 'https://twig.symfony.com'
  1809. version_added = '2.0'
  1810. flags = re.M | re.S
  1811. # Note that a backslash is included in the following two patterns
  1812. # PHP uses a backslash as a namespace separator
  1813. _ident_char = r'[\\\w-]|[^\x00-\x7f]'
  1814. _ident_begin = r'(?:[\\_a-z]|[^\x00-\x7f])'
  1815. _ident_end = r'(?:' + _ident_char + ')*'
  1816. _ident_inner = _ident_begin + _ident_end
  1817. tokens = {
  1818. 'root': [
  1819. (r'[^{]+', Other),
  1820. (r'\{\{', Comment.Preproc, 'var'),
  1821. # twig comments
  1822. (r'\{\#.*?\#\}', Comment),
  1823. # raw twig blocks
  1824. (r'(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)'
  1825. r'(\{%)(-?\s*)(endraw)(\s*-?)(%\})',
  1826. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1827. Other, Comment.Preproc, Text, Keyword, Text,
  1828. Comment.Preproc)),
  1829. (r'(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)'
  1830. r'(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})',
  1831. bygroups(Comment.Preproc, Text, Keyword, Text, Comment.Preproc,
  1832. Other, Comment.Preproc, Text, Keyword, Text,
  1833. Comment.Preproc)),
  1834. # filter blocks
  1835. (rf'(\{{%)(-?\s*)(filter)(\s+)({_ident_inner})',
  1836. bygroups(Comment.Preproc, Text, Keyword, Text, Name.Function),
  1837. 'tag'),
  1838. (r'(\{%)(-?\s*)([a-zA-Z_]\w*)',
  1839. bygroups(Comment.Preproc, Text, Keyword), 'tag'),
  1840. (r'\{', Other),
  1841. ],
  1842. 'varnames': [
  1843. (rf'(\|)(\s*)({_ident_inner})',
  1844. bygroups(Operator, Text, Name.Function)),
  1845. (rf'(is)(\s+)(not)?(\s*)({_ident_inner})',
  1846. bygroups(Keyword, Text, Keyword, Text, Name.Function)),
  1847. (r'(?i)(true|false|none|null)\b', Keyword.Pseudo),
  1848. (r'(in|not|and|b-and|or|b-or|b-xor|is'
  1849. r'if|elseif|else|import'
  1850. r'constant|defined|divisibleby|empty|even|iterable|odd|sameas'
  1851. r'matches|starts\s+with|ends\s+with)\b',
  1852. Keyword),
  1853. (r'(loop|block|parent)\b', Name.Builtin),
  1854. (_ident_inner, Name.Variable),
  1855. (r'\.' + _ident_inner, Name.Variable),
  1856. (r'\.[0-9]+', Number),
  1857. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1858. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1859. (r'([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)', Operator),
  1860. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1861. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1862. ],
  1863. 'var': [
  1864. (r'\s+', Text),
  1865. (r'(-?)(\}\})', bygroups(Text, Comment.Preproc), '#pop'),
  1866. include('varnames')
  1867. ],
  1868. 'tag': [
  1869. (r'\s+', Text),
  1870. (r'(-?)(%\})', bygroups(Text, Comment.Preproc), '#pop'),
  1871. include('varnames'),
  1872. (r'.', Punctuation),
  1873. ],
  1874. }
  1875. class TwigHtmlLexer(DelegatingLexer):
  1876. """
  1877. Subclass of the `TwigLexer` that highlights unlexed data with the
  1878. `HtmlLexer`.
  1879. """
  1880. name = "HTML+Twig"
  1881. aliases = ["html+twig"]
  1882. filenames = ['*.twig']
  1883. mimetypes = ['text/html+twig']
  1884. url = 'https://twig.symfony.com'
  1885. version_added = '2.0'
  1886. def __init__(self, **options):
  1887. super().__init__(HtmlLexer, TwigLexer, **options)
  1888. class Angular2Lexer(RegexLexer):
  1889. """
  1890. Generic angular2 template lexer.
  1891. Highlights only the Angular template tags (stuff between `{{` and `}}` and
  1892. special attributes: '(event)=', '[property]=', '[(twoWayBinding)]=').
  1893. Everything else is left for a delegating lexer.
  1894. """
  1895. name = "Angular2"
  1896. url = 'https://angular.io/guide/template-syntax'
  1897. aliases = ['ng2']
  1898. version_added = '2.1'
  1899. tokens = {
  1900. 'root': [
  1901. (r'[^{([*#]+', Other),
  1902. # {{meal.name}}
  1903. (r'(\{\{)(\s*)', bygroups(Comment.Preproc, Text), 'ngExpression'),
  1904. # (click)="deleteOrder()"; [value]="test"; [(twoWayTest)]="foo.bar"
  1905. (r'([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)',
  1906. bygroups(Punctuation, Name.Attribute, Punctuation, Text, Operator, Text),
  1907. 'attr'),
  1908. (r'([([]+)([\w:.-]+)([\])]+)(\s*)',
  1909. bygroups(Punctuation, Name.Attribute, Punctuation, Text)),
  1910. # *ngIf="..."; #f="ngForm"
  1911. (r'([*#])([\w:.-]+)(\s*)(=)(\s*)',
  1912. bygroups(Punctuation, Name.Attribute, Text, Operator, Text), 'attr'),
  1913. (r'([*#])([\w:.-]+)(\s*)',
  1914. bygroups(Punctuation, Name.Attribute, Text)),
  1915. ],
  1916. 'ngExpression': [
  1917. (r'\s+(\|\s+)?', Text),
  1918. (r'\}\}', Comment.Preproc, '#pop'),
  1919. # Literals
  1920. (r':?(true|false)', String.Boolean),
  1921. (r':?"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
  1922. (r":?'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
  1923. (r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
  1924. r"0[xX][0-9a-fA-F]+[Ll]?", Number),
  1925. # Variabletext
  1926. (r'[a-zA-Z][\w-]*(\(.*\))?', Name.Variable),
  1927. (r'\.[\w-]+(\(.*\))?', Name.Variable),
  1928. # inline If
  1929. (r'(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)',
  1930. bygroups(Operator, Text, String, Text, Operator, Text, String, Text)),
  1931. ],
  1932. 'attr': [
  1933. ('".*?"', String, '#pop'),
  1934. ("'.*?'", String, '#pop'),
  1935. (r'[^\s>]+', String, '#pop'),
  1936. ],
  1937. }
  1938. class Angular2HtmlLexer(DelegatingLexer):
  1939. """
  1940. Subclass of the `Angular2Lexer` that highlights unlexed data with the
  1941. `HtmlLexer`.
  1942. """
  1943. name = "HTML + Angular2"
  1944. aliases = ["html+ng2"]
  1945. filenames = ['*.ng2']
  1946. url = 'https://angular.io/guide/template-syntax'
  1947. version_added = '2.0'
  1948. def __init__(self, **options):
  1949. super().__init__(HtmlLexer, Angular2Lexer, **options)
  1950. class SqlJinjaLexer(DelegatingLexer):
  1951. """
  1952. Templated SQL lexer.
  1953. """
  1954. name = 'SQL+Jinja'
  1955. aliases = ['sql+jinja']
  1956. filenames = ['*.sql', '*.sql.j2', '*.sql.jinja2']
  1957. url = 'https://jinja.palletsprojects.com'
  1958. version_added = '2.13'
  1959. def __init__(self, **options):
  1960. super().__init__(SqlLexer, DjangoLexer, **options)
  1961. def analyse_text(text):
  1962. rv = 0.0
  1963. # dbt's ref function
  1964. if re.search(r'\{\{\s*ref\(.*\)\s*\}\}', text):
  1965. rv += 0.4
  1966. # dbt's source function
  1967. if re.search(r'\{\{\s*source\(.*\)\s*\}\}', text):
  1968. rv += 0.25
  1969. # Jinja macro
  1970. if re.search(r'\{%-?\s*macro \w+\(.*\)\s*-?%\}', text):
  1971. rv += 0.15
  1972. return rv