shell.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898
  1. """
  2. pygments.lexers.shell
  3. ~~~~~~~~~~~~~~~~~~~~~
  4. Lexers for various shells.
  5. :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
  10. include, default, this, using, words, line_re
  11. from pygments.token import Punctuation, Whitespace, \
  12. Text, Comment, Operator, Keyword, Name, String, Number, Generic
  13. from pygments.util import shebang_matches
  14. __all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
  15. 'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
  16. 'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
  17. 'ExeclineLexer']
  18. class BashLexer(RegexLexer):
  19. """
  20. Lexer for (ba|k|z|)sh shell scripts.
  21. """
  22. name = 'Bash'
  23. aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell', 'openrc']
  24. filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
  25. '*.exheres-0', '*.exlib', '*.zsh',
  26. '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
  27. '.kshrc', 'kshrc',
  28. 'PKGBUILD']
  29. mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
  30. url = 'https://en.wikipedia.org/wiki/Unix_shell'
  31. version_added = '0.6'
  32. tokens = {
  33. 'root': [
  34. include('basic'),
  35. (r'`', String.Backtick, 'backticks'),
  36. include('data'),
  37. include('interp'),
  38. ],
  39. 'interp': [
  40. (r'\$\(\(', Keyword, 'math'),
  41. (r'\$\(', Keyword, 'paren'),
  42. (r'\$\{#?', String.Interpol, 'curly'),
  43. (r'\$[a-zA-Z_]\w*', Name.Variable), # user variable
  44. (r'\$(?:\d+|[#$?!_*@-])', Name.Variable), # builtin
  45. (r'\$', Text),
  46. ],
  47. 'basic': [
  48. (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
  49. r'select|break|continue|until|esac|elif)(\s*)\b',
  50. bygroups(Keyword, Whitespace)),
  51. (r'\b(alias|bg|bind|builtin|caller|cd|command|compgen|'
  52. r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
  53. r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
  54. r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
  55. r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
  56. r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
  57. Name.Builtin),
  58. (r'\A#!.+\n', Comment.Hashbang),
  59. (r'#.*\n', Comment.Single),
  60. (r'\\[\w\W]', String.Escape),
  61. (r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Whitespace, Operator)),
  62. (r'[\[\]{}()=]', Operator),
  63. (r'<<<', Operator), # here-string
  64. (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
  65. (r'&&|\|\|', Operator),
  66. ],
  67. 'data': [
  68. (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
  69. (r'"', String.Double, 'string'),
  70. (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
  71. (r"(?s)'.*?'", String.Single),
  72. (r';', Punctuation),
  73. (r'&', Punctuation),
  74. (r'\|', Punctuation),
  75. (r'\s+', Whitespace),
  76. (r'\d+\b', Number),
  77. (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
  78. (r'<', Text),
  79. ],
  80. 'string': [
  81. (r'"', String.Double, '#pop'),
  82. (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
  83. include('interp'),
  84. ],
  85. 'curly': [
  86. (r'\}', String.Interpol, '#pop'),
  87. (r':-', Keyword),
  88. (r'\w+', Name.Variable),
  89. (r'[^}:"\'`$\\]+', Punctuation),
  90. (r':', Punctuation),
  91. include('root'),
  92. ],
  93. 'paren': [
  94. (r'\)', Keyword, '#pop'),
  95. include('root'),
  96. ],
  97. 'math': [
  98. (r'\)\)', Keyword, '#pop'),
  99. (r'\*\*|\|\||<<|>>|[-+*/%^|&<>]', Operator),
  100. (r'\d+#[\da-zA-Z]+', Number),
  101. (r'\d+#(?! )', Number),
  102. (r'0[xX][\da-fA-F]+', Number),
  103. (r'\d+', Number),
  104. (r'[a-zA-Z_]\w*', Name.Variable), # user variable
  105. include('root'),
  106. ],
  107. 'backticks': [
  108. (r'`', String.Backtick, '#pop'),
  109. include('root'),
  110. ],
  111. }
  112. def analyse_text(text):
  113. if shebang_matches(text, r'(ba|z|)sh'):
  114. return 1
  115. if text.startswith('$ '):
  116. return 0.2
  117. class SlurmBashLexer(BashLexer):
  118. """
  119. Lexer for (ba|k|z|)sh Slurm scripts.
  120. """
  121. name = 'Slurm'
  122. aliases = ['slurm', 'sbatch']
  123. filenames = ['*.sl']
  124. mimetypes = []
  125. version_added = '2.4'
  126. EXTRA_KEYWORDS = {'srun'}
  127. def get_tokens_unprocessed(self, text):
  128. for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
  129. if token is Text and value in self.EXTRA_KEYWORDS:
  130. yield index, Name.Builtin, value
  131. elif token is Comment.Single and 'SBATCH' in value:
  132. yield index, Keyword.Pseudo, value
  133. else:
  134. yield index, token, value
  135. class ShellSessionBaseLexer(Lexer):
  136. """
  137. Base lexer for shell sessions.
  138. .. versionadded:: 2.1
  139. """
  140. _bare_continuation = False
  141. _venv = re.compile(r'^(\([^)]*\))(\s*)')
  142. def get_tokens_unprocessed(self, text):
  143. innerlexer = self._innerLexerCls(**self.options)
  144. pos = 0
  145. curcode = ''
  146. insertions = []
  147. backslash_continuation = False
  148. for match in line_re.finditer(text):
  149. line = match.group()
  150. venv_match = self._venv.match(line)
  151. if venv_match:
  152. venv = venv_match.group(1)
  153. venv_whitespace = venv_match.group(2)
  154. insertions.append((len(curcode),
  155. [(0, Generic.Prompt.VirtualEnv, venv)]))
  156. if venv_whitespace:
  157. insertions.append((len(curcode),
  158. [(0, Text, venv_whitespace)]))
  159. line = line[venv_match.end():]
  160. m = self._ps1rgx.match(line)
  161. if m:
  162. # To support output lexers (say diff output), the output
  163. # needs to be broken by prompts whenever the output lexer
  164. # changes.
  165. if not insertions:
  166. pos = match.start()
  167. insertions.append((len(curcode),
  168. [(0, Generic.Prompt, m.group(1))]))
  169. curcode += m.group(2)
  170. backslash_continuation = curcode.endswith('\\\n')
  171. elif backslash_continuation:
  172. if line.startswith(self._ps2):
  173. insertions.append((len(curcode),
  174. [(0, Generic.Prompt,
  175. line[:len(self._ps2)])]))
  176. curcode += line[len(self._ps2):]
  177. else:
  178. curcode += line
  179. backslash_continuation = curcode.endswith('\\\n')
  180. elif self._bare_continuation and line.startswith(self._ps2):
  181. insertions.append((len(curcode),
  182. [(0, Generic.Prompt,
  183. line[:len(self._ps2)])]))
  184. curcode += line[len(self._ps2):]
  185. else:
  186. if insertions:
  187. toks = innerlexer.get_tokens_unprocessed(curcode)
  188. for i, t, v in do_insertions(insertions, toks):
  189. yield pos+i, t, v
  190. yield match.start(), Generic.Output, line
  191. insertions = []
  192. curcode = ''
  193. if insertions:
  194. for i, t, v in do_insertions(insertions,
  195. innerlexer.get_tokens_unprocessed(curcode)):
  196. yield pos+i, t, v
  197. class BashSessionLexer(ShellSessionBaseLexer):
  198. """
  199. Lexer for Bash shell sessions, i.e. command lines, including a
  200. prompt, interspersed with output.
  201. """
  202. name = 'Bash Session'
  203. aliases = ['console', 'shell-session']
  204. filenames = ['*.sh-session', '*.shell-session']
  205. mimetypes = ['application/x-shell-session', 'application/x-sh-session']
  206. url = 'https://en.wikipedia.org/wiki/Unix_shell'
  207. version_added = '1.1'
  208. _innerLexerCls = BashLexer
  209. _ps1rgx = re.compile(
  210. r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
  211. r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
  212. _ps2 = '> '
  213. class BatchLexer(RegexLexer):
  214. """
  215. Lexer for the DOS/Windows Batch file format.
  216. """
  217. name = 'Batchfile'
  218. aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
  219. filenames = ['*.bat', '*.cmd']
  220. mimetypes = ['application/x-dos-batch']
  221. url = 'https://en.wikipedia.org/wiki/Batch_file'
  222. version_added = '0.7'
  223. flags = re.MULTILINE | re.IGNORECASE
  224. _nl = r'\n\x1a'
  225. _punct = r'&<>|'
  226. _ws = r'\t\v\f\r ,;=\xa0'
  227. _nlws = r'\s\x1a\xa0,;='
  228. _space = rf'(?:(?:(?:\^[{_nl}])?[{_ws}])+)'
  229. _keyword_terminator = (rf'(?=(?:\^[{_nl}]?)?[{_ws}+./:[\\\]]|[{_nl}{_punct}(])')
  230. _token_terminator = rf'(?=\^?[{_ws}]|[{_punct}{_nl}])'
  231. _start_label = rf'((?:(?<=^[^:])|^[^:]?)[{_ws}]*)(:)'
  232. _label = rf'(?:(?:[^{_nlws}{_punct}+:^]|\^[{_nl}]?[\w\W])*)'
  233. _label_compound = rf'(?:(?:[^{_nlws}{_punct}+:^)]|\^[{_nl}]?[^)])*)'
  234. _number = rf'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+){_token_terminator})'
  235. _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
  236. _string = rf'(?:"[^{_nl}"]*(?:"|(?=[{_nl}])))'
  237. _variable = (r'(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
  238. rf'[^%:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%{_nl}^]|'
  239. rf'\^[^%{_nl}])[^={_nl}]*=(?:[^%{_nl}^]|\^[^%{_nl}])*)?)?%))|'
  240. rf'(?:\^?![^!:{_nl}]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
  241. rf'[^!{_nl}^]|\^[^!{_nl}])[^={_nl}]*=(?:[^!{_nl}^]|\^[^!{_nl}])*)?)?\^?!))')
  242. _core_token = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct}])+)'
  243. _core_token_compound = rf'(?:(?:(?:\^[{_nl}]?)?[^"{_nlws}{_punct})])+)'
  244. _token = rf'(?:[{_punct}]+|{_core_token})'
  245. _token_compound = rf'(?:[{_punct}]+|{_core_token_compound})'
  246. _stoken = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token})+)')
  247. def _make_begin_state(compound, _core_token=_core_token,
  248. _core_token_compound=_core_token_compound,
  249. _keyword_terminator=_keyword_terminator,
  250. _nl=_nl, _punct=_punct, _string=_string,
  251. _space=_space, _start_label=_start_label,
  252. _stoken=_stoken, _token_terminator=_token_terminator,
  253. _variable=_variable, _ws=_ws):
  254. rest = '(?:{}|{}|[^"%{}{}{}])*'.format(_string, _variable, _nl, _punct,
  255. ')' if compound else '')
  256. rest_of_line = rf'(?:(?:[^{_nl}^]|\^[{_nl}]?[\w\W])*)'
  257. rest_of_line_compound = rf'(?:(?:[^{_nl}^)]|\^[{_nl}]?[^)])*)'
  258. set_space = rf'((?:(?:\^[{_nl}]?)?[^\S\n])*)'
  259. suffix = ''
  260. if compound:
  261. _keyword_terminator = rf'(?:(?=\))|{_keyword_terminator})'
  262. _token_terminator = rf'(?:(?=\))|{_token_terminator})'
  263. suffix = '/compound'
  264. return [
  265. ((r'\)', Punctuation, '#pop') if compound else
  266. (rf'\)((?=\()|{_token_terminator}){rest_of_line}',
  267. Comment.Single)),
  268. (rf'(?={_start_label})', Text, f'follow{suffix}'),
  269. (_space, using(this, state='text')),
  270. include(f'redirect{suffix}'),
  271. (rf'[{_nl}]+', Text),
  272. (r'\(', Punctuation, 'root/compound'),
  273. (r'@+', Punctuation),
  274. (rf'((?:for|if|rem)(?:(?=(?:\^[{_nl}]?)?/)|(?:(?!\^)|'
  275. rf'(?<=m))(?:(?=\()|{_token_terminator})))({_space}?{_core_token_compound if compound else _core_token}?(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?)',
  276. bygroups(Keyword, using(this, state='text')),
  277. f'follow{suffix}'),
  278. (rf'(goto{_keyword_terminator})({rest}(?:\^[{_nl}]?)?/(?:\^[{_nl}]?)?\?{rest})',
  279. bygroups(Keyword, using(this, state='text')),
  280. f'follow{suffix}'),
  281. (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
  282. 'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
  283. 'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
  284. 'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
  285. 'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
  286. 'title', 'type', 'ver', 'verify', 'vol'),
  287. suffix=_keyword_terminator), Keyword, f'follow{suffix}'),
  288. (rf'(call)({_space}?)(:)',
  289. bygroups(Keyword, using(this, state='text'), Punctuation),
  290. f'call{suffix}'),
  291. (rf'call{_keyword_terminator}', Keyword),
  292. (rf'(for{_token_terminator}(?!\^))({_space})(/f{_token_terminator})',
  293. bygroups(Keyword, using(this, state='text'), Keyword),
  294. ('for/f', 'for')),
  295. (rf'(for{_token_terminator}(?!\^))({_space})(/l{_token_terminator})',
  296. bygroups(Keyword, using(this, state='text'), Keyword),
  297. ('for/l', 'for')),
  298. (rf'for{_token_terminator}(?!\^)', Keyword, ('for2', 'for')),
  299. (rf'(goto{_keyword_terminator})({_space}?)(:?)',
  300. bygroups(Keyword, using(this, state='text'), Punctuation),
  301. f'label{suffix}'),
  302. (rf'(if(?:(?=\()|{_token_terminator})(?!\^))({_space}?)((?:/i{_token_terminator})?)({_space}?)((?:not{_token_terminator})?)({_space}?)',
  303. bygroups(Keyword, using(this, state='text'), Keyword,
  304. using(this, state='text'), Keyword,
  305. using(this, state='text')), ('(?', 'if')),
  306. (rf'rem(((?=\()|{_token_terminator}){_space}?{_stoken}?.*|{_keyword_terminator}{rest_of_line_compound if compound else rest_of_line})',
  307. Comment.Single, f'follow{suffix}'),
  308. (rf'(set{_keyword_terminator}){set_space}(/a)',
  309. bygroups(Keyword, using(this, state='text'), Keyword),
  310. f'arithmetic{suffix}'),
  311. (r'(set{}){}((?:/p)?){}((?:(?:(?:\^[{}]?)?[^"{}{}^={}]|'
  312. r'\^[{}]?[^"=])+)?)((?:(?:\^[{}]?)?=)?)'.format(_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
  313. ')' if compound else '', _nl, _nl),
  314. bygroups(Keyword, using(this, state='text'), Keyword,
  315. using(this, state='text'), using(this, state='variable'),
  316. Punctuation),
  317. f'follow{suffix}'),
  318. default(f'follow{suffix}')
  319. ]
  320. def _make_follow_state(compound, _label=_label,
  321. _label_compound=_label_compound, _nl=_nl,
  322. _space=_space, _start_label=_start_label,
  323. _token=_token, _token_compound=_token_compound,
  324. _ws=_ws):
  325. suffix = '/compound' if compound else ''
  326. state = []
  327. if compound:
  328. state.append((r'(?=\))', Text, '#pop'))
  329. state += [
  330. (rf'{_start_label}([{_ws}]*)({_label_compound if compound else _label})(.*)',
  331. bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
  332. include(f'redirect{suffix}'),
  333. (rf'(?=[{_nl}])', Text, '#pop'),
  334. (r'\|\|?|&&?', Punctuation, '#pop'),
  335. include('text')
  336. ]
  337. return state
  338. def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
  339. _string=_string, _variable=_variable,
  340. _ws=_ws, _nlws=_nlws):
  341. op = r'=+\-*/!~'
  342. state = []
  343. if compound:
  344. state.append((r'(?=\))', Text, '#pop'))
  345. state += [
  346. (r'0[0-7]+', Number.Oct),
  347. (r'0x[\da-f]+', Number.Hex),
  348. (r'\d+', Number.Integer),
  349. (r'[(),]+', Punctuation),
  350. (rf'([{op}]|%|\^\^)+', Operator),
  351. (r'({}|{}|(\^[{}]?)?[^(){}%\^"{}{}]|\^[{}]?{})+'.format(_string, _variable, _nl, op, _nlws, _punct, _nlws,
  352. r'[^)]' if compound else r'[\w\W]'),
  353. using(this, state='variable')),
  354. (r'(?=[\x00|&])', Text, '#pop'),
  355. include('follow')
  356. ]
  357. return state
  358. def _make_call_state(compound, _label=_label,
  359. _label_compound=_label_compound):
  360. state = []
  361. if compound:
  362. state.append((r'(?=\))', Text, '#pop'))
  363. state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
  364. bygroups(Punctuation, Name.Label), '#pop'))
  365. return state
  366. def _make_label_state(compound, _label=_label,
  367. _label_compound=_label_compound, _nl=_nl,
  368. _punct=_punct, _string=_string, _variable=_variable):
  369. state = []
  370. if compound:
  371. state.append((r'(?=\))', Text, '#pop'))
  372. state.append((r'({}?)((?:{}|{}|\^[{}]?{}|[^"%^{}{}{}])*)'.format(_label_compound if compound else _label, _string,
  373. _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
  374. _punct, r')' if compound else ''),
  375. bygroups(Name.Label, Comment.Single), '#pop'))
  376. return state
  377. def _make_redirect_state(compound,
  378. _core_token_compound=_core_token_compound,
  379. _nl=_nl, _punct=_punct, _stoken=_stoken,
  380. _string=_string, _space=_space,
  381. _variable=_variable, _nlws=_nlws):
  382. stoken_compound = (rf'(?:[{_punct}]+|(?:{_string}|{_variable}|{_core_token_compound})+)')
  383. return [
  384. (rf'((?:(?<=[{_nlws}])\d)?)(>>?&|<&)([{_nlws}]*)(\d)',
  385. bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
  386. (rf'((?:(?<=[{_nlws}])(?<!\^[{_nl}])\d)?)(>>?|<)({_space}?{stoken_compound if compound else _stoken})',
  387. bygroups(Number.Integer, Punctuation, using(this, state='text')))
  388. ]
  389. tokens = {
  390. 'root': _make_begin_state(False),
  391. 'follow': _make_follow_state(False),
  392. 'arithmetic': _make_arithmetic_state(False),
  393. 'call': _make_call_state(False),
  394. 'label': _make_label_state(False),
  395. 'redirect': _make_redirect_state(False),
  396. 'root/compound': _make_begin_state(True),
  397. 'follow/compound': _make_follow_state(True),
  398. 'arithmetic/compound': _make_arithmetic_state(True),
  399. 'call/compound': _make_call_state(True),
  400. 'label/compound': _make_label_state(True),
  401. 'redirect/compound': _make_redirect_state(True),
  402. 'variable-or-escape': [
  403. (_variable, Name.Variable),
  404. (rf'%%|\^[{_nl}]?(\^!|[\w\W])', String.Escape)
  405. ],
  406. 'string': [
  407. (r'"', String.Double, '#pop'),
  408. (_variable, Name.Variable),
  409. (r'\^!|%%', String.Escape),
  410. (rf'[^"%^{_nl}]+|[%^]', String.Double),
  411. default('#pop')
  412. ],
  413. 'sqstring': [
  414. include('variable-or-escape'),
  415. (r'[^%]+|%', String.Single)
  416. ],
  417. 'bqstring': [
  418. include('variable-or-escape'),
  419. (r'[^%]+|%', String.Backtick)
  420. ],
  421. 'text': [
  422. (r'"', String.Double, 'string'),
  423. include('variable-or-escape'),
  424. (rf'[^"%^{_nlws}{_punct}\d)]+|.', Text)
  425. ],
  426. 'variable': [
  427. (r'"', String.Double, 'string'),
  428. include('variable-or-escape'),
  429. (rf'[^"%^{_nl}]+|.', Name.Variable)
  430. ],
  431. 'for': [
  432. (rf'({_space})(in)({_space})(\()',
  433. bygroups(using(this, state='text'), Keyword,
  434. using(this, state='text'), Punctuation), '#pop'),
  435. include('follow')
  436. ],
  437. 'for2': [
  438. (r'\)', Punctuation),
  439. (rf'({_space})(do{_token_terminator})',
  440. bygroups(using(this, state='text'), Keyword), '#pop'),
  441. (rf'[{_nl}]+', Text),
  442. include('follow')
  443. ],
  444. 'for/f': [
  445. (rf'(")((?:{_variable}|[^"])*?")([{_nlws}]*)(\))',
  446. bygroups(String.Double, using(this, state='string'), Text,
  447. Punctuation)),
  448. (r'"', String.Double, ('#pop', 'for2', 'string')),
  449. (rf"('(?:%%|{_variable}|[\w\W])*?')([{_nlws}]*)(\))",
  450. bygroups(using(this, state='sqstring'), Text, Punctuation)),
  451. (rf'(`(?:%%|{_variable}|[\w\W])*?`)([{_nlws}]*)(\))',
  452. bygroups(using(this, state='bqstring'), Text, Punctuation)),
  453. include('for2')
  454. ],
  455. 'for/l': [
  456. (r'-?\d+', Number.Integer),
  457. include('for2')
  458. ],
  459. 'if': [
  460. (rf'((?:cmdextversion|errorlevel){_token_terminator})({_space})(\d+)',
  461. bygroups(Keyword, using(this, state='text'),
  462. Number.Integer), '#pop'),
  463. (rf'(defined{_token_terminator})({_space})({_stoken})',
  464. bygroups(Keyword, using(this, state='text'),
  465. using(this, state='variable')), '#pop'),
  466. (rf'(exist{_token_terminator})({_space}{_stoken})',
  467. bygroups(Keyword, using(this, state='text')), '#pop'),
  468. (rf'({_number}{_space})({_opword})({_space}{_number})',
  469. bygroups(using(this, state='arithmetic'), Operator.Word,
  470. using(this, state='arithmetic')), '#pop'),
  471. (_stoken, using(this, state='text'), ('#pop', 'if2')),
  472. ],
  473. 'if2': [
  474. (rf'({_space}?)(==)({_space}?{_stoken})',
  475. bygroups(using(this, state='text'), Operator,
  476. using(this, state='text')), '#pop'),
  477. (rf'({_space})({_opword})({_space}{_stoken})',
  478. bygroups(using(this, state='text'), Operator.Word,
  479. using(this, state='text')), '#pop')
  480. ],
  481. '(?': [
  482. (_space, using(this, state='text')),
  483. (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
  484. default('#pop')
  485. ],
  486. 'else?': [
  487. (_space, using(this, state='text')),
  488. (rf'else{_token_terminator}', Keyword, '#pop'),
  489. default('#pop')
  490. ]
  491. }
  492. class MSDOSSessionLexer(ShellSessionBaseLexer):
  493. """
  494. Lexer for MS DOS shell sessions, i.e. command lines, including a
  495. prompt, interspersed with output.
  496. """
  497. name = 'MSDOS Session'
  498. aliases = ['doscon']
  499. filenames = []
  500. mimetypes = []
  501. url = 'https://en.wikipedia.org/wiki/MS-DOS'
  502. version_added = '2.1'
  503. _innerLexerCls = BatchLexer
  504. _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
  505. _ps2 = 'More? '
  506. class TcshLexer(RegexLexer):
  507. """
  508. Lexer for tcsh scripts.
  509. """
  510. name = 'Tcsh'
  511. aliases = ['tcsh', 'csh']
  512. filenames = ['*.tcsh', '*.csh']
  513. mimetypes = ['application/x-csh']
  514. url = 'https://www.tcsh.org'
  515. version_added = '0.10'
  516. tokens = {
  517. 'root': [
  518. include('basic'),
  519. (r'\$\(', Keyword, 'paren'),
  520. (r'\$\{#?', Keyword, 'curly'),
  521. (r'`', String.Backtick, 'backticks'),
  522. include('data'),
  523. ],
  524. 'basic': [
  525. (r'\b(if|endif|else|while|then|foreach|case|default|'
  526. r'break|continue|goto|breaksw|end|switch|endsw)\s*\b',
  527. Keyword),
  528. (r'\b(alias|alloc|bg|bindkey|builtins|bye|caller|cd|chdir|'
  529. r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
  530. r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
  531. r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
  532. r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
  533. r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
  534. r'source|stop|suspend|source|suspend|telltc|time|'
  535. r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
  536. r'ver|wait|warp|watchlog|where|which)\s*\b',
  537. Name.Builtin),
  538. (r'#.*', Comment),
  539. (r'\\[\w\W]', String.Escape),
  540. (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
  541. (r'[\[\]{}()=]+', Operator),
  542. (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
  543. (r';', Punctuation),
  544. ],
  545. 'data': [
  546. (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
  547. (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
  548. (r'\s+', Text),
  549. (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
  550. (r'\d+(?= |\Z)', Number),
  551. (r'\$#?(\w+|.)', Name.Variable),
  552. ],
  553. 'curly': [
  554. (r'\}', Keyword, '#pop'),
  555. (r':-', Keyword),
  556. (r'\w+', Name.Variable),
  557. (r'[^}:"\'`$]+', Punctuation),
  558. (r':', Punctuation),
  559. include('root'),
  560. ],
  561. 'paren': [
  562. (r'\)', Keyword, '#pop'),
  563. include('root'),
  564. ],
  565. 'backticks': [
  566. (r'`', String.Backtick, '#pop'),
  567. include('root'),
  568. ],
  569. }
  570. class TcshSessionLexer(ShellSessionBaseLexer):
  571. """
  572. Lexer for Tcsh sessions, i.e. command lines, including a
  573. prompt, interspersed with output.
  574. """
  575. name = 'Tcsh Session'
  576. aliases = ['tcshcon']
  577. filenames = []
  578. mimetypes = []
  579. url = 'https://www.tcsh.org'
  580. version_added = '2.1'
  581. _innerLexerCls = TcshLexer
  582. _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
  583. _ps2 = '? '
  584. class PowerShellLexer(RegexLexer):
  585. """
  586. For Windows PowerShell code.
  587. """
  588. name = 'PowerShell'
  589. aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
  590. filenames = ['*.ps1', '*.psm1']
  591. mimetypes = ['text/x-powershell']
  592. url = 'https://learn.microsoft.com/en-us/powershell'
  593. version_added = '1.5'
  594. flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
  595. keywords = (
  596. 'while validateset validaterange validatepattern validatelength '
  597. 'validatecount until trap switch return ref process param parameter in '
  598. 'if global: local: function foreach for finally filter end elseif else '
  599. 'dynamicparam do default continue cmdletbinding break begin alias \\? '
  600. '% #script #private #local #global mandatory parametersetname position '
  601. 'valuefrompipeline valuefrompipelinebypropertyname '
  602. 'valuefromremainingarguments helpmessage try catch throw').split()
  603. operators = (
  604. 'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
  605. 'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
  606. 'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
  607. 'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
  608. 'lt match ne not notcontains notlike notmatch or regex replace '
  609. 'wildcard').split()
  610. verbs = (
  611. 'write where watch wait use update unregister unpublish unprotect '
  612. 'unlock uninstall undo unblock trace test tee take sync switch '
  613. 'suspend submit stop step start split sort skip show set send select '
  614. 'search scroll save revoke resume restore restart resolve resize '
  615. 'reset request repair rename remove register redo receive read push '
  616. 'publish protect pop ping out optimize open new move mount merge '
  617. 'measure lock limit join invoke install initialize import hide group '
  618. 'grant get format foreach find export expand exit enter enable edit '
  619. 'dismount disconnect disable deny debug cxnew copy convertto '
  620. 'convertfrom convert connect confirm compress complete compare close '
  621. 'clear checkpoint block backup assert approve aggregate add').split()
  622. aliases_ = (
  623. 'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
  624. 'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
  625. 'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
  626. 'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
  627. 'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
  628. 'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
  629. 'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
  630. 'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
  631. 'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
  632. 'trcm type wget where wjb write').split()
  633. commenthelp = (
  634. 'component description example externalhelp forwardhelpcategory '
  635. 'forwardhelptargetname functionality inputs link '
  636. 'notes outputs parameter remotehelprunspace role synopsis').split()
  637. tokens = {
  638. 'root': [
  639. # we need to count pairs of parentheses for correct highlight
  640. # of '$(...)' blocks in strings
  641. (r'\(', Punctuation, 'child'),
  642. (r'\s+', Text),
  643. (r'^(\s*#[#\s]*)(\.(?:{}))([^\n]*$)'.format('|'.join(commenthelp)),
  644. bygroups(Comment, String.Doc, Comment)),
  645. (r'#[^\n]*?$', Comment),
  646. (r'(&lt;|<)#', Comment.Multiline, 'multline'),
  647. (r'@"\n', String.Heredoc, 'heredoc-double'),
  648. (r"@'\n.*?\n'@", String.Heredoc),
  649. # escaped syntax
  650. (r'`[\'"$@-]', Punctuation),
  651. (r'"', String.Double, 'string'),
  652. (r"'([^']|'')*'", String.Single),
  653. (r'(\$|@@|@)((global|script|private|env):)?\w+',
  654. Name.Variable),
  655. (r'({})\b'.format('|'.join(keywords)), Keyword),
  656. (r'-({})\b'.format('|'.join(operators)), Operator),
  657. (r'({})-[a-z_]\w*\b'.format('|'.join(verbs)), Name.Builtin),
  658. (r'({})\s'.format('|'.join(aliases_)), Name.Builtin),
  659. (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant), # .net [type]s
  660. (r'-[a-z_]\w*', Name),
  661. (r'\w+', Name),
  662. (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
  663. ],
  664. 'child': [
  665. (r'\)', Punctuation, '#pop'),
  666. include('root'),
  667. ],
  668. 'multline': [
  669. (r'[^#&.]+', Comment.Multiline),
  670. (r'#(>|&gt;)', Comment.Multiline, '#pop'),
  671. (r'\.({})'.format('|'.join(commenthelp)), String.Doc),
  672. (r'[#&.]', Comment.Multiline),
  673. ],
  674. 'string': [
  675. (r"`[0abfnrtv'\"$`]", String.Escape),
  676. (r'[^$`"]+', String.Double),
  677. (r'\$\(', Punctuation, 'child'),
  678. (r'""', String.Double),
  679. (r'[`$]', String.Double),
  680. (r'"', String.Double, '#pop'),
  681. ],
  682. 'heredoc-double': [
  683. (r'\n"@', String.Heredoc, '#pop'),
  684. (r'\$\(', Punctuation, 'child'),
  685. (r'[^@\n]+"]', String.Heredoc),
  686. (r".", String.Heredoc),
  687. ]
  688. }
  689. class PowerShellSessionLexer(ShellSessionBaseLexer):
  690. """
  691. Lexer for PowerShell sessions, i.e. command lines, including a
  692. prompt, interspersed with output.
  693. """
  694. name = 'PowerShell Session'
  695. aliases = ['pwsh-session', 'ps1con']
  696. filenames = []
  697. mimetypes = []
  698. url = 'https://learn.microsoft.com/en-us/powershell'
  699. version_added = '2.1'
  700. _innerLexerCls = PowerShellLexer
  701. _bare_continuation = True
  702. _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
  703. _ps2 = '> '
  704. class FishShellLexer(RegexLexer):
  705. """
  706. Lexer for Fish shell scripts.
  707. """
  708. name = 'Fish'
  709. aliases = ['fish', 'fishshell']
  710. filenames = ['*.fish', '*.load']
  711. mimetypes = ['application/x-fish']
  712. url = 'https://fishshell.com'
  713. version_added = '2.1'
  714. tokens = {
  715. 'root': [
  716. include('basic'),
  717. include('data'),
  718. include('interp'),
  719. ],
  720. 'interp': [
  721. (r'\$\(\(', Keyword, 'math'),
  722. (r'\(', Keyword, 'paren'),
  723. (r'\$#?(\w+|.)', Name.Variable),
  724. ],
  725. 'basic': [
  726. (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
  727. r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
  728. r'cd|count|test)(\s*)\b',
  729. bygroups(Keyword, Text)),
  730. (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
  731. r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
  732. r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
  733. r'fish_update_completions|fishd|funced|funcsave|functions|help|'
  734. r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
  735. r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
  736. r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
  737. Name.Builtin),
  738. (r'#.*\n', Comment),
  739. (r'\\[\w\W]', String.Escape),
  740. (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
  741. (r'[\[\]()=]', Operator),
  742. (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
  743. ],
  744. 'data': [
  745. (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
  746. (r'"', String.Double, 'string'),
  747. (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
  748. (r"(?s)'.*?'", String.Single),
  749. (r';', Punctuation),
  750. (r'&|\||\^|<|>', Operator),
  751. (r'\s+', Text),
  752. (r'\d+(?= |\Z)', Number),
  753. (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
  754. ],
  755. 'string': [
  756. (r'"', String.Double, '#pop'),
  757. (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
  758. include('interp'),
  759. ],
  760. 'paren': [
  761. (r'\)', Keyword, '#pop'),
  762. include('root'),
  763. ],
  764. 'math': [
  765. (r'\)\)', Keyword, '#pop'),
  766. (r'[-+*/%^|&]|\*\*|\|\|', Operator),
  767. (r'\d+#\d+', Number),
  768. (r'\d+#(?! )', Number),
  769. (r'\d+', Number),
  770. include('root'),
  771. ],
  772. }
  773. class ExeclineLexer(RegexLexer):
  774. """
  775. Lexer for Laurent Bercot's execline language.
  776. """
  777. name = 'execline'
  778. aliases = ['execline']
  779. filenames = ['*.exec']
  780. url = 'https://skarnet.org/software/execline'
  781. version_added = '2.7'
  782. tokens = {
  783. 'root': [
  784. include('basic'),
  785. include('data'),
  786. include('interp')
  787. ],
  788. 'interp': [
  789. (r'\$\{', String.Interpol, 'curly'),
  790. (r'\$[\w@#]+', Name.Variable), # user variable
  791. (r'\$', Text),
  792. ],
  793. 'basic': [
  794. (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
  795. r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
  796. r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
  797. r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
  798. r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
  799. r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
  800. r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
  801. r'withstdinas)\b', Name.Builtin),
  802. (r'\A#!.+\n', Comment.Hashbang),
  803. (r'#.*\n', Comment.Single),
  804. (r'[{}]', Operator)
  805. ],
  806. 'data': [
  807. (r'(?s)"(\\.|[^"\\$])*"', String.Double),
  808. (r'"', String.Double, 'string'),
  809. (r'\s+', Text),
  810. (r'[^\s{}$"\\]+', Text)
  811. ],
  812. 'string': [
  813. (r'"', String.Double, '#pop'),
  814. (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
  815. include('interp'),
  816. ],
  817. 'curly': [
  818. (r'\}', String.Interpol, '#pop'),
  819. (r'[\w#@]+', Name.Variable),
  820. include('root')
  821. ]
  822. }
  823. def analyse_text(text):
  824. if shebang_matches(text, r'execlineb'):
  825. return 1