theorem.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456
  1. # -*- coding: utf-8 -*-
  2. """
  3. pygments.lexers.theorem
  4. ~~~~~~~~~~~~~~~~~~~~~~~
  5. Lexers for theorem-proving languages.
  6. :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
  7. :license: BSD, see LICENSE for details.
  8. """
  9. import re
  10. from pygments.lexer import RegexLexer, default, words
  11. from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
  12. Number, Punctuation, Generic
  13. __all__ = ['CoqLexer', 'IsabelleLexer', 'LeanLexer']
  14. class CoqLexer(RegexLexer):
  15. """
  16. For the `Coq <http://coq.inria.fr/>`_ theorem prover.
  17. .. versionadded:: 1.5
  18. """
  19. name = 'Coq'
  20. aliases = ['coq']
  21. filenames = ['*.v']
  22. mimetypes = ['text/x-coq']
  23. keywords1 = (
  24. # Vernacular commands
  25. 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable',
  26. 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis',
  27. 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope',
  28. 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac',
  29. 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit',
  30. 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex',
  31. 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure',
  32. 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary',
  33. 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save',
  34. 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search',
  35. 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside',
  36. 'outside', 'Check', 'Global', 'Instance', 'Class', 'Existing',
  37. 'Universe', 'Polymorphic', 'Monomorphic', 'Context'
  38. )
  39. keywords2 = (
  40. # Gallina
  41. 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct',
  42. 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else',
  43. 'for', 'of', 'nosimpl', 'with', 'as',
  44. )
  45. keywords3 = (
  46. # Sorts
  47. 'Type', 'Prop',
  48. )
  49. keywords4 = (
  50. # Tactics
  51. 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro',
  52. 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct',
  53. 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite',
  54. 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold',
  55. 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog',
  56. 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial',
  57. 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto',
  58. 'split', 'left', 'right', 'autorewrite', 'tauto', 'setoid_rewrite',
  59. 'intuition', 'eauto', 'eapply', 'econstructor', 'etransitivity',
  60. 'constructor', 'erewrite', 'red', 'cbv', 'lazy', 'vm_compute',
  61. 'native_compute', 'subst',
  62. )
  63. keywords5 = (
  64. # Terminators
  65. 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega',
  66. 'assumption', 'solve', 'contradiction', 'discriminate',
  67. 'congruence',
  68. )
  69. keywords6 = (
  70. # Control
  71. 'do', 'last', 'first', 'try', 'idtac', 'repeat',
  72. )
  73. # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done',
  74. # 'downto', 'else', 'end', 'exception', 'external', 'false',
  75. # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include',
  76. # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method',
  77. # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private',
  78. # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
  79. # 'type', 'val', 'virtual', 'when', 'while', 'with'
  80. keyopts = (
  81. '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', r'-\.',
  82. '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', '<-',
  83. '<->', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
  84. r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|]', r'\}', '~', '=>',
  85. r'/\\', r'\\/', r'\{\|', r'\|\}',
  86. u'Π', u'λ',
  87. )
  88. operators = r'[!$%&*+\./:<=>?@^|~-]'
  89. prefix_syms = r'[!?~]'
  90. infix_syms = r'[=<>@^|&+\*/$%-]'
  91. tokens = {
  92. 'root': [
  93. (r'\s+', Text),
  94. (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
  95. (r'\(\*', Comment, 'comment'),
  96. (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
  97. (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
  98. (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
  99. (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword),
  100. (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
  101. (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved),
  102. # (r'\b([A-Z][\w\']*)(\.)', Name.Namespace, 'dotted'),
  103. (r'\b([A-Z][\w\']*)', Name),
  104. (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
  105. (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
  106. (r"[^\W\d][\w']*", Name),
  107. (r'\d[\d_]*', Number.Integer),
  108. (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
  109. (r'0[oO][0-7][0-7_]*', Number.Oct),
  110. (r'0[bB][01][01_]*', Number.Bin),
  111. (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
  112. (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
  113. String.Char),
  114. (r"'.'", String.Char),
  115. (r"'", Keyword), # a stray quote is another syntax element
  116. (r'"', String.Double, 'string'),
  117. (r'[~?][a-z][\w\']*:', Name),
  118. ],
  119. 'comment': [
  120. (r'[^(*)]+', Comment),
  121. (r'\(\*', Comment, '#push'),
  122. (r'\*\)', Comment, '#pop'),
  123. (r'[(*)]', Comment),
  124. ],
  125. 'string': [
  126. (r'[^"]+', String.Double),
  127. (r'""', String.Double),
  128. (r'"', String.Double, '#pop'),
  129. ],
  130. 'dotted': [
  131. (r'\s+', Text),
  132. (r'\.', Punctuation),
  133. (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
  134. (r'[A-Z][\w\']*', Name.Class, '#pop'),
  135. (r'[a-z][a-z0-9_\']*', Name, '#pop'),
  136. default('#pop')
  137. ],
  138. }
  139. def analyse_text(text):
  140. if text.startswith('(*'):
  141. return True
  142. class IsabelleLexer(RegexLexer):
  143. """
  144. For the `Isabelle <http://isabelle.in.tum.de/>`_ proof assistant.
  145. .. versionadded:: 2.0
  146. """
  147. name = 'Isabelle'
  148. aliases = ['isabelle']
  149. filenames = ['*.thy']
  150. mimetypes = ['text/x-isabelle']
  151. keyword_minor = (
  152. 'and', 'assumes', 'attach', 'avoids', 'binder', 'checking',
  153. 'class_instance', 'class_relation', 'code_module', 'congs',
  154. 'constant', 'constrains', 'datatypes', 'defines', 'file', 'fixes',
  155. 'for', 'functions', 'hints', 'identifier', 'if', 'imports', 'in',
  156. 'includes', 'infix', 'infixl', 'infixr', 'is', 'keywords', 'lazy',
  157. 'module_name', 'monos', 'morphisms', 'no_discs_sels', 'notes',
  158. 'obtains', 'open', 'output', 'overloaded', 'parametric', 'permissive',
  159. 'pervasive', 'rep_compat', 'shows', 'structure', 'type_class',
  160. 'type_constructor', 'unchecked', 'unsafe', 'where',
  161. )
  162. keyword_diag = (
  163. 'ML_command', 'ML_val', 'class_deps', 'code_deps', 'code_thms',
  164. 'display_drafts', 'find_consts', 'find_theorems', 'find_unused_assms',
  165. 'full_prf', 'help', 'locale_deps', 'nitpick', 'pr', 'prf',
  166. 'print_abbrevs', 'print_antiquotations', 'print_attributes',
  167. 'print_binds', 'print_bnfs', 'print_bundles',
  168. 'print_case_translations', 'print_cases', 'print_claset',
  169. 'print_classes', 'print_codeproc', 'print_codesetup',
  170. 'print_coercions', 'print_commands', 'print_context',
  171. 'print_defn_rules', 'print_dependencies', 'print_facts',
  172. 'print_induct_rules', 'print_inductives', 'print_interps',
  173. 'print_locale', 'print_locales', 'print_methods', 'print_options',
  174. 'print_orders', 'print_quot_maps', 'print_quotconsts',
  175. 'print_quotients', 'print_quotientsQ3', 'print_quotmapsQ3',
  176. 'print_rules', 'print_simpset', 'print_state', 'print_statement',
  177. 'print_syntax', 'print_theorems', 'print_theory', 'print_trans_rules',
  178. 'prop', 'pwd', 'quickcheck', 'refute', 'sledgehammer', 'smt_status',
  179. 'solve_direct', 'spark_status', 'term', 'thm', 'thm_deps', 'thy_deps',
  180. 'try', 'try0', 'typ', 'unused_thms', 'value', 'values', 'welcome',
  181. 'print_ML_antiquotations', 'print_term_bindings', 'values_prolog',
  182. )
  183. keyword_thy = ('theory', 'begin', 'end')
  184. keyword_section = ('header', 'chapter')
  185. keyword_subsection = (
  186. 'section', 'subsection', 'subsubsection', 'sect', 'subsect',
  187. 'subsubsect',
  188. )
  189. keyword_theory_decl = (
  190. 'ML', 'ML_file', 'abbreviation', 'adhoc_overloading', 'arities',
  191. 'atom_decl', 'attribute_setup', 'axiomatization', 'bundle',
  192. 'case_of_simps', 'class', 'classes', 'classrel', 'codatatype',
  193. 'code_abort', 'code_class', 'code_const', 'code_datatype',
  194. 'code_identifier', 'code_include', 'code_instance', 'code_modulename',
  195. 'code_monad', 'code_printing', 'code_reflect', 'code_reserved',
  196. 'code_type', 'coinductive', 'coinductive_set', 'consts', 'context',
  197. 'datatype', 'datatype_new', 'datatype_new_compat', 'declaration',
  198. 'declare', 'default_sort', 'defer_recdef', 'definition', 'defs',
  199. 'domain', 'domain_isomorphism', 'domaindef', 'equivariance',
  200. 'export_code', 'extract', 'extract_type', 'fixrec', 'fun',
  201. 'fun_cases', 'hide_class', 'hide_const', 'hide_fact', 'hide_type',
  202. 'import_const_map', 'import_file', 'import_tptp', 'import_type_map',
  203. 'inductive', 'inductive_set', 'instantiation', 'judgment', 'lemmas',
  204. 'lifting_forget', 'lifting_update', 'local_setup', 'locale',
  205. 'method_setup', 'nitpick_params', 'no_adhoc_overloading',
  206. 'no_notation', 'no_syntax', 'no_translations', 'no_type_notation',
  207. 'nominal_datatype', 'nonterminal', 'notation', 'notepad', 'oracle',
  208. 'overloading', 'parse_ast_translation', 'parse_translation',
  209. 'partial_function', 'primcorec', 'primrec', 'primrec_new',
  210. 'print_ast_translation', 'print_translation', 'quickcheck_generator',
  211. 'quickcheck_params', 'realizability', 'realizers', 'recdef', 'record',
  212. 'refute_params', 'setup', 'setup_lifting', 'simproc_setup',
  213. 'simps_of_case', 'sledgehammer_params', 'spark_end', 'spark_open',
  214. 'spark_open_siv', 'spark_open_vcg', 'spark_proof_functions',
  215. 'spark_types', 'statespace', 'syntax', 'syntax_declaration', 'text',
  216. 'text_raw', 'theorems', 'translations', 'type_notation',
  217. 'type_synonym', 'typed_print_translation', 'typedecl', 'hoarestate',
  218. 'install_C_file', 'install_C_types', 'wpc_setup', 'c_defs', 'c_types',
  219. 'memsafe', 'SML_export', 'SML_file', 'SML_import', 'approximate',
  220. 'bnf_axiomatization', 'cartouche', 'datatype_compat',
  221. 'free_constructors', 'functor', 'nominal_function',
  222. 'nominal_termination', 'permanent_interpretation',
  223. 'binds', 'defining', 'smt2_status', 'term_cartouche',
  224. 'boogie_file', 'text_cartouche',
  225. )
  226. keyword_theory_script = ('inductive_cases', 'inductive_simps')
  227. keyword_theory_goal = (
  228. 'ax_specification', 'bnf', 'code_pred', 'corollary', 'cpodef',
  229. 'crunch', 'crunch_ignore',
  230. 'enriched_type', 'function', 'instance', 'interpretation', 'lemma',
  231. 'lift_definition', 'nominal_inductive', 'nominal_inductive2',
  232. 'nominal_primrec', 'pcpodef', 'primcorecursive',
  233. 'quotient_definition', 'quotient_type', 'recdef_tc', 'rep_datatype',
  234. 'schematic_corollary', 'schematic_lemma', 'schematic_theorem',
  235. 'spark_vc', 'specification', 'subclass', 'sublocale', 'termination',
  236. 'theorem', 'typedef', 'wrap_free_constructors',
  237. )
  238. keyword_qed = ('by', 'done', 'qed')
  239. keyword_abandon_proof = ('sorry', 'oops')
  240. keyword_proof_goal = ('have', 'hence', 'interpret')
  241. keyword_proof_block = ('next', 'proof')
  242. keyword_proof_chain = (
  243. 'finally', 'from', 'then', 'ultimately', 'with',
  244. )
  245. keyword_proof_decl = (
  246. 'ML_prf', 'also', 'include', 'including', 'let', 'moreover', 'note',
  247. 'txt', 'txt_raw', 'unfolding', 'using', 'write',
  248. )
  249. keyword_proof_asm = ('assume', 'case', 'def', 'fix', 'presume')
  250. keyword_proof_asm_goal = ('guess', 'obtain', 'show', 'thus')
  251. keyword_proof_script = (
  252. 'apply', 'apply_end', 'apply_trace', 'back', 'defer', 'prefer',
  253. )
  254. operators = (
  255. '::', ':', '(', ')', '[', ']', '_', '=', ',', '|',
  256. '+', '-', '!', '?',
  257. )
  258. proof_operators = ('{', '}', '.', '..')
  259. tokens = {
  260. 'root': [
  261. (r'\s+', Text),
  262. (r'\(\*', Comment, 'comment'),
  263. (r'\{\*', Comment, 'text'),
  264. (words(operators), Operator),
  265. (words(proof_operators), Operator.Word),
  266. (words(keyword_minor, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
  267. (words(keyword_diag, prefix=r'\b', suffix=r'\b'), Keyword.Type),
  268. (words(keyword_thy, prefix=r'\b', suffix=r'\b'), Keyword),
  269. (words(keyword_theory_decl, prefix=r'\b', suffix=r'\b'), Keyword),
  270. (words(keyword_section, prefix=r'\b', suffix=r'\b'), Generic.Heading),
  271. (words(keyword_subsection, prefix=r'\b', suffix=r'\b'), Generic.Subheading),
  272. (words(keyword_theory_goal, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
  273. (words(keyword_theory_script, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
  274. (words(keyword_abandon_proof, prefix=r'\b', suffix=r'\b'), Generic.Error),
  275. (words(keyword_qed, prefix=r'\b', suffix=r'\b'), Keyword),
  276. (words(keyword_proof_goal, prefix=r'\b', suffix=r'\b'), Keyword),
  277. (words(keyword_proof_block, prefix=r'\b', suffix=r'\b'), Keyword),
  278. (words(keyword_proof_decl, prefix=r'\b', suffix=r'\b'), Keyword),
  279. (words(keyword_proof_chain, prefix=r'\b', suffix=r'\b'), Keyword),
  280. (words(keyword_proof_asm, prefix=r'\b', suffix=r'\b'), Keyword),
  281. (words(keyword_proof_asm_goal, prefix=r'\b', suffix=r'\b'), Keyword),
  282. (words(keyword_proof_script, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo),
  283. (r'\\<\w*>', Text.Symbol),
  284. (r"[^\W\d][.\w']*", Name),
  285. (r"\?[^\W\d][.\w']*", Name),
  286. (r"'[^\W\d][.\w']*", Name.Type),
  287. (r'\d[\d_]*', Name), # display numbers as name
  288. (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
  289. (r'0[oO][0-7][0-7_]*', Number.Oct),
  290. (r'0[bB][01][01_]*', Number.Bin),
  291. (r'"', String, 'string'),
  292. (r'`', String.Other, 'fact'),
  293. ],
  294. 'comment': [
  295. (r'[^(*)]+', Comment),
  296. (r'\(\*', Comment, '#push'),
  297. (r'\*\)', Comment, '#pop'),
  298. (r'[(*)]', Comment),
  299. ],
  300. 'text': [
  301. (r'[^*}]+', Comment),
  302. (r'\*\}', Comment, '#pop'),
  303. (r'\*', Comment),
  304. (r'\}', Comment),
  305. ],
  306. 'string': [
  307. (r'[^"\\]+', String),
  308. (r'\\<\w*>', String.Symbol),
  309. (r'\\"', String),
  310. (r'\\', String),
  311. (r'"', String, '#pop'),
  312. ],
  313. 'fact': [
  314. (r'[^`\\]+', String.Other),
  315. (r'\\<\w*>', String.Symbol),
  316. (r'\\`', String.Other),
  317. (r'\\', String.Other),
  318. (r'`', String.Other, '#pop'),
  319. ],
  320. }
  321. class LeanLexer(RegexLexer):
  322. """
  323. For the `Lean <https://github.com/leanprover/lean>`_
  324. theorem prover.
  325. .. versionadded:: 2.0
  326. """
  327. name = 'Lean'
  328. aliases = ['lean']
  329. filenames = ['*.lean']
  330. mimetypes = ['text/x-lean']
  331. flags = re.MULTILINE | re.UNICODE
  332. keywords1 = (
  333. 'import', 'abbreviation', 'opaque_hint', 'tactic_hint', 'definition',
  334. 'renaming', 'inline', 'hiding', 'exposing', 'parameter', 'parameters',
  335. 'conjecture', 'hypothesis', 'lemma', 'corollary', 'variable', 'variables',
  336. 'theorem', 'axiom', 'inductive', 'structure', 'universe', 'alias',
  337. 'help', 'options', 'precedence', 'postfix', 'prefix', 'calc_trans',
  338. 'calc_subst', 'calc_refl', 'infix', 'infixl', 'infixr', 'notation', 'eval',
  339. 'check', 'exit', 'coercion', 'end', 'private', 'using', 'namespace',
  340. 'including', 'instance', 'section', 'context', 'protected', 'expose',
  341. 'export', 'set_option', 'add_rewrite', 'extends', 'open', 'example',
  342. 'constant', 'constants', 'print', 'opaque', 'reducible', 'irreducible',
  343. )
  344. keywords2 = (
  345. 'forall', 'fun', 'Pi', 'obtain', 'from', 'have', 'show', 'assume',
  346. 'take', 'let', 'if', 'else', 'then', 'by', 'in', 'with', 'begin',
  347. 'proof', 'qed', 'calc', 'match',
  348. )
  349. keywords3 = (
  350. # Sorts
  351. 'Type', 'Prop',
  352. )
  353. operators = (
  354. u'!=', u'#', u'&', u'&&', u'*', u'+', u'-', u'/', u'@', u'!', u'`',
  355. u'-.', u'->', u'.', u'..', u'...', u'::', u':>', u';', u';;', u'<',
  356. u'<-', u'=', u'==', u'>', u'_', u'|', u'||', u'~', u'=>', u'<=', u'>=',
  357. u'/\\', u'\\/', u'∀', u'Π', u'λ', u'↔', u'∧', u'∨', u'≠', u'≤', u'≥',
  358. u'¬', u'⁻¹', u'⬝', u'▸', u'→', u'∃', u'ℕ', u'ℤ', u'≈', u'×', u'⌞',
  359. u'⌟', u'≡', u'⟨', u'⟩',
  360. )
  361. punctuation = (u'(', u')', u':', u'{', u'}', u'[', u']', u'⦃', u'⦄',
  362. u':=', u',')
  363. tokens = {
  364. 'root': [
  365. (r'\s+', Text),
  366. (r'/-', Comment, 'comment'),
  367. (r'--.*?$', Comment.Single),
  368. (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
  369. (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
  370. (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
  371. (words(operators), Name.Builtin.Pseudo),
  372. (words(punctuation), Operator),
  373. (u"[A-Za-z_\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2100-\u214f]"
  374. u"[A-Za-z_'\u03b1-\u03ba\u03bc-\u03fb\u1f00-\u1ffe\u2070-\u2079"
  375. u"\u207f-\u2089\u2090-\u209c\u2100-\u214f0-9]*", Name),
  376. (r'\d+', Number.Integer),
  377. (r'"', String.Double, 'string'),
  378. (r'[~?][a-z][\w\']*:', Name.Variable)
  379. ],
  380. 'comment': [
  381. # Multiline Comments
  382. (r'[^/-]', Comment.Multiline),
  383. (r'/-', Comment.Multiline, '#push'),
  384. (r'-/', Comment.Multiline, '#pop'),
  385. (r'[/-]', Comment.Multiline)
  386. ],
  387. 'string': [
  388. (r'[^\\"]+', String.Double),
  389. (r'\\[n"\\]', String.Escape),
  390. ('"', String.Double, '#pop'),
  391. ],
  392. }