lean.py 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241
  1. """
  2. pygments.lexers.lean
  3. ~~~~~~~~~~~~~~~~~~~~
  4. Lexers for the Lean theorem prover.
  5. :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import RegexLexer, words, include
  10. from pygments.token import Comment, Operator, Keyword, Name, String, \
  11. Number, Generic, Whitespace
  12. __all__ = ['Lean3Lexer', 'Lean4Lexer']
  13. class Lean3Lexer(RegexLexer):
  14. """
  15. For the Lean 3 theorem prover.
  16. """
  17. name = 'Lean'
  18. url = 'https://leanprover-community.github.io/lean3'
  19. aliases = ['lean', 'lean3']
  20. filenames = ['*.lean']
  21. mimetypes = ['text/x-lean', 'text/x-lean3']
  22. version_added = '2.0'
  23. # from https://github.com/leanprover/vscode-lean/blob/1589ca3a65e394b3789409707febbd2d166c9344/syntaxes/lean.json#L186C20-L186C217
  24. _name_segment = (
  25. "(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟]"
  26. "(?:(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟0-9'ⁿ-₉ₐ-ₜᵢ-ᵪ])*")
  27. _name = _name_segment + r"(\." + _name_segment + r")*"
  28. tokens = {
  29. 'expression': [
  30. (r'\s+', Whitespace),
  31. (r'/--', String.Doc, 'docstring'),
  32. (r'/-', Comment, 'comment'),
  33. (r'--.*?$', Comment.Single),
  34. (words((
  35. 'forall', 'fun', 'Pi', 'from', 'have', 'show', 'assume', 'suffices',
  36. 'let', 'if', 'else', 'then', 'in', 'with', 'calc', 'match',
  37. 'do'
  38. ), prefix=r'\b', suffix=r'\b'), Keyword),
  39. (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error),
  40. (words(('Sort', 'Prop', 'Type'), prefix=r'\b', suffix=r'\b'), Keyword.Type),
  41. (words((
  42. '(', ')', ':', '{', '}', '[', ']', '⟨', '⟩', '‹', '›', '⦃', '⦄', ':=', ',',
  43. )), Operator),
  44. (_name, Name),
  45. (r'``?' + _name, String.Symbol),
  46. (r'0x[A-Za-z0-9]+', Number.Integer),
  47. (r'0b[01]+', Number.Integer),
  48. (r'\d+', Number.Integer),
  49. (r'"', String.Double, 'string'),
  50. (r"'(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4})|.)'", String.Char),
  51. (r'[~?][a-z][\w\']*:', Name.Variable),
  52. (r'\S', Name.Builtin.Pseudo),
  53. ],
  54. 'root': [
  55. (words((
  56. 'import', 'renaming', 'hiding',
  57. 'namespace',
  58. 'local',
  59. 'private', 'protected', 'section',
  60. 'include', 'omit', 'section',
  61. 'protected', 'export',
  62. 'open',
  63. 'attribute',
  64. ), prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
  65. (words((
  66. 'lemma', 'theorem', 'def', 'definition', 'example',
  67. 'axiom', 'axioms', 'constant', 'constants',
  68. 'universe', 'universes',
  69. 'inductive', 'coinductive', 'structure', 'extends',
  70. 'class', 'instance',
  71. 'abbreviation',
  72. 'noncomputable theory',
  73. 'noncomputable', 'mutual', 'meta',
  74. 'attribute',
  75. 'parameter', 'parameters',
  76. 'variable', 'variables',
  77. 'reserve', 'precedence',
  78. 'postfix', 'prefix', 'notation', 'infix', 'infixl', 'infixr',
  79. 'begin', 'by', 'end',
  80. 'set_option',
  81. 'run_cmd',
  82. ), prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
  83. (r'@\[', Keyword.Declaration, 'attribute'),
  84. (words((
  85. '#eval', '#check', '#reduce', '#exit',
  86. '#print', '#help',
  87. ), suffix=r'\b'), Keyword),
  88. include('expression')
  89. ],
  90. 'attribute': [
  91. (r'\]', Keyword.Declaration, '#pop'),
  92. include('expression'),
  93. ],
  94. 'comment': [
  95. (r'[^/-]+', Comment.Multiline),
  96. (r'/-', Comment.Multiline, '#push'),
  97. (r'-/', Comment.Multiline, '#pop'),
  98. (r'[/-]', Comment.Multiline)
  99. ],
  100. 'docstring': [
  101. (r'[^/-]+', String.Doc),
  102. (r'-/', String.Doc, '#pop'),
  103. (r'[/-]', String.Doc)
  104. ],
  105. 'string': [
  106. (r'[^\\"]+', String.Double),
  107. (r"(?:(\\[\\\"'nt])|(\\x[0-9a-fA-F]{2})|(\\u[0-9a-fA-F]{4}))", String.Escape),
  108. ('"', String.Double, '#pop'),
  109. ],
  110. }
  111. def analyse_text(text):
  112. if re.search(r'^import [a-z]', text, re.MULTILINE):
  113. return 0.1
  114. LeanLexer = Lean3Lexer
  115. class Lean4Lexer(RegexLexer):
  116. """
  117. For the Lean 4 theorem prover.
  118. """
  119. name = 'Lean4'
  120. url = 'https://github.com/leanprover/lean4'
  121. aliases = ['lean4']
  122. filenames = ['*.lean']
  123. mimetypes = ['text/x-lean4']
  124. version_added = '2.18'
  125. # same as Lean3Lexer, with `!` and `?` allowed
  126. _name_segment = (
  127. "(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟]"
  128. "(?:(?![λΠΣ])[_a-zA-Zα-ωΑ-Ωϊ-ϻἀ-῾℀-⅏𝒜-𝖟0-9'ⁿ-₉ₐ-ₜᵢ-ᵪ!?])*")
  129. _name = _name_segment + r"(\." + _name_segment + r")*"
  130. keywords1 = (
  131. 'import', 'unif_hint',
  132. 'renaming', 'inline', 'hiding', 'lemma', 'variable',
  133. 'theorem', 'axiom', 'inductive', 'structure', 'universe', 'alias',
  134. '#help', 'precedence', 'postfix', 'prefix',
  135. 'infix', 'infixl', 'infixr', 'notation', '#eval',
  136. '#check', '#reduce', '#exit', 'end', 'private', 'using', 'namespace',
  137. 'instance', 'section', 'protected',
  138. 'export', 'set_option', 'extends', 'open', 'example',
  139. '#print', 'opaque',
  140. 'def', 'macro', 'elab', 'syntax', 'macro_rules', '#reduce', 'where',
  141. 'abbrev', 'noncomputable', 'class', 'attribute', '#synth', 'mutual',
  142. 'scoped', 'local',
  143. )
  144. keywords2 = (
  145. 'forall', 'fun', 'obtain', 'from', 'have', 'show', 'assume',
  146. 'let', 'if', 'else', 'then', 'by', 'in', 'with',
  147. 'calc', 'match', 'nomatch', 'do', 'at',
  148. )
  149. keywords3 = (
  150. # Sorts
  151. 'Type', 'Prop', 'Sort',
  152. )
  153. operators = (
  154. '!=', '#', '&', '&&', '*', '+', '-', '/', '@', '!',
  155. '-.', '->', '.', '..', '...', '::', ':>', ';', ';;', '<',
  156. '<-', '=', '==', '>', '_', '|', '||', '~', '=>', '<=', '>=',
  157. '/\\', '\\/', '∀', 'Π', 'λ', '↔', '∧', '∨', '≠', '≤', '≥',
  158. '¬', '⁻¹', '⬝', '▸', '→', '∃', '≈', '×', '⌞',
  159. '⌟', '≡', '⟨', '⟩', "↦",
  160. )
  161. punctuation = ('(', ')', ':', '{', '}', '[', ']', '⦃', '⦄',
  162. ':=', ',')
  163. tokens = {
  164. 'expression': [
  165. (r'\s+', Whitespace),
  166. (r'/--', String.Doc, 'docstring'),
  167. (r'/-', Comment, 'comment'),
  168. (r'--.*$', Comment.Single),
  169. (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type),
  170. (words(('sorry', 'admit'), prefix=r'\b', suffix=r'\b'), Generic.Error),
  171. (words(operators), Name.Builtin.Pseudo),
  172. (words(punctuation), Operator),
  173. (_name_segment, Name),
  174. (r'``?' + _name, String.Symbol),
  175. (r'(?<=\.)\d+', Number),
  176. (r'(\d+\.\d*)([eE][+-]?[0-9]+)?', Number.Float),
  177. (r'\d+', Number.Integer),
  178. (r'"', String.Double, 'string'),
  179. (r'[~?][a-z][\w\']*:', Name.Variable),
  180. (r'\S', Name.Builtin.Pseudo),
  181. ],
  182. 'root': [
  183. (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace),
  184. (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword),
  185. (r'@\[', Keyword.Declaration, 'attribute'),
  186. include('expression')
  187. ],
  188. 'attribute': [
  189. (r'\]', Keyword.Declaration, '#pop'),
  190. include('expression'),
  191. ],
  192. 'comment': [
  193. # Multiline Comments
  194. (r'[^/-]+', Comment.Multiline),
  195. (r'/-', Comment.Multiline, '#push'),
  196. (r'-/', Comment.Multiline, '#pop'),
  197. (r'[/-]', Comment.Multiline)
  198. ],
  199. 'docstring': [
  200. (r'[^/-]+', String.Doc),
  201. (r'-/', String.Doc, '#pop'),
  202. (r'[/-]', String.Doc)
  203. ],
  204. 'string': [
  205. (r'[^\\"]+', String.Double),
  206. (r'\\[n"\\\n]', String.Escape),
  207. ('"', String.Double, '#pop'),
  208. ],
  209. }
  210. def analyse_text(text):
  211. if re.search(r'^import [A-Z]', text, re.MULTILINE):
  212. return 0.1