dylan.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281
  1. """
  2. pygments.lexers.dylan
  3. ~~~~~~~~~~~~~~~~~~~~~
  4. Lexers for the Dylan language.
  5. :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \
  10. default, line_re
  11. from pygments.token import Comment, Operator, Keyword, Name, String, \
  12. Number, Punctuation, Generic, Literal, Whitespace
  13. __all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer']
  14. class DylanLexer(RegexLexer):
  15. """
  16. For the Dylan language.
  17. .. versionadded:: 0.7
  18. """
  19. name = 'Dylan'
  20. url = 'http://www.opendylan.org/'
  21. aliases = ['dylan']
  22. filenames = ['*.dylan', '*.dyl', '*.intr']
  23. mimetypes = ['text/x-dylan']
  24. flags = re.IGNORECASE
  25. builtins = {
  26. 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class',
  27. 'compiler-open', 'compiler-sideways', 'domain', 'dynamic',
  28. 'each-subclass', 'exception', 'exclude', 'function', 'generic',
  29. 'handler', 'inherited', 'inline', 'inline-only', 'instance',
  30. 'interface', 'import', 'keyword', 'library', 'macro', 'method',
  31. 'module', 'open', 'primary', 'required', 'sealed', 'sideways',
  32. 'singleton', 'slot', 'thread', 'variable', 'virtual'}
  33. keywords = {
  34. 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup',
  35. 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally',
  36. 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename',
  37. 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when',
  38. 'while'}
  39. operators = {
  40. '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=',
  41. '>', '>=', '&', '|'}
  42. functions = {
  43. 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!',
  44. 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply',
  45. 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!',
  46. 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol',
  47. 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose',
  48. 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as',
  49. 'condition-format-arguments', 'condition-format-string', 'conjoin',
  50. 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions',
  51. 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do',
  52. 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?',
  53. 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first',
  54. 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol',
  55. 'function-arguments', 'function-return-values',
  56. 'function-specializers', 'gcd', 'generic-function-mandatory-keywords',
  57. 'generic-function-methods', 'head', 'head-setter', 'identity',
  58. 'initialize', 'instance?', 'integral?', 'intersection',
  59. 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited',
  60. 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make',
  61. 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes',
  62. 'min', 'modulo', 'negative', 'negative?', 'next-method',
  63. 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop',
  64. 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank',
  65. 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!',
  66. 'remove-duplicates', 'remove-duplicates!', 'remove-key!',
  67. 'remove-method', 'replace-elements!', 'replace-subsequence!',
  68. 'restart-query', 'return-allowed?', 'return-description',
  69. 'return-query', 'reverse', 'reverse!', 'round', 'round/',
  70. 'row-major-index', 'second', 'second-setter', 'shallow-copy',
  71. 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?',
  72. 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position',
  73. 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third',
  74. 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type',
  75. 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values',
  76. 'vector', 'zero?'}
  77. valid_name = '\\\\?[\\w!&*<>|^$%@\\-+~?/=]+'
  78. def get_tokens_unprocessed(self, text):
  79. for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
  80. if token is Name:
  81. lowercase_value = value.lower()
  82. if lowercase_value in self.builtins:
  83. yield index, Name.Builtin, value
  84. continue
  85. if lowercase_value in self.keywords:
  86. yield index, Keyword, value
  87. continue
  88. if lowercase_value in self.functions:
  89. yield index, Name.Builtin, value
  90. continue
  91. if lowercase_value in self.operators:
  92. yield index, Operator, value
  93. continue
  94. yield index, token, value
  95. tokens = {
  96. 'root': [
  97. # Whitespace
  98. (r'\s+', Whitespace),
  99. # single line comment
  100. (r'//.*?\n', Comment.Single),
  101. # lid header
  102. (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
  103. bygroups(Name.Attribute, Operator, Whitespace, String)),
  104. default('code') # no header match, switch to code
  105. ],
  106. 'code': [
  107. # Whitespace
  108. (r'\s+', Whitespace),
  109. # single line comment
  110. (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
  111. # multi-line comment
  112. (r'/\*', Comment.Multiline, 'comment'),
  113. # strings and characters
  114. (r'"', String, 'string'),
  115. (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char),
  116. # binary integer
  117. (r'#b[01]+', Number.Bin),
  118. # octal integer
  119. (r'#o[0-7]+', Number.Oct),
  120. # floating point
  121. (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float),
  122. # decimal integer
  123. (r'[-+]?\d+', Number.Integer),
  124. # hex integer
  125. (r'#x[0-9a-f]+', Number.Hex),
  126. # Macro parameters
  127. (r'(\?' + valid_name + ')(:)'
  128. r'(token|name|variable|expression|body|case-body|\*)',
  129. bygroups(Name.Tag, Operator, Name.Builtin)),
  130. (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)',
  131. bygroups(Name.Tag, Operator, Name.Builtin)),
  132. (r'\?' + valid_name, Name.Tag),
  133. # Punctuation
  134. (r'(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])', Punctuation),
  135. # Most operators are picked up as names and then re-flagged.
  136. # This one isn't valid in a name though, so we pick it up now.
  137. (r':=', Operator),
  138. # Pick up #t / #f before we match other stuff with #.
  139. (r'#[tf]', Literal),
  140. # #"foo" style keywords
  141. (r'#"', String.Symbol, 'keyword'),
  142. # #rest, #key, #all-keys, etc.
  143. (r'#[a-z0-9-]+', Keyword),
  144. # required-init-keyword: style keywords.
  145. (valid_name + ':', Keyword),
  146. # class names
  147. ('<' + valid_name + '>', Name.Class),
  148. # define variable forms.
  149. (r'\*' + valid_name + r'\*', Name.Variable.Global),
  150. # define constant forms.
  151. (r'\$' + valid_name, Name.Constant),
  152. # everything else. We re-flag some of these in the method above.
  153. (valid_name, Name),
  154. ],
  155. 'comment': [
  156. (r'[^*/]+', Comment.Multiline),
  157. (r'/\*', Comment.Multiline, '#push'),
  158. (r'\*/', Comment.Multiline, '#pop'),
  159. (r'[*/]', Comment.Multiline)
  160. ],
  161. 'keyword': [
  162. (r'"', String.Symbol, '#pop'),
  163. (r'[^\\"]+', String.Symbol), # all other characters
  164. ],
  165. 'string': [
  166. (r'"', String, '#pop'),
  167. (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape),
  168. (r'[^\\"\n]+', String), # all other characters
  169. (r'\\\n', String), # line continuation
  170. (r'\\', String), # stray backslash
  171. ]
  172. }
  173. class DylanLidLexer(RegexLexer):
  174. """
  175. For Dylan LID (Library Interchange Definition) files.
  176. .. versionadded:: 1.6
  177. """
  178. name = 'DylanLID'
  179. aliases = ['dylan-lid', 'lid']
  180. filenames = ['*.lid', '*.hdp']
  181. mimetypes = ['text/x-dylan-lid']
  182. flags = re.IGNORECASE
  183. tokens = {
  184. 'root': [
  185. # Whitespace
  186. (r'\s+', Whitespace),
  187. # single line comment
  188. (r'(//.*?)(\n)', bygroups(Comment.Single, Whitespace)),
  189. # lid header
  190. (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)',
  191. bygroups(Name.Attribute, Operator, Whitespace, String)),
  192. ]
  193. }
  194. class DylanConsoleLexer(Lexer):
  195. """
  196. For Dylan interactive console output.
  197. This is based on a copy of the RubyConsoleLexer.
  198. .. versionadded:: 1.6
  199. """
  200. name = 'Dylan session'
  201. aliases = ['dylan-console', 'dylan-repl']
  202. filenames = ['*.dylan-console']
  203. mimetypes = ['text/x-dylan-console']
  204. _example = 'dylan-console/console'
  205. _prompt_re = re.compile(r'\?| ')
  206. def get_tokens_unprocessed(self, text):
  207. dylexer = DylanLexer(**self.options)
  208. curcode = ''
  209. insertions = []
  210. for match in line_re.finditer(text):
  211. line = match.group()
  212. m = self._prompt_re.match(line)
  213. if m is not None:
  214. end = m.end()
  215. insertions.append((len(curcode),
  216. [(0, Generic.Prompt, line[:end])]))
  217. curcode += line[end:]
  218. else:
  219. if curcode:
  220. yield from do_insertions(insertions,
  221. dylexer.get_tokens_unprocessed(curcode))
  222. curcode = ''
  223. insertions = []
  224. yield match.start(), Generic.Output, line
  225. if curcode:
  226. yield from do_insertions(insertions,
  227. dylexer.get_tokens_unprocessed(curcode))