esoteric.py 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277
  1. # -*- coding: utf-8 -*-
  2. """
  3. pygments.lexers.esoteric
  4. ~~~~~~~~~~~~~~~~~~~~~~~~
  5. Lexers for esoteric languages.
  6. :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
  7. :license: BSD, see LICENSE for details.
  8. """
  9. from pygments.lexer import RegexLexer, include, words
  10. from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
  11. Number, Punctuation, Error
  12. __all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'CAmkESLexer',
  13. 'CapDLLexer', 'AheuiLexer']
  14. class BrainfuckLexer(RegexLexer):
  15. """
  16. Lexer for the esoteric `BrainFuck <http://www.muppetlabs.com/~breadbox/bf/>`_
  17. language.
  18. """
  19. name = 'Brainfuck'
  20. aliases = ['brainfuck', 'bf']
  21. filenames = ['*.bf', '*.b']
  22. mimetypes = ['application/x-brainfuck']
  23. tokens = {
  24. 'common': [
  25. # use different colors for different instruction types
  26. (r'[.,]+', Name.Tag),
  27. (r'[+-]+', Name.Builtin),
  28. (r'[<>]+', Name.Variable),
  29. (r'[^.,+\-<>\[\]]+', Comment),
  30. ],
  31. 'root': [
  32. (r'\[', Keyword, 'loop'),
  33. (r'\]', Error),
  34. include('common'),
  35. ],
  36. 'loop': [
  37. (r'\[', Keyword, '#push'),
  38. (r'\]', Keyword, '#pop'),
  39. include('common'),
  40. ]
  41. }
  42. class BefungeLexer(RegexLexer):
  43. """
  44. Lexer for the esoteric `Befunge <http://en.wikipedia.org/wiki/Befunge>`_
  45. language.
  46. .. versionadded:: 0.7
  47. """
  48. name = 'Befunge'
  49. aliases = ['befunge']
  50. filenames = ['*.befunge']
  51. mimetypes = ['application/x-befunge']
  52. tokens = {
  53. 'root': [
  54. (r'[0-9a-f]', Number),
  55. (r'[+*/%!`-]', Operator), # Traditional math
  56. (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives
  57. (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives
  58. (r'[|_mw]', Keyword),
  59. (r'[{}]', Name.Tag), # Befunge-98 stack ops
  60. (r'".*?"', String.Double), # Strings don't appear to allow escapes
  61. (r'\'.', String.Single), # Single character
  62. (r'[#;]', Comment), # Trampoline... depends on direction hit
  63. (r'[pg&~=@iotsy]', Keyword), # Misc
  64. (r'[()A-Z]', Comment), # Fingerprints
  65. (r'\s+', Text), # Whitespace doesn't matter
  66. ],
  67. }
  68. class CAmkESLexer(RegexLexer):
  69. """
  70. Basic lexer for the input language for the
  71. `CAmkES <https://sel4.systems/CAmkES/>`_ component platform.
  72. .. versionadded:: 2.1
  73. """
  74. name = 'CAmkES'
  75. aliases = ['camkes', 'idl4']
  76. filenames = ['*.camkes', '*.idl4']
  77. tokens = {
  78. 'root': [
  79. # C pre-processor directive
  80. (r'^\s*#.*\n', Comment.Preproc),
  81. # Whitespace, comments
  82. (r'\s+', Text),
  83. (r'/\*(.|\n)*?\*/', Comment),
  84. (r'//.*\n', Comment),
  85. (r'[\[(){},.;\]]', Punctuation),
  86. (r'[~!%^&*+=|?:<>/-]', Operator),
  87. (words(('assembly', 'attribute', 'component', 'composition',
  88. 'configuration', 'connection', 'connector', 'consumes',
  89. 'control', 'dataport', 'Dataport', 'Dataports', 'emits',
  90. 'event', 'Event', 'Events', 'export', 'from', 'group',
  91. 'hardware', 'has', 'interface', 'Interface', 'maybe',
  92. 'procedure', 'Procedure', 'Procedures', 'provides',
  93. 'template', 'thread', 'threads', 'to', 'uses', 'with'),
  94. suffix=r'\b'), Keyword),
  95. (words(('bool', 'boolean', 'Buf', 'char', 'character', 'double',
  96. 'float', 'in', 'inout', 'int', 'int16_6', 'int32_t',
  97. 'int64_t', 'int8_t', 'integer', 'mutex', 'out', 'real',
  98. 'refin', 'semaphore', 'signed', 'string', 'struct',
  99. 'uint16_t', 'uint32_t', 'uint64_t', 'uint8_t', 'uintptr_t',
  100. 'unsigned', 'void'),
  101. suffix=r'\b'), Keyword.Type),
  102. # Recognised attributes
  103. (r'[a-zA-Z_]\w*_(priority|domain|buffer)', Keyword.Reserved),
  104. (words(('dma_pool', 'from_access', 'to_access'), suffix=r'\b'),
  105. Keyword.Reserved),
  106. # CAmkES-level include
  107. (r'import\s+(<[^>]*>|"[^"]*");', Comment.Preproc),
  108. # C-level include
  109. (r'include\s+(<[^>]*>|"[^"]*");', Comment.Preproc),
  110. # Literals
  111. (r'0[xX][\da-fA-F]+', Number.Hex),
  112. (r'-?[\d]+', Number),
  113. (r'-?[\d]+\.[\d]+', Number.Float),
  114. (r'"[^"]*"', String),
  115. (r'[Tt]rue|[Ff]alse', Name.Builtin),
  116. # Identifiers
  117. (r'[a-zA-Z_]\w*', Name),
  118. ],
  119. }
  120. class CapDLLexer(RegexLexer):
  121. """
  122. Basic lexer for
  123. `CapDL <https://ssrg.nicta.com.au/publications/nictaabstracts/Kuz_KLW_10.abstract.pml>`_.
  124. The source of the primary tool that reads such specifications is available
  125. at https://github.com/seL4/capdl/tree/master/capDL-tool. Note that this
  126. lexer only supports a subset of the grammar. For example, identifiers can
  127. shadow type names, but these instances are currently incorrectly
  128. highlighted as types. Supporting this would need a stateful lexer that is
  129. considered unnecessarily complex for now.
  130. .. versionadded:: 2.2
  131. """
  132. name = 'CapDL'
  133. aliases = ['capdl']
  134. filenames = ['*.cdl']
  135. tokens = {
  136. 'root': [
  137. # C pre-processor directive
  138. (r'^\s*#.*\n', Comment.Preproc),
  139. # Whitespace, comments
  140. (r'\s+', Text),
  141. (r'/\*(.|\n)*?\*/', Comment),
  142. (r'(//|--).*\n', Comment),
  143. (r'[<>\[(){},:;=\]]', Punctuation),
  144. (r'\.\.', Punctuation),
  145. (words(('arch', 'arm11', 'caps', 'child_of', 'ia32', 'irq', 'maps',
  146. 'objects'), suffix=r'\b'), Keyword),
  147. (words(('aep', 'asid_pool', 'cnode', 'ep', 'frame', 'io_device',
  148. 'io_ports', 'io_pt', 'notification', 'pd', 'pt', 'tcb',
  149. 'ut', 'vcpu'), suffix=r'\b'), Keyword.Type),
  150. # Properties
  151. (words(('asid', 'addr', 'badge', 'cached', 'dom', 'domainID', 'elf',
  152. 'fault_ep', 'G', 'guard', 'guard_size', 'init', 'ip',
  153. 'prio', 'sp', 'R', 'RG', 'RX', 'RW', 'RWG', 'RWX', 'W',
  154. 'WG', 'WX', 'level', 'masked', 'master_reply', 'paddr',
  155. 'ports', 'reply', 'uncached'), suffix=r'\b'),
  156. Keyword.Reserved),
  157. # Literals
  158. (r'0[xX][\da-fA-F]+', Number.Hex),
  159. (r'\d+(\.\d+)?(k|M)?', Number),
  160. (words(('bits',), suffix=r'\b'), Number),
  161. (words(('cspace', 'vspace', 'reply_slot', 'caller_slot',
  162. 'ipc_buffer_slot'), suffix=r'\b'), Number),
  163. # Identifiers
  164. (r'[a-zA-Z_][-@\.\w]*', Name),
  165. ],
  166. }
  167. class RedcodeLexer(RegexLexer):
  168. """
  169. A simple Redcode lexer based on ICWS'94.
  170. Contributed by Adam Blinkinsop <blinks@acm.org>.
  171. .. versionadded:: 0.8
  172. """
  173. name = 'Redcode'
  174. aliases = ['redcode']
  175. filenames = ['*.cw']
  176. opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD',
  177. 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL',
  178. 'ORG', 'EQU', 'END')
  179. modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I')
  180. tokens = {
  181. 'root': [
  182. # Whitespace:
  183. (r'\s+', Text),
  184. (r';.*$', Comment.Single),
  185. # Lexemes:
  186. # Identifiers
  187. (r'\b(%s)\b' % '|'.join(opcodes), Name.Function),
  188. (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator),
  189. (r'[A-Za-z_]\w+', Name),
  190. # Operators
  191. (r'[-+*/%]', Operator),
  192. (r'[#$@<>]', Operator), # mode
  193. (r'[.,]', Punctuation), # mode
  194. # Numbers
  195. (r'[-+]?\d+', Number.Integer),
  196. ],
  197. }
  198. class AheuiLexer(RegexLexer):
  199. """
  200. Aheui_ Lexer.
  201. Aheui_ is esoteric language based on Korean alphabets.
  202. .. _Aheui: http://aheui.github.io/
  203. """
  204. name = 'Aheui'
  205. aliases = ['aheui']
  206. filenames = ['*.aheui']
  207. tokens = {
  208. 'root': [
  209. (u'['
  210. u'나-낳냐-냫너-넣녀-녛노-놓뇨-눟뉴-닇'
  211. u'다-닿댜-댷더-덯뎌-뎧도-돟됴-둫듀-딓'
  212. u'따-땋땨-떃떠-떻뗘-뗳또-똫뚀-뚷뜌-띟'
  213. u'라-랗랴-럏러-렇려-렿로-롷료-뤃류-릫'
  214. u'마-맣먀-먛머-멓며-몋모-뫃묘-뭏뮤-믷'
  215. u'바-밯뱌-뱧버-벟벼-볗보-봏뵤-붛뷰-빃'
  216. u'빠-빻뺘-뺳뻐-뻫뼈-뼣뽀-뽛뾰-뿧쀼-삏'
  217. u'사-샇샤-샿서-섷셔-셯소-솧쇼-숳슈-싛'
  218. u'싸-쌓쌰-썋써-쎃쎠-쎻쏘-쏳쑈-쑿쓔-씧'
  219. u'자-잫쟈-쟣저-젛져-졓조-좋죠-줗쥬-즿'
  220. u'차-챃챠-챻처-첳쳐-쳫초-촣쵸-춯츄-칗'
  221. u'카-캏캬-컇커-컿켜-켷코-콯쿄-쿻큐-킣'
  222. u'타-탛탸-턓터-텋텨-톃토-톻툐-퉇튜-틯'
  223. u'파-팧퍄-퍟퍼-펗펴-폏포-퐇표-풓퓨-픻'
  224. u'하-핳햐-햫허-헣혀-혛호-홓효-훟휴-힇'
  225. u']', Operator),
  226. ('.', Comment),
  227. ],
  228. }