rdf.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468
  1. """
  2. pygments.lexers.rdf
  3. ~~~~~~~~~~~~~~~~~~~
  4. Lexers for semantic web and RDF query languages and markup.
  5. :copyright: Copyright 2006-2024 by the Pygments team, see AUTHORS.
  6. :license: BSD, see LICENSE for details.
  7. """
  8. import re
  9. from pygments.lexer import RegexLexer, bygroups, default
  10. from pygments.token import Keyword, Punctuation, String, Number, Operator, \
  11. Generic, Whitespace, Name, Literal, Comment, Text
  12. __all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
  13. class SparqlLexer(RegexLexer):
  14. """
  15. Lexer for SPARQL query language.
  16. """
  17. name = 'SPARQL'
  18. aliases = ['sparql']
  19. filenames = ['*.rq', '*.sparql']
  20. mimetypes = ['application/sparql-query']
  21. url = 'https://www.w3.org/TR/sparql11-query'
  22. version_added = '2.0'
  23. # character group definitions ::
  24. PN_CHARS_BASE_GRP = ('a-zA-Z'
  25. '\u00c0-\u00d6'
  26. '\u00d8-\u00f6'
  27. '\u00f8-\u02ff'
  28. '\u0370-\u037d'
  29. '\u037f-\u1fff'
  30. '\u200c-\u200d'
  31. '\u2070-\u218f'
  32. '\u2c00-\u2fef'
  33. '\u3001-\ud7ff'
  34. '\uf900-\ufdcf'
  35. '\ufdf0-\ufffd')
  36. PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
  37. PN_CHARS_GRP = (PN_CHARS_U_GRP +
  38. r'\-' +
  39. r'0-9' +
  40. '\u00b7' +
  41. '\u0300-\u036f' +
  42. '\u203f-\u2040')
  43. HEX_GRP = '0-9A-Fa-f'
  44. PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
  45. # terminal productions ::
  46. PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
  47. PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
  48. PN_CHARS = '[' + PN_CHARS_GRP + ']'
  49. HEX = '[' + HEX_GRP + ']'
  50. PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
  51. IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
  52. BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
  53. '.]*' + PN_CHARS + ')?'
  54. PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
  55. VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
  56. '0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
  57. PERCENT = '%' + HEX + HEX
  58. PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
  59. PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
  60. PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
  61. '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
  62. PN_CHARS_GRP + ':]|' + PLX + '))?')
  63. EXPONENT = r'[eE][+-]?\d+'
  64. # Lexer token definitions ::
  65. tokens = {
  66. 'root': [
  67. (r'\s+', Text),
  68. # keywords ::
  69. (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
  70. r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
  71. r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
  72. r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
  73. r'using\s+named|using|graph|default|named|all|optional|service|'
  74. r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
  75. (r'(a)\b', Keyword),
  76. # IRIs ::
  77. ('(' + IRIREF + ')', Name.Label),
  78. # blank nodes ::
  79. ('(' + BLANK_NODE_LABEL + ')', Name.Label),
  80. # # variables ::
  81. ('[?$]' + VARNAME, Name.Variable),
  82. # prefixed names ::
  83. (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
  84. bygroups(Name.Namespace, Punctuation, Name.Tag)),
  85. # function names ::
  86. (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
  87. r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
  88. r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
  89. r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
  90. r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
  91. r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
  92. r'count|sum|min|max|avg|sample|group_concat|separator)\b',
  93. Name.Function),
  94. # boolean literals ::
  95. (r'(true|false)', Keyword.Constant),
  96. # double literals ::
  97. (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
  98. # decimal literals ::
  99. (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
  100. # integer literals ::
  101. (r'[+\-]?\d+', Number.Integer),
  102. # operators ::
  103. (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
  104. # punctuation characters ::
  105. (r'[(){}.;,:^\[\]]', Punctuation),
  106. # line comments ::
  107. (r'#[^\n]*', Comment),
  108. # strings ::
  109. (r'"""', String, 'triple-double-quoted-string'),
  110. (r'"', String, 'single-double-quoted-string'),
  111. (r"'''", String, 'triple-single-quoted-string'),
  112. (r"'", String, 'single-single-quoted-string'),
  113. ],
  114. 'triple-double-quoted-string': [
  115. (r'"""', String, 'end-of-string'),
  116. (r'[^\\]+', String),
  117. (r'\\', String, 'string-escape'),
  118. ],
  119. 'single-double-quoted-string': [
  120. (r'"', String, 'end-of-string'),
  121. (r'[^"\\\n]+', String),
  122. (r'\\', String, 'string-escape'),
  123. ],
  124. 'triple-single-quoted-string': [
  125. (r"'''", String, 'end-of-string'),
  126. (r'[^\\]+', String),
  127. (r'\\', String.Escape, 'string-escape'),
  128. ],
  129. 'single-single-quoted-string': [
  130. (r"'", String, 'end-of-string'),
  131. (r"[^'\\\n]+", String),
  132. (r'\\', String, 'string-escape'),
  133. ],
  134. 'string-escape': [
  135. (r'u' + HEX + '{4}', String.Escape, '#pop'),
  136. (r'U' + HEX + '{8}', String.Escape, '#pop'),
  137. (r'.', String.Escape, '#pop'),
  138. ],
  139. 'end-of-string': [
  140. (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
  141. bygroups(Operator, Name.Function), '#pop:2'),
  142. (r'\^\^', Operator, '#pop:2'),
  143. default('#pop:2'),
  144. ],
  145. }
  146. class TurtleLexer(RegexLexer):
  147. """
  148. Lexer for Turtle data language.
  149. """
  150. name = 'Turtle'
  151. aliases = ['turtle']
  152. filenames = ['*.ttl']
  153. mimetypes = ['text/turtle', 'application/x-turtle']
  154. url = 'https://www.w3.org/TR/turtle'
  155. version_added = '2.1'
  156. # character group definitions ::
  157. PN_CHARS_BASE_GRP = ('a-zA-Z'
  158. '\u00c0-\u00d6'
  159. '\u00d8-\u00f6'
  160. '\u00f8-\u02ff'
  161. '\u0370-\u037d'
  162. '\u037f-\u1fff'
  163. '\u200c-\u200d'
  164. '\u2070-\u218f'
  165. '\u2c00-\u2fef'
  166. '\u3001-\ud7ff'
  167. '\uf900-\ufdcf'
  168. '\ufdf0-\ufffd')
  169. PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
  170. PN_CHARS_GRP = (PN_CHARS_U_GRP +
  171. r'\-' +
  172. r'0-9' +
  173. '\u00b7' +
  174. '\u0300-\u036f' +
  175. '\u203f-\u2040')
  176. PN_CHARS = '[' + PN_CHARS_GRP + ']'
  177. PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
  178. PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
  179. HEX_GRP = '0-9A-Fa-f'
  180. HEX = '[' + HEX_GRP + ']'
  181. PERCENT = '%' + HEX + HEX
  182. PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
  183. PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
  184. PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
  185. PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
  186. PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
  187. '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
  188. PN_CHARS_GRP + ':]|' + PLX + '))?')
  189. patterns = {
  190. 'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)', # Simplified character range
  191. 'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
  192. }
  193. tokens = {
  194. 'root': [
  195. (r'\s+', Text),
  196. # Base / prefix
  197. (r'(@base|BASE)(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
  198. bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
  199. Punctuation)),
  200. (r'(@prefix|PREFIX)(\s+){PNAME_NS}(\s+){IRIREF}(\s*)(\.?)'.format(**patterns),
  201. bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
  202. Name.Variable, Whitespace, Punctuation)),
  203. # The shorthand predicate 'a'
  204. (r'(?<=\s)a(?=\s)', Keyword.Type),
  205. # IRIREF
  206. (r'{IRIREF}'.format(**patterns), Name.Variable),
  207. # PrefixedName
  208. (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
  209. bygroups(Name.Namespace, Punctuation, Name.Tag)),
  210. # BlankNodeLabel
  211. (r'(_)(:)([' + PN_CHARS_U_GRP + r'0-9]([' + PN_CHARS_GRP + r'.]*' + PN_CHARS + ')?)',
  212. bygroups(Name.Namespace, Punctuation, Name.Tag)),
  213. # Comment
  214. (r'#[^\n]+', Comment),
  215. (r'\b(true|false)\b', Literal),
  216. (r'[+\-]?\d*\.\d+', Number.Float),
  217. (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
  218. (r'[+\-]?\d+', Number.Integer),
  219. (r'[\[\](){}.;,:^]', Punctuation),
  220. (r'"""', String, 'triple-double-quoted-string'),
  221. (r'"', String, 'single-double-quoted-string'),
  222. (r"'''", String, 'triple-single-quoted-string'),
  223. (r"'", String, 'single-single-quoted-string'),
  224. ],
  225. 'triple-double-quoted-string': [
  226. (r'"""', String, 'end-of-string'),
  227. (r'[^\\]+', String),
  228. (r'\\', String, 'string-escape'),
  229. ],
  230. 'single-double-quoted-string': [
  231. (r'"', String, 'end-of-string'),
  232. (r'[^"\\\n]+', String),
  233. (r'\\', String, 'string-escape'),
  234. ],
  235. 'triple-single-quoted-string': [
  236. (r"'''", String, 'end-of-string'),
  237. (r'[^\\]+', String),
  238. (r'\\', String, 'string-escape'),
  239. ],
  240. 'single-single-quoted-string': [
  241. (r"'", String, 'end-of-string'),
  242. (r"[^'\\\n]+", String),
  243. (r'\\', String, 'string-escape'),
  244. ],
  245. 'string-escape': [
  246. (r'.', String, '#pop'),
  247. ],
  248. 'end-of-string': [
  249. (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
  250. bygroups(Operator, Generic.Emph), '#pop:2'),
  251. (r'(\^\^){IRIREF}'.format(**patterns), bygroups(Operator, Generic.Emph), '#pop:2'),
  252. default('#pop:2'),
  253. ],
  254. }
  255. # Turtle and Tera Term macro files share the same file extension
  256. # but each has a recognizable and distinct syntax.
  257. def analyse_text(text):
  258. for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
  259. if re.search(rf'^\s*{t}', text):
  260. return 0.80
  261. class ShExCLexer(RegexLexer):
  262. """
  263. Lexer for ShExC shape expressions language syntax.
  264. """
  265. name = 'ShExC'
  266. aliases = ['shexc', 'shex']
  267. filenames = ['*.shex']
  268. mimetypes = ['text/shex']
  269. url = 'https://shex.io/shex-semantics/#shexc'
  270. version_added = ''
  271. # character group definitions ::
  272. PN_CHARS_BASE_GRP = ('a-zA-Z'
  273. '\u00c0-\u00d6'
  274. '\u00d8-\u00f6'
  275. '\u00f8-\u02ff'
  276. '\u0370-\u037d'
  277. '\u037f-\u1fff'
  278. '\u200c-\u200d'
  279. '\u2070-\u218f'
  280. '\u2c00-\u2fef'
  281. '\u3001-\ud7ff'
  282. '\uf900-\ufdcf'
  283. '\ufdf0-\ufffd')
  284. PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
  285. PN_CHARS_GRP = (PN_CHARS_U_GRP +
  286. r'\-' +
  287. r'0-9' +
  288. '\u00b7' +
  289. '\u0300-\u036f' +
  290. '\u203f-\u2040')
  291. HEX_GRP = '0-9A-Fa-f'
  292. PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
  293. # terminal productions ::
  294. PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
  295. PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
  296. PN_CHARS = '[' + PN_CHARS_GRP + ']'
  297. HEX = '[' + HEX_GRP + ']'
  298. PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
  299. UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
  300. UCHAR = r'\\' + UCHAR_NO_BACKSLASH
  301. IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
  302. BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
  303. '.]*' + PN_CHARS + ')?'
  304. PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
  305. PERCENT = '%' + HEX + HEX
  306. PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
  307. PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
  308. PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
  309. '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
  310. PN_CHARS_GRP + ':]|' + PLX + '))?')
  311. EXPONENT = r'[eE][+-]?\d+'
  312. # Lexer token definitions ::
  313. tokens = {
  314. 'root': [
  315. (r'\s+', Text),
  316. # keywords ::
  317. (r'(?i)(base|prefix|start|external|'
  318. r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
  319. r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
  320. r'totaldigits|fractiondigits|'
  321. r'closed|extra)\b', Keyword),
  322. (r'(a)\b', Keyword),
  323. # IRIs ::
  324. ('(' + IRIREF + ')', Name.Label),
  325. # blank nodes ::
  326. ('(' + BLANK_NODE_LABEL + ')', Name.Label),
  327. # prefixed names ::
  328. (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
  329. bygroups(Name.Namespace, Punctuation, Name.Tag)),
  330. # boolean literals ::
  331. (r'(true|false)', Keyword.Constant),
  332. # double literals ::
  333. (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
  334. # decimal literals ::
  335. (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
  336. # integer literals ::
  337. (r'[+\-]?\d+', Number.Integer),
  338. # operators ::
  339. (r'[@|$&=*+?^\-~]', Operator),
  340. # operator keywords ::
  341. (r'(?i)(and|or|not)\b', Operator.Word),
  342. # punctuation characters ::
  343. (r'[(){}.;,:^\[\]]', Punctuation),
  344. # line comments ::
  345. (r'#[^\n]*', Comment),
  346. # strings ::
  347. (r'"""', String, 'triple-double-quoted-string'),
  348. (r'"', String, 'single-double-quoted-string'),
  349. (r"'''", String, 'triple-single-quoted-string'),
  350. (r"'", String, 'single-single-quoted-string'),
  351. ],
  352. 'triple-double-quoted-string': [
  353. (r'"""', String, 'end-of-string'),
  354. (r'[^\\]+', String),
  355. (r'\\', String, 'string-escape'),
  356. ],
  357. 'single-double-quoted-string': [
  358. (r'"', String, 'end-of-string'),
  359. (r'[^"\\\n]+', String),
  360. (r'\\', String, 'string-escape'),
  361. ],
  362. 'triple-single-quoted-string': [
  363. (r"'''", String, 'end-of-string'),
  364. (r'[^\\]+', String),
  365. (r'\\', String.Escape, 'string-escape'),
  366. ],
  367. 'single-single-quoted-string': [
  368. (r"'", String, 'end-of-string'),
  369. (r"[^'\\\n]+", String),
  370. (r'\\', String, 'string-escape'),
  371. ],
  372. 'string-escape': [
  373. (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
  374. (r'.', String.Escape, '#pop'),
  375. ],
  376. 'end-of-string': [
  377. (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
  378. bygroups(Operator, Name.Function), '#pop:2'),
  379. (r'\^\^', Operator, '#pop:2'),
  380. default('#pop:2'),
  381. ],
  382. }