gen_integrations.py 29 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016
  1. #!/usr/bin/env python3
  2. import json
  3. import os
  4. import re
  5. import sys
  6. from copy import deepcopy
  7. from pathlib import Path
  8. from jsonschema import Draft7Validator, ValidationError
  9. from referencing import Registry, Resource
  10. from referencing.jsonschema import DRAFT7
  11. from ruamel.yaml import YAML, YAMLError
  12. AGENT_REPO = 'netdata/netdata'
  13. INTEGRATIONS_PATH = Path(__file__).parent
  14. TEMPLATE_PATH = INTEGRATIONS_PATH / 'templates'
  15. OUTPUT_PATH = INTEGRATIONS_PATH / 'integrations.js'
  16. JSON_PATH = INTEGRATIONS_PATH / 'integrations.json'
  17. CATEGORIES_FILE = INTEGRATIONS_PATH / 'categories.yaml'
  18. REPO_PATH = INTEGRATIONS_PATH.parent
  19. SCHEMA_PATH = INTEGRATIONS_PATH / 'schemas'
  20. DISTROS_FILE = REPO_PATH / '.github' / 'data' / 'distros.yml'
  21. METADATA_PATTERN = '*/metadata.yaml'
  22. COLLECTOR_SOURCES = [
  23. (AGENT_REPO, REPO_PATH / 'src' / 'collectors', True),
  24. (AGENT_REPO, REPO_PATH / 'src' / 'collectors' / 'charts.d.plugin', True),
  25. (AGENT_REPO, REPO_PATH / 'src' / 'collectors' / 'python.d.plugin', True),
  26. (AGENT_REPO, REPO_PATH / 'src' / 'go' / 'plugin' / 'go.d' / 'collector', True),
  27. ]
  28. DEPLOY_SOURCES = [
  29. (AGENT_REPO, INTEGRATIONS_PATH / 'deploy.yaml', False),
  30. ]
  31. EXPORTER_SOURCES = [
  32. (AGENT_REPO, REPO_PATH / 'src' / 'exporting', True),
  33. ]
  34. AGENT_NOTIFICATION_SOURCES = [
  35. (AGENT_REPO, REPO_PATH / 'src' / 'health' / 'notifications', True),
  36. ]
  37. CLOUD_NOTIFICATION_SOURCES = [
  38. (AGENT_REPO, INTEGRATIONS_PATH / 'cloud-notifications' / 'metadata.yaml', False),
  39. ]
  40. LOGS_SOURCES = [
  41. (AGENT_REPO, INTEGRATIONS_PATH / 'logs' / 'metadata.yaml', False),
  42. ]
  43. AUTHENTICATION_SOURCES = [
  44. (AGENT_REPO, INTEGRATIONS_PATH / 'cloud-authentication' / 'metadata.yaml', False),
  45. ]
  46. COLLECTOR_RENDER_KEYS = [
  47. 'alerts',
  48. 'metrics',
  49. 'overview',
  50. 'related_resources',
  51. 'setup',
  52. 'troubleshooting',
  53. ]
  54. EXPORTER_RENDER_KEYS = [
  55. 'overview',
  56. 'setup',
  57. 'troubleshooting',
  58. ]
  59. AGENT_NOTIFICATION_RENDER_KEYS = [
  60. 'overview',
  61. 'setup',
  62. 'troubleshooting',
  63. ]
  64. CLOUD_NOTIFICATION_RENDER_KEYS = [
  65. 'setup',
  66. 'troubleshooting',
  67. ]
  68. LOGS_RENDER_KEYS = [
  69. 'overview',
  70. 'setup',
  71. ]
  72. AUTHENTICATION_RENDER_KEYS = [
  73. 'overview',
  74. 'setup',
  75. 'troubleshooting',
  76. ]
  77. CUSTOM_TAG_PATTERN = re.compile('\\{% if .*?%\\}.*?\\{% /if %\\}|\\{%.*?%\\}', flags=re.DOTALL)
  78. FIXUP_BLANK_PATTERN = re.compile('\\\\\\n *\\n')
  79. GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS', False)
  80. DEBUG = os.environ.get('DEBUG', False)
  81. def debug(msg):
  82. if GITHUB_ACTIONS:
  83. print(f':debug:{msg}')
  84. elif DEBUG:
  85. print(f'>>> {msg}')
  86. else:
  87. pass
  88. def warn(msg, path):
  89. if GITHUB_ACTIONS:
  90. print(f':warning file={path}:{msg}')
  91. else:
  92. print(f'!!! WARNING:{path}:{msg}')
  93. def retrieve_from_filesystem(uri):
  94. path = SCHEMA_PATH / Path(uri)
  95. contents = json.loads(path.read_text())
  96. return Resource.from_contents(contents, DRAFT7)
  97. registry = Registry(retrieve=retrieve_from_filesystem)
  98. CATEGORY_VALIDATOR = Draft7Validator(
  99. {'$ref': './categories.json#'},
  100. registry=registry,
  101. )
  102. DEPLOY_VALIDATOR = Draft7Validator(
  103. {'$ref': './deploy.json#'},
  104. registry=registry,
  105. )
  106. EXPORTER_VALIDATOR = Draft7Validator(
  107. {'$ref': './exporter.json#'},
  108. registry=registry,
  109. )
  110. AGENT_NOTIFICATION_VALIDATOR = Draft7Validator(
  111. {'$ref': './agent_notification.json#'},
  112. registry=registry,
  113. )
  114. CLOUD_NOTIFICATION_VALIDATOR = Draft7Validator(
  115. {'$ref': './cloud_notification.json#'},
  116. registry=registry,
  117. )
  118. LOGS_VALIDATOR = Draft7Validator(
  119. {'$ref': './logs.json#'},
  120. registry=registry,
  121. )
  122. AUTHENTICATION_VALIDATOR = Draft7Validator(
  123. {'$ref': './authentication.json#'},
  124. registry=registry,
  125. )
  126. COLLECTOR_VALIDATOR = Draft7Validator(
  127. {'$ref': './collector.json#'},
  128. registry=registry,
  129. )
  130. _jinja_env = False
  131. def get_jinja_env():
  132. global _jinja_env
  133. if not _jinja_env:
  134. from jinja2 import Environment, FileSystemLoader, select_autoescape
  135. _jinja_env = Environment(
  136. loader=FileSystemLoader(TEMPLATE_PATH),
  137. autoescape=select_autoescape(),
  138. block_start_string='[%',
  139. block_end_string='%]',
  140. variable_start_string='[[',
  141. variable_end_string=']]',
  142. comment_start_string='[#',
  143. comment_end_string='#]',
  144. trim_blocks=True,
  145. lstrip_blocks=True,
  146. )
  147. _jinja_env.globals.update(strfy=strfy)
  148. return _jinja_env
  149. def strfy(value):
  150. if isinstance(value, bool):
  151. return "yes" if value else "no"
  152. if isinstance(value, str):
  153. return ' '.join([v.strip() for v in value.strip().split("\n") if v]).replace('|', '/')
  154. return value
  155. def get_category_sets(categories):
  156. default = set()
  157. valid = set()
  158. for c in categories:
  159. if 'id' in c:
  160. valid.add(c['id'])
  161. if c.get('collector_default', False):
  162. default.add(c['id'])
  163. if 'children' in c and c['children']:
  164. d, v = get_category_sets(c['children'])
  165. default |= d
  166. valid |= v
  167. return (default, valid)
  168. def get_collector_metadata_entries():
  169. ret = []
  170. for r, d, m in COLLECTOR_SOURCES:
  171. if d.exists() and d.is_dir() and m:
  172. for item in d.glob(METADATA_PATTERN):
  173. ret.append((r, item))
  174. elif d.exists() and d.is_file() and not m:
  175. if d.match(METADATA_PATTERN):
  176. ret.append(d)
  177. return ret
  178. def load_yaml(src):
  179. yaml = YAML(typ='safe')
  180. if not src.is_file():
  181. warn(f'{src} is not a file.', src)
  182. return False
  183. try:
  184. contents = src.read_text()
  185. except (IOError, OSError):
  186. warn(f'Failed to read {src}.', src)
  187. return False
  188. try:
  189. data = yaml.load(contents)
  190. except YAMLError:
  191. warn(f'Failed to parse {src} as YAML.', src)
  192. return False
  193. return data
  194. def load_categories():
  195. categories = load_yaml(CATEGORIES_FILE)
  196. if not categories:
  197. sys.exit(1)
  198. try:
  199. CATEGORY_VALIDATOR.validate(categories)
  200. except ValidationError:
  201. warn(f'Failed to validate {CATEGORIES_FILE} against the schema.', CATEGORIES_FILE)
  202. sys.exit(1)
  203. return categories
  204. def load_collectors():
  205. ret = []
  206. entries = get_collector_metadata_entries()
  207. for repo, path in entries:
  208. debug(f'Loading {path}.')
  209. data = load_yaml(path)
  210. if not data:
  211. continue
  212. try:
  213. COLLECTOR_VALIDATOR.validate(data)
  214. except ValidationError:
  215. warn(f'Failed to validate {path} against the schema.', path)
  216. continue
  217. for idx, item in enumerate(data['modules']):
  218. item['meta']['plugin_name'] = data['plugin_name']
  219. item['integration_type'] = 'collector'
  220. item['_src_path'] = path
  221. item['_repo'] = repo
  222. item['_index'] = idx
  223. ret.append(item)
  224. return ret
  225. def _load_deploy_file(file, repo):
  226. ret = []
  227. debug(f'Loading {file}.')
  228. data = load_yaml(file)
  229. if not data:
  230. return []
  231. try:
  232. DEPLOY_VALIDATOR.validate(data)
  233. except ValidationError:
  234. warn(f'Failed to validate {file} against the schema.', file)
  235. return []
  236. for idx, item in enumerate(data):
  237. item['integration_type'] = 'deploy'
  238. item['_src_path'] = file
  239. item['_repo'] = repo
  240. item['_index'] = idx
  241. ret.append(item)
  242. return ret
  243. def load_deploy():
  244. ret = []
  245. for repo, path, match in DEPLOY_SOURCES:
  246. if match and path.exists() and path.is_dir():
  247. for file in path.glob(METADATA_PATTERN):
  248. ret.extend(_load_deploy_file(file, repo))
  249. elif not match and path.exists() and path.is_file():
  250. ret.extend(_load_deploy_file(path, repo))
  251. return ret
  252. def _load_exporter_file(file, repo):
  253. debug(f'Loading {file}.')
  254. data = load_yaml(file)
  255. if not data:
  256. return []
  257. try:
  258. EXPORTER_VALIDATOR.validate(data)
  259. except ValidationError:
  260. warn(f'Failed to validate {file} against the schema.', file)
  261. return []
  262. if 'id' in data:
  263. data['integration_type'] = 'exporter'
  264. data['_src_path'] = file
  265. data['_repo'] = repo
  266. data['_index'] = 0
  267. return [data]
  268. else:
  269. ret = []
  270. for idx, item in enumerate(data):
  271. item['integration_type'] = 'exporter'
  272. item['_src_path'] = file
  273. item['_repo'] = repo
  274. item['_index'] = idx
  275. ret.append(item)
  276. return ret
  277. def load_exporters():
  278. ret = []
  279. for repo, path, match in EXPORTER_SOURCES:
  280. if match and path.exists() and path.is_dir():
  281. for file in path.glob(METADATA_PATTERN):
  282. ret.extend(_load_exporter_file(file, repo))
  283. elif not match and path.exists() and path.is_file():
  284. ret.extend(_load_exporter_file(path, repo))
  285. return ret
  286. def _load_agent_notification_file(file, repo):
  287. debug(f'Loading {file}.')
  288. data = load_yaml(file)
  289. if not data:
  290. return []
  291. try:
  292. AGENT_NOTIFICATION_VALIDATOR.validate(data)
  293. except ValidationError:
  294. warn(f'Failed to validate {file} against the schema.', file)
  295. return []
  296. if 'id' in data:
  297. data['integration_type'] = 'agent_notification'
  298. data['_src_path'] = file
  299. data['_repo'] = repo
  300. data['_index'] = 0
  301. return [data]
  302. else:
  303. ret = []
  304. for idx, item in enumerate(data):
  305. item['integration_type'] = 'agent_notification'
  306. item['_src_path'] = file
  307. item['_repo'] = repo
  308. item['_index'] = idx
  309. ret.append(item)
  310. return ret
  311. def load_agent_notifications():
  312. ret = []
  313. for repo, path, match in AGENT_NOTIFICATION_SOURCES:
  314. if match and path.exists() and path.is_dir():
  315. for file in path.glob(METADATA_PATTERN):
  316. ret.extend(_load_agent_notification_file(file, repo))
  317. elif not match and path.exists() and path.is_file():
  318. ret.extend(_load_agent_notification_file(path, repo))
  319. return ret
  320. def _load_cloud_notification_file(file, repo):
  321. debug(f'Loading {file}.')
  322. data = load_yaml(file)
  323. if not data:
  324. return []
  325. try:
  326. CLOUD_NOTIFICATION_VALIDATOR.validate(data)
  327. except ValidationError:
  328. warn(f'Failed to validate {file} against the schema.', file)
  329. return []
  330. if 'id' in data:
  331. data['integration_type'] = 'cloud_notification'
  332. data['_src_path'] = file
  333. data['_repo'] = repo
  334. data['_index'] = 0
  335. return [data]
  336. else:
  337. ret = []
  338. for idx, item in enumerate(data):
  339. item['integration_type'] = 'cloud_notification'
  340. item['_src_path'] = file
  341. item['_repo'] = repo
  342. item['_index'] = idx
  343. ret.append(item)
  344. return ret
  345. def load_cloud_notifications():
  346. ret = []
  347. for repo, path, match in CLOUD_NOTIFICATION_SOURCES:
  348. if match and path.exists() and path.is_dir():
  349. for file in path.glob(METADATA_PATTERN):
  350. ret.extend(_load_cloud_notification_file(file, repo))
  351. elif not match and path.exists() and path.is_file():
  352. ret.extend(_load_cloud_notification_file(path, repo))
  353. return ret
  354. def _load_logs_file(file, repo):
  355. debug(f'Loading {file}.')
  356. data = load_yaml(file)
  357. if not data:
  358. return []
  359. try:
  360. LOGS_VALIDATOR.validate(data)
  361. except ValidationError:
  362. warn(f'Failed to validate {file} against the schema.', file)
  363. return []
  364. if 'id' in data:
  365. data['integration_type'] = 'logs'
  366. data['_src_path'] = file
  367. data['_repo'] = repo
  368. data['_index'] = 0
  369. return [data]
  370. else:
  371. ret = []
  372. for idx, item in enumerate(data):
  373. item['integration_type'] = 'logs'
  374. item['_src_path'] = file
  375. item['_repo'] = repo
  376. item['_index'] = idx
  377. ret.append(item)
  378. return ret
  379. def load_logs():
  380. ret = []
  381. for repo, path, match in LOGS_SOURCES:
  382. if match and path.exists() and path.is_dir():
  383. for file in path.glob(METADATA_PATTERN):
  384. ret.extend(_load_logs_file(file, repo))
  385. elif not match and path.exists() and path.is_file():
  386. ret.extend(_load_logs_file(path, repo))
  387. return ret
  388. def _load_authentication_file(file, repo):
  389. debug(f'Loading {file}.')
  390. data = load_yaml(file)
  391. if not data:
  392. return []
  393. try:
  394. AUTHENTICATION_VALIDATOR.validate(data)
  395. except ValidationError:
  396. warn(f'Failed to validate {file} against the schema.', file)
  397. return []
  398. if 'id' in data:
  399. data['integration_type'] = 'authentication'
  400. data['_src_path'] = file
  401. data['_repo'] = repo
  402. data['_index'] = 0
  403. return [data]
  404. else:
  405. ret = []
  406. for idx, item in enumerate(data):
  407. item['integration_type'] = 'authentication'
  408. item['_src_path'] = file
  409. item['_repo'] = repo
  410. item['_index'] = idx
  411. ret.append(item)
  412. return ret
  413. def load_authentications():
  414. ret = []
  415. for repo, path, match in AUTHENTICATION_SOURCES:
  416. if match and path.exists() and path.is_dir():
  417. for file in path.glob(METADATA_PATTERN):
  418. ret.extend(_load_authentication_file(file, repo))
  419. elif not match and path.exists() and path.is_file():
  420. ret.extend(_load_authentication_file(path, repo))
  421. return ret
  422. def make_id(meta):
  423. if 'monitored_instance' in meta:
  424. instance_name = meta['monitored_instance']['name'].replace(' ', '_')
  425. elif 'instance_name' in meta:
  426. instance_name = meta['instance_name']
  427. else:
  428. instance_name = '000_unknown'
  429. return f'{meta["plugin_name"]}-{meta["module_name"]}-{instance_name}'
  430. def make_edit_link(item):
  431. item_path = item['_src_path'].relative_to(REPO_PATH)
  432. return f'https://github.com/{item["_repo"]}/blob/master/{item_path}'
  433. def sort_integrations(integrations):
  434. integrations.sort(key=lambda i: i['_index'])
  435. integrations.sort(key=lambda i: i['_src_path'])
  436. integrations.sort(key=lambda i: i['id'])
  437. def dedupe_integrations(integrations, ids):
  438. tmp_integrations = []
  439. for i in integrations:
  440. if ids.get(i['id'], False):
  441. first_path, first_index = ids[i['id']]
  442. warn(
  443. f'Duplicate integration ID found at {i["_src_path"]} index {i["_index"]} (original definition at {first_path} index {first_index}), ignoring that integration.',
  444. i['_src_path'])
  445. else:
  446. tmp_integrations.append(i)
  447. ids[i['id']] = (i['_src_path'], i['_index'])
  448. return tmp_integrations, ids
  449. def render_collectors(categories, collectors, ids):
  450. debug('Computing default categories.')
  451. default_cats, valid_cats = get_category_sets(categories)
  452. debug('Generating collector IDs.')
  453. for item in collectors:
  454. item['id'] = make_id(item['meta'])
  455. debug('Sorting collectors.')
  456. sort_integrations(collectors)
  457. debug('Removing duplicate collectors.')
  458. collectors, ids = dedupe_integrations(collectors, ids)
  459. clean_collectors = []
  460. idmap = {i['id']: i for i in collectors}
  461. for item in collectors:
  462. debug(f'Processing {item["id"]}.')
  463. item['edit_link'] = make_edit_link(item)
  464. clean_item = deepcopy(item)
  465. related = []
  466. for res in item['meta']['related_resources']['integrations']['list']:
  467. res_id = make_id(res)
  468. if res_id not in idmap.keys():
  469. warn(f'Could not find related integration {res_id}, ignoring it.', item['_src_path'])
  470. continue
  471. related.append({
  472. 'plugin_name': res['plugin_name'],
  473. 'module_name': res['module_name'],
  474. 'id': res_id,
  475. 'name': idmap[res_id]['meta']['monitored_instance']['name'],
  476. 'info': idmap[res_id]['meta']['info_provided_to_referring_integrations'],
  477. })
  478. item_cats = set(item['meta']['monitored_instance']['categories'])
  479. bogus_cats = item_cats - valid_cats
  480. actual_cats = item_cats & valid_cats
  481. if bogus_cats:
  482. warn(f'Ignoring invalid categories: {", ".join(bogus_cats)}', item["_src_path"])
  483. if not item_cats:
  484. item['meta']['monitored_instance']['categories'] = list(default_cats)
  485. warn(f'{item["id"]} does not list any caregories, adding it to: {default_cats}', item["_src_path"])
  486. else:
  487. item['meta']['monitored_instance']['categories'] = [x for x in
  488. item['meta']['monitored_instance']['categories'] if
  489. x in list(actual_cats)]
  490. for scope in item['metrics']['scopes']:
  491. if scope['name'] == 'global':
  492. scope['name'] = f'{item["meta"]["monitored_instance"]["name"]} instance'
  493. for cfg_example in item['setup']['configuration']['examples']['list']:
  494. if 'folding' not in cfg_example:
  495. cfg_example['folding'] = {
  496. 'enabled': item['setup']['configuration']['examples']['folding']['enabled']
  497. }
  498. for key in COLLECTOR_RENDER_KEYS:
  499. if key in item.keys():
  500. template = get_jinja_env().get_template(f'{key}.md')
  501. data = template.render(entry=item, related=related, clean=False)
  502. clean_data = template.render(entry=item, related=related, clean=True)
  503. if 'variables' in item['meta']['monitored_instance']:
  504. template = get_jinja_env().from_string(data)
  505. data = template.render(variables=item['meta']['monitored_instance']['variables'])
  506. template = get_jinja_env().from_string(clean_data)
  507. clean_data = template.render(variables=item['meta']['monitored_instance']['variables'])
  508. else:
  509. data = ''
  510. clean_data = ''
  511. item[key] = data
  512. clean_item[key] = clean_data
  513. for k in ['_src_path', '_repo', '_index']:
  514. del item[k], clean_item[k]
  515. clean_collectors.append(clean_item)
  516. return collectors, clean_collectors, ids
  517. def render_deploy(distros, categories, deploy, ids):
  518. debug('Sorting deployments.')
  519. sort_integrations(deploy)
  520. debug('Checking deployment ids.')
  521. deploy, ids = dedupe_integrations(deploy, ids)
  522. clean_deploy = []
  523. template = get_jinja_env().get_template('platform_info.md')
  524. for item in deploy:
  525. debug(f'Processing {item["id"]}.')
  526. item['edit_link'] = make_edit_link(item)
  527. clean_item = deepcopy(item)
  528. if item['platform_info']['group']:
  529. entries = [
  530. {
  531. 'version': i['version'],
  532. 'support': i['support_type'],
  533. 'arches': i.get('packages', {'arches': []})['arches'],
  534. 'notes': i['notes'],
  535. } for i in distros[item['platform_info']['group']] if i['distro'] == item['platform_info']['distro']
  536. ]
  537. else:
  538. entries = []
  539. data = template.render(entries=entries, clean=False)
  540. clean_data = template.render(entries=entries, clean=True)
  541. for method in clean_item['methods']:
  542. for command in method['commands']:
  543. command['command'] = CUSTOM_TAG_PATTERN.sub('', command['command'])
  544. command['command'] = FIXUP_BLANK_PATTERN.sub('', command['command'])
  545. item['platform_info'] = data
  546. clean_item['platform_info'] = clean_data
  547. if 'clean_additional_info' in item:
  548. clean_item['additional_info'] = item['clean_additional_info']
  549. del item['clean_additional_info'], clean_item['clean_additional_info']
  550. for k in ['_src_path', '_repo', '_index']:
  551. del item[k], clean_item[k]
  552. clean_deploy.append(clean_item)
  553. return deploy, clean_deploy, ids
  554. def render_exporters(categories, exporters, ids):
  555. debug('Sorting exporters.')
  556. sort_integrations(exporters)
  557. debug('Checking exporter ids.')
  558. exporters, ids = dedupe_integrations(exporters, ids)
  559. clean_exporters = []
  560. for item in exporters:
  561. item['edit_link'] = make_edit_link(item)
  562. clean_item = deepcopy(item)
  563. for key in EXPORTER_RENDER_KEYS:
  564. if key in item.keys():
  565. template = get_jinja_env().get_template(f'{key}.md')
  566. data = template.render(entry=item, clean=False)
  567. clean_data = template.render(entry=item, clean=True)
  568. if 'variables' in item['meta']:
  569. template = get_jinja_env().from_string(data)
  570. data = template.render(variables=item['meta']['variables'], clean=False)
  571. template = get_jinja_env().from_string(clean_data)
  572. clean_data = template.render(variables=item['meta']['variables'], clean=True)
  573. else:
  574. data = ''
  575. clean_data = ''
  576. item[key] = data
  577. clean_item[key] = clean_data
  578. for k in ['_src_path', '_repo', '_index']:
  579. del item[k], clean_item[k]
  580. clean_exporters.append(clean_item)
  581. return exporters, clean_exporters, ids
  582. def render_agent_notifications(categories, notifications, ids):
  583. debug('Sorting notifications.')
  584. sort_integrations(notifications)
  585. debug('Checking notification ids.')
  586. notifications, ids = dedupe_integrations(notifications, ids)
  587. clean_notifications = []
  588. for item in notifications:
  589. item['edit_link'] = make_edit_link(item)
  590. clean_item = deepcopy(item)
  591. for key in AGENT_NOTIFICATION_RENDER_KEYS:
  592. if key in item.keys():
  593. template = get_jinja_env().get_template(f'{key}.md')
  594. data = template.render(entry=item, clean=False)
  595. clean_data = template.render(entry=item, clean=True)
  596. if 'variables' in item['meta']:
  597. template = get_jinja_env().from_string(data)
  598. data = template.render(variables=item['meta']['variables'], clean=False)
  599. template = get_jinja_env().from_string(clean_data)
  600. clean_data = template.render(variables=item['meta']['variables'], clean=True)
  601. else:
  602. data = ''
  603. clean_data = ''
  604. item[key] = data
  605. clean_item[key] = clean_data
  606. for k in ['_src_path', '_repo', '_index']:
  607. del item[k], clean_item[k]
  608. clean_notifications.append(clean_item)
  609. return notifications, clean_notifications, ids
  610. def render_cloud_notifications(categories, notifications, ids):
  611. debug('Sorting notifications.')
  612. sort_integrations(notifications)
  613. debug('Checking notification ids.')
  614. notifications, ids = dedupe_integrations(notifications, ids)
  615. clean_notifications = []
  616. for item in notifications:
  617. item['edit_link'] = make_edit_link(item)
  618. clean_item = deepcopy(item)
  619. for key in CLOUD_NOTIFICATION_RENDER_KEYS:
  620. if key in item.keys():
  621. template = get_jinja_env().get_template(f'{key}.md')
  622. data = template.render(entry=item, clean=False)
  623. clean_data = template.render(entry=item, clean=True)
  624. if 'variables' in item['meta']:
  625. template = get_jinja_env().from_string(data)
  626. data = template.render(variables=item['meta']['variables'], clean=False)
  627. template = get_jinja_env().from_string(clean_data)
  628. clean_data = template.render(variables=item['meta']['variables'], clean=True)
  629. else:
  630. data = ''
  631. clean_data = ''
  632. item[key] = data
  633. clean_item[key] = clean_data
  634. for k in ['_src_path', '_repo', '_index']:
  635. del item[k], clean_item[k]
  636. clean_notifications.append(clean_item)
  637. return notifications, clean_notifications, ids
  638. def render_logs(categories, logs, ids):
  639. debug('Sorting logs.')
  640. sort_integrations(logs)
  641. debug('Checking log ids.')
  642. logs, ids = dedupe_integrations(logs, ids)
  643. clean_logs = []
  644. for item in logs:
  645. item['edit_link'] = make_edit_link(item)
  646. clean_item = deepcopy(item)
  647. for key in LOGS_RENDER_KEYS:
  648. if key in item.keys():
  649. template = get_jinja_env().get_template(f'{key}.md')
  650. data = template.render(entry=item, clean=False)
  651. clean_data = template.render(entry=item, clean=True)
  652. if 'variables' in item['meta']:
  653. template = get_jinja_env().from_string(data)
  654. data = template.render(variables=item['meta']['variables'], clean=False)
  655. template = get_jinja_env().from_string(clean_data)
  656. clean_data = template.render(variables=item['meta']['variables'], clean=True)
  657. else:
  658. data = ''
  659. clean_data = ''
  660. item[key] = data
  661. clean_item[key] = clean_data
  662. for k in ['_src_path', '_repo', '_index']:
  663. del item[k], clean_item[k]
  664. clean_logs.append(clean_item)
  665. return logs, clean_logs, ids
  666. def render_authentications(categories, authentications, ids):
  667. debug('Sorting authentications.')
  668. sort_integrations(authentications)
  669. debug('Checking authentication ids.')
  670. authentications, ids = dedupe_integrations(authentications, ids)
  671. clean_authentications = []
  672. for item in authentications:
  673. item['edit_link'] = make_edit_link(item)
  674. clean_item = deepcopy(item)
  675. for key in AUTHENTICATION_RENDER_KEYS:
  676. if key in item.keys():
  677. template = get_jinja_env().get_template(f'{key}.md')
  678. data = template.render(entry=item, clean=False)
  679. clean_data = template.render(entry=item, clean=True)
  680. if 'variables' in item['meta']:
  681. template = get_jinja_env().from_string(data)
  682. data = template.render(variables=item['meta']['variables'], clean=False)
  683. template = get_jinja_env().from_string(clean_data)
  684. clean_data = template.render(variables=item['meta']['variables'], clean=True)
  685. else:
  686. data = ''
  687. clean_data = ''
  688. item[key] = data
  689. clean_item[key] = clean_data
  690. for k in ['_src_path', '_repo', '_index']:
  691. del item[k], clean_item[k]
  692. clean_authentications.append(clean_item)
  693. return authentications, clean_authentications, ids
  694. def convert_local_links(text, prefix):
  695. return text.replace("](/", f"]({prefix}/")
  696. def render_integrations(categories, integrations):
  697. template = get_jinja_env().get_template('integrations.js')
  698. data = template.render(
  699. categories=json.dumps(categories, indent=4),
  700. integrations=json.dumps(integrations, indent=4),
  701. )
  702. data = convert_local_links(data, "https://github.com/netdata/netdata/blob/master")
  703. OUTPUT_PATH.write_text(data)
  704. def render_json(categories, integrations):
  705. JSON_PATH.write_text(json.dumps({
  706. 'categories': categories,
  707. 'integrations': integrations,
  708. }, indent=4))
  709. def main():
  710. categories = load_categories()
  711. distros = load_yaml(DISTROS_FILE)
  712. collectors = load_collectors()
  713. deploy = load_deploy()
  714. exporters = load_exporters()
  715. agent_notifications = load_agent_notifications()
  716. cloud_notifications = load_cloud_notifications()
  717. logs = load_logs()
  718. authentications = load_authentications()
  719. collectors, clean_collectors, ids = render_collectors(categories, collectors, dict())
  720. deploy, clean_deploy, ids = render_deploy(distros, categories, deploy, ids)
  721. exporters, clean_exporters, ids = render_exporters(categories, exporters, ids)
  722. agent_notifications, clean_agent_notifications, ids = render_agent_notifications(categories, agent_notifications,
  723. ids)
  724. cloud_notifications, clean_cloud_notifications, ids = render_cloud_notifications(categories, cloud_notifications,
  725. ids)
  726. logs, clean_logs, ids = render_logs(categories, logs, ids)
  727. authentications, clean_authentications, ids = render_authentications(categories, authentications, ids)
  728. integrations = collectors + deploy + exporters + agent_notifications + cloud_notifications + logs + authentications
  729. render_integrations(categories, integrations)
  730. clean_integrations = clean_collectors + clean_deploy + clean_exporters + clean_agent_notifications + clean_cloud_notifications + clean_logs + clean_authentications
  731. render_json(categories, clean_integrations)
  732. if __name__ == '__main__':
  733. sys.exit(main())