gen_integrations.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638
  1. #!/usr/bin/env python3
  2. import json
  3. import os
  4. import sys
  5. from pathlib import Path
  6. from jsonschema import Draft7Validator, ValidationError
  7. from referencing import Registry, Resource
  8. from referencing.jsonschema import DRAFT7
  9. from ruamel.yaml import YAML, YAMLError
  10. AGENT_REPO = 'netdata/netdata'
  11. GO_REPO = 'netdata/go.d.plugin'
  12. INTEGRATIONS_PATH = Path(__file__).parent
  13. TEMPLATE_PATH = INTEGRATIONS_PATH / 'templates'
  14. OUTPUT_PATH = INTEGRATIONS_PATH / 'integrations.js'
  15. CATEGORIES_FILE = INTEGRATIONS_PATH / 'categories.yaml'
  16. REPO_PATH = INTEGRATIONS_PATH.parent
  17. SCHEMA_PATH = INTEGRATIONS_PATH / 'schemas'
  18. GO_REPO_PATH = REPO_PATH / 'go.d.plugin'
  19. DISTROS_FILE = REPO_PATH / '.github' / 'data' / 'distros.yml'
  20. METADATA_PATTERN = '*/metadata.yaml'
  21. COLLECTOR_SOURCES = [
  22. (AGENT_REPO, REPO_PATH / 'collectors', True),
  23. (AGENT_REPO, REPO_PATH / 'collectors' / 'charts.d.plugin', True),
  24. (AGENT_REPO, REPO_PATH / 'collectors' / 'python.d.plugin', True),
  25. (GO_REPO, GO_REPO_PATH / 'modules', True),
  26. ]
  27. DEPLOY_SOURCES = [
  28. (AGENT_REPO, INTEGRATIONS_PATH / 'deploy.yaml', False),
  29. ]
  30. EXPORTER_SOURCES = [
  31. (AGENT_REPO, REPO_PATH / 'exporting', True),
  32. ]
  33. NOTIFICATION_SOURCES = [
  34. (AGENT_REPO, REPO_PATH / 'health' / 'notifications', True),
  35. (AGENT_REPO, INTEGRATIONS_PATH / 'cloud-notifications' / 'metadata.yaml', False),
  36. ]
  37. COLLECTOR_RENDER_KEYS = [
  38. 'alerts',
  39. 'metrics',
  40. 'overview',
  41. 'related_resources',
  42. 'setup',
  43. 'troubleshooting',
  44. ]
  45. EXPORTER_RENDER_KEYS = [
  46. 'overview',
  47. 'setup',
  48. 'troubleshooting',
  49. ]
  50. NOTIFICATION_RENDER_KEYS = [
  51. 'overview',
  52. 'setup',
  53. 'troubleshooting',
  54. ]
  55. GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS', False)
  56. DEBUG = os.environ.get('DEBUG', False)
  57. def debug(msg):
  58. if GITHUB_ACTIONS:
  59. print(f':debug:{ msg }')
  60. elif DEBUG:
  61. print(f'>>> { msg }')
  62. else:
  63. pass
  64. def warn(msg, path):
  65. if GITHUB_ACTIONS:
  66. print(f':warning file={ path }:{ msg }')
  67. else:
  68. print(f'!!! WARNING:{ path }:{ msg }')
  69. def retrieve_from_filesystem(uri):
  70. path = SCHEMA_PATH / Path(uri)
  71. contents = json.loads(path.read_text())
  72. return Resource.from_contents(contents, DRAFT7)
  73. registry = Registry(retrieve=retrieve_from_filesystem)
  74. CATEGORY_VALIDATOR = Draft7Validator(
  75. {'$ref': './categories.json#'},
  76. registry=registry,
  77. )
  78. DEPLOY_VALIDATOR = Draft7Validator(
  79. {'$ref': './deploy.json#'},
  80. registry=registry,
  81. )
  82. EXPORTER_VALIDATOR = Draft7Validator(
  83. {'$ref': './exporter.json#'},
  84. registry=registry,
  85. )
  86. NOTIFICATION_VALIDATOR = Draft7Validator(
  87. {'$ref': './notification.json#'},
  88. registry=registry,
  89. )
  90. COLLECTOR_VALIDATOR = Draft7Validator(
  91. {'$ref': './collector.json#'},
  92. registry=registry,
  93. )
  94. _jinja_env = False
  95. def get_jinja_env():
  96. global _jinja_env
  97. if not _jinja_env:
  98. from jinja2 import Environment, FileSystemLoader, select_autoescape
  99. _jinja_env = Environment(
  100. loader=FileSystemLoader(TEMPLATE_PATH),
  101. autoescape=select_autoescape(),
  102. block_start_string='[%',
  103. block_end_string='%]',
  104. variable_start_string='[[',
  105. variable_end_string=']]',
  106. comment_start_string='[#',
  107. comment_end_string='#]',
  108. trim_blocks=True,
  109. lstrip_blocks=True,
  110. )
  111. _jinja_env.globals.update(strfy=strfy)
  112. return _jinja_env
  113. def strfy(value):
  114. if not isinstance(value, str):
  115. return value
  116. return ' '.join([v.strip() for v in value.strip().split("\n") if v]).replace('|', '/')
  117. def get_category_sets(categories):
  118. default = set()
  119. valid = set()
  120. for c in categories:
  121. if 'id' in c:
  122. valid.add(c['id'])
  123. if c.get('collector_default', False):
  124. default.add(c['id'])
  125. if 'children' in c and c['children']:
  126. d, v = get_category_sets(c['children'])
  127. default |= d
  128. valid |= v
  129. return (default, valid)
  130. def get_collector_metadata_entries():
  131. ret = []
  132. for r, d, m in COLLECTOR_SOURCES:
  133. if d.exists() and d.is_dir() and m:
  134. for item in d.glob(METADATA_PATTERN):
  135. ret.append((r, item))
  136. elif d.exists() and d.is_file() and not m:
  137. if d.match(METADATA_PATTERN):
  138. ret.append(d)
  139. return ret
  140. def load_yaml(src):
  141. yaml = YAML(typ='safe')
  142. if not src.is_file():
  143. warn(f'{ src } is not a file.', src)
  144. return False
  145. try:
  146. contents = src.read_text()
  147. except (IOError, OSError):
  148. warn(f'Failed to read { src }.', src)
  149. return False
  150. try:
  151. data = yaml.load(contents)
  152. except YAMLError:
  153. warn(f'Failed to parse { src } as YAML.', src)
  154. return False
  155. return data
  156. def load_categories():
  157. categories = load_yaml(CATEGORIES_FILE)
  158. if not categories:
  159. sys.exit(1)
  160. try:
  161. CATEGORY_VALIDATOR.validate(categories)
  162. except ValidationError:
  163. warn(f'Failed to validate { CATEGORIES_FILE } against the schema.', CATEGORIES_FILE)
  164. sys.exit(1)
  165. return categories
  166. def load_collectors():
  167. ret = []
  168. entries = get_collector_metadata_entries()
  169. for repo, path in entries:
  170. debug(f'Loading { path }.')
  171. data = load_yaml(path)
  172. if not data:
  173. continue
  174. try:
  175. COLLECTOR_VALIDATOR.validate(data)
  176. except ValidationError:
  177. warn(f'Failed to validate { path } against the schema.', path)
  178. continue
  179. for idx, item in enumerate(data['modules']):
  180. item['meta']['plugin_name'] = data['plugin_name']
  181. item['integration_type'] = 'collector'
  182. item['_src_path'] = path
  183. item['_repo'] = repo
  184. item['_index'] = idx
  185. ret.append(item)
  186. return ret
  187. def _load_deploy_file(file, repo):
  188. ret = []
  189. debug(f'Loading { file }.')
  190. data = load_yaml(file)
  191. if not data:
  192. return []
  193. try:
  194. DEPLOY_VALIDATOR.validate(data)
  195. except ValidationError:
  196. warn(f'Failed to validate { file } against the schema.', file)
  197. return []
  198. for idx, item in enumerate(data):
  199. item['integration_type'] = 'deploy'
  200. item['_src_path'] = file
  201. item['_repo'] = repo
  202. item['_index'] = idx
  203. ret.append(item)
  204. return ret
  205. def load_deploy():
  206. ret = []
  207. for repo, path, match in DEPLOY_SOURCES:
  208. if match and path.exists() and path.is_dir():
  209. for file in path.glob(METADATA_PATTERN):
  210. ret.extend(_load_deploy_file(file, repo))
  211. elif not match and path.exists() and path.is_file():
  212. ret.extend(_load_deploy_file(path, repo))
  213. return ret
  214. def _load_exporter_file(file, repo):
  215. debug(f'Loading { file }.')
  216. data = load_yaml(file)
  217. if not data:
  218. return []
  219. try:
  220. EXPORTER_VALIDATOR.validate(data)
  221. except ValidationError:
  222. warn(f'Failed to validate { file } against the schema.', file)
  223. return []
  224. if 'id' in data:
  225. data['integration_type'] = 'exporter'
  226. data['_src_path'] = file
  227. data['_repo'] = repo
  228. data['_index'] = 0
  229. return [data]
  230. else:
  231. ret = []
  232. for idx, item in enumerate(data):
  233. item['integration_type'] = 'exporter'
  234. item['_src_path'] = file
  235. item['_repo'] = repo
  236. item['_index'] = idx
  237. ret.append(item)
  238. return ret
  239. def load_exporters():
  240. ret = []
  241. for repo, path, match in EXPORTER_SOURCES:
  242. if match and path.exists() and path.is_dir():
  243. for file in path.glob(METADATA_PATTERN):
  244. ret.extend(_load_exporter_file(file, repo))
  245. elif not match and path.exists() and path.is_file():
  246. ret.extend(_load_exporter_file(path, repo))
  247. return ret
  248. def _load_notification_file(file, repo):
  249. debug(f'Loading { file }.')
  250. data = load_yaml(file)
  251. if not data:
  252. return []
  253. try:
  254. NOTIFICATION_VALIDATOR.validate(data)
  255. except ValidationError:
  256. warn(f'Failed to validate { file } against the schema.', file)
  257. return []
  258. if 'id' in data:
  259. data['integration_type'] = 'notification'
  260. data['_src_path'] = file
  261. data['_repo'] = repo
  262. data['_index'] = 0
  263. return [data]
  264. else:
  265. ret = []
  266. for idx, item in enumerate(data):
  267. item['integration_type'] = 'notification'
  268. item['_src_path'] = file
  269. item['_repo'] = repo
  270. item['_index'] = idx
  271. ret.append(item)
  272. return ret
  273. def load_notifications():
  274. ret = []
  275. for repo, path, match in NOTIFICATION_SOURCES:
  276. if match and path.exists() and path.is_dir():
  277. for file in path.glob(METADATA_PATTERN):
  278. ret.extend(_load_notification_file(file, repo))
  279. elif not match and path.exists() and path.is_file():
  280. ret.extend(_load_notification_file(path, repo))
  281. return ret
  282. def make_id(meta):
  283. if 'monitored_instance' in meta:
  284. instance_name = meta['monitored_instance']['name'].replace(' ', '_')
  285. elif 'instance_name' in meta:
  286. instance_name = meta['instance_name']
  287. else:
  288. instance_name = '000_unknown'
  289. return f'{ meta["plugin_name"] }-{ meta["module_name"] }-{ instance_name }'
  290. def make_edit_link(item):
  291. if item['_repo'] == 'netdata/go.d.plugin':
  292. item_path = item['_src_path'].relative_to(GO_REPO_PATH)
  293. else:
  294. item_path = item['_src_path'].relative_to(REPO_PATH)
  295. return f'https://github.com/{ item["_repo"] }/blob/master/{ item_path }'
  296. def sort_integrations(integrations):
  297. integrations.sort(key=lambda i: i['_index'])
  298. integrations.sort(key=lambda i: i['_src_path'])
  299. integrations.sort(key=lambda i: i['id'])
  300. def dedupe_integrations(integrations, ids):
  301. tmp_integrations = []
  302. for i in integrations:
  303. if ids.get(i['id'], False):
  304. first_path, first_index = ids[i['id']]
  305. warn(f'Duplicate integration ID found at { i["_src_path"] } index { i["_index"] } (original definition at { first_path } index { first_index }), ignoring that integration.', i['_src_path'])
  306. else:
  307. tmp_integrations.append(i)
  308. ids[i['id']] = (i['_src_path'], i['_index'])
  309. return tmp_integrations, ids
  310. def render_collectors(categories, collectors, ids):
  311. debug('Computing default categories.')
  312. default_cats, valid_cats = get_category_sets(categories)
  313. debug('Generating collector IDs.')
  314. for item in collectors:
  315. item['id'] = make_id(item['meta'])
  316. debug('Sorting collectors.')
  317. sort_integrations(collectors)
  318. debug('Removing duplicate collectors.')
  319. collectors, ids = dedupe_integrations(collectors, ids)
  320. idmap = {i['id']: i for i in collectors}
  321. for item in collectors:
  322. debug(f'Processing { item["id"] }.')
  323. related = []
  324. for res in item['meta']['related_resources']['integrations']['list']:
  325. res_id = make_id(res)
  326. if res_id not in idmap.keys():
  327. warn(f'Could not find related integration { res_id }, ignoring it.', item['_src_path'])
  328. continue
  329. related.append({
  330. 'plugin_name': res['plugin_name'],
  331. 'module_name': res['module_name'],
  332. 'id': res_id,
  333. 'name': idmap[res_id]['meta']['monitored_instance']['name'],
  334. 'info': idmap[res_id]['meta']['info_provided_to_referring_integrations'],
  335. })
  336. item_cats = set(item['meta']['monitored_instance']['categories'])
  337. bogus_cats = item_cats - valid_cats
  338. actual_cats = item_cats & valid_cats
  339. if bogus_cats:
  340. warn(f'Ignoring invalid categories: { ", ".join(bogus_cats) }', item["_src_path"])
  341. if not item_cats:
  342. item['meta']['monitored_instance']['categories'] = list(default_cats)
  343. warn(f'{ item["id"] } does not list any caregories, adding it to: { default_cats }', item["_src_path"])
  344. else:
  345. item['meta']['monitored_instance']['categories'] = [x for x in item['meta']['monitored_instance']['categories'] if x in list(actual_cats)]
  346. for scope in item['metrics']['scopes']:
  347. if scope['name'] == 'global':
  348. scope['name'] = f'{ item["meta"]["monitored_instance"]["name"] } instance'
  349. for cfg_example in item['setup']['configuration']['examples']['list']:
  350. if 'folding' not in cfg_example:
  351. cfg_example['folding'] = {
  352. 'enabled': item['setup']['configuration']['examples']['folding']['enabled']
  353. }
  354. for key in COLLECTOR_RENDER_KEYS:
  355. if key in item.keys():
  356. template = get_jinja_env().get_template(f'{ key }.md')
  357. data = template.render(entry=item, related=related)
  358. if 'variables' in item['meta']['monitored_instance']:
  359. template = get_jinja_env().from_string(data)
  360. data = template.render(variables=item['meta']['monitored_instance']['variables'])
  361. else:
  362. data = ''
  363. item[key] = data
  364. item['edit_link'] = make_edit_link(item)
  365. del item['_src_path']
  366. del item['_repo']
  367. del item['_index']
  368. return collectors, ids
  369. def render_deploy(distros, categories, deploy, ids):
  370. debug('Sorting deployments.')
  371. sort_integrations(deploy)
  372. debug('Checking deployment ids.')
  373. deploy, ids = dedupe_integrations(deploy, ids)
  374. template = get_jinja_env().get_template('platform_info.md')
  375. for item in deploy:
  376. debug(f'Processing { item["id"] }.')
  377. if item['platform_info']['group']:
  378. entries = [
  379. {
  380. 'version': i['version'],
  381. 'support': i['support_type'],
  382. 'arches': i.get('packages', {'arches': []})['arches'],
  383. 'notes': i['notes'],
  384. } for i in distros[item['platform_info']['group']] if i['distro'] == item['platform_info']['distro']
  385. ]
  386. else:
  387. entries = []
  388. data = template.render(entries=entries)
  389. item['platform_info'] = data
  390. item['edit_link'] = make_edit_link(item)
  391. del item['_src_path']
  392. del item['_repo']
  393. del item['_index']
  394. return deploy, ids
  395. def render_exporters(categories, exporters, ids):
  396. debug('Sorting exporters.')
  397. sort_integrations(exporters)
  398. debug('Checking exporter ids.')
  399. exporters, ids = dedupe_integrations(exporters, ids)
  400. for item in exporters:
  401. for key in EXPORTER_RENDER_KEYS:
  402. if key in item.keys():
  403. template = get_jinja_env().get_template(f'{ key }.md')
  404. data = template.render(entry=item)
  405. if 'variables' in item['meta']:
  406. template = get_jinja_env().from_string(data)
  407. data = template.render(variables=item['meta']['variables'])
  408. else:
  409. data = ''
  410. item[key] = data
  411. item['edit_link'] = make_edit_link(item)
  412. del item['_src_path']
  413. del item['_repo']
  414. del item['_index']
  415. return exporters, ids
  416. def render_notifications(categories, notifications, ids):
  417. debug('Sorting notifications.')
  418. sort_integrations(notifications)
  419. debug('Checking notification ids.')
  420. notifications, ids = dedupe_integrations(notifications, ids)
  421. for item in notifications:
  422. for key in NOTIFICATION_RENDER_KEYS:
  423. if key in item.keys():
  424. template = get_jinja_env().get_template(f'{ key }.md')
  425. data = template.render(entry=item)
  426. if 'variables' in item['meta']:
  427. template = get_jinja_env().from_string(data)
  428. data = template.render(variables=item['meta']['variables'])
  429. else:
  430. data = ''
  431. item[key] = data
  432. item['edit_link'] = make_edit_link(item)
  433. del item['_src_path']
  434. del item['_repo']
  435. del item['_index']
  436. return notifications, ids
  437. def render_integrations(categories, integrations):
  438. template = get_jinja_env().get_template('integrations.js')
  439. data = template.render(
  440. categories=json.dumps(categories),
  441. integrations=json.dumps(integrations),
  442. )
  443. OUTPUT_PATH.write_text(data)
  444. def main():
  445. categories = load_categories()
  446. distros = load_yaml(DISTROS_FILE)
  447. collectors = load_collectors()
  448. deploy = load_deploy()
  449. exporters = load_exporters()
  450. notifications = load_notifications()
  451. collectors, ids = render_collectors(categories, collectors, dict())
  452. deploy, ids = render_deploy(distros, categories, deploy, ids)
  453. exporters, ids = render_exporters(categories, exporters, ids)
  454. notifications, ids = render_notifications(categories, notifications, ids)
  455. integrations = collectors + deploy + exporters + notifications
  456. render_integrations(categories, integrations)
  457. if __name__ == '__main__':
  458. sys.exit(main())