haproxy.chart.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360
  1. # -*- coding: utf-8 -*-
  2. # Description: haproxy netdata python.d module
  3. # Author: ilyam8, ktarasz
  4. # SPDX-License-Identifier: GPL-3.0-or-later
  5. from collections import defaultdict
  6. from re import compile as re_compile
  7. try:
  8. from urlparse import urlparse
  9. except ImportError:
  10. from urllib.parse import urlparse
  11. from bases.FrameworkServices.SocketService import SocketService
  12. from bases.FrameworkServices.UrlService import UrlService
  13. # charts order (can be overridden if you want less charts, or different order)
  14. ORDER = [
  15. 'fbin',
  16. 'fbout',
  17. 'fscur',
  18. 'fqcur',
  19. 'fhrsp_1xx',
  20. 'fhrsp_2xx',
  21. 'fhrsp_3xx',
  22. 'fhrsp_4xx',
  23. 'fhrsp_5xx',
  24. 'fhrsp_other',
  25. 'fhrsp_total',
  26. 'bbin',
  27. 'bbout',
  28. 'bscur',
  29. 'bqcur',
  30. 'bhrsp_1xx',
  31. 'bhrsp_2xx',
  32. 'bhrsp_3xx',
  33. 'bhrsp_4xx',
  34. 'bhrsp_5xx',
  35. 'bhrsp_other',
  36. 'bhrsp_total',
  37. 'bqtime',
  38. 'bttime',
  39. 'brtime',
  40. 'bctime',
  41. 'health_sup',
  42. 'health_sdown',
  43. 'health_bdown',
  44. 'health_idle'
  45. ]
  46. CHARTS = {
  47. 'fbin': {
  48. 'options': [None, 'Kilobytes In', 'KiB/s', 'frontend', 'haproxy_f.bin', 'line'],
  49. 'lines': []
  50. },
  51. 'fbout': {
  52. 'options': [None, 'Kilobytes Out', 'KiB/s', 'frontend', 'haproxy_f.bout', 'line'],
  53. 'lines': []
  54. },
  55. 'fscur': {
  56. 'options': [None, 'Sessions Active', 'sessions', 'frontend', 'haproxy_f.scur', 'line'],
  57. 'lines': []
  58. },
  59. 'fqcur': {
  60. 'options': [None, 'Session In Queue', 'sessions', 'frontend', 'haproxy_f.qcur', 'line'],
  61. 'lines': []
  62. },
  63. 'fhrsp_1xx': {
  64. 'options': [None, 'HTTP responses with 1xx code', 'responses/s', 'frontend', 'haproxy_f.hrsp_1xx', 'line'],
  65. 'lines': []
  66. },
  67. 'fhrsp_2xx': {
  68. 'options': [None, 'HTTP responses with 2xx code', 'responses/s', 'frontend', 'haproxy_f.hrsp_2xx', 'line'],
  69. 'lines': []
  70. },
  71. 'fhrsp_3xx': {
  72. 'options': [None, 'HTTP responses with 3xx code', 'responses/s', 'frontend', 'haproxy_f.hrsp_3xx', 'line'],
  73. 'lines': []
  74. },
  75. 'fhrsp_4xx': {
  76. 'options': [None, 'HTTP responses with 4xx code', 'responses/s', 'frontend', 'haproxy_f.hrsp_4xx', 'line'],
  77. 'lines': []
  78. },
  79. 'fhrsp_5xx': {
  80. 'options': [None, 'HTTP responses with 5xx code', 'responses/s', 'frontend', 'haproxy_f.hrsp_5xx', 'line'],
  81. 'lines': []
  82. },
  83. 'fhrsp_other': {
  84. 'options': [None, 'HTTP responses with other codes (protocol error)', 'responses/s', 'frontend',
  85. 'haproxy_f.hrsp_other', 'line'],
  86. 'lines': []
  87. },
  88. 'fhrsp_total': {
  89. 'options': [None, 'HTTP responses', 'responses', 'frontend', 'haproxy_f.hrsp_total', 'line'],
  90. 'lines': []
  91. },
  92. 'bbin': {
  93. 'options': [None, 'Kilobytes In', 'KiB/s', 'backend', 'haproxy_b.bin', 'line'],
  94. 'lines': []
  95. },
  96. 'bbout': {
  97. 'options': [None, 'Kilobytes Out', 'KiB/s', 'backend', 'haproxy_b.bout', 'line'],
  98. 'lines': []
  99. },
  100. 'bscur': {
  101. 'options': [None, 'Sessions Active', 'sessions', 'backend', 'haproxy_b.scur', 'line'],
  102. 'lines': []
  103. },
  104. 'bqcur': {
  105. 'options': [None, 'Sessions In Queue', 'sessions', 'backend', 'haproxy_b.qcur', 'line'],
  106. 'lines': []
  107. },
  108. 'bhrsp_1xx': {
  109. 'options': [None, 'HTTP responses with 1xx code', 'responses/s', 'backend', 'haproxy_b.hrsp_1xx', 'line'],
  110. 'lines': []
  111. },
  112. 'bhrsp_2xx': {
  113. 'options': [None, 'HTTP responses with 2xx code', 'responses/s', 'backend', 'haproxy_b.hrsp_2xx', 'line'],
  114. 'lines': []
  115. },
  116. 'bhrsp_3xx': {
  117. 'options': [None, 'HTTP responses with 3xx code', 'responses/s', 'backend', 'haproxy_b.hrsp_3xx', 'line'],
  118. 'lines': []
  119. },
  120. 'bhrsp_4xx': {
  121. 'options': [None, 'HTTP responses with 4xx code', 'responses/s', 'backend', 'haproxy_b.hrsp_4xx', 'line'],
  122. 'lines': []
  123. },
  124. 'bhrsp_5xx': {
  125. 'options': [None, 'HTTP responses with 5xx code', 'responses/s', 'backend', 'haproxy_b.hrsp_5xx', 'line'],
  126. 'lines': []
  127. },
  128. 'bhrsp_other': {
  129. 'options': [None, 'HTTP responses with other codes (protocol error)', 'responses/s', 'backend',
  130. 'haproxy_b.hrsp_other', 'line'],
  131. 'lines': []
  132. },
  133. 'bhrsp_total': {
  134. 'options': [None, 'HTTP responses (total)', 'responses/s', 'backend', 'haproxy_b.hrsp_total', 'line'],
  135. 'lines': []
  136. },
  137. 'bqtime': {
  138. 'options': [None, 'The average queue time over the 1024 last requests', 'milliseconds', 'backend',
  139. 'haproxy_b.qtime', 'line'],
  140. 'lines': []
  141. },
  142. 'bctime': {
  143. 'options': [None, 'The average connect time over the 1024 last requests', 'milliseconds', 'backend',
  144. 'haproxy_b.ctime', 'line'],
  145. 'lines': []
  146. },
  147. 'brtime': {
  148. 'options': [None, 'The average response time over the 1024 last requests', 'milliseconds', 'backend',
  149. 'haproxy_b.rtime', 'line'],
  150. 'lines': []
  151. },
  152. 'bttime': {
  153. 'options': [None, 'The average total session time over the 1024 last requests', 'milliseconds', 'backend',
  154. 'haproxy_b.ttime', 'line'],
  155. 'lines': []
  156. },
  157. 'health_sdown': {
  158. 'options': [None, 'Backend Servers In DOWN State', 'failed servers', 'health', 'haproxy_hs.down', 'line'],
  159. 'lines': []
  160. },
  161. 'health_sup': {
  162. 'options': [None, 'Backend Servers In UP State', 'health servers', 'health', 'haproxy_hs.up', 'line'],
  163. 'lines': []
  164. },
  165. 'health_bdown': {
  166. 'options': [None, 'Is Backend Failed?', 'boolean', 'health', 'haproxy_hb.down', 'line'],
  167. 'lines': []
  168. },
  169. 'health_idle': {
  170. 'options': [None, 'The Ratio Of Polling Time Vs Total Time', 'percentage', 'health', 'haproxy.idle', 'line'],
  171. 'lines': [
  172. ['idle', None, 'absolute']
  173. ]
  174. }
  175. }
  176. METRICS = {
  177. 'bin': {'algorithm': 'incremental', 'divisor': 1024},
  178. 'bout': {'algorithm': 'incremental', 'divisor': 1024},
  179. 'scur': {'algorithm': 'absolute', 'divisor': 1},
  180. 'qcur': {'algorithm': 'absolute', 'divisor': 1},
  181. 'hrsp_1xx': {'algorithm': 'incremental', 'divisor': 1},
  182. 'hrsp_2xx': {'algorithm': 'incremental', 'divisor': 1},
  183. 'hrsp_3xx': {'algorithm': 'incremental', 'divisor': 1},
  184. 'hrsp_4xx': {'algorithm': 'incremental', 'divisor': 1},
  185. 'hrsp_5xx': {'algorithm': 'incremental', 'divisor': 1},
  186. 'hrsp_other': {'algorithm': 'incremental', 'divisor': 1}
  187. }
  188. BACKEND_METRICS = {
  189. 'qtime': {'algorithm': 'absolute', 'divisor': 1},
  190. 'ctime': {'algorithm': 'absolute', 'divisor': 1},
  191. 'rtime': {'algorithm': 'absolute', 'divisor': 1},
  192. 'ttime': {'algorithm': 'absolute', 'divisor': 1}
  193. }
  194. REGEX = dict(url=re_compile(r'idle = (?P<idle>[0-9]+)'),
  195. socket=re_compile(r'Idle_pct: (?P<idle>[0-9]+)'))
  196. # TODO: the code is unreadable
  197. class Service(UrlService, SocketService):
  198. def __init__(self, configuration=None, name=None):
  199. if 'socket' in configuration:
  200. SocketService.__init__(self, configuration=configuration, name=name)
  201. self.poll = SocketService
  202. self.options_ = dict(regex=REGEX['socket'],
  203. stat='show stat\n'.encode(),
  204. info='show info\n'.encode())
  205. else:
  206. UrlService.__init__(self, configuration=configuration, name=name)
  207. self.poll = UrlService
  208. self.options_ = dict(regex=REGEX['url'],
  209. stat=self.url,
  210. info=url_remove_params(self.url))
  211. self.order = ORDER
  212. self.definitions = CHARTS
  213. def check(self):
  214. if self.poll.check(self):
  215. self.create_charts()
  216. self.info('We are using %s.' % self.poll.__name__)
  217. return True
  218. return False
  219. def _get_data(self):
  220. to_netdata = dict()
  221. self.request, self.url = self.options_['stat'], self.options_['stat']
  222. stat_data = self._get_stat_data()
  223. self.request, self.url = self.options_['info'], self.options_['info']
  224. info_data = self._get_info_data(regex=self.options_['regex'])
  225. to_netdata.update(stat_data)
  226. to_netdata.update(info_data)
  227. return to_netdata or None
  228. def _get_stat_data(self):
  229. """
  230. :return: dict
  231. """
  232. raw_data = self.poll._get_raw_data(self)
  233. if not raw_data:
  234. return dict()
  235. raw_data = raw_data.splitlines()
  236. self.data = parse_data_([dict(zip(raw_data[0].split(','), raw_data[_].split(',')))
  237. for _ in range(1, len(raw_data))])
  238. if not self.data:
  239. return dict()
  240. stat_data = dict()
  241. for frontend in self.data['frontend']:
  242. for metric in METRICS:
  243. idx = frontend['# pxname'].replace('.', '_')
  244. stat_data['_'.join(['frontend', metric, idx])] = frontend.get(metric) or 0
  245. for backend in self.data['backend']:
  246. name, idx = backend['# pxname'], backend['# pxname'].replace('.', '_')
  247. stat_data['hsup_' + idx] = len([server for server in self.data['servers']
  248. if server_status(server, name, 'UP')])
  249. stat_data['hsdown_' + idx] = len([server for server in self.data['servers']
  250. if server_status(server, name, 'DOWN')])
  251. stat_data['hbdown_' + idx] = 1 if backend.get('status') == 'DOWN' else 0
  252. for metric in BACKEND_METRICS:
  253. stat_data['_'.join(['backend', metric, idx])] = backend.get(metric) or 0
  254. hrsp_total = 0
  255. for metric in METRICS:
  256. stat_data['_'.join(['backend', metric, idx])] = backend.get(metric) or 0
  257. if metric.startswith('hrsp_'):
  258. hrsp_total += int(backend.get(metric) or 0)
  259. stat_data['_'.join(['backend', 'hrsp_total', idx])] = hrsp_total
  260. return stat_data
  261. def _get_info_data(self, regex):
  262. """
  263. :return: dict
  264. """
  265. raw_data = self.poll._get_raw_data(self)
  266. if not raw_data:
  267. return dict()
  268. match = regex.search(raw_data)
  269. return match.groupdict() if match else dict()
  270. @staticmethod
  271. def _check_raw_data(data):
  272. """
  273. Check if all data has been gathered from socket
  274. :param data: str
  275. :return: boolean
  276. """
  277. return not bool(data)
  278. def create_charts(self):
  279. for front in self.data['frontend']:
  280. name, idx = front['# pxname'], front['# pxname'].replace('.', '_')
  281. for metric in METRICS:
  282. self.definitions['f' + metric]['lines'].append(['_'.join(['frontend', metric, idx]),
  283. name, METRICS[metric]['algorithm'], 1,
  284. METRICS[metric]['divisor']])
  285. self.definitions['fhrsp_total']['lines'].append(['_'.join(['frontend', 'hrsp_total', idx]),
  286. name, 'incremental', 1, 1])
  287. for back in self.data['backend']:
  288. name, idx = back['# pxname'], back['# pxname'].replace('.', '_')
  289. for metric in METRICS:
  290. self.definitions['b' + metric]['lines'].append(['_'.join(['backend', metric, idx]),
  291. name, METRICS[metric]['algorithm'], 1,
  292. METRICS[metric]['divisor']])
  293. self.definitions['bhrsp_total']['lines'].append(['_'.join(['backend', 'hrsp_total', idx]),
  294. name, 'incremental', 1, 1])
  295. for metric in BACKEND_METRICS:
  296. self.definitions['b' + metric]['lines'].append(['_'.join(['backend', metric, idx]),
  297. name, BACKEND_METRICS[metric]['algorithm'], 1,
  298. BACKEND_METRICS[metric]['divisor']])
  299. self.definitions['health_sup']['lines'].append(['hsup_' + idx, name, 'absolute'])
  300. self.definitions['health_sdown']['lines'].append(['hsdown_' + idx, name, 'absolute'])
  301. self.definitions['health_bdown']['lines'].append(['hbdown_' + idx, name, 'absolute'])
  302. def parse_data_(data):
  303. def is_backend(backend):
  304. return backend.get('svname') == 'BACKEND' and backend.get('# pxname') != 'stats'
  305. def is_frontend(frontend):
  306. return frontend.get('svname') == 'FRONTEND' and frontend.get('# pxname') != 'stats'
  307. def is_server(server):
  308. return not server.get('svname', '').startswith(('FRONTEND', 'BACKEND'))
  309. if not data:
  310. return None
  311. result = defaultdict(list)
  312. for elem in data:
  313. if is_backend(elem):
  314. result['backend'].append(elem)
  315. continue
  316. elif is_frontend(elem):
  317. result['frontend'].append(elem)
  318. continue
  319. elif is_server(elem):
  320. result['servers'].append(elem)
  321. return result or None
  322. def server_status(server, backend_name, status='DOWN'):
  323. return server.get('# pxname') == backend_name and server.get('status') == status
  324. def url_remove_params(url):
  325. parsed = urlparse(url or str())
  326. return '{scheme}://{netloc}{path}'.format(scheme=parsed.scheme, netloc=parsed.netloc, path=parsed.path)