haproxy.chart.py 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219
  1. # -*- coding: utf-8 -*-
  2. # Description: haproxy netdata python.d module
  3. # Author: l2isbad
  4. from collections import defaultdict
  5. from re import compile as re_compile
  6. try:
  7. from urlparse import urlparse
  8. except ImportError:
  9. from urllib.parse import urlparse
  10. from bases.FrameworkServices.SocketService import SocketService
  11. from bases.FrameworkServices.UrlService import UrlService
  12. # default module values (can be overridden per job in `config`)
  13. # update_every = 2
  14. priority = 60000
  15. retries = 60
  16. # charts order (can be overridden if you want less charts, or different order)
  17. ORDER = ['fbin', 'fbout', 'fscur', 'fqcur', 'bbin', 'bbout', 'bscur', 'bqcur',
  18. 'health_sdown', 'health_bdown', 'health_idle']
  19. CHARTS = {
  20. 'fbin': {
  21. 'options': [None, "Kilobytes In", "KB/s", 'frontend', 'haproxy_f.bin', 'line'],
  22. 'lines': [
  23. ]},
  24. 'fbout': {
  25. 'options': [None, "Kilobytes Out", "KB/s", 'frontend', 'haproxy_f.bout', 'line'],
  26. 'lines': [
  27. ]},
  28. 'fscur': {
  29. 'options': [None, "Sessions Active", "sessions", 'frontend', 'haproxy_f.scur', 'line'],
  30. 'lines': [
  31. ]},
  32. 'fqcur': {
  33. 'options': [None, "Session In Queue", "sessions", 'frontend', 'haproxy_f.qcur', 'line'],
  34. 'lines': [
  35. ]},
  36. 'bbin': {
  37. 'options': [None, "Kilobytes In", "KB/s", 'backend', 'haproxy_b.bin', 'line'],
  38. 'lines': [
  39. ]},
  40. 'bbout': {
  41. 'options': [None, "Kilobytes Out", "KB/s", 'backend', 'haproxy_b.bout', 'line'],
  42. 'lines': [
  43. ]},
  44. 'bscur': {
  45. 'options': [None, "Sessions Active", "sessions", 'backend', 'haproxy_b.scur', 'line'],
  46. 'lines': [
  47. ]},
  48. 'bqcur': {
  49. 'options': [None, "Sessions In Queue", "sessions", 'backend', 'haproxy_b.qcur', 'line'],
  50. 'lines': [
  51. ]},
  52. 'health_sdown': {
  53. 'options': [None, "Backend Servers In DOWN State", "failed servers", 'health',
  54. 'haproxy_hs.down', 'line'],
  55. 'lines': [
  56. ]},
  57. 'health_bdown': {
  58. 'options': [None, "Is Backend Alive? 1 = DOWN", "failed backend", 'health', 'haproxy_hb.down', 'line'],
  59. 'lines': [
  60. ]},
  61. 'health_idle': {
  62. 'options': [None, "The Ratio Of Polling Time Vs Total Time", "percent", 'health', 'haproxy.idle', 'line'],
  63. 'lines': [
  64. ['idle', None, 'absolute']
  65. ]}
  66. }
  67. METRICS = {'bin': {'algorithm': 'incremental', 'divisor': 1024},
  68. 'bout': {'algorithm': 'incremental', 'divisor': 1024},
  69. 'scur': {'algorithm': 'absolute', 'divisor': 1},
  70. 'qcur': {'algorithm': 'absolute', 'divisor': 1}}
  71. REGEX = dict(url=re_compile(r'idle = (?P<idle>[0-9]+)'),
  72. socket=re_compile(r'Idle_pct: (?P<idle>[0-9]+)'))
  73. class Service(UrlService, SocketService):
  74. def __init__(self, configuration=None, name=None):
  75. if 'socket' in configuration:
  76. SocketService.__init__(self, configuration=configuration, name=name)
  77. self.poll = SocketService
  78. self.options_ = dict(regex=REGEX['socket'],
  79. stat='show stat\n'.encode(),
  80. info='show info\n'.encode())
  81. else:
  82. UrlService.__init__(self, configuration=configuration, name=name)
  83. self.poll = UrlService
  84. self.options_ = dict(regex=REGEX['url'],
  85. stat=self.url,
  86. info=url_remove_params(self.url))
  87. self.order = ORDER
  88. self.definitions = CHARTS
  89. def check(self):
  90. if self.poll.check(self):
  91. self.create_charts()
  92. self.info('We are using %s.' % self.poll.__name__)
  93. return True
  94. return False
  95. def _get_data(self):
  96. to_netdata = dict()
  97. self.request, self.url = self.options_['stat'], self.options_['stat']
  98. stat_data = self._get_stat_data()
  99. self.request, self.url = self.options_['info'], self.options_['info']
  100. info_data = self._get_info_data(regex=self.options_['regex'])
  101. to_netdata.update(stat_data)
  102. to_netdata.update(info_data)
  103. return to_netdata or None
  104. def _get_stat_data(self):
  105. """
  106. :return: dict
  107. """
  108. raw_data = self.poll._get_raw_data(self)
  109. if not raw_data:
  110. return dict()
  111. raw_data = raw_data.splitlines()
  112. self.data = parse_data_([dict(zip(raw_data[0].split(','), raw_data[_].split(',')))
  113. for _ in range(1, len(raw_data))])
  114. if not self.data:
  115. return dict()
  116. stat_data = dict()
  117. for frontend in self.data['frontend']:
  118. for metric in METRICS:
  119. idx = frontend['# pxname'].replace('.', '_')
  120. stat_data['_'.join(['frontend', metric, idx])] = frontend.get(metric) or 0
  121. for backend in self.data['backend']:
  122. name, idx = backend['# pxname'], backend['# pxname'].replace('.', '_')
  123. stat_data['hsdown_' + idx] = len([server for server in self.data['servers']
  124. if server_down(server, name)])
  125. stat_data['hbdown_' + idx] = 1 if backend.get('status') == 'DOWN' else 0
  126. for metric in METRICS:
  127. stat_data['_'.join(['backend', metric, idx])] = backend.get(metric) or 0
  128. return stat_data
  129. def _get_info_data(self, regex):
  130. """
  131. :return: dict
  132. """
  133. raw_data = self.poll._get_raw_data(self)
  134. if not raw_data:
  135. return dict()
  136. match = regex.search(raw_data)
  137. return match.groupdict() if match else dict()
  138. @staticmethod
  139. def _check_raw_data(data):
  140. """
  141. Check if all data has been gathered from socket
  142. :param data: str
  143. :return: boolean
  144. """
  145. return not bool(data)
  146. def create_charts(self):
  147. for front in self.data['frontend']:
  148. name, idx = front['# pxname'], front['# pxname'].replace('.', '_')
  149. for metric in METRICS:
  150. self.definitions['f' + metric]['lines'].append(['_'.join(['frontend', metric, idx]),
  151. name, METRICS[metric]['algorithm'], 1,
  152. METRICS[metric]['divisor']])
  153. for back in self.data['backend']:
  154. name, idx = back['# pxname'], back['# pxname'].replace('.', '_')
  155. for metric in METRICS:
  156. self.definitions['b' + metric]['lines'].append(['_'.join(['backend', metric, idx]),
  157. name, METRICS[metric]['algorithm'], 1,
  158. METRICS[metric]['divisor']])
  159. self.definitions['health_sdown']['lines'].append(['hsdown_' + idx, name, 'absolute'])
  160. self.definitions['health_bdown']['lines'].append(['hbdown_' + idx, name, 'absolute'])
  161. def parse_data_(data):
  162. def is_backend(backend):
  163. return backend.get('svname') == 'BACKEND' and backend.get('# pxname') != 'stats'
  164. def is_frontend(frontend):
  165. return frontend.get('svname') == 'FRONTEND' and frontend.get('# pxname') != 'stats'
  166. def is_server(server):
  167. return not server.get('svname', '').startswith(('FRONTEND', 'BACKEND'))
  168. if not data:
  169. return None
  170. result = defaultdict(list)
  171. for elem in data:
  172. if is_backend(elem):
  173. result['backend'].append(elem)
  174. continue
  175. elif is_frontend(elem):
  176. result['frontend'].append(elem)
  177. continue
  178. elif is_server(elem):
  179. result['servers'].append(elem)
  180. return result or None
  181. def server_down(server, backend_name):
  182. return server.get('# pxname') == backend_name and server.get('status') == 'DOWN'
  183. def url_remove_params(url):
  184. parsed = urlparse(url or str())
  185. return '{scheme}://{netloc}{path}'.format(scheme=parsed.scheme, netloc=parsed.netloc, path=parsed.path)