metric_correlations.c 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983
  1. // SPDX-License-Identifier: GPL-3.0-or-later
  2. #include "daemon/common.h"
  3. #include "KolmogorovSmirnovDist.h"
  4. #define MAX_POINTS 10000
  5. int enable_metric_correlations = CONFIG_BOOLEAN_YES;
  6. int metric_correlations_version = 1;
  7. METRIC_CORRELATIONS_METHOD default_metric_correlations_method = METRIC_CORRELATIONS_KS2;
  8. typedef struct mc_stats {
  9. NETDATA_DOUBLE max_base_high_ratio;
  10. size_t db_points;
  11. size_t result_points;
  12. size_t db_queries;
  13. size_t binary_searches;
  14. } MC_STATS;
  15. // ----------------------------------------------------------------------------
  16. // parse and render metric correlations methods
  17. static struct {
  18. const char *name;
  19. METRIC_CORRELATIONS_METHOD value;
  20. } metric_correlations_methods[] = {
  21. { "ks2" , METRIC_CORRELATIONS_KS2 }
  22. , { "volume" , METRIC_CORRELATIONS_VOLUME }
  23. , { NULL , 0 }
  24. };
  25. METRIC_CORRELATIONS_METHOD mc_string_to_method(const char *method) {
  26. for(int i = 0; metric_correlations_methods[i].name ;i++)
  27. if(strcmp(method, metric_correlations_methods[i].name) == 0)
  28. return metric_correlations_methods[i].value;
  29. return default_metric_correlations_method;
  30. }
  31. const char *mc_method_to_string(METRIC_CORRELATIONS_METHOD method) {
  32. for(int i = 0; metric_correlations_methods[i].name ;i++)
  33. if(metric_correlations_methods[i].value == method)
  34. return metric_correlations_methods[i].name;
  35. return "unknown";
  36. }
  37. // ----------------------------------------------------------------------------
  38. // The results per dimension are aggregated into a dictionary
  39. typedef enum {
  40. RESULT_IS_BASE_HIGH_RATIO = (1 << 0),
  41. RESULT_IS_PERCENTAGE_OF_TIME = (1 << 1),
  42. } RESULT_FLAGS;
  43. struct register_result {
  44. RESULT_FLAGS flags;
  45. RRDSET *st;
  46. const char *chart_id;
  47. const char *context;
  48. const char *dim_name;
  49. NETDATA_DOUBLE value;
  50. };
  51. static void register_result_insert_callback(const char *name, void *value, void *data) {
  52. (void)name;
  53. (void)data;
  54. struct register_result *t = (struct register_result *)value;
  55. if(t->chart_id) t->chart_id = strdupz(t->chart_id);
  56. if(t->context) t->context = strdupz(t->context);
  57. if(t->dim_name) t->dim_name = strdupz(t->dim_name);
  58. }
  59. static void register_result_delete_callback(const char *name, void *value, void *data) {
  60. (void)name;
  61. (void)data;
  62. struct register_result *t = (struct register_result *)value;
  63. freez((void *)t->chart_id);
  64. freez((void *)t->context);
  65. freez((void *)t->dim_name);
  66. }
  67. static DICTIONARY *register_result_init() {
  68. DICTIONARY *results = dictionary_create(DICTIONARY_FLAG_SINGLE_THREADED);
  69. dictionary_register_insert_callback(results, register_result_insert_callback, results);
  70. dictionary_register_delete_callback(results, register_result_delete_callback, results);
  71. return results;
  72. }
  73. static void register_result_destroy(DICTIONARY *results) {
  74. dictionary_destroy(results);
  75. }
  76. static void register_result(DICTIONARY *results, RRDSET *st, RRDDIM *d, NETDATA_DOUBLE value, RESULT_FLAGS flags, MC_STATS *stats) {
  77. if(!netdata_double_isnumber(value)) return;
  78. // make it positive
  79. NETDATA_DOUBLE v = fabsndd(value);
  80. // no need to store zero scored values
  81. if(v == 0.0) return;
  82. // keep track of the max of the baseline / highlight ratio
  83. if(flags & RESULT_IS_BASE_HIGH_RATIO && v > stats->max_base_high_ratio)
  84. stats->max_base_high_ratio = v;
  85. struct register_result t = {
  86. .flags = flags,
  87. .st = st,
  88. .chart_id = st->id,
  89. .context = st->context,
  90. .dim_name = d->name,
  91. .value = v
  92. };
  93. char buf[5000 + 1];
  94. snprintfz(buf, 5000, "%s:%s", st->id, d->name);
  95. dictionary_set(results, buf, &t, sizeof(struct register_result));
  96. }
  97. // ----------------------------------------------------------------------------
  98. // Generation of JSON output for the results
  99. static size_t registered_results_to_json(DICTIONARY *results, BUFFER *wb,
  100. long long after, long long before,
  101. long long baseline_after, long long baseline_before,
  102. long points, METRIC_CORRELATIONS_METHOD method,
  103. RRDR_GROUPING group, RRDR_OPTIONS options, uint32_t shifts,
  104. size_t correlated_dimensions, usec_t duration, MC_STATS *stats) {
  105. buffer_sprintf(wb, "{\n"
  106. "\t\"after\": %lld,\n"
  107. "\t\"before\": %lld,\n"
  108. "\t\"duration\": %lld,\n"
  109. "\t\"points\": %ld,\n"
  110. "\t\"baseline_after\": %lld,\n"
  111. "\t\"baseline_before\": %lld,\n"
  112. "\t\"baseline_duration\": %lld,\n"
  113. "\t\"baseline_points\": %ld,\n"
  114. "\t\"statistics\": {\n"
  115. "\t\t\"query_time_ms\": %f,\n"
  116. "\t\t\"db_queries\": %zu,\n"
  117. "\t\t\"db_points_read\": %zu,\n"
  118. "\t\t\"query_result_points\": %zu,\n"
  119. "\t\t\"binary_searches\": %zu\n"
  120. "\t},\n"
  121. "\t\"group\": \"%s\",\n"
  122. "\t\"method\": \"%s\",\n"
  123. "\t\"options\": \"",
  124. after,
  125. before,
  126. before - after,
  127. points,
  128. baseline_after,
  129. baseline_before,
  130. baseline_before - baseline_after,
  131. points << shifts,
  132. (double)duration / (double)USEC_PER_MS,
  133. stats->db_queries,
  134. stats->db_points,
  135. stats->result_points,
  136. stats->binary_searches,
  137. web_client_api_request_v1_data_group_to_string(group),
  138. mc_method_to_string(method));
  139. web_client_api_request_v1_data_options_to_string(wb, options);
  140. buffer_strcat(wb, "\",\n\t\"correlated_charts\": {\n");
  141. size_t charts = 0, chart_dims = 0, total_dimensions = 0;
  142. struct register_result *t;
  143. RRDSET *last_st = NULL; // never access this - we use it only for comparison
  144. dfe_start_read(results, t) {
  145. if(!last_st || t->st != last_st) {
  146. last_st = t->st;
  147. if(charts) buffer_strcat(wb, "\n\t\t\t}\n\t\t},\n");
  148. buffer_strcat(wb, "\t\t\"");
  149. buffer_strcat(wb, t->chart_id);
  150. buffer_strcat(wb, "\": {\n");
  151. buffer_strcat(wb, "\t\t\t\"context\": \"");
  152. buffer_strcat(wb, t->context);
  153. buffer_strcat(wb, "\",\n\t\t\t\"dimensions\": {\n");
  154. charts++;
  155. chart_dims = 0;
  156. }
  157. if (chart_dims) buffer_sprintf(wb, ",\n");
  158. buffer_sprintf(wb, "\t\t\t\t\"%s\": " NETDATA_DOUBLE_FORMAT, t->dim_name, t->value);
  159. chart_dims++;
  160. total_dimensions++;
  161. }
  162. dfe_done(t);
  163. // close dimensions and chart
  164. if (total_dimensions)
  165. buffer_strcat(wb, "\n\t\t\t}\n\t\t}\n");
  166. // close correlated_charts
  167. buffer_sprintf(wb, "\t},\n"
  168. "\t\"correlated_dimensions\": %zu,\n"
  169. "\t\"total_dimensions_count\": %zu\n"
  170. "}\n",
  171. total_dimensions,
  172. correlated_dimensions // yes, we flip them
  173. );
  174. return total_dimensions;
  175. }
  176. // ----------------------------------------------------------------------------
  177. // KS2 algorithm functions
  178. typedef long int DIFFS_NUMBERS;
  179. #define DOUBLE_TO_INT_MULTIPLIER 100000
  180. static inline int binary_search_bigger_than(const DIFFS_NUMBERS arr[], int left, int size, DIFFS_NUMBERS K) {
  181. // binary search to find the index the smallest index
  182. // of the first value in the array that is greater than K
  183. int right = size;
  184. while(left < right) {
  185. int middle = (int)(((unsigned int)(left + right)) >> 1);
  186. if(arr[middle] > K)
  187. right = middle;
  188. else
  189. left = middle + 1;
  190. }
  191. return left;
  192. }
  193. int compare_diffs(const void *left, const void *right) {
  194. DIFFS_NUMBERS lt = *(DIFFS_NUMBERS *)left;
  195. DIFFS_NUMBERS rt = *(DIFFS_NUMBERS *)right;
  196. // https://stackoverflow.com/a/3886497/1114110
  197. return (lt > rt) - (lt < rt);
  198. }
  199. static size_t calculate_pairs_diff(DIFFS_NUMBERS *diffs, NETDATA_DOUBLE *arr, size_t size) {
  200. NETDATA_DOUBLE *last = &arr[size - 1];
  201. size_t added = 0;
  202. while(last > arr) {
  203. NETDATA_DOUBLE second = *last--;
  204. NETDATA_DOUBLE first = *last;
  205. *diffs++ = (DIFFS_NUMBERS)((first - second) * (NETDATA_DOUBLE)DOUBLE_TO_INT_MULTIPLIER);
  206. added++;
  207. }
  208. return added;
  209. }
  210. static double ks_2samp(DIFFS_NUMBERS baseline_diffs[], int base_size, DIFFS_NUMBERS highlight_diffs[], int high_size, uint32_t base_shifts) {
  211. qsort(baseline_diffs, base_size, sizeof(DIFFS_NUMBERS), compare_diffs);
  212. qsort(highlight_diffs, high_size, sizeof(DIFFS_NUMBERS), compare_diffs);
  213. // Now we should be calculating this:
  214. //
  215. // For each number in the diffs arrays, we should find the index of the
  216. // number bigger than them in both arrays and calculate the % of this index
  217. // vs the total array size. Once we have the 2 percentages, we should find
  218. // the min and max across the delta of all of them.
  219. //
  220. // It should look like this:
  221. //
  222. // base_pcent = binary_search_bigger_than(...) / base_size;
  223. // high_pcent = binary_search_bigger_than(...) / high_size;
  224. // delta = base_pcent - high_pcent;
  225. // if(delta < min) min = delta;
  226. // if(delta > max) max = delta;
  227. //
  228. // This would require a lot of multiplications and divisions.
  229. //
  230. // To speed it up, we do the binary search to find the index of each number
  231. // but then we divide the base index by the power of two number (shifts) it
  232. // is bigger than high index. So the 2 indexes are now comparable.
  233. // We also keep track of the original indexes with min and max, to properly
  234. // calculate their percentages once the loops finish.
  235. // initialize min and max using the first number of baseline_diffs
  236. DIFFS_NUMBERS K = baseline_diffs[0];
  237. int base_idx = binary_search_bigger_than(baseline_diffs, 1, base_size, K);
  238. int high_idx = binary_search_bigger_than(highlight_diffs, 0, high_size, K);
  239. int delta = base_idx - (high_idx << base_shifts);
  240. int min = delta, max = delta;
  241. int base_min_idx = base_idx;
  242. int base_max_idx = base_idx;
  243. int high_min_idx = high_idx;
  244. int high_max_idx = high_idx;
  245. // do the baseline_diffs starting from 1 (we did position 0 above)
  246. for(int i = 1; i < base_size; i++) {
  247. K = baseline_diffs[i];
  248. base_idx = binary_search_bigger_than(baseline_diffs, i + 1, base_size, K); // starting from i, since data1 is sorted
  249. high_idx = binary_search_bigger_than(highlight_diffs, 0, high_size, K);
  250. delta = base_idx - (high_idx << base_shifts);
  251. if(delta < min) {
  252. min = delta;
  253. base_min_idx = base_idx;
  254. high_min_idx = high_idx;
  255. }
  256. else if(delta > max) {
  257. max = delta;
  258. base_max_idx = base_idx;
  259. high_max_idx = high_idx;
  260. }
  261. }
  262. // do the highlight_diffs starting from 0
  263. for(int i = 0; i < high_size; i++) {
  264. K = highlight_diffs[i];
  265. base_idx = binary_search_bigger_than(baseline_diffs, 0, base_size, K);
  266. high_idx = binary_search_bigger_than(highlight_diffs, i + 1, high_size, K); // starting from i, since data2 is sorted
  267. delta = base_idx - (high_idx << base_shifts);
  268. if(delta < min) {
  269. min = delta;
  270. base_min_idx = base_idx;
  271. high_min_idx = high_idx;
  272. }
  273. else if(delta > max) {
  274. max = delta;
  275. base_max_idx = base_idx;
  276. high_max_idx = high_idx;
  277. }
  278. }
  279. // now we have the min, max and their indexes
  280. // properly calculate min and max as dmin and dmax
  281. double dbase_size = (double)base_size;
  282. double dhigh_size = (double)high_size;
  283. double dmin = ((double)base_min_idx / dbase_size) - ((double)high_min_idx / dhigh_size);
  284. double dmax = ((double)base_max_idx / dbase_size) - ((double)high_max_idx / dhigh_size);
  285. dmin = -dmin;
  286. if(islessequal(dmin, 0.0)) dmin = 0.0;
  287. else if(isgreaterequal(dmin, 1.0)) dmin = 1.0;
  288. double d;
  289. if(isgreaterequal(dmin, dmax)) d = dmin;
  290. else d = dmax;
  291. double en = round(dbase_size * dhigh_size / (dbase_size + dhigh_size));
  292. // under these conditions, KSfbar() crashes
  293. if(unlikely(isnan(en) || isinf(en) || en == 0.0 || isnan(d) || isinf(d)))
  294. return NAN;
  295. return KSfbar((int)en, d);
  296. }
  297. static double kstwo(
  298. NETDATA_DOUBLE baseline[], int baseline_points,
  299. NETDATA_DOUBLE highlight[], int highlight_points, uint32_t base_shifts) {
  300. // -1 in size, since the calculate_pairs_diffs() returns one less point
  301. DIFFS_NUMBERS baseline_diffs[baseline_points - 1];
  302. DIFFS_NUMBERS highlight_diffs[highlight_points - 1];
  303. int base_size = (int)calculate_pairs_diff(baseline_diffs, baseline, baseline_points);
  304. int high_size = (int)calculate_pairs_diff(highlight_diffs, highlight, highlight_points);
  305. if(unlikely(!base_size || !high_size))
  306. return NAN;
  307. if(unlikely(base_size != baseline_points - 1 || high_size != highlight_points - 1)) {
  308. error("Metric correlations: internal error - calculate_pairs_diff() returns the wrong number of entries");
  309. return NAN;
  310. }
  311. return ks_2samp(baseline_diffs, base_size, highlight_diffs, high_size, base_shifts);
  312. }
  313. static int rrdset_metric_correlations_ks2(RRDSET *st, DICTIONARY *results,
  314. long long baseline_after, long long baseline_before,
  315. long long after, long long before,
  316. long long points, RRDR_OPTIONS options,
  317. RRDR_GROUPING group, const char *group_options,
  318. uint32_t shifts, int timeout, MC_STATS *stats) {
  319. options |= RRDR_OPTION_NATURAL_POINTS;
  320. long group_time = 0;
  321. struct context_param *context_param_list = NULL;
  322. int correlated_dimensions = 0;
  323. RRDR *high_rrdr = NULL;
  324. RRDR *base_rrdr = NULL;
  325. // get first the highlight to find the number of points available
  326. stats->db_queries++;
  327. usec_t started_usec = now_realtime_usec();
  328. ONEWAYALLOC *owa = onewayalloc_create(0);
  329. high_rrdr = rrd2rrdr(owa, st, points,
  330. after, before, group,
  331. group_time, options, NULL, context_param_list, group_options,
  332. timeout, 0);
  333. if(!high_rrdr) {
  334. info("Metric correlations: rrd2rrdr() failed for the highlighted window on chart '%s'.", st->name);
  335. goto cleanup;
  336. }
  337. stats->db_points += high_rrdr->internal.db_points_read;
  338. stats->result_points += high_rrdr->internal.result_points_generated;
  339. if(!high_rrdr->d) {
  340. info("Metric correlations: rrd2rrdr() did not return any dimensions on chart '%s'.", st->name);
  341. goto cleanup;
  342. }
  343. if(high_rrdr->result_options & RRDR_RESULT_OPTION_CANCEL) {
  344. info("Metric correlations: rrd2rrdr() on highlighted window timed out '%s'.", st->name);
  345. goto cleanup;
  346. }
  347. int high_points = rrdr_rows(high_rrdr);
  348. usec_t now_usec = now_realtime_usec();
  349. if(now_usec - started_usec > timeout * USEC_PER_MS)
  350. goto cleanup;
  351. // get the baseline, requesting the same number of points as the highlight
  352. stats->db_queries++;
  353. base_rrdr = rrd2rrdr(owa, st,high_points << shifts,
  354. baseline_after, baseline_before, group,
  355. group_time, options, NULL, context_param_list, group_options,
  356. (int)(timeout - ((now_usec - started_usec) / USEC_PER_MS)), 0);
  357. if(!base_rrdr) {
  358. info("Metric correlations: rrd2rrdr() failed for the baseline window on chart '%s'.", st->name);
  359. goto cleanup;
  360. }
  361. stats->db_points += base_rrdr->internal.db_points_read;
  362. stats->result_points += base_rrdr->internal.result_points_generated;
  363. if(!base_rrdr->d) {
  364. info("Metric correlations: rrd2rrdr() did not return any dimensions on chart '%s'.", st->name);
  365. goto cleanup;
  366. }
  367. if (base_rrdr->d != high_rrdr->d) {
  368. info("Cannot generate metric correlations for chart '%s' when the baseline and the highlight have different number of dimensions.", st->name);
  369. goto cleanup;
  370. }
  371. if(base_rrdr->result_options & RRDR_RESULT_OPTION_CANCEL) {
  372. info("Metric correlations: rrd2rrdr() on baseline window timed out '%s'.", st->name);
  373. goto cleanup;
  374. }
  375. int base_points = rrdr_rows(base_rrdr);
  376. now_usec = now_realtime_usec();
  377. if(now_usec - started_usec > timeout * USEC_PER_MS)
  378. goto cleanup;
  379. // we need at least 2 points to do the job
  380. if(base_points < 2 || high_points < 2)
  381. goto cleanup;
  382. // for each dimension
  383. RRDDIM *d;
  384. int i;
  385. for(i = 0, d = base_rrdr->st->dimensions ; d && i < base_rrdr->d; i++, d = d->next) {
  386. // skip the not evaluated ones
  387. if(unlikely(base_rrdr->od[i] & RRDR_DIMENSION_HIDDEN) || (high_rrdr->od[i] & RRDR_DIMENSION_HIDDEN))
  388. continue;
  389. correlated_dimensions++;
  390. // skip the dimensions that are just zero for both the baseline and the highlight
  391. if(unlikely(!(base_rrdr->od[i] & RRDR_DIMENSION_NONZERO) && !(high_rrdr->od[i] & RRDR_DIMENSION_NONZERO)))
  392. continue;
  393. // copy the baseline points of the dimension to a contiguous array
  394. // there is no need to check for empty values, since empty are already zero
  395. NETDATA_DOUBLE baseline[base_points];
  396. for(int c = 0; c < base_points; c++)
  397. baseline[c] = base_rrdr->v[ c * base_rrdr->d + i ];
  398. // copy the highlight points of the dimension to a contiguous array
  399. // there is no need to check for empty values, since empty values are already zero
  400. // https://github.com/netdata/netdata/blob/6e3144683a73a2024d51425b20ecfd569034c858/web/api/queries/average/average.c#L41-L43
  401. NETDATA_DOUBLE highlight[high_points];
  402. for(int c = 0; c < high_points; c++)
  403. highlight[c] = high_rrdr->v[ c * high_rrdr->d + i ];
  404. stats->binary_searches += 2 * (base_points - 1) + 2 * (high_points - 1);
  405. double prob = kstwo(baseline, base_points, highlight, high_points, shifts);
  406. if(!isnan(prob) && !isinf(prob)) {
  407. // these conditions should never happen, but still let's check
  408. if(unlikely(prob < 0.0)) {
  409. error("Metric correlations: kstwo() returned a negative number: %f", prob);
  410. prob = -prob;
  411. }
  412. if(unlikely(prob > 1.0)) {
  413. error("Metric correlations: kstwo() returned a number above 1.0: %f", prob);
  414. prob = 1.0;
  415. }
  416. // to spread the results evenly, 0.0 needs to be the less correlated and 1.0 the most correlated
  417. // so we flip the result of kstwo()
  418. register_result(results, base_rrdr->st, d, 1.0 - prob, RESULT_IS_BASE_HIGH_RATIO, stats);
  419. }
  420. }
  421. cleanup:
  422. rrdr_free(owa, high_rrdr);
  423. rrdr_free(owa, base_rrdr);
  424. onewayalloc_destroy(owa);
  425. return correlated_dimensions;
  426. }
  427. // ----------------------------------------------------------------------------
  428. // VOLUME algorithm functions
  429. static int rrdset_metric_correlations_volume(RRDSET *st, DICTIONARY *results,
  430. long long baseline_after, long long baseline_before,
  431. long long after, long long before,
  432. RRDR_OPTIONS options, RRDR_GROUPING group, const char *group_options,
  433. int timeout, MC_STATS *stats) {
  434. options |= RRDR_OPTION_MATCH_IDS | RRDR_OPTION_ABSOLUTE | RRDR_OPTION_NATURAL_POINTS;
  435. long group_time = 0;
  436. int correlated_dimensions = 0;
  437. int ret, value_is_null;
  438. usec_t started_usec = now_realtime_usec();
  439. RRDDIM *d;
  440. for(d = st->dimensions; d ; d = d->next) {
  441. usec_t now_usec = now_realtime_usec();
  442. if(now_usec - started_usec > timeout * USEC_PER_MS)
  443. return correlated_dimensions;
  444. // we count how many metrics we evaluated
  445. correlated_dimensions++;
  446. // there is no point to pass a timeout to these queries
  447. // since the query engine checks for a timeout between
  448. // dimensions, and we query a single dimension at a time.
  449. stats->db_queries++;
  450. NETDATA_DOUBLE baseline_average = NAN;
  451. uint8_t base_anomaly_rate = 0;
  452. value_is_null = 1;
  453. ret = rrdset2value_api_v1(st, NULL, &baseline_average, d->id, 1,
  454. baseline_after, baseline_before,
  455. group, group_options, group_time, options,
  456. NULL, NULL,
  457. &stats->db_points, &stats->result_points,
  458. &value_is_null, &base_anomaly_rate, 0, 0);
  459. if(ret != HTTP_RESP_OK || value_is_null || !netdata_double_isnumber(baseline_average)) {
  460. // this means no data for the baseline window, but we may have data for the highlighted one - assume zero
  461. baseline_average = 0.0;
  462. }
  463. stats->db_queries++;
  464. NETDATA_DOUBLE highlight_average = NAN;
  465. uint8_t high_anomaly_rate = 0;
  466. value_is_null = 1;
  467. ret = rrdset2value_api_v1(st, NULL, &highlight_average, d->id, 1,
  468. after, before,
  469. group, group_options, group_time, options,
  470. NULL, NULL,
  471. &stats->db_points, &stats->result_points,
  472. &value_is_null, &high_anomaly_rate, 0, 0);
  473. if(ret != HTTP_RESP_OK || value_is_null || !netdata_double_isnumber(highlight_average)) {
  474. // this means no data for the highlighted duration - so skip it
  475. continue;
  476. }
  477. if(baseline_average == highlight_average) {
  478. // they are the same - let's move on
  479. continue;
  480. }
  481. stats->db_queries++;
  482. NETDATA_DOUBLE highlight_countif = NAN;
  483. value_is_null = 1;
  484. char highlighted_countif_options[50 + 1];
  485. snprintfz(highlighted_countif_options, 50, "%s" NETDATA_DOUBLE_FORMAT, highlight_average < baseline_average ? "<":">", baseline_average);
  486. ret = rrdset2value_api_v1(st, NULL, &highlight_countif, d->id, 1,
  487. after, before,
  488. RRDR_GROUPING_COUNTIF,highlighted_countif_options,
  489. group_time, options,
  490. NULL, NULL,
  491. &stats->db_points, &stats->result_points,
  492. &value_is_null, NULL, 0, 0);
  493. if(ret != HTTP_RESP_OK || value_is_null || !netdata_double_isnumber(highlight_countif)) {
  494. info("MC: highlighted countif query failed, but highlighted average worked - strange...");
  495. continue;
  496. }
  497. // this represents the percentage of time
  498. // the highlighted window was above/below the baseline window
  499. // (above or below depending on their averages)
  500. highlight_countif = highlight_countif / 100.0; // countif returns 0 - 100.0
  501. RESULT_FLAGS flags;
  502. NETDATA_DOUBLE pcent = NAN;
  503. if(isgreater(baseline_average, 0.0) || isless(baseline_average, 0.0)) {
  504. flags = RESULT_IS_BASE_HIGH_RATIO;
  505. pcent = (highlight_average - baseline_average) / baseline_average * highlight_countif;
  506. }
  507. else {
  508. flags = RESULT_IS_PERCENTAGE_OF_TIME;
  509. pcent = highlight_countif;
  510. }
  511. register_result(results, st, d, pcent, flags, stats);
  512. }
  513. return correlated_dimensions;
  514. }
  515. int compare_netdata_doubles(const void *left, const void *right) {
  516. NETDATA_DOUBLE lt = *(NETDATA_DOUBLE *)left;
  517. NETDATA_DOUBLE rt = *(NETDATA_DOUBLE *)right;
  518. // https://stackoverflow.com/a/3886497/1114110
  519. return (lt > rt) - (lt < rt);
  520. }
  521. static inline int binary_search_bigger_than_netdata_double(const NETDATA_DOUBLE arr[], int left, int size, NETDATA_DOUBLE K) {
  522. // binary search to find the index the smallest index
  523. // of the first value in the array that is greater than K
  524. int right = size;
  525. while(left < right) {
  526. int middle = (int)(((unsigned int)(left + right)) >> 1);
  527. if(arr[middle] > K)
  528. right = middle;
  529. else
  530. left = middle + 1;
  531. }
  532. return left;
  533. }
  534. // ----------------------------------------------------------------------------
  535. // spread the results evenly according to their value
  536. static size_t spread_results_evenly(DICTIONARY *results, MC_STATS *stats) {
  537. struct register_result *t;
  538. // count the dimensions
  539. size_t dimensions = dictionary_stats_entries(results);
  540. if(!dimensions) return 0;
  541. if(stats->max_base_high_ratio == 0.0)
  542. stats->max_base_high_ratio = 1.0;
  543. // create an array of the right size and copy all the values in it
  544. NETDATA_DOUBLE slots[dimensions];
  545. dimensions = 0;
  546. dfe_start_read(results, t) {
  547. if(t->flags & (RESULT_IS_PERCENTAGE_OF_TIME))
  548. t->value = t->value * stats->max_base_high_ratio;
  549. slots[dimensions++] = t->value;
  550. }
  551. dfe_done(t);
  552. // sort the array with the values of all dimensions
  553. qsort(slots, dimensions, sizeof(NETDATA_DOUBLE), compare_netdata_doubles);
  554. // skip the duplicates in the sorted array
  555. NETDATA_DOUBLE last_value = NAN;
  556. size_t unique_values = 0;
  557. for(size_t i = 0; i < dimensions ;i++) {
  558. if(likely(slots[i] != last_value))
  559. slots[unique_values++] = last_value = slots[i];
  560. }
  561. // this cannot happen, but coverity thinks otherwise...
  562. if(!unique_values)
  563. unique_values = dimensions;
  564. // calculate the weight of each slot, using the number of unique values
  565. NETDATA_DOUBLE slot_weight = 1.0 / (NETDATA_DOUBLE)unique_values;
  566. dfe_start_read(results, t) {
  567. int slot = binary_search_bigger_than_netdata_double(slots, 0, (int)unique_values, t->value);
  568. NETDATA_DOUBLE v = slot * slot_weight;
  569. if(unlikely(v > 1.0)) v = 1.0;
  570. v = 1.0 - v;
  571. t->value = v;
  572. }
  573. dfe_done(t);
  574. return dimensions;
  575. }
  576. // ----------------------------------------------------------------------------
  577. // The main function
  578. int metric_correlations(RRDHOST *host, BUFFER *wb, METRIC_CORRELATIONS_METHOD method,
  579. RRDR_GROUPING group, const char *group_options,
  580. long long baseline_after, long long baseline_before,
  581. long long after, long long before,
  582. long long points, RRDR_OPTIONS options, int timeout) {
  583. // method = METRIC_CORRELATIONS_VOLUME;
  584. // options |= RRDR_OPTION_ANOMALY_BIT;
  585. MC_STATS stats = {};
  586. if (enable_metric_correlations == CONFIG_BOOLEAN_NO) {
  587. buffer_strcat(wb, "{\"error\": \"Metric correlations functionality is not enabled.\" }");
  588. return HTTP_RESP_FORBIDDEN;
  589. }
  590. // if the user didn't give a timeout
  591. // assume 60 seconds
  592. if(!timeout)
  593. timeout = 60 * MSEC_PER_SEC;
  594. // if the timeout is less than 1 second
  595. // make it at least 1 second
  596. if(timeout < (long)(1 * MSEC_PER_SEC))
  597. timeout = 1 * MSEC_PER_SEC;
  598. usec_t timeout_usec = timeout * USEC_PER_MS;
  599. usec_t started_usec = now_realtime_usec();
  600. if(!points) points = 500;
  601. rrdr_relative_window_to_absolute(&after, &before);
  602. if(baseline_before <= API_RELATIVE_TIME_MAX)
  603. baseline_before += after;
  604. rrdr_relative_window_to_absolute(&baseline_after, &baseline_before);
  605. if (before <= after || baseline_before <= baseline_after) {
  606. buffer_strcat(wb, "{\"error\": \"Invalid baseline or highlight ranges.\" }");
  607. return HTTP_RESP_BAD_REQUEST;
  608. }
  609. DICTIONARY *results = register_result_init();
  610. DICTIONARY *charts = dictionary_create(DICTIONARY_FLAG_SINGLE_THREADED|DICTIONARY_FLAG_VALUE_LINK_DONT_CLONE);;
  611. char *error = NULL;
  612. int resp = HTTP_RESP_OK;
  613. // baseline should be a power of two multiple of highlight
  614. uint32_t shifts = 0;
  615. {
  616. long long base_delta = baseline_before - baseline_after;
  617. long long high_delta = before - after;
  618. uint32_t multiplier = (uint32_t)round((double)base_delta / (double)high_delta);
  619. // check if the multiplier is a power of two
  620. // https://stackoverflow.com/a/600306/1114110
  621. if((multiplier & (multiplier - 1)) != 0) {
  622. // it is not power of two
  623. // let's find the closest power of two
  624. // https://stackoverflow.com/a/466242/1114110
  625. multiplier--;
  626. multiplier |= multiplier >> 1;
  627. multiplier |= multiplier >> 2;
  628. multiplier |= multiplier >> 4;
  629. multiplier |= multiplier >> 8;
  630. multiplier |= multiplier >> 16;
  631. multiplier++;
  632. }
  633. // convert the multiplier to the number of shifts
  634. // we need to do, to divide baseline numbers to match
  635. // the highlight ones
  636. while(multiplier > 1) {
  637. shifts++;
  638. multiplier = multiplier >> 1;
  639. }
  640. // if the baseline size will not comply to MAX_POINTS
  641. // lower the window of the baseline
  642. while(shifts && (points << shifts) > MAX_POINTS)
  643. shifts--;
  644. // if the baseline size still does not comply to MAX_POINTS
  645. // lower the resolution of the highlight and the baseline
  646. while((points << shifts) > MAX_POINTS)
  647. points = points >> 1;
  648. if(points < 15) {
  649. resp = HTTP_RESP_BAD_REQUEST;
  650. goto cleanup;
  651. }
  652. // adjust the baseline to be multiplier times bigger than the highlight
  653. baseline_after = baseline_before - (high_delta << shifts);
  654. }
  655. // dont lock here and wait for results
  656. // get the charts and run mc after
  657. RRDSET *st;
  658. rrdhost_rdlock(host);
  659. rrdset_foreach_read(st, host) {
  660. if (rrdset_is_available_for_viewers(st))
  661. dictionary_set(charts, st->name, "", 1);
  662. }
  663. rrdhost_unlock(host);
  664. size_t correlated_dimensions = 0;
  665. void *ptr;
  666. // for every chart in the dictionary
  667. dfe_start_read(charts, ptr) {
  668. usec_t now_usec = now_realtime_usec();
  669. if(now_usec - started_usec > timeout_usec) {
  670. error = "timed out";
  671. resp = HTTP_RESP_GATEWAY_TIMEOUT;
  672. goto cleanup;
  673. }
  674. st = rrdset_find_byname(host, ptr_name);
  675. if(!st) continue;
  676. rrdset_rdlock(st);
  677. switch(method) {
  678. case METRIC_CORRELATIONS_VOLUME:
  679. correlated_dimensions += rrdset_metric_correlations_volume(st, results,
  680. baseline_after, baseline_before,
  681. after, before,
  682. options, group, group_options,
  683. (int)(timeout - ((now_usec - started_usec) / USEC_PER_MS)),
  684. &stats);
  685. break;
  686. default:
  687. case METRIC_CORRELATIONS_KS2:
  688. correlated_dimensions += rrdset_metric_correlations_ks2(st, results,
  689. baseline_after, baseline_before,
  690. after, before,
  691. points, options, group, group_options, shifts,
  692. (int)(timeout - ((now_usec - started_usec) / USEC_PER_MS)),
  693. &stats);
  694. break;
  695. }
  696. rrdset_unlock(st);
  697. }
  698. dfe_done(ptr);
  699. if(!(options & RRDR_OPTION_RETURN_RAW))
  700. spread_results_evenly(results, &stats);
  701. usec_t ended_usec = now_realtime_usec();
  702. // generate the json output we need
  703. buffer_flush(wb);
  704. size_t added_dimensions = registered_results_to_json(results, wb,
  705. after, before,
  706. baseline_after, baseline_before,
  707. points, method, group, options, shifts, correlated_dimensions,
  708. ended_usec - started_usec, &stats);
  709. if(!added_dimensions) {
  710. error = "no results produced from correlations";
  711. resp = HTTP_RESP_NOT_FOUND;
  712. }
  713. cleanup:
  714. if(charts) dictionary_destroy(charts);
  715. if(results) register_result_destroy(results);
  716. if(error) {
  717. buffer_flush(wb);
  718. buffer_sprintf(wb, "{\"error\": \"%s\" }", error);
  719. }
  720. return resp;
  721. }
  722. // ----------------------------------------------------------------------------
  723. // unittest
  724. /*
  725. Unit tests against the output of this:
  726. https://github.com/scipy/scipy/blob/4cf21e753cf937d1c6c2d2a0e372fbc1dbbeea81/scipy/stats/_stats_py.py#L7275-L7449
  727. import matplotlib.pyplot as plt
  728. import pandas as pd
  729. import numpy as np
  730. import scipy as sp
  731. from scipy import stats
  732. data1 = np.array([ 1111, -2222, 33, 100, 100, 15555, -1, 19999, 888, 755, -1, -730 ])
  733. data2 = np.array([365, -123, 0])
  734. data1 = np.sort(data1)
  735. data2 = np.sort(data2)
  736. n1 = data1.shape[0]
  737. n2 = data2.shape[0]
  738. data_all = np.concatenate([data1, data2])
  739. cdf1 = np.searchsorted(data1, data_all, side='right') / n1
  740. cdf2 = np.searchsorted(data2, data_all, side='right') / n2
  741. print(data_all)
  742. print("\ndata1", data1, cdf1)
  743. print("\ndata2", data2, cdf2)
  744. cddiffs = cdf1 - cdf2
  745. print("\ncddiffs", cddiffs)
  746. minS = np.clip(-np.min(cddiffs), 0, 1)
  747. maxS = np.max(cddiffs)
  748. print("\nmin", minS)
  749. print("max", maxS)
  750. m, n = sorted([float(n1), float(n2)], reverse=True)
  751. en = m * n / (m + n)
  752. d = max(minS, maxS)
  753. prob = stats.distributions.kstwo.sf(d, np.round(en))
  754. print("\nprob", prob)
  755. */
  756. static int double_expect(double v, const char *str, const char *descr) {
  757. char buf[100 + 1];
  758. snprintfz(buf, 100, "%0.6f", v);
  759. int ret = strcmp(buf, str) ? 1 : 0;
  760. fprintf(stderr, "%s %s, expected %s, got %s\n", ret?"FAILED":"OK", descr, str, buf);
  761. return ret;
  762. }
  763. static int mc_unittest1(void) {
  764. int bs = 3, hs = 3;
  765. DIFFS_NUMBERS base[3] = { 1, 2, 3 };
  766. DIFFS_NUMBERS high[3] = { 3, 4, 6 };
  767. double prob = ks_2samp(base, bs, high, hs, 0);
  768. return double_expect(prob, "0.222222", "3x3");
  769. }
  770. static int mc_unittest2(void) {
  771. int bs = 6, hs = 3;
  772. DIFFS_NUMBERS base[6] = { 1, 2, 3, 10, 10, 15 };
  773. DIFFS_NUMBERS high[3] = { 3, 4, 6 };
  774. double prob = ks_2samp(base, bs, high, hs, 1);
  775. return double_expect(prob, "0.500000", "6x3");
  776. }
  777. static int mc_unittest3(void) {
  778. int bs = 12, hs = 3;
  779. DIFFS_NUMBERS base[12] = { 1, 2, 3, 10, 10, 15, 111, 19999, 8, 55, -1, -73 };
  780. DIFFS_NUMBERS high[3] = { 3, 4, 6 };
  781. double prob = ks_2samp(base, bs, high, hs, 2);
  782. return double_expect(prob, "0.347222", "12x3");
  783. }
  784. static int mc_unittest4(void) {
  785. int bs = 12, hs = 3;
  786. DIFFS_NUMBERS base[12] = { 1111, -2222, 33, 100, 100, 15555, -1, 19999, 888, 755, -1, -730 };
  787. DIFFS_NUMBERS high[3] = { 365, -123, 0 };
  788. double prob = ks_2samp(base, bs, high, hs, 2);
  789. return double_expect(prob, "0.777778", "12x3");
  790. }
  791. int mc_unittest(void) {
  792. int errors = 0;
  793. errors += mc_unittest1();
  794. errors += mc_unittest2();
  795. errors += mc_unittest3();
  796. errors += mc_unittest4();
  797. return errors;
  798. }