weights.c 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120
  1. // SPDX-License-Identifier: GPL-3.0-or-later
  2. #include "daemon/common.h"
  3. #include "database/KolmogorovSmirnovDist.h"
  4. #define MAX_POINTS 10000
  5. int enable_metric_correlations = CONFIG_BOOLEAN_YES;
  6. int metric_correlations_version = 1;
  7. WEIGHTS_METHOD default_metric_correlations_method = WEIGHTS_METHOD_MC_KS2;
  8. typedef struct weights_stats {
  9. NETDATA_DOUBLE max_base_high_ratio;
  10. size_t db_points;
  11. size_t result_points;
  12. size_t db_queries;
  13. size_t db_points_per_tier[RRD_STORAGE_TIERS];
  14. size_t binary_searches;
  15. } WEIGHTS_STATS;
  16. // ----------------------------------------------------------------------------
  17. // parse and render metric correlations methods
  18. static struct {
  19. const char *name;
  20. WEIGHTS_METHOD value;
  21. } weights_methods[] = {
  22. { "ks2" , WEIGHTS_METHOD_MC_KS2}
  23. , { "volume" , WEIGHTS_METHOD_MC_VOLUME}
  24. , { "anomaly-rate" , WEIGHTS_METHOD_ANOMALY_RATE}
  25. , { NULL , 0 }
  26. };
  27. WEIGHTS_METHOD weights_string_to_method(const char *method) {
  28. for(int i = 0; weights_methods[i].name ;i++)
  29. if(strcmp(method, weights_methods[i].name) == 0)
  30. return weights_methods[i].value;
  31. return default_metric_correlations_method;
  32. }
  33. const char *weights_method_to_string(WEIGHTS_METHOD method) {
  34. for(int i = 0; weights_methods[i].name ;i++)
  35. if(weights_methods[i].value == method)
  36. return weights_methods[i].name;
  37. return "unknown";
  38. }
  39. // ----------------------------------------------------------------------------
  40. // The results per dimension are aggregated into a dictionary
  41. typedef enum {
  42. RESULT_IS_BASE_HIGH_RATIO = (1 << 0),
  43. RESULT_IS_PERCENTAGE_OF_TIME = (1 << 1),
  44. } RESULT_FLAGS;
  45. struct register_result {
  46. RESULT_FLAGS flags;
  47. RRDCONTEXT_ACQUIRED *rca;
  48. RRDINSTANCE_ACQUIRED *ria;
  49. RRDMETRIC_ACQUIRED *rma;
  50. NETDATA_DOUBLE value;
  51. };
  52. static DICTIONARY *register_result_init() {
  53. DICTIONARY *results = dictionary_create(DICT_OPTION_SINGLE_THREADED);
  54. return results;
  55. }
  56. static void register_result_destroy(DICTIONARY *results) {
  57. dictionary_destroy(results);
  58. }
  59. static void register_result(DICTIONARY *results,
  60. RRDCONTEXT_ACQUIRED *rca,
  61. RRDINSTANCE_ACQUIRED *ria,
  62. RRDMETRIC_ACQUIRED *rma,
  63. NETDATA_DOUBLE value,
  64. RESULT_FLAGS flags,
  65. WEIGHTS_STATS *stats,
  66. bool register_zero) {
  67. if(!netdata_double_isnumber(value)) return;
  68. // make it positive
  69. NETDATA_DOUBLE v = fabsndd(value);
  70. // no need to store zero scored values
  71. if(unlikely(fpclassify(v) == FP_ZERO && !register_zero))
  72. return;
  73. // keep track of the max of the baseline / highlight ratio
  74. if(flags & RESULT_IS_BASE_HIGH_RATIO && v > stats->max_base_high_ratio)
  75. stats->max_base_high_ratio = v;
  76. struct register_result t = {
  77. .flags = flags,
  78. .rca = rca,
  79. .ria = ria,
  80. .rma = rma,
  81. .value = v
  82. };
  83. // we can use the pointer address or RMA as a unique key for each metric
  84. char buf[20 + 1];
  85. ssize_t len = snprintfz(buf, 20, "%p", rma);
  86. dictionary_set_advanced(results, buf, len + 1, &t, sizeof(struct register_result), NULL);
  87. }
  88. // ----------------------------------------------------------------------------
  89. // Generation of JSON output for the results
  90. static void results_header_to_json(DICTIONARY *results __maybe_unused, BUFFER *wb,
  91. time_t after, time_t before,
  92. time_t baseline_after, time_t baseline_before,
  93. size_t points, WEIGHTS_METHOD method,
  94. RRDR_GROUPING group, RRDR_OPTIONS options, uint32_t shifts,
  95. size_t examined_dimensions __maybe_unused, usec_t duration,
  96. WEIGHTS_STATS *stats) {
  97. buffer_sprintf(wb, "{\n"
  98. "\t\"after\": %lld,\n"
  99. "\t\"before\": %lld,\n"
  100. "\t\"duration\": %lld,\n"
  101. "\t\"points\": %zu,\n",
  102. (long long)after,
  103. (long long)before,
  104. (long long)(before - after),
  105. points
  106. );
  107. if(method == WEIGHTS_METHOD_MC_KS2 || method == WEIGHTS_METHOD_MC_VOLUME)
  108. buffer_sprintf(wb, ""
  109. "\t\"baseline_after\": %lld,\n"
  110. "\t\"baseline_before\": %lld,\n"
  111. "\t\"baseline_duration\": %lld,\n"
  112. "\t\"baseline_points\": %zu,\n",
  113. (long long)baseline_after,
  114. (long long)baseline_before,
  115. (long long)(baseline_before - baseline_after),
  116. points << shifts
  117. );
  118. buffer_sprintf(wb, ""
  119. "\t\"statistics\": {\n"
  120. "\t\t\"query_time_ms\": %f,\n"
  121. "\t\t\"db_queries\": %zu,\n"
  122. "\t\t\"query_result_points\": %zu,\n"
  123. "\t\t\"binary_searches\": %zu,\n"
  124. "\t\t\"db_points_read\": %zu,\n"
  125. "\t\t\"db_points_per_tier\": [ ",
  126. (double)duration / (double)USEC_PER_MS,
  127. stats->db_queries,
  128. stats->result_points,
  129. stats->binary_searches,
  130. stats->db_points
  131. );
  132. for(size_t tier = 0; tier < storage_tiers ;tier++)
  133. buffer_sprintf(wb, "%s%zu", tier?", ":"", stats->db_points_per_tier[tier]);
  134. buffer_sprintf(wb, " ]\n"
  135. "\t},\n"
  136. "\t\"group\": \"%s\",\n"
  137. "\t\"method\": \"%s\",\n"
  138. "\t\"options\": \"",
  139. web_client_api_request_v1_data_group_to_string(group),
  140. weights_method_to_string(method)
  141. );
  142. web_client_api_request_v1_data_options_to_buffer(wb, options);
  143. }
  144. static size_t registered_results_to_json_charts(DICTIONARY *results, BUFFER *wb,
  145. time_t after, time_t before,
  146. time_t baseline_after, time_t baseline_before,
  147. size_t points, WEIGHTS_METHOD method,
  148. RRDR_GROUPING group, RRDR_OPTIONS options, uint32_t shifts,
  149. size_t examined_dimensions, usec_t duration,
  150. WEIGHTS_STATS *stats) {
  151. results_header_to_json(results, wb, after, before, baseline_after, baseline_before,
  152. points, method, group, options, shifts, examined_dimensions, duration, stats);
  153. buffer_strcat(wb, "\",\n\t\"correlated_charts\": {\n");
  154. size_t charts = 0, chart_dims = 0, total_dimensions = 0;
  155. struct register_result *t;
  156. RRDINSTANCE_ACQUIRED *last_ria = NULL; // never access this - we use it only for comparison
  157. dfe_start_read(results, t) {
  158. if(t->ria != last_ria) {
  159. last_ria = t->ria;
  160. if(charts) buffer_strcat(wb, "\n\t\t\t}\n\t\t},\n");
  161. buffer_strcat(wb, "\t\t\"");
  162. buffer_strcat(wb, rrdinstance_acquired_id(t->ria));
  163. buffer_strcat(wb, "\": {\n");
  164. buffer_strcat(wb, "\t\t\t\"context\": \"");
  165. buffer_strcat(wb, rrdcontext_acquired_id(t->rca));
  166. buffer_strcat(wb, "\",\n\t\t\t\"dimensions\": {\n");
  167. charts++;
  168. chart_dims = 0;
  169. }
  170. if (chart_dims) buffer_sprintf(wb, ",\n");
  171. buffer_sprintf(wb, "\t\t\t\t\"%s\": " NETDATA_DOUBLE_FORMAT, rrdmetric_acquired_name(t->rma), t->value);
  172. chart_dims++;
  173. total_dimensions++;
  174. }
  175. dfe_done(t);
  176. // close dimensions and chart
  177. if (total_dimensions)
  178. buffer_strcat(wb, "\n\t\t\t}\n\t\t}\n");
  179. // close correlated_charts
  180. buffer_sprintf(wb, "\t},\n"
  181. "\t\"correlated_dimensions\": %zu,\n"
  182. "\t\"total_dimensions_count\": %zu\n"
  183. "}\n",
  184. total_dimensions,
  185. examined_dimensions
  186. );
  187. return total_dimensions;
  188. }
  189. static size_t registered_results_to_json_contexts(DICTIONARY *results, BUFFER *wb,
  190. time_t after, time_t before,
  191. time_t baseline_after, time_t baseline_before,
  192. size_t points, WEIGHTS_METHOD method,
  193. RRDR_GROUPING group, RRDR_OPTIONS options, uint32_t shifts,
  194. size_t examined_dimensions, usec_t duration,
  195. WEIGHTS_STATS *stats) {
  196. results_header_to_json(results, wb, after, before, baseline_after, baseline_before,
  197. points, method, group, options, shifts, examined_dimensions, duration, stats);
  198. buffer_strcat(wb, "\",\n\t\"contexts\": {\n");
  199. size_t contexts = 0, charts = 0, total_dimensions = 0, context_dims = 0, chart_dims = 0;
  200. NETDATA_DOUBLE contexts_total_weight = 0.0, charts_total_weight = 0.0;
  201. struct register_result *t;
  202. RRDCONTEXT_ACQUIRED *last_rca = NULL;
  203. RRDINSTANCE_ACQUIRED *last_ria = NULL;
  204. dfe_start_read(results, t) {
  205. if(t->rca != last_rca) {
  206. last_rca = t->rca;
  207. if(contexts)
  208. buffer_sprintf(wb, "\n"
  209. "\t\t\t\t\t},\n"
  210. "\t\t\t\t\t\"weight\":" NETDATA_DOUBLE_FORMAT "\n"
  211. "\t\t\t\t}\n\t\t\t},\n"
  212. "\t\t\t\"weight\":" NETDATA_DOUBLE_FORMAT "\n\t\t},\n"
  213. , charts_total_weight / (double)chart_dims
  214. , contexts_total_weight / (double)context_dims);
  215. buffer_strcat(wb, "\t\t\"");
  216. buffer_strcat(wb, rrdcontext_acquired_id(t->rca));
  217. buffer_strcat(wb, "\": {\n\t\t\t\"charts\":{\n");
  218. contexts++;
  219. charts = 0;
  220. context_dims = 0;
  221. contexts_total_weight = 0.0;
  222. last_ria = NULL;
  223. }
  224. if(t->ria != last_ria) {
  225. last_ria = t->ria;
  226. if(charts)
  227. buffer_sprintf(wb, "\n"
  228. "\t\t\t\t\t},\n"
  229. "\t\t\t\t\t\"weight\":" NETDATA_DOUBLE_FORMAT "\n"
  230. "\t\t\t\t},\n"
  231. , charts_total_weight / (double)chart_dims);
  232. buffer_strcat(wb, "\t\t\t\t\"");
  233. buffer_strcat(wb, rrdinstance_acquired_id(t->ria));
  234. buffer_strcat(wb, "\": {\n");
  235. buffer_strcat(wb, "\t\t\t\t\t\"dimensions\": {\n");
  236. charts++;
  237. chart_dims = 0;
  238. charts_total_weight = 0.0;
  239. }
  240. if (chart_dims) buffer_sprintf(wb, ",\n");
  241. buffer_sprintf(wb, "\t\t\t\t\t\t\"%s\": " NETDATA_DOUBLE_FORMAT, rrdmetric_acquired_name(t->rma), t->value);
  242. charts_total_weight += t->value;
  243. contexts_total_weight += t->value;
  244. chart_dims++;
  245. context_dims++;
  246. total_dimensions++;
  247. }
  248. dfe_done(t);
  249. // close dimensions and chart
  250. if (total_dimensions)
  251. buffer_sprintf(wb, "\n"
  252. "\t\t\t\t\t},\n"
  253. "\t\t\t\t\t\"weight\":" NETDATA_DOUBLE_FORMAT "\n"
  254. "\t\t\t\t}\n"
  255. "\t\t\t},\n"
  256. "\t\t\t\"weight\":" NETDATA_DOUBLE_FORMAT "\n"
  257. "\t\t}\n"
  258. , charts_total_weight / (double)chart_dims
  259. , contexts_total_weight / (double)context_dims);
  260. // close correlated_charts
  261. buffer_sprintf(wb, "\t},\n"
  262. "\t\"weighted_dimensions\": %zu,\n"
  263. "\t\"total_dimensions_count\": %zu\n"
  264. "}\n",
  265. total_dimensions,
  266. examined_dimensions
  267. );
  268. return total_dimensions;
  269. }
  270. // ----------------------------------------------------------------------------
  271. // KS2 algorithm functions
  272. typedef long int DIFFS_NUMBERS;
  273. #define DOUBLE_TO_INT_MULTIPLIER 100000
  274. static inline int binary_search_bigger_than(const DIFFS_NUMBERS arr[], int left, int size, DIFFS_NUMBERS K) {
  275. // binary search to find the index the smallest index
  276. // of the first value in the array that is greater than K
  277. int right = size;
  278. while(left < right) {
  279. int middle = (int)(((unsigned int)(left + right)) >> 1);
  280. if(arr[middle] > K)
  281. right = middle;
  282. else
  283. left = middle + 1;
  284. }
  285. return left;
  286. }
  287. int compare_diffs(const void *left, const void *right) {
  288. DIFFS_NUMBERS lt = *(DIFFS_NUMBERS *)left;
  289. DIFFS_NUMBERS rt = *(DIFFS_NUMBERS *)right;
  290. // https://stackoverflow.com/a/3886497/1114110
  291. return (lt > rt) - (lt < rt);
  292. }
  293. static size_t calculate_pairs_diff(DIFFS_NUMBERS *diffs, NETDATA_DOUBLE *arr, size_t size) {
  294. NETDATA_DOUBLE *last = &arr[size - 1];
  295. size_t added = 0;
  296. while(last > arr) {
  297. NETDATA_DOUBLE second = *last--;
  298. NETDATA_DOUBLE first = *last;
  299. *diffs++ = (DIFFS_NUMBERS)((first - second) * (NETDATA_DOUBLE)DOUBLE_TO_INT_MULTIPLIER);
  300. added++;
  301. }
  302. return added;
  303. }
  304. static double ks_2samp(
  305. DIFFS_NUMBERS baseline_diffs[], int base_size,
  306. DIFFS_NUMBERS highlight_diffs[], int high_size,
  307. uint32_t base_shifts) {
  308. qsort(baseline_diffs, base_size, sizeof(DIFFS_NUMBERS), compare_diffs);
  309. qsort(highlight_diffs, high_size, sizeof(DIFFS_NUMBERS), compare_diffs);
  310. // Now we should be calculating this:
  311. //
  312. // For each number in the diffs arrays, we should find the index of the
  313. // number bigger than them in both arrays and calculate the % of this index
  314. // vs the total array size. Once we have the 2 percentages, we should find
  315. // the min and max across the delta of all of them.
  316. //
  317. // It should look like this:
  318. //
  319. // base_pcent = binary_search_bigger_than(...) / base_size;
  320. // high_pcent = binary_search_bigger_than(...) / high_size;
  321. // delta = base_pcent - high_pcent;
  322. // if(delta < min) min = delta;
  323. // if(delta > max) max = delta;
  324. //
  325. // This would require a lot of multiplications and divisions.
  326. //
  327. // To speed it up, we do the binary search to find the index of each number
  328. // but, then we divide the base index by the power of two number (shifts) it
  329. // is bigger than high index. So the 2 indexes are now comparable.
  330. // We also keep track of the original indexes with min and max, to properly
  331. // calculate their percentages once the loops finish.
  332. // initialize min and max using the first number of baseline_diffs
  333. DIFFS_NUMBERS K = baseline_diffs[0];
  334. int base_idx = binary_search_bigger_than(baseline_diffs, 1, base_size, K);
  335. int high_idx = binary_search_bigger_than(highlight_diffs, 0, high_size, K);
  336. int delta = base_idx - (high_idx << base_shifts);
  337. int min = delta, max = delta;
  338. int base_min_idx = base_idx;
  339. int base_max_idx = base_idx;
  340. int high_min_idx = high_idx;
  341. int high_max_idx = high_idx;
  342. // do the baseline_diffs starting from 1 (we did position 0 above)
  343. for(int i = 1; i < base_size; i++) {
  344. K = baseline_diffs[i];
  345. base_idx = binary_search_bigger_than(baseline_diffs, i + 1, base_size, K); // starting from i, since data1 is sorted
  346. high_idx = binary_search_bigger_than(highlight_diffs, 0, high_size, K);
  347. delta = base_idx - (high_idx << base_shifts);
  348. if(delta < min) {
  349. min = delta;
  350. base_min_idx = base_idx;
  351. high_min_idx = high_idx;
  352. }
  353. else if(delta > max) {
  354. max = delta;
  355. base_max_idx = base_idx;
  356. high_max_idx = high_idx;
  357. }
  358. }
  359. // do the highlight_diffs starting from 0
  360. for(int i = 0; i < high_size; i++) {
  361. K = highlight_diffs[i];
  362. base_idx = binary_search_bigger_than(baseline_diffs, 0, base_size, K);
  363. high_idx = binary_search_bigger_than(highlight_diffs, i + 1, high_size, K); // starting from i, since data2 is sorted
  364. delta = base_idx - (high_idx << base_shifts);
  365. if(delta < min) {
  366. min = delta;
  367. base_min_idx = base_idx;
  368. high_min_idx = high_idx;
  369. }
  370. else if(delta > max) {
  371. max = delta;
  372. base_max_idx = base_idx;
  373. high_max_idx = high_idx;
  374. }
  375. }
  376. // now we have the min, max and their indexes
  377. // properly calculate min and max as dmin and dmax
  378. double dbase_size = (double)base_size;
  379. double dhigh_size = (double)high_size;
  380. double dmin = ((double)base_min_idx / dbase_size) - ((double)high_min_idx / dhigh_size);
  381. double dmax = ((double)base_max_idx / dbase_size) - ((double)high_max_idx / dhigh_size);
  382. dmin = -dmin;
  383. if(islessequal(dmin, 0.0)) dmin = 0.0;
  384. else if(isgreaterequal(dmin, 1.0)) dmin = 1.0;
  385. double d;
  386. if(isgreaterequal(dmin, dmax)) d = dmin;
  387. else d = dmax;
  388. double en = round(dbase_size * dhigh_size / (dbase_size + dhigh_size));
  389. // under these conditions, KSfbar() crashes
  390. if(unlikely(isnan(en) || isinf(en) || en == 0.0 || isnan(d) || isinf(d)))
  391. return NAN;
  392. return KSfbar((int)en, d);
  393. }
  394. static double kstwo(
  395. NETDATA_DOUBLE baseline[], int baseline_points,
  396. NETDATA_DOUBLE highlight[], int highlight_points,
  397. uint32_t base_shifts) {
  398. // -1 in size, since the calculate_pairs_diffs() returns one less point
  399. DIFFS_NUMBERS baseline_diffs[baseline_points - 1];
  400. DIFFS_NUMBERS highlight_diffs[highlight_points - 1];
  401. int base_size = (int)calculate_pairs_diff(baseline_diffs, baseline, baseline_points);
  402. int high_size = (int)calculate_pairs_diff(highlight_diffs, highlight, highlight_points);
  403. if(unlikely(!base_size || !high_size))
  404. return NAN;
  405. if(unlikely(base_size != baseline_points - 1 || high_size != highlight_points - 1)) {
  406. error("Metric correlations: internal error - calculate_pairs_diff() returns the wrong number of entries");
  407. return NAN;
  408. }
  409. return ks_2samp(baseline_diffs, base_size, highlight_diffs, high_size, base_shifts);
  410. }
  411. NETDATA_DOUBLE *rrd2rrdr_ks2(
  412. ONEWAYALLOC *owa, RRDHOST *host,
  413. RRDCONTEXT_ACQUIRED *rca, RRDINSTANCE_ACQUIRED *ria, RRDMETRIC_ACQUIRED *rma,
  414. time_t after, time_t before, size_t points, RRDR_OPTIONS options,
  415. RRDR_GROUPING group_method, const char *group_options, size_t tier,
  416. WEIGHTS_STATS *stats,
  417. size_t *entries
  418. ) {
  419. NETDATA_DOUBLE *ret = NULL;
  420. QUERY_TARGET_REQUEST qtr = {
  421. .host = host,
  422. .rca = rca,
  423. .ria = ria,
  424. .rma = rma,
  425. .after = after,
  426. .before = before,
  427. .points = points,
  428. .options = options,
  429. .group_method = group_method,
  430. .group_options = group_options,
  431. .tier = tier,
  432. .query_source = QUERY_SOURCE_API_WEIGHTS,
  433. .priority = STORAGE_PRIORITY_NORMAL,
  434. };
  435. RRDR *r = rrd2rrdr(owa, query_target_create(&qtr));
  436. if(!r)
  437. goto cleanup;
  438. stats->db_queries++;
  439. stats->result_points += r->internal.result_points_generated;
  440. stats->db_points += r->internal.db_points_read;
  441. for(size_t tr = 0; tr < storage_tiers ; tr++)
  442. stats->db_points_per_tier[tr] += r->internal.tier_points_read[tr];
  443. if(r->d != 1) {
  444. error("WEIGHTS: on query '%s' expected 1 dimension in RRDR but got %zu", r->internal.qt->id, r->d);
  445. goto cleanup;
  446. }
  447. if(unlikely(r->od[0] & RRDR_DIMENSION_HIDDEN))
  448. goto cleanup;
  449. if(unlikely(!(r->od[0] & RRDR_DIMENSION_QUERIED)))
  450. goto cleanup;
  451. if(unlikely(!(r->od[0] & RRDR_DIMENSION_NONZERO)))
  452. goto cleanup;
  453. if(rrdr_rows(r) < 2)
  454. goto cleanup;
  455. *entries = rrdr_rows(r);
  456. ret = onewayalloc_mallocz(owa, sizeof(NETDATA_DOUBLE) * rrdr_rows(r));
  457. // copy the points of the dimension to a contiguous array
  458. // there is no need to check for empty values, since empty values are already zero
  459. // https://github.com/netdata/netdata/blob/6e3144683a73a2024d51425b20ecfd569034c858/web/api/queries/average/average.c#L41-L43
  460. memcpy(ret, r->v, rrdr_rows(r) * sizeof(NETDATA_DOUBLE));
  461. cleanup:
  462. rrdr_free(owa, r);
  463. return ret;
  464. }
  465. static void rrdset_metric_correlations_ks2(
  466. RRDHOST *host,
  467. RRDCONTEXT_ACQUIRED *rca, RRDINSTANCE_ACQUIRED *ria, RRDMETRIC_ACQUIRED *rma,
  468. DICTIONARY *results,
  469. time_t baseline_after, time_t baseline_before,
  470. time_t after, time_t before,
  471. size_t points, RRDR_OPTIONS options,
  472. RRDR_GROUPING group_method, const char *group_options, size_t tier,
  473. uint32_t shifts,
  474. WEIGHTS_STATS *stats, bool register_zero
  475. ) {
  476. options |= RRDR_OPTION_NATURAL_POINTS;
  477. ONEWAYALLOC *owa = onewayalloc_create(16 * 1024);
  478. size_t high_points = 0;
  479. NETDATA_DOUBLE *highlight = rrd2rrdr_ks2(
  480. owa, host, rca, ria, rma, after, before, points,
  481. options, group_method, group_options, tier, stats, &high_points);
  482. if(!highlight)
  483. goto cleanup;
  484. size_t base_points = 0;
  485. NETDATA_DOUBLE *baseline = rrd2rrdr_ks2(
  486. owa, host, rca, ria, rma, baseline_after, baseline_before, high_points << shifts,
  487. options, group_method, group_options, tier, stats, &base_points);
  488. if(!baseline)
  489. goto cleanup;
  490. stats->binary_searches += 2 * (base_points - 1) + 2 * (high_points - 1);
  491. double prob = kstwo(baseline, (int)base_points, highlight, (int)high_points, shifts);
  492. if(!isnan(prob) && !isinf(prob)) {
  493. // these conditions should never happen, but still let's check
  494. if(unlikely(prob < 0.0)) {
  495. error("Metric correlations: kstwo() returned a negative number: %f", prob);
  496. prob = -prob;
  497. }
  498. if(unlikely(prob > 1.0)) {
  499. error("Metric correlations: kstwo() returned a number above 1.0: %f", prob);
  500. prob = 1.0;
  501. }
  502. // to spread the results evenly, 0.0 needs to be the less correlated and 1.0 the most correlated
  503. // so, we flip the result of kstwo()
  504. register_result(results, rca, ria, rma, 1.0 - prob, RESULT_IS_BASE_HIGH_RATIO, stats, register_zero);
  505. }
  506. cleanup:
  507. onewayalloc_destroy(owa);
  508. }
  509. // ----------------------------------------------------------------------------
  510. // VOLUME algorithm functions
  511. static void merge_query_value_to_stats(QUERY_VALUE *qv, WEIGHTS_STATS *stats) {
  512. stats->db_queries++;
  513. stats->result_points += qv->result_points;
  514. stats->db_points += qv->points_read;
  515. for(size_t tier = 0; tier < storage_tiers ; tier++)
  516. stats->db_points_per_tier[tier] += qv->storage_points_per_tier[tier];
  517. }
  518. static void rrdset_metric_correlations_volume(
  519. RRDHOST *host,
  520. RRDCONTEXT_ACQUIRED *rca, RRDINSTANCE_ACQUIRED *ria, RRDMETRIC_ACQUIRED *rma,
  521. DICTIONARY *results,
  522. time_t baseline_after, time_t baseline_before,
  523. time_t after, time_t before,
  524. RRDR_OPTIONS options, RRDR_GROUPING group_method, const char *group_options,
  525. size_t tier,
  526. WEIGHTS_STATS *stats, bool register_zero) {
  527. options |= RRDR_OPTION_MATCH_IDS | RRDR_OPTION_ABSOLUTE | RRDR_OPTION_NATURAL_POINTS;
  528. QUERY_VALUE baseline_average = rrdmetric2value(host, rca, ria, rma, baseline_after, baseline_before,
  529. options, group_method, group_options, tier, 0,
  530. QUERY_SOURCE_API_WEIGHTS, STORAGE_PRIORITY_NORMAL);
  531. merge_query_value_to_stats(&baseline_average, stats);
  532. if(!netdata_double_isnumber(baseline_average.value)) {
  533. // this means no data for the baseline window, but we may have data for the highlighted one - assume zero
  534. baseline_average.value = 0.0;
  535. }
  536. QUERY_VALUE highlight_average = rrdmetric2value(host, rca, ria, rma, after, before,
  537. options, group_method, group_options, tier, 0,
  538. QUERY_SOURCE_API_WEIGHTS, STORAGE_PRIORITY_NORMAL);
  539. merge_query_value_to_stats(&highlight_average, stats);
  540. if(!netdata_double_isnumber(highlight_average.value))
  541. return;
  542. if(baseline_average.value == highlight_average.value) {
  543. // they are the same - let's move on
  544. return;
  545. }
  546. char highlight_countif_options[50 + 1];
  547. snprintfz(highlight_countif_options, 50, "%s" NETDATA_DOUBLE_FORMAT, highlight_average.value < baseline_average.value ? "<" : ">", baseline_average.value);
  548. QUERY_VALUE highlight_countif = rrdmetric2value(host, rca, ria, rma, after, before,
  549. options, RRDR_GROUPING_COUNTIF, highlight_countif_options, tier, 0,
  550. QUERY_SOURCE_API_WEIGHTS, STORAGE_PRIORITY_NORMAL);
  551. merge_query_value_to_stats(&highlight_countif, stats);
  552. if(!netdata_double_isnumber(highlight_countif.value)) {
  553. info("WEIGHTS: highlighted countif query failed, but highlighted average worked - strange...");
  554. return;
  555. }
  556. // this represents the percentage of time
  557. // the highlighted window was above/below the baseline window
  558. // (above or below depending on their averages)
  559. highlight_countif.value = highlight_countif.value / 100.0; // countif returns 0 - 100.0
  560. RESULT_FLAGS flags;
  561. NETDATA_DOUBLE pcent = NAN;
  562. if(isgreater(baseline_average.value, 0.0) || isless(baseline_average.value, 0.0)) {
  563. flags = RESULT_IS_BASE_HIGH_RATIO;
  564. pcent = (highlight_average.value - baseline_average.value) / baseline_average.value * highlight_countif.value;
  565. }
  566. else {
  567. flags = RESULT_IS_PERCENTAGE_OF_TIME;
  568. pcent = highlight_countif.value;
  569. }
  570. register_result(results, rca, ria, rma, pcent, flags, stats, register_zero);
  571. }
  572. // ----------------------------------------------------------------------------
  573. // ANOMALY RATE algorithm functions
  574. static void rrdset_weights_anomaly_rate(
  575. RRDHOST *host,
  576. RRDCONTEXT_ACQUIRED *rca, RRDINSTANCE_ACQUIRED *ria, RRDMETRIC_ACQUIRED *rma,
  577. DICTIONARY *results,
  578. time_t after, time_t before,
  579. RRDR_OPTIONS options, RRDR_GROUPING group_method, const char *group_options,
  580. size_t tier,
  581. WEIGHTS_STATS *stats, bool register_zero) {
  582. options |= RRDR_OPTION_MATCH_IDS | RRDR_OPTION_ANOMALY_BIT | RRDR_OPTION_NATURAL_POINTS;
  583. QUERY_VALUE qv = rrdmetric2value(host, rca, ria, rma, after, before,
  584. options, group_method, group_options, tier, 0,
  585. QUERY_SOURCE_API_WEIGHTS, STORAGE_PRIORITY_NORMAL);
  586. merge_query_value_to_stats(&qv, stats);
  587. if(netdata_double_isnumber(qv.value))
  588. register_result(results, rca, ria, rma, qv.value, 0, stats, register_zero);
  589. }
  590. // ----------------------------------------------------------------------------
  591. int compare_netdata_doubles(const void *left, const void *right) {
  592. NETDATA_DOUBLE lt = *(NETDATA_DOUBLE *)left;
  593. NETDATA_DOUBLE rt = *(NETDATA_DOUBLE *)right;
  594. // https://stackoverflow.com/a/3886497/1114110
  595. return (lt > rt) - (lt < rt);
  596. }
  597. static inline int binary_search_bigger_than_netdata_double(const NETDATA_DOUBLE arr[], int left, int size, NETDATA_DOUBLE K) {
  598. // binary search to find the index the smallest index
  599. // of the first value in the array that is greater than K
  600. int right = size;
  601. while(left < right) {
  602. int middle = (int)(((unsigned int)(left + right)) >> 1);
  603. if(arr[middle] > K)
  604. right = middle;
  605. else
  606. left = middle + 1;
  607. }
  608. return left;
  609. }
  610. // ----------------------------------------------------------------------------
  611. // spread the results evenly according to their value
  612. static size_t spread_results_evenly(DICTIONARY *results, WEIGHTS_STATS *stats) {
  613. struct register_result *t;
  614. // count the dimensions
  615. size_t dimensions = dictionary_entries(results);
  616. if(!dimensions) return 0;
  617. if(stats->max_base_high_ratio == 0.0)
  618. stats->max_base_high_ratio = 1.0;
  619. // create an array of the right size and copy all the values in it
  620. NETDATA_DOUBLE slots[dimensions];
  621. dimensions = 0;
  622. dfe_start_read(results, t) {
  623. if(t->flags & (RESULT_IS_PERCENTAGE_OF_TIME))
  624. t->value = t->value * stats->max_base_high_ratio;
  625. slots[dimensions++] = t->value;
  626. }
  627. dfe_done(t);
  628. // sort the array with the values of all dimensions
  629. qsort(slots, dimensions, sizeof(NETDATA_DOUBLE), compare_netdata_doubles);
  630. // skip the duplicates in the sorted array
  631. NETDATA_DOUBLE last_value = NAN;
  632. size_t unique_values = 0;
  633. for(size_t i = 0; i < dimensions ;i++) {
  634. if(likely(slots[i] != last_value))
  635. slots[unique_values++] = last_value = slots[i];
  636. }
  637. // this cannot happen, but coverity thinks otherwise...
  638. if(!unique_values)
  639. unique_values = dimensions;
  640. // calculate the weight of each slot, using the number of unique values
  641. NETDATA_DOUBLE slot_weight = 1.0 / (NETDATA_DOUBLE)unique_values;
  642. dfe_start_read(results, t) {
  643. int slot = binary_search_bigger_than_netdata_double(slots, 0, (int)unique_values, t->value);
  644. NETDATA_DOUBLE v = slot * slot_weight;
  645. if(unlikely(v > 1.0)) v = 1.0;
  646. v = 1.0 - v;
  647. t->value = v;
  648. }
  649. dfe_done(t);
  650. return dimensions;
  651. }
  652. // ----------------------------------------------------------------------------
  653. // The main function
  654. int web_api_v1_weights(
  655. RRDHOST *host, BUFFER *wb, WEIGHTS_METHOD method, WEIGHTS_FORMAT format,
  656. RRDR_GROUPING group, const char *group_options,
  657. time_t baseline_after, time_t baseline_before,
  658. time_t after, time_t before,
  659. size_t points, RRDR_OPTIONS options, SIMPLE_PATTERN *contexts, size_t tier, size_t timeout) {
  660. WEIGHTS_STATS stats = {};
  661. DICTIONARY *results = register_result_init();
  662. DICTIONARY *metrics = NULL;
  663. char *error = NULL;
  664. int resp = HTTP_RESP_OK;
  665. // if the user didn't give a timeout
  666. // assume 60 seconds
  667. if(!timeout)
  668. timeout = 60 * MSEC_PER_SEC;
  669. // if the timeout is less than 1 second
  670. // make it at least 1 second
  671. if(timeout < (long)(1 * MSEC_PER_SEC))
  672. timeout = 1 * MSEC_PER_SEC;
  673. usec_t timeout_usec = timeout * USEC_PER_MS;
  674. usec_t started_usec = now_realtime_usec();
  675. if(!rrdr_relative_window_to_absolute(&after, &before))
  676. buffer_no_cacheable(wb);
  677. if (before <= after) {
  678. resp = HTTP_RESP_BAD_REQUEST;
  679. error = "Invalid selected time-range.";
  680. goto cleanup;
  681. }
  682. uint32_t shifts = 0;
  683. if(method == WEIGHTS_METHOD_MC_KS2 || method == WEIGHTS_METHOD_MC_VOLUME) {
  684. if(!points) points = 500;
  685. if(baseline_before <= API_RELATIVE_TIME_MAX)
  686. baseline_before += after;
  687. rrdr_relative_window_to_absolute(&baseline_after, &baseline_before);
  688. if (baseline_before <= baseline_after) {
  689. resp = HTTP_RESP_BAD_REQUEST;
  690. error = "Invalid baseline time-range.";
  691. goto cleanup;
  692. }
  693. // baseline should be a power of two multiple of highlight
  694. long long base_delta = baseline_before - baseline_after;
  695. long long high_delta = before - after;
  696. uint32_t multiplier = (uint32_t)round((double)base_delta / (double)high_delta);
  697. // check if the multiplier is a power of two
  698. // https://stackoverflow.com/a/600306/1114110
  699. if((multiplier & (multiplier - 1)) != 0) {
  700. // it is not power of two
  701. // let's find the closest power of two
  702. // https://stackoverflow.com/a/466242/1114110
  703. multiplier--;
  704. multiplier |= multiplier >> 1;
  705. multiplier |= multiplier >> 2;
  706. multiplier |= multiplier >> 4;
  707. multiplier |= multiplier >> 8;
  708. multiplier |= multiplier >> 16;
  709. multiplier++;
  710. }
  711. // convert the multiplier to the number of shifts
  712. // we need to do, to divide baseline numbers to match
  713. // the highlight ones
  714. while(multiplier > 1) {
  715. shifts++;
  716. multiplier = multiplier >> 1;
  717. }
  718. // if the baseline size will not comply to MAX_POINTS
  719. // lower the window of the baseline
  720. while(shifts && (points << shifts) > MAX_POINTS)
  721. shifts--;
  722. // if the baseline size still does not comply to MAX_POINTS
  723. // lower the resolution of the highlight and the baseline
  724. while((points << shifts) > MAX_POINTS)
  725. points = points >> 1;
  726. if(points < 15) {
  727. resp = HTTP_RESP_BAD_REQUEST;
  728. error = "Too few points available, at least 15 are needed.";
  729. goto cleanup;
  730. }
  731. // adjust the baseline to be multiplier times bigger than the highlight
  732. baseline_after = baseline_before - (high_delta << shifts);
  733. }
  734. size_t examined_dimensions = 0;
  735. bool register_zero = true;
  736. if(options & RRDR_OPTION_NONZERO) {
  737. register_zero = false;
  738. options &= ~RRDR_OPTION_NONZERO;
  739. }
  740. metrics = rrdcontext_all_metrics_to_dict(host, contexts);
  741. struct metric_entry *me;
  742. // for every metric_entry in the dictionary
  743. dfe_start_read(metrics, me) {
  744. usec_t now_usec = now_realtime_usec();
  745. if(now_usec - started_usec > timeout_usec) {
  746. error = "timed out";
  747. resp = HTTP_RESP_GATEWAY_TIMEOUT;
  748. goto cleanup;
  749. }
  750. examined_dimensions++;
  751. switch(method) {
  752. case WEIGHTS_METHOD_ANOMALY_RATE:
  753. options |= RRDR_OPTION_ANOMALY_BIT;
  754. rrdset_weights_anomaly_rate(
  755. host,
  756. me->rca, me->ria, me->rma,
  757. results,
  758. after, before,
  759. options, group, group_options, tier,
  760. &stats, register_zero
  761. );
  762. break;
  763. case WEIGHTS_METHOD_MC_VOLUME:
  764. rrdset_metric_correlations_volume(
  765. host,
  766. me->rca, me->ria, me->rma,
  767. results,
  768. baseline_after, baseline_before,
  769. after, before,
  770. options, group, group_options, tier,
  771. &stats, register_zero
  772. );
  773. break;
  774. default:
  775. case WEIGHTS_METHOD_MC_KS2:
  776. rrdset_metric_correlations_ks2(
  777. host,
  778. me->rca, me->ria, me->rma,
  779. results,
  780. baseline_after, baseline_before,
  781. after, before, points,
  782. options, group, group_options, tier, shifts,
  783. &stats, register_zero
  784. );
  785. break;
  786. }
  787. }
  788. dfe_done(me);
  789. if(!register_zero)
  790. options |= RRDR_OPTION_NONZERO;
  791. if(!(options & RRDR_OPTION_RETURN_RAW))
  792. spread_results_evenly(results, &stats);
  793. usec_t ended_usec = now_realtime_usec();
  794. // generate the json output we need
  795. buffer_flush(wb);
  796. size_t added_dimensions = 0;
  797. switch(format) {
  798. case WEIGHTS_FORMAT_CHARTS:
  799. added_dimensions =
  800. registered_results_to_json_charts(
  801. results, wb,
  802. after, before,
  803. baseline_after, baseline_before,
  804. points, method, group, options, shifts,
  805. examined_dimensions,
  806. ended_usec - started_usec, &stats);
  807. break;
  808. default:
  809. case WEIGHTS_FORMAT_CONTEXTS:
  810. added_dimensions =
  811. registered_results_to_json_contexts(
  812. results, wb,
  813. after, before,
  814. baseline_after, baseline_before,
  815. points, method, group, options, shifts,
  816. examined_dimensions,
  817. ended_usec - started_usec, &stats);
  818. break;
  819. }
  820. if(!added_dimensions) {
  821. error = "no results produced.";
  822. resp = HTTP_RESP_NOT_FOUND;
  823. }
  824. cleanup:
  825. if(metrics) dictionary_destroy(metrics);
  826. if(results) register_result_destroy(results);
  827. if(error) {
  828. buffer_flush(wb);
  829. buffer_sprintf(wb, "{\"error\": \"%s\" }", error);
  830. }
  831. return resp;
  832. }
  833. // ----------------------------------------------------------------------------
  834. // unittest
  835. /*
  836. Unit tests against the output of this:
  837. https://github.com/scipy/scipy/blob/4cf21e753cf937d1c6c2d2a0e372fbc1dbbeea81/scipy/stats/_stats_py.py#L7275-L7449
  838. import matplotlib.pyplot as plt
  839. import pandas as pd
  840. import numpy as np
  841. import scipy as sp
  842. from scipy import stats
  843. data1 = np.array([ 1111, -2222, 33, 100, 100, 15555, -1, 19999, 888, 755, -1, -730 ])
  844. data2 = np.array([365, -123, 0])
  845. data1 = np.sort(data1)
  846. data2 = np.sort(data2)
  847. n1 = data1.shape[0]
  848. n2 = data2.shape[0]
  849. data_all = np.concatenate([data1, data2])
  850. cdf1 = np.searchsorted(data1, data_all, side='right') / n1
  851. cdf2 = np.searchsorted(data2, data_all, side='right') / n2
  852. print(data_all)
  853. print("\ndata1", data1, cdf1)
  854. print("\ndata2", data2, cdf2)
  855. cddiffs = cdf1 - cdf2
  856. print("\ncddiffs", cddiffs)
  857. minS = np.clip(-np.min(cddiffs), 0, 1)
  858. maxS = np.max(cddiffs)
  859. print("\nmin", minS)
  860. print("max", maxS)
  861. m, n = sorted([float(n1), float(n2)], reverse=True)
  862. en = m * n / (m + n)
  863. d = max(minS, maxS)
  864. prob = stats.distributions.kstwo.sf(d, np.round(en))
  865. print("\nprob", prob)
  866. */
  867. static int double_expect(double v, const char *str, const char *descr) {
  868. char buf[100 + 1];
  869. snprintfz(buf, 100, "%0.6f", v);
  870. int ret = strcmp(buf, str) ? 1 : 0;
  871. fprintf(stderr, "%s %s, expected %s, got %s\n", ret?"FAILED":"OK", descr, str, buf);
  872. return ret;
  873. }
  874. static int mc_unittest1(void) {
  875. int bs = 3, hs = 3;
  876. DIFFS_NUMBERS base[3] = { 1, 2, 3 };
  877. DIFFS_NUMBERS high[3] = { 3, 4, 6 };
  878. double prob = ks_2samp(base, bs, high, hs, 0);
  879. return double_expect(prob, "0.222222", "3x3");
  880. }
  881. static int mc_unittest2(void) {
  882. int bs = 6, hs = 3;
  883. DIFFS_NUMBERS base[6] = { 1, 2, 3, 10, 10, 15 };
  884. DIFFS_NUMBERS high[3] = { 3, 4, 6 };
  885. double prob = ks_2samp(base, bs, high, hs, 1);
  886. return double_expect(prob, "0.500000", "6x3");
  887. }
  888. static int mc_unittest3(void) {
  889. int bs = 12, hs = 3;
  890. DIFFS_NUMBERS base[12] = { 1, 2, 3, 10, 10, 15, 111, 19999, 8, 55, -1, -73 };
  891. DIFFS_NUMBERS high[3] = { 3, 4, 6 };
  892. double prob = ks_2samp(base, bs, high, hs, 2);
  893. return double_expect(prob, "0.347222", "12x3");
  894. }
  895. static int mc_unittest4(void) {
  896. int bs = 12, hs = 3;
  897. DIFFS_NUMBERS base[12] = { 1111, -2222, 33, 100, 100, 15555, -1, 19999, 888, 755, -1, -730 };
  898. DIFFS_NUMBERS high[3] = { 365, -123, 0 };
  899. double prob = ks_2samp(base, bs, high, hs, 2);
  900. return double_expect(prob, "0.777778", "12x3");
  901. }
  902. int mc_unittest(void) {
  903. int errors = 0;
  904. errors += mc_unittest1();
  905. errors += mc_unittest2();
  906. errors += mc_unittest3();
  907. errors += mc_unittest4();
  908. return errors;
  909. }