1 /*****************************************************************************
2 * RRDtool 1.0.21 Copyright Tobias Oetiker, 1997 - 2000
3 *****************************************************************************
5 *****************************************************************************
6 * Initial version by Jake Brutlag, WebTV Networks, 5/1/00
7 *****************************************************************************/
13 unsigned long MyMod(signed long val, unsigned long mod);
15 int update_hwpredict(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
16 unsigned long ds_idx, unsigned short CDP_scratch_idx);
17 int update_seasonal(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
18 unsigned long ds_idx, unsigned short CDP_scratch_idx,
19 rrd_value_t *seasonal_coef);
20 int update_devpredict(rrd_t *rrd, unsigned long cdp_idx,
21 unsigned long rra_idx, unsigned long ds_idx, unsigned short CDP_scratch_idx);
22 int update_devseasonal(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
23 unsigned long ds_idx, unsigned short CDP_scratch_idx,
24 rrd_value_t *seasonal_dev);
25 int update_failures(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
26 unsigned long ds_idx, unsigned short CDP_scratch_idx);
29 update_hwpredict(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
30 unsigned long ds_idx, unsigned short CDP_scratch_idx)
32 rrd_value_t prediction, seasonal_coef;
33 unsigned long dependent_rra_idx, seasonal_cdp_idx;
34 unival *coefs = rrd -> cdp_prep[cdp_idx].scratch;
35 rra_def_t *current_rra = &(rrd -> rra_def[rra_idx]);
37 /* save coefficients from current prediction */
38 coefs[CDP_hw_last_intercept].u_val = coefs[CDP_hw_intercept].u_val;
39 coefs[CDP_hw_last_slope].u_val = coefs[CDP_hw_slope].u_val;
40 coefs[CDP_last_null_count].u_cnt = coefs[CDP_null_count].u_cnt;
42 /* retrieve the current seasonal coef */
43 dependent_rra_idx = current_rra -> par[RRA_dependent_rra_idx].u_cnt;
44 seasonal_cdp_idx = dependent_rra_idx*(rrd -> stat_head -> ds_cnt) + ds_idx;
45 if (dependent_rra_idx < rra_idx)
46 seasonal_coef = rrd -> cdp_prep[seasonal_cdp_idx].scratch[CDP_hw_last_seasonal].u_val;
48 seasonal_coef = rrd -> cdp_prep[seasonal_cdp_idx].scratch[CDP_hw_seasonal].u_val;
50 /* compute the prediction */
51 if (isnan(coefs[CDP_hw_intercept].u_val) || isnan(coefs[CDP_hw_slope].u_val)
52 || isnan(seasonal_coef))
56 /* bootstrap initialization of slope and intercept */
57 if (isnan(coefs[CDP_hw_intercept].u_val) &&
58 !isnan(coefs[CDP_scratch_idx].u_val))
61 fprintf(stderr,"Initialization of slope/intercept\n");
63 coefs[CDP_hw_intercept].u_val = coefs[CDP_scratch_idx].u_val;
64 coefs[CDP_hw_last_intercept].u_val = coefs[CDP_scratch_idx].u_val;
65 /* initialize the slope to 0 */
66 coefs[CDP_hw_slope].u_val = 0.0;
67 coefs[CDP_hw_last_slope].u_val = 0.0;
68 /* initialize null count to 1 */
69 coefs[CDP_null_count].u_cnt = 1;
70 coefs[CDP_last_null_count].u_cnt = 1;
72 /* if seasonal coefficient is NA, then don't update intercept, slope */
74 prediction = coefs[CDP_hw_intercept].u_val +
75 (coefs[CDP_hw_slope].u_val)*(coefs[CDP_null_count].u_cnt)
78 fprintf(stderr,"computed prediction: %f\n",prediction);
80 if (isnan(coefs[CDP_scratch_idx].u_val))
82 /* NA value, no updates of intercept, slope;
83 * increment the null count */
84 (coefs[CDP_null_count].u_cnt)++;
87 fprintf(stderr,"Updating intercept, slope\n");
89 /* update the intercept */
90 coefs[CDP_hw_intercept].u_val = (current_rra -> par[RRA_hw_alpha].u_val)*
91 (coefs[CDP_scratch_idx].u_val - seasonal_coef) +
92 (1 - current_rra -> par[RRA_hw_alpha].u_val)*(coefs[CDP_hw_intercept].u_val
93 + (coefs[CDP_hw_slope].u_val)*(coefs[CDP_null_count].u_cnt));
94 /* update the slope */
95 coefs[CDP_hw_slope].u_val = (current_rra -> par[RRA_hw_beta].u_val)*
96 (coefs[CDP_hw_intercept].u_val - coefs[CDP_hw_last_intercept].u_val) +
97 (1 - current_rra -> par[RRA_hw_beta].u_val)*(coefs[CDP_hw_slope].u_val);
98 /* reset the null count */
99 coefs[CDP_null_count].u_cnt = 1;
103 /* store the prediction for writing */
104 coefs[CDP_scratch_idx].u_val = prediction;
109 lookup_seasonal(rrd_t *rrd, unsigned long rra_idx, unsigned long rra_start,
110 FILE *rrd_file, unsigned long offset, rrd_value_t **seasonal_coef)
112 unsigned long pos_tmp;
113 /* rra_ptr[].cur_row points to the rra row to be written; this function
114 * reads cur_row + offset */
115 unsigned long row_idx = rrd -> rra_ptr[rra_idx].cur_row + offset;
116 /* handle wrap around */
117 if (row_idx >= rrd -> rra_def[rra_idx].row_cnt)
118 row_idx = row_idx % (rrd -> rra_def[rra_idx].row_cnt);
120 /* rra_start points to the appropriate rra block in the file */
121 /* compute the pointer to the appropriate location in the file */
122 pos_tmp = rra_start + (row_idx)*(rrd -> stat_head -> ds_cnt)*sizeof(rrd_value_t);
124 /* allocate memory if need be */
125 if (*seasonal_coef == NULL)
127 (rrd_value_t *) malloc((rrd -> stat_head -> ds_cnt)*sizeof(rrd_value_t));
128 if (*seasonal_coef == NULL) {
129 rrd_set_error("memory allocation failure: seasonal coef");
133 if (!fseek(rrd_file,pos_tmp,SEEK_SET))
135 if (fread(*seasonal_coef,sizeof(rrd_value_t),rrd->stat_head->ds_cnt,rrd_file)
136 == rrd -> stat_head -> ds_cnt)
139 /* we can safely ignore the rule requiring a seek operation between read
140 * and write, because this read moves the file pointer to somewhere
141 * in the file other than the next write location.
145 rrd_set_error("read operation failed in lookup_seasonal(): %lu\n",pos_tmp);
148 rrd_set_error("seek operation failed in lookup_seasonal(): %lu\n",pos_tmp);
155 update_seasonal(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
156 unsigned long ds_idx, unsigned short CDP_scratch_idx, rrd_value_t *seasonal_coef)
158 /* TODO: extract common if subblocks in the wake of I/O optimization */
159 rrd_value_t intercept, seasonal;
160 rra_def_t *current_rra = &(rrd -> rra_def[rra_idx]);
161 rra_def_t *hw_rra = &(rrd -> rra_def[current_rra -> par[RRA_dependent_rra_idx].u_cnt]);
162 /* obtain cdp_prep index for HWPREDICT */
163 unsigned long hw_cdp_idx = (current_rra -> par[RRA_dependent_rra_idx].u_cnt)
164 * (rrd -> stat_head -> ds_cnt) + ds_idx;
165 unival *coefs = rrd -> cdp_prep[hw_cdp_idx].scratch;
167 /* update seasonal coefficient in cdp prep areas */
168 seasonal = rrd -> cdp_prep[cdp_idx].scratch[CDP_hw_seasonal].u_val;
169 rrd -> cdp_prep[cdp_idx].scratch[CDP_hw_last_seasonal].u_val = seasonal;
170 rrd -> cdp_prep[cdp_idx].scratch[CDP_hw_seasonal].u_val =
171 seasonal_coef[ds_idx];
173 /* update seasonal value for disk */
174 if (current_rra -> par[RRA_dependent_rra_idx].u_cnt < rra_idx)
175 /* associated HWPREDICT has already been updated */
176 /* check for possible NA values */
177 if (isnan(rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val))
179 /* no update, store the old value unchanged,
180 * doesn't matter if it is NA */
181 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val = seasonal;
182 } else if (isnan(coefs[CDP_hw_last_intercept].u_val)
183 || isnan(coefs[CDP_hw_last_slope].u_val))
185 /* this should never happen, as HWPREDICT was already updated */
186 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val= DNAN;
187 } else if (isnan(seasonal))
189 /* initialization: intercept is not currently being updated */
191 fprintf(stderr,"Initialization of seasonal coef %lu\n",
192 rrd -> rra_ptr[rra_idx].cur_row);
194 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val
195 -= coefs[CDP_hw_last_intercept].u_val;
197 intercept = coefs[CDP_hw_intercept].u_val;
200 "Updating seasonal, params: gamma %f, new intercept %f, old seasonal %f\n",
201 current_rra -> par[RRA_seasonal_gamma].u_val,
202 intercept, seasonal);
204 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val =
205 (current_rra -> par[RRA_seasonal_gamma].u_val)*
206 (rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val - intercept) +
207 (1 - current_rra -> par[RRA_seasonal_gamma].u_val)*seasonal;
210 /* SEASONAL array is updated first, which means the new intercept
211 * hasn't be computed; so we compute it here. */
213 /* check for possible NA values */
214 if (isnan(rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val))
216 /* no update, simple store the old value unchanged,
217 * doesn't matter if it is NA */
218 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val = seasonal;
219 } else if (isnan(coefs[CDP_hw_intercept].u_val)
220 || isnan(coefs[CDP_hw_slope].u_val))
222 /* Initialization of slope and intercept will occur.
223 * force seasonal coefficient to 0. */
224 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val= 0.0;
225 } else if (isnan(seasonal))
227 /* initialization: intercept will not be updated
228 * CDP_hw_intercept = CDP_hw_last_intercept; just need to
229 * subtract this baseline value. */
231 fprintf(stderr,"Initialization of seasonal coef %lu\n",
232 rrd -> rra_ptr[rra_idx].cur_row);
234 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val -= coefs[CDP_hw_intercept].u_val;
236 /* Note that we must get CDP_scratch_idx from SEASONAL array, as CDP_scratch_idx
237 * for HWPREDICT array will be DNAN. */
238 intercept = (hw_rra -> par[RRA_hw_alpha].u_val)*
239 (rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val - seasonal)
240 + (1 - hw_rra -> par[RRA_hw_alpha].u_val)*(coefs[CDP_hw_intercept].u_val
241 + (coefs[CDP_hw_slope].u_val)*(coefs[CDP_null_count].u_cnt));
242 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val =
243 (current_rra -> par[RRA_seasonal_gamma].u_val)*
244 (rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val - intercept) +
245 (1 - current_rra -> par[RRA_seasonal_gamma].u_val)*seasonal;
249 fprintf(stderr,"seasonal coefficient set= %f\n",
250 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val);
256 update_devpredict(rrd_t *rrd, unsigned long cdp_idx,
257 unsigned long rra_idx, unsigned long ds_idx, unsigned short CDP_scratch_idx)
259 /* there really isn't any "update" here; the only reason this information
260 * is stored separately from DEVSEASONAL is to preserve deviation predictions
261 * for a longer duration than one seasonal cycle. */
262 unsigned long seasonal_cdp_idx = (rrd -> rra_def[rra_idx].par[RRA_dependent_rra_idx].u_cnt)
263 * (rrd -> stat_head -> ds_cnt) + ds_idx;
265 if (rrd -> rra_def[rra_idx].par[RRA_dependent_rra_idx].u_cnt < rra_idx)
267 /* associated DEVSEASONAL array already updated */
268 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val
269 = rrd -> cdp_prep[seasonal_cdp_idx].scratch[CDP_last_seasonal_deviation].u_val;
271 /* associated DEVSEASONAL not yet updated */
272 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val
273 = rrd -> cdp_prep[seasonal_cdp_idx].scratch[CDP_seasonal_deviation].u_val;
279 update_devseasonal(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
280 unsigned long ds_idx, unsigned short CDP_scratch_idx,
281 rrd_value_t *seasonal_dev)
283 rrd_value_t prediction = 0, seasonal_coef = DNAN;
284 rra_def_t *current_rra = &(rrd -> rra_def[rra_idx]);
285 /* obtain cdp_prep index for HWPREDICT */
286 unsigned long hw_rra_idx = current_rra -> par[RRA_dependent_rra_idx].u_cnt;
287 unsigned long hw_cdp_idx = hw_rra_idx * (rrd -> stat_head -> ds_cnt) + ds_idx;
288 unsigned long seasonal_cdp_idx;
289 unival *coefs = rrd -> cdp_prep[hw_cdp_idx].scratch;
291 rrd -> cdp_prep[cdp_idx].scratch[CDP_last_seasonal_deviation].u_val =
292 rrd -> cdp_prep[cdp_idx].scratch[CDP_seasonal_deviation].u_val;
293 /* retrieve the next seasonal deviation value, could be NA */
294 rrd -> cdp_prep[cdp_idx].scratch[CDP_seasonal_deviation].u_val =
295 seasonal_dev[ds_idx];
297 /* retrieve the current seasonal_coef (not to be confused with the
298 * current seasonal deviation). Could make this more readable by introducing
299 * some wrapper functions. */
300 seasonal_cdp_idx = (rrd -> rra_def[hw_rra_idx].par[RRA_dependent_rra_idx].u_cnt)
301 *(rrd -> stat_head -> ds_cnt) + ds_idx;
302 if (rrd -> rra_def[hw_rra_idx].par[RRA_dependent_rra_idx].u_cnt < rra_idx)
303 /* SEASONAL array already updated */
304 seasonal_coef = rrd -> cdp_prep[seasonal_cdp_idx].scratch[CDP_hw_last_seasonal].u_val;
306 /* SEASONAL array not yet updated */
307 seasonal_coef = rrd -> cdp_prep[seasonal_cdp_idx].scratch[CDP_hw_seasonal].u_val;
309 /* compute the abs value of the difference between the prediction and
311 if (hw_rra_idx < rra_idx)
313 /* associated HWPREDICT has already been updated */
314 if (isnan(coefs[CDP_hw_last_intercept].u_val) ||
315 isnan(coefs[CDP_hw_last_slope].u_val) ||
316 isnan(seasonal_coef))
318 /* one of the prediction values is uinitialized */
319 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val = DNAN;
322 prediction = coefs[CDP_hw_last_intercept].u_val +
323 (coefs[CDP_hw_last_slope].u_val)*(coefs[CDP_last_null_count].u_cnt)
327 /* associated HWPREDICT has NOT been updated */
328 if (isnan(coefs[CDP_hw_intercept].u_val) ||
329 isnan(coefs[CDP_hw_slope].u_val) ||
330 isnan(seasonal_coef))
332 /* one of the prediction values is uinitialized */
333 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val = DNAN;
336 prediction = coefs[CDP_hw_intercept].u_val +
337 (coefs[CDP_hw_slope].u_val)*(coefs[CDP_null_count].u_cnt)
342 if (isnan(rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val))
344 /* no update, store existing value unchanged, doesn't
345 * matter if it is NA */
346 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val =
347 rrd -> cdp_prep[cdp_idx].scratch[CDP_last_seasonal_deviation].u_val;
348 } else if (isnan(rrd -> cdp_prep[cdp_idx].scratch[CDP_last_seasonal_deviation].u_val))
352 fprintf(stderr,"Initialization of seasonal deviation\n");
354 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val =
355 abs(prediction - rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val);
357 /* exponential smoothing update */
358 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val =
359 (rrd -> rra_def[rra_idx].par[RRA_seasonal_gamma].u_val)*
360 abs(prediction - rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val)
361 + (1 - rrd -> rra_def[rra_idx].par[RRA_seasonal_gamma].u_val)*
362 (rrd -> cdp_prep[cdp_idx].scratch[CDP_last_seasonal_deviation].u_val);
367 /* Check for a failure based on a threshold # of violations within the specified
370 update_failures(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx,
371 unsigned long ds_idx, unsigned short CDP_scratch_idx)
373 /* detection of a violation depends on 3 RRAs:
374 * HWPREDICT, SEASONAL, and DEVSEASONAL */
375 rra_def_t *current_rra = &(rrd -> rra_def[rra_idx]);
376 unsigned long dev_rra_idx = current_rra -> par[RRA_dependent_rra_idx].u_cnt;
377 rra_def_t *dev_rra = &(rrd -> rra_def[dev_rra_idx]);
378 unsigned long hw_rra_idx = dev_rra -> par[RRA_dependent_rra_idx].u_cnt;
379 rra_def_t *hw_rra = &(rrd -> rra_def[hw_rra_idx]);
380 unsigned long seasonal_rra_idx = hw_rra -> par[RRA_dependent_rra_idx].u_cnt;
381 unsigned long temp_cdp_idx;
382 rrd_value_t deviation = DNAN;
383 rrd_value_t seasonal_coef = DNAN;
384 rrd_value_t prediction = DNAN;
386 unsigned short violation_cnt = 0, i;
387 char *violations_array;
389 /* usual checks to determine the order of the RRAs */
390 temp_cdp_idx = dev_rra_idx * (rrd -> stat_head -> ds_cnt) + ds_idx;
391 if (rra_idx < seasonal_rra_idx)
393 /* DEVSEASONAL not yet updated */
394 deviation = rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_seasonal_deviation].u_val;
396 /* DEVSEASONAL already updated */
397 deviation = rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_last_seasonal_deviation].u_val;
399 if (!isnan(deviation)) {
401 temp_cdp_idx = seasonal_rra_idx * (rrd -> stat_head -> ds_cnt) + ds_idx;
402 if (rra_idx < seasonal_rra_idx)
404 /* SEASONAL not yet updated */
405 seasonal_coef = rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_hw_seasonal].u_val;
407 /* SEASONAL already updated */
408 seasonal_coef = rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_hw_last_seasonal].u_val;
410 /* in this code block, we know seasonal coef is not DNAN, because deviation is not
413 temp_cdp_idx = hw_rra_idx * (rrd -> stat_head -> ds_cnt) + ds_idx;
414 if (rra_idx < hw_rra_idx)
416 /* HWPREDICT not yet updated */
417 prediction = rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_hw_intercept].u_val +
418 (rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_hw_slope].u_val)
419 *(rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_null_count].u_cnt)
422 /* HWPREDICT already updated */
423 prediction = rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_hw_last_intercept].u_val +
424 (rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_hw_last_slope].u_val)
425 *(rrd -> cdp_prep[temp_cdp_idx].scratch[CDP_last_null_count].u_cnt)
429 /* determine if the observed value is a violation */
430 if (!isnan(rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val))
432 if (rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val > prediction +
433 (current_rra -> par[RRA_delta_pos].u_val)*deviation
434 || rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val < prediction -
435 (current_rra -> par[RRA_delta_neg].u_val)*deviation)
438 violation = 1; /* count DNAN values as violations */
443 /* determine if a failure has occurred and update the failure array */
444 violation_cnt = violation;
445 /* WARNING: this cast makes XML files non-portable across platforms,
446 * because an array of longs on disk is treated as an array of chars
448 violations_array = (char *) ((void *) rrd -> cdp_prep[cdp_idx].scratch);
449 for (i = current_rra -> par[RRA_window_len].u_cnt; i > 1; i--)
452 violations_array[i-1] = violations_array[i-2];
453 violation_cnt += violations_array[i-1];
455 violations_array[0] = violation;
457 if (violation_cnt < current_rra -> par[RRA_failure_threshold].u_cnt)
459 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val = 0.0;
461 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val = 1.0;
463 return (rrd-> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val);
466 /* For the specified CDP prep area and the FAILURES RRA,
467 * erase all history of past violations.
470 erase_violations(rrd_t *rrd, unsigned long cdp_idx, unsigned long rra_idx)
473 char *violations_array;
474 /* check that rra_idx is a CF_FAILURES array */
475 if (cf_conv(rrd -> rra_def[rra_idx].cf_nam) != CF_FAILURES)
478 fprintf(stderr,"erase_violations called for non-FAILURES RRA: %s\n",
479 rrd -> rra_def[rra_idx].cf);
485 fprintf(stderr,"scratch buffer before erase:\n");
486 for (i = 0; i < MAX_CDP_PAR_EN; i++)
488 fprintf(stderr,"%lu ", rrd -> cdp_prep[cdp_idx].scratch[i].u_cnt);
490 fprintf(stderr,"\n");
493 /* WARNING: this cast makes XML files non-portable across platforms,
494 * because an array of longs on disk is treated as an array of chars
496 violations_array = (char *) ((void *) rrd -> cdp_prep[cdp_idx].scratch);
497 /* erase everything in the part of the CDP scratch array that will be
498 * used to store violations for the current window */
499 for (i = rrd -> rra_def[rra_idx].par[RRA_window_len].u_cnt; i > 0; i--)
501 violations_array[i-1] = 0;
504 fprintf(stderr,"scratch buffer after erase:\n");
505 for (i = 0; i < MAX_CDP_PAR_EN; i++)
507 fprintf(stderr,"%lu ", rrd -> cdp_prep[cdp_idx].scratch[i].u_cnt);
509 fprintf(stderr,"\n");
513 /* Smooth a periodic array with a moving average: equal weights and
514 * length = 5% of the period. */
516 apply_smoother(rrd_t *rrd, unsigned long rra_idx, unsigned long rra_start,
519 unsigned long i, j, k;
520 unsigned long totalbytes;
521 rrd_value_t *rrd_values;
522 unsigned long row_length = rrd -> stat_head -> ds_cnt;
523 unsigned long row_count = rrd -> rra_def[rra_idx].row_cnt;
524 unsigned long offset;
526 rrd_value_t *working_average;
528 offset = floor(0.025*row_count);
529 if (offset == 0) return 0; /* no smoothing */
531 /* allocate memory */
532 totalbytes = sizeof(rrd_value_t)*row_length*row_count;
533 rrd_values = (rrd_value_t *) malloc(totalbytes);
534 if (rrd_values == NULL)
536 rrd_set_error("apply smoother: memory allocation failure");
540 /* rra_start is at the beginning of this rra */
541 if (fseek(rrd_file,rra_start,SEEK_SET))
543 rrd_set_error("seek to rra %d failed", rra_start);
548 /* could read all data in a single block, but we need to
549 * check for NA values */
550 for (i = 0; i < row_count; ++i)
552 for (j = 0; j < row_length; ++j)
554 fread(&(rrd_values[i*row_length + j]),sizeof(rrd_value_t),1,rrd_file);
555 /* should check fread for errors... */
556 if (isnan(rrd_values[i*row_length + j])) {
557 /* can't apply smoothing, still uninitialized values */
559 fprintf(stderr,"apply_smoother: NA detected in seasonal array: %ld %ld\n",i,j);
567 /* allocate queues, one for each data source */
568 buffers = (FIFOqueue **) malloc(sizeof(FIFOqueue *)*row_length);
569 for (i = 0; i < row_length; ++i)
571 queue_alloc(&(buffers[i]),2*offset + 1);
573 /* need working average initialized to 0 */
574 working_average = (rrd_value_t *) calloc(row_length,sizeof(rrd_value_t));
576 /* compute sums of the first 2*offset terms */
577 for (i = 0; i < 2*offset; ++i)
579 k = MyMod(i - offset,row_count);
580 for (j = 0; j < row_length; ++j)
582 queue_push(buffers[j],rrd_values[k*row_length + j]);
583 working_average[j] += rrd_values[k*row_length + j];
587 /* compute moving averages */
588 for (i = offset; i < row_count + offset; ++i)
590 for (j = 0; j < row_length; ++j)
592 k = MyMod(i,row_count);
593 /* add a term to the sum */
594 working_average[j] += rrd_values[k*row_length + j];
595 queue_push(buffers[j],rrd_values[k*row_length + j]);
597 /* reset k to be the center of the window */
598 k = MyMod(i - offset,row_count);
599 /* overwrite rdd_values entry, the old value is already
600 * saved in buffers */
601 rrd_values[k*row_length + j] = working_average[j]/(2*offset + 1);
603 /* remove a term from the sum */
604 working_average[j] -= queue_pop(buffers[j]);
608 for (i = 0; i < row_length; ++i)
610 queue_dealloc(buffers[i]);
613 free(working_average);
615 /* flush updated values to disk */
617 if (fseek(rrd_file,rra_start,SEEK_SET))
619 rrd_set_error("apply_smoother: seek to pos %d failed", rra_start);
623 /* write as a single block */
624 if (fwrite(rrd_values,sizeof(rrd_value_t),row_length*row_count,rrd_file)
625 != row_length*row_count)
627 rrd_set_error("apply_smoother: write failed to %lu",rra_start);
638 update_aberrant_CF(rrd_t *rrd, rrd_value_t pdp_val, enum cf_en current_cf,
639 unsigned long cdp_idx, unsigned long rra_idx, unsigned long ds_idx,
640 unsigned short CDP_scratch_idx, rrd_value_t *seasonal_coef)
642 rrd -> cdp_prep[cdp_idx].scratch[CDP_scratch_idx].u_val = pdp_val;
643 switch (current_cf) {
648 return update_hwpredict(rrd,cdp_idx,rra_idx,ds_idx,CDP_scratch_idx);
650 return update_devpredict(rrd,cdp_idx,rra_idx,ds_idx,CDP_scratch_idx);
652 return update_seasonal(rrd,cdp_idx,rra_idx,ds_idx,CDP_scratch_idx,seasonal_coef);
654 return update_devseasonal(rrd,cdp_idx,rra_idx,ds_idx,CDP_scratch_idx,seasonal_coef);
656 return update_failures(rrd,cdp_idx,rra_idx,ds_idx,CDP_scratch_idx);
661 unsigned long MyMod(signed long val, unsigned long mod)
663 unsigned long new_val;
665 new_val = ((unsigned long) abs(val)) % mod;
667 new_val = (val % mod);
670 return (mod - new_val);
675 /* a standard fixed-capacity FIF0 queue implementation
676 * No overflow checking is performed. */
677 int queue_alloc(FIFOqueue **q,int capacity)
679 *q = (FIFOqueue *) malloc(sizeof(FIFOqueue));
680 if (*q == NULL) return -1;
681 (*q) -> queue = (rrd_value_t *) malloc(sizeof(rrd_value_t)*capacity);
682 if ((*q) -> queue == NULL)
687 (*q) -> capacity = capacity;
688 (*q) -> head = capacity;
693 int queue_isempty(FIFOqueue *q)
695 return (q -> head % q -> capacity == q -> tail);
698 void queue_push(FIFOqueue *q, rrd_value_t value)
700 q -> queue[(q -> tail)++] = value;
701 q -> tail = q -> tail % q -> capacity;
704 rrd_value_t queue_pop(FIFOqueue *q)
706 q -> head = q -> head % q -> capacity;
707 return q -> queue[(q -> head)++];
710 void queue_dealloc(FIFOqueue *q)