time-to-botec

Benchmark sampling in different programming languages
Log | Files | Refs | README

lowest.js (4041B)


      1 /**
      2 * @license Apache-2.0
      3 *
      4 * Copyright (c) 2018 The Stdlib Authors.
      5 *
      6 * Licensed under the Apache License, Version 2.0 (the "License");
      7 * you may not use this file except in compliance with the License.
      8 * You may obtain a copy of the License at
      9 *
     10 *    http://www.apache.org/licenses/LICENSE-2.0
     11 *
     12 * Unless required by applicable law or agreed to in writing, software
     13 * distributed under the License is distributed on an "AS IS" BASIS,
     14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
     15 * See the License for the specific language governing permissions and
     16 * limitations under the License.
     17 */
     18 
     19 'use strict';
     20 
     21 // MODULES //
     22 
     23 var sqrt = require( '@stdlib/math/base/special/sqrt' );
     24 var abs = require( '@stdlib/math/base/special/abs' );
     25 var max = require( '@stdlib/math/base/special/max' );
     26 var pow = require( '@stdlib/math/base/special/pow' );
     27 
     28 
     29 // MAIN //
     30 
     31 /**
     32 * Calculates the fitted value `ys` for a value `xs` on the horizontal axis.
     33 *
     34 * ## Method
     35 *
     36 * -   The smoothed value for the x-axis value at the current index is computed using a (robust) locally weighted regression of degree one.  The tricube weight function is used with `h` equal to the maximum of `xs - x[ nleft ]` and `x[ nright ] - xs`.
     37 *
     38 * ## References
     39 *
     40 * -   Cleveland, William S. 1979. "Robust Locally and Smoothing Weighted Regression Scatterplots." _Journal of the American Statistical Association_ 74 (368): 829–36. doi:[10.1080/01621459.1979.10481038](https://doi.org/10.1080/01621459.1979.10481038).
     41 * -   Cleveland, William S. 1981. "Lowess: A program for smoothing scatterplots by robust locally weighted regression." _American Statistician_ 35 (1): 54–55. doi:[10.2307/2683591](https://doi.org/10.2307/2683591).
     42 *
     43 * @private
     44 * @param {NumericArray} x - ordered x-axis values (abscissa values)
     45 * @param {NumericArray} y - corresponding y-axis values (ordinate values)
     46 * @param {PositiveInteger} n - number of observations
     47 * @param {NonNegativeInteger} i - current index
     48 * @param {NonNegativeInteger} nleft - index of the first point used in computing the fitted value
     49 * @param {NonNegativeInteger} nright - index of the last point used in computing the fitted value
     50 * @param {ProbabilityArray} w - weights at indices from `nleft` to `nright` to be used in the calculation of the fitted value
     51 * @param {boolean} userw - boolean indicating whether a robust fit is carried out using the weights in `rw`
     52 * @param {ProbabilityArray} rw - robustness weights
     53 * @returns {number} fitted value
     54 */
     55 function lowest( x, y, n, i, nleft, nright, w, userw, rw ) {
     56 	var range;
     57 	var nrt;
     58 	var h1;
     59 	var h9;
     60 	var xs;
     61 	var ys;
     62 	var h;
     63 	var a;
     64 	var b;
     65 	var c;
     66 	var r;
     67 	var j;
     68 
     69 	xs = x[ i ];
     70 	range = x[ n - 1 ] - x[ 0 ];
     71 	h = max( xs - x[ nleft ], x[ nright ] - xs );
     72 	h9 = 0.999 * h;
     73 	h1 = 0.001 * h;
     74 
     75 	// Compute weights (pick up all ties on right):
     76 	a = 0.0; // sum of weights
     77 	for ( j = nleft; j < n; j++ ) {
     78 		w[ j ] = 0.0;
     79 		r = abs( x[ j ] - xs );
     80 		if ( r <= h9 ) { // small enough for non-zero weight
     81 			if ( r > h1 ) {
     82 				w[ j ] = pow( 1.0-pow( r/h, 3.0 ), 3.0 );
     83 			} else {
     84 				w[ j ] = 1.0;
     85 			}
     86 			if ( userw ) {
     87 				w[ j ] *= rw[ j ];
     88 			}
     89 			a += w[ j ];
     90 		}
     91 		else if ( x[ j ] > xs ) {
     92 			break; // get out at first zero weight on right
     93 		}
     94 	}
     95 	nrt = j - 1; // rightmost point (may be greater than `nright` because of ties)
     96 	if ( a <= 0.0 ) {
     97 		return y[ i ];
     98 	}
     99 
    100 	// Make sum of weights equal to one:
    101 	for ( j = nleft; j <= nrt; j++ ) {
    102 		w[ j ] /= a;
    103 	}
    104 
    105 	if ( h > 0.0 ) { // use linear fit
    106 		// Find weighted center of x values:
    107 		a = 0.0;
    108 		for ( j = nleft; j <= nrt; j++ ) {
    109 			a += w[ j ] * x[ j ];
    110 		}
    111 		b = xs - a;
    112 		c = 0.0;
    113 		for ( j = nleft; j <= nrt; j++ ) {
    114 			c += w[ j ] * pow( x[ j ] - a, 2.0 );
    115 		}
    116 		if ( sqrt( c ) > 0.001 * range ) {
    117 			// Points are spread out enough to compute slope:
    118 			b /= c;
    119 			for ( j = nleft; j <= nrt; j++ ) {
    120 				w[ j ] *= ( 1.0 + ( b*(x[j]-a) ) );
    121 			}
    122 		}
    123 	}
    124 	ys = 0.0;
    125 	for ( j = nleft; j <= nrt; j++ ) {
    126 		ys += w[ j ] * y[ j ];
    127 	}
    128 	return ys;
    129 }
    130 
    131 
    132 // EXPORTS //
    133 
    134 module.exports = lowest;