adesh-rao commented on a change in pull request #2760:
URL: https://github.com/apache/hive/pull/2760#discussion_r739045967
##########
File path: ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStd.java
##########
@@ -90,7 +92,9 @@ public GenericUDAFEvaluator getEvaluator(TypeInfo[]
parameters)
* use it, etc.
*/
public static double calculateStdResult(double variance, long count) {
- return Math.sqrt(variance / count);
+ BigDecimal bvariance = new BigDecimal(variance);
+ BigDecimal result = bvariance.divide(new BigDecimal(count));
+ return Math.sqrt(result.doubleValue());
Review comment:
Add a ToDo to use BigDecimal.sqrt() once hive moves to java >= 9 version?
##########
File path:
ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFVariance.java
##########
@@ -120,14 +126,15 @@ public static double calculateMerge(
long partialCount, long mergeCount, double partialSum, double mergeSum,
double partialVariance, double mergeVariance) {
- final double doublePartialCount = (double) partialCount;
- final double doubleMergeCount = (double) mergeCount;
+ final BigDecimal bPartialCount = new BigDecimal(partialCount);
+ final BigDecimal bMergeCount = new BigDecimal(mergeCount);
+ BigDecimal bmergeVariance = new BigDecimal(mergeVariance);
- double t = (doublePartialCount / doubleMergeCount) * mergeSum - partialSum;
- mergeVariance +=
- partialVariance + ((doubleMergeCount / doublePartialCount) /
- (doubleMergeCount + doublePartialCount)) * t * t;
- return mergeVariance;
+ BigDecimal t =
+ (bPartialCount.divide(bMergeCount).multiply(new
BigDecimal(mergeSum).subtract(new BigDecimal(partialSum))));
Review comment:
the preview expression was `(doublePartialCount/doubleMergeCount) *
mergeSum - partialSum`
The new expression seems to be like:
`(bPartialCount/bMergeCount )* (mergeSum - partialSum)`.
Both are not equivalent. Can you fix this?
##########
File path: ql/src/test/queries/clientpositive/sdtdev.q
##########
@@ -0,0 +1,12 @@
+create table test ( col1 decimal(10,3) );
+insert into test values
(10230.72),(10230.72),(10230.72),(10230.72),(10230.72),(10230.72),(10230.72);
+select STDDEV_SAMP(col1) AS STDDEV_6M , STDDEV(col1) as STDDEV
,STDDEV_POP(col1) as STDDEV_POP , variance(col1) as variance,var_pop(col1) as
var_pop,var_samp(col1) as var_samp from test;
+
+create table testpoint ( col1 decimal(10,3));
+insert into testpoint values
(0.12345678),(0.25362123),(0.62437485),(0.65133746),(0.98765432),(0.12435647),(0.7654321445);
+select STDDEV_SAMP(col1) AS STDDEV_6M , STDDEV(col1) as STDDEV
,STDDEV_POP(col1) as STDDEV_POP , variance(col1) as variance,var_pop(col1) as
var_pop,var_samp(col1) as var_samp from testpoint;
+
+create table testint(col1 int);
+insert into testint values
(85),(86),(100),(76),(81),(93),(84),(99),(71),(69),(93),(85),(81),(87),(89);
+select STDDEV_SAMP(col1) AS STDDEV_6M , STDDEV(col1) as STDDEV
,STDDEV_POP(col1) as STDDEV_POP, variance(col1) as variance,var_pop(col1) as
var_pop,var_samp(col1) as var_samp from testint;
+
Review comment:
nit: add drop table statements?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]