diff --git a/Ganeti/HTools/Cluster.hs b/Ganeti/HTools/Cluster.hs
index f5a956b7d28b2e0bf1e9bce3b80ce70f5b473985..3324f8e7ee7ba9486599c58645980f2ca0fbc421 100644
--- a/Ganeti/HTools/Cluster.hs
+++ b/Ganeti/HTools/Cluster.hs
@@ -230,16 +230,16 @@ compDetailedCV nl =
         mem_l = map Node.pMem nodes
         dsk_l = map Node.pDsk nodes
         -- metric: memory covariance
-        mem_cv = varianceCoeff mem_l
+        mem_cv = stdDev mem_l
         -- metric: disk covariance
-        dsk_cv = varianceCoeff dsk_l
+        dsk_cv = stdDev dsk_l
         -- metric: count of instances living on N1 failing nodes
         n1_score = fromIntegral . sum . map (\n -> length (Node.sList n) +
                                                    length (Node.pList n)) .
                    filter Node.failN1 $ nodes :: Double
         res_l = map Node.pRem nodes
         -- metric: reserved memory covariance
-        res_cv = varianceCoeff res_l
+        res_cv = stdDev res_l
         -- offline instances metrics
         offline_ipri = sum . map (length . Node.pList) $ offline
         offline_isec = sum . map (length . Node.sList) $ offline
@@ -251,7 +251,7 @@ compDetailedCV nl =
         off_pri_score = fromIntegral offline_ipri::Double
         cpu_l = map Node.pCpu nodes
         -- metric: covariance of vcpu/pcpu ratio
-        cpu_cv = varianceCoeff cpu_l
+        cpu_cv = stdDev cpu_l
         -- metrics: covariance of cpu, memory, disk and network load
         (c_load, m_load, d_load, n_load) = unzip4 $
             map (\n ->
@@ -263,8 +263,7 @@ compDetailedCV nl =
         pri_tags_inst = sum $ map Node.conflictingPrimaries nodes
         pri_tags_score = fromIntegral pri_tags_inst::Double
     in [ mem_cv, dsk_cv, n1_score, res_cv, off_score, off_pri_score, cpu_cv
-       , varianceCoeff c_load, varianceCoeff m_load
-       , varianceCoeff d_load, varianceCoeff n_load
+       , stdDev c_load, stdDev m_load , stdDev d_load, stdDev n_load
        , pri_tags_score ]
 
 -- | Compute the /total/ variance.
diff --git a/Ganeti/HTools/Utils.hs b/Ganeti/HTools/Utils.hs
index 8354dc2c408227afeb2b13e22027558556b6af1d..7bc4ead7ed3c323e6878caefe53d56388462404f 100644
--- a/Ganeti/HTools/Utils.hs
+++ b/Ganeti/HTools/Utils.hs
@@ -27,7 +27,7 @@ module Ganeti.HTools.Utils
     , debugFn
     , debugXy
     , sepSplit
-    , varianceCoeff
+    , stdDev
     , commaJoin
     , readEitherString
     , loadJSArray
@@ -87,9 +87,9 @@ sepSplit sep s
 -- Simple and slow statistical functions, please replace with better
 -- versions
 
--- | Our modified standard deviation function (not, it's not the variance)
-varianceCoeff :: [Double] -> Double
-varianceCoeff lst =
+-- | Standard deviation function
+stdDev :: [Double] -> Double
+stdDev lst =
   -- first, calculate the list length and sum lst in a single step,
   -- for performance reasons
   let (ll', sx) = foldl' (\(rl, rs) e ->
@@ -99,9 +99,7 @@ varianceCoeff lst =
       ll = fromIntegral ll'::Double
       mv = sx / ll
       av = foldl' (\accu em -> let d = em - mv in accu + d * d) 0.0 lst
-      bv = sqrt (av / ll) -- stddev
-      cv = bv / ll        -- standard deviation divided by list length
-  in cv
+  in sqrt (av / ll) -- stddev
 
 -- * JSON-related functions