Commit f4f6eb0b authored by Iustin Pop's avatar Iustin Pop
Browse files

Convert the rest of the pipeline to ClusterData



This patch converts the backends and mergeData to the new ClusterData
type.
Signed-off-by: default avatarIustin Pop <iustin@google.com>
Reviewed-by: default avatarBalazs Lecz <leczb@google.com>
parent 017a0c3d
......@@ -130,7 +130,7 @@ parseData body = do
let (kti, il) = assignIndices iobj
-- cluster tags
ctags <- fromObj "cluster_tags" obj
cdata <- mergeData [] [] [] (gl, nl, il, ctags)
cdata <- mergeData [] [] [] (ClusterData gl nl il ctags)
let map_n = cdNodes cdata
optype <- fromObj "type" request
rqtype <-
......
......@@ -183,10 +183,9 @@ commonSuffix nl il =
mergeData :: [(String, DynUtil)] -- ^ Instance utilisation data
-> [String] -- ^ Exclusion tags
-> [String] -- ^ Untouchable instances
-> (Group.List, Node.List, Instance.List, [String])
-- ^ Data from backends
-> ClusterData -- ^ Data from backends
-> Result ClusterData
mergeData um extags exinsts (gl, nl, il2, tags) =
mergeData um extags exinsts cdata@(ClusterData _ nl il2 tags) =
let il = Container.elems il2
il3 = foldl' (\im (name, n_util) ->
case Container.findByName im name of
......@@ -209,7 +208,7 @@ mergeData um extags exinsts (gl, nl, il2, tags) =
in if not $ all (`elem` all_inst_names) exinsts
then Bad $ "Some of the excluded instances are unknown: " ++
show (exinsts \\ all_inst_names)
else Ok (ClusterData gl snl sil tags)
else Ok cdata { cdNodes = snl, cdInstances = sil }
-- | Checks the cluster data for consistency.
checkData :: Node.List -> Instance.List
......
......@@ -186,7 +186,7 @@ readData master =
)
parseData :: (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
-> Result (Group.List, Node.List, Instance.List, [String])
-> Result ClusterData
parseData (groups, nodes, instances, cinfo) = do
group_data <- groups >>= getGroups
let (group_names, group_idx) = assignIndices group_data
......@@ -195,9 +195,9 @@ parseData (groups, nodes, instances, cinfo) = do
inst_data <- instances >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
ctags <- cinfo >>= getClusterTags
return (group_idx, node_idx, inst_idx, ctags)
return (ClusterData group_idx node_idx inst_idx ctags)
-- | Top level function for data loading
loadData :: String -- ^ Unix socket to use as source
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
-> IO (Result ClusterData)
loadData master = readData master >>= return . parseData
......@@ -850,7 +850,7 @@ prop_Loader_assignIndices nodes =
prop_Loader_mergeData ns =
let na = Container.fromAssocList $ map (\n -> (Node.idx n, n)) ns
in case Loader.mergeData [] [] []
(Container.empty, na, Container.empty, []) of
(Loader.emptyCluster {Loader.cdNodes = na}) of
Types.Bad _ -> False
Types.Ok (Loader.ClusterData _ nl il _) ->
let nodes = Container.elems nl
......
......@@ -147,7 +147,7 @@ readData master = do
-- | Builds the cluster data from the raw Rapi content
parseData :: (Result String, Result String, Result String, Result String)
-> Result (Group.List, Node.List, Instance.List, [String])
-> Result ClusterData
parseData (group_body, node_body, inst_body, tags_body) = do
group_data <- group_body >>= getGroups
let (group_names, group_idx) = assignIndices group_data
......@@ -156,9 +156,9 @@ parseData (group_body, node_body, inst_body, tags_body) = do
inst_data <- inst_body >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
tags_data <- tags_body >>= (fromJResult "Parsing tags data" . decodeStrict)
return (group_idx, node_idx, inst_idx, tags_data)
return (ClusterData group_idx node_idx inst_idx tags_data)
-- | Top level function for data loading
loadData :: String -- ^ Cluster or URL to use as source
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
-> IO (Result ClusterData)
loadData master = readData master >>= return . parseData
......@@ -35,10 +35,10 @@ import Text.Printf (printf)
import Ganeti.HTools.Utils
import Ganeti.HTools.Types
import Ganeti.HTools.Loader
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
-- | Parse the string description into nodes.
parseDesc :: String -> Result (AllocPolicy, Int, Int, Int, Int)
......@@ -74,7 +74,7 @@ createGroup grpIndex spec = do
-- | Builds the cluster data from node\/instance files.
parseData :: [String] -- ^ Cluster description in text format
-> Result (Group.List, Node.List, Instance.List, [String])
-> Result ClusterData
parseData ndata = do
grpNodeData <- mapM (uncurry createGroup) $ zip [1..] ndata
let (groups, nodes) = unzip grpNodeData
......@@ -82,11 +82,11 @@ parseData ndata = do
let ktn = map (\(idx, n) -> (idx, Node.setIdx n idx))
$ zip [1..] nodes'
ktg = map (\g -> (Group.idx g, g)) groups
return (Container.fromAssocList ktg,
Container.fromAssocList ktn, Container.empty, [])
return (ClusterData (Container.fromAssocList ktg)
(Container.fromAssocList ktn) Container.empty [])
-- | Builds the cluster data from node\/instance files.
loadData :: [String] -- ^ Cluster description in text format
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
-> IO (Result ClusterData)
loadData = -- IO monad, just for consistency with the other loaders
return . parseData
......@@ -169,7 +169,7 @@ readData = readFile
-- | Builds the cluster data from text input.
parseData :: String -- ^ Text data
-> Result (Group.List, Node.List, Instance.List, [String])
-> Result ClusterData
parseData fdata = do
let flines = lines fdata
(glines, nlines, ilines, ctags) <-
......@@ -184,9 +184,9 @@ parseData fdata = do
{- instance file: name mem disk status pnode snode -}
(_, il) <- loadTabular ilines (loadInst ktn)
{- the tags are simply line-based, no processing needed -}
return (gl, nl, il, ctags)
return (ClusterData gl nl il ctags)
-- | Top level function for data loading
loadData :: String -- ^ Path to the text file
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
-> IO (Result ClusterData)
loadData afile = readData afile >>= return . parseData
......@@ -90,7 +90,7 @@ fixSlash = map (\x -> if x == '/' then '_' else x)
-- | Generates serialized data from loader input
processData :: Result (Group.List, Node.List, Instance.List, [String])
processData :: Result ClusterData
-> Result (Group.List, Node.List, Instance.List, String)
processData input_data = do
(ClusterData gl nl il ctags) <- input_data >>= mergeData [] [] []
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment