Commit a679e9dc authored by Iustin Pop's avatar Iustin Pop
Browse files

Rework the data loader pipelines to read groups



This (invasive) patch changes all the loader pipelines to read the node
groups data from the cluster, via the various backends. It is invasive
as it needs coordinated changes across all the loaders.

Note that the new group data is not used, just returned.
Signed-off-by: default avatarIustin Pop <iustin@google.com>
Reviewed-by: default avatarBalazs Lecz <leczb@google.com>
parent f4531f51
......@@ -10,7 +10,7 @@ libraries implementing the low-level protocols.
{-
Copyright (C) 2009 Google Inc.
Copyright (C) 2009, 2010 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
......@@ -49,6 +49,7 @@ import qualified Ganeti.HTools.Text as Text
import qualified Ganeti.HTools.Loader as Loader
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import Ganeti.HTools.Types
import Ganeti.HTools.CLI
......@@ -74,7 +75,7 @@ parseUtilisation line =
-- | External tool data loader from a variety of sources.
loadExternalData :: Options
-> IO (Node.List, Instance.List, [String])
-> IO (Group.List, Node.List, Instance.List, [String])
loadExternalData opts = do
let mhost = optMaster opts
lsock = optLuxi opts
......@@ -120,7 +121,7 @@ loadExternalData opts = do
| otherwise -> return $ Bad "No backend selected! Exiting."
let ldresult = input_data >>= Loader.mergeData util_data' exTags exInsts
(loaded_nl, il, tags) <-
(gl, loaded_nl, il, tags) <-
(case ldresult of
Ok x -> return x
Bad s -> do
......@@ -134,4 +135,4 @@ loadExternalData opts = do
hPutStrLn stderr "Warning: cluster has inconsistent data:"
hPutStrLn stderr . unlines . map (printf " - %s") $ fix_msgs
return (fixed_nl, il, tags)
return (gl, fixed_nl, il, tags)
......@@ -34,6 +34,7 @@ import Text.JSON (JSObject, JSValue(JSBool, JSString, JSArray),
makeObj, encodeStrict, decodeStrict,
fromJSObject, toJSString)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.HTools.Loader
......@@ -94,6 +95,14 @@ parseNode n a = do
dtotal dfree ctotal False guuid)
return (n, node)
-- | Parses a group as found in the cluster group list.
parseGroup :: String -- ^ The group UUID
-> [(String, JSValue)] -- ^ The JSON object
-> Result (String, Group.Group)
parseGroup u a = do
name <- fromObj "name" a
return (u, Group.create name u AllocPreferred)
-- | Top-level parser.
parseData :: String -- ^ The JSON message as received from Ganeti
-> Result Request -- ^ A (possible valid) request
......@@ -102,6 +111,10 @@ parseData body = do
let obj = fromJSObject decoded
-- request parser
request <- liftM fromJSObject (fromObj "request" obj)
-- existing group parsing
glist <- liftM fromJSObject (fromObj "nodegroups" obj)
gobj <- mapM (\(x, y) -> asJSObject y >>= parseGroup x . fromJSObject) glist
let (_, gl) = assignIndices gobj
-- existing node parsing
nlist <- liftM fromJSObject (fromObj "nodes" obj)
nobj <- mapM (\(x,y) -> asJSObject y >>= parseNode x . fromJSObject) nlist
......@@ -114,7 +127,7 @@ parseData body = do
let (kti, il) = assignIndices iobj
-- cluster tags
ctags <- fromObj "cluster_tags" obj
(map_n, map_i, ptags) <- mergeData [] [] [] (nl, il, ctags)
(map_g, map_n, map_i, ptags) <- mergeData [] [] [] (gl, nl, il, ctags)
optype <- fromObj "type" request
rqtype <-
case optype of
......@@ -140,7 +153,7 @@ parseData body = do
let ex_ndx = map Node.idx ex_nodes
return $ Evacuate ex_ndx
other -> fail ("Invalid request type '" ++ other ++ "'")
return $ Request rqtype map_n map_i ptags
return $ Request rqtype map_g map_n map_i ptags
-- | Format the result
formatRVal :: RqType -> [Node.AllocElement] -> JSValue
......
......@@ -45,6 +45,7 @@ import Text.Printf (printf)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Instance as Instance
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Group as Group
import Ganeti.HTools.Types
......@@ -70,7 +71,7 @@ data RqType
deriving (Show)
-- | A complete request, as received from Ganeti.
data Request = Request RqType Node.List Instance.List [String]
data Request = Request RqType Group.List Node.List Instance.List [String]
deriving (Show)
-- * Functions
......@@ -168,10 +169,10 @@ commonSuffix nl il =
mergeData :: [(String, DynUtil)] -- ^ Instance utilisation data
-> [String] -- ^ Exclusion tags
-> [String] -- ^ Untouchable instances
-> (Node.List, Instance.List, [String])
-> (Group.List, Node.List, Instance.List, [String])
-- ^ Data from backends
-> Result (Node.List, Instance.List, [String])
mergeData um extags exinsts (nl, il2, tags) =
-> Result (Group.List, Node.List, Instance.List, [String])
mergeData um extags exinsts (gl, nl, il2, tags) =
let il = Container.elems il2
il3 = foldl' (\im (name, n_util) ->
case Container.findByName im name of
......@@ -194,7 +195,7 @@ mergeData um extags exinsts (nl, il2, tags) =
in if not $ all (`elem` all_inst_names) exinsts
then Bad $ "Some of the excluded instances are unknown: " ++
show (exinsts \\ all_inst_names)
else Ok (snl, sil, tags)
else Ok (gl, snl, sil, tags)
-- | Checks the cluster data for consistency.
checkData :: Node.List -> Instance.List
......
......@@ -35,6 +35,7 @@ import Text.JSON.Types
import qualified Ganeti.Luxi as L
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
import Ganeti.HTools.Utils (fromJVal, annotateResult, tryFromObj, asJSObject)
......@@ -63,10 +64,15 @@ queryInstancesMsg =
L.QueryInstances [] ["name", "disk_usage", "be/memory", "be/vcpus",
"status", "pnode", "snodes", "tags", "oper_ram"] False
-- | The input data for cluster query
-- | The input data for cluster query.
queryClusterInfoMsg :: L.LuxiOp
queryClusterInfoMsg = L.QueryClusterInfo
-- | The input data for node group query.
queryGroupsMsg :: L.LuxiOp
queryGroupsMsg =
L.QueryGroups [] ["uuid", "name"] False
-- | Wraper over callMethod doing node query.
queryNodes :: L.Client -> IO (Result JSValue)
queryNodes = L.callMethod queryNodesMsg
......@@ -78,6 +84,10 @@ queryInstances = L.callMethod queryInstancesMsg
queryClusterInfo :: L.Client -> IO (Result JSValue)
queryClusterInfo = L.callMethod queryClusterInfoMsg
-- | Wrapper over callMethod doing group query.
queryGroups :: L.Client -> IO (Result JSValue)
queryGroups = L.callMethod queryGroupsMsg
-- | Parse a instance list in JSON format.
getInstances :: NameAssoc
-> JSValue
......@@ -145,11 +155,23 @@ getClusterTags v = do
obj <- annotateResult errmsg $ asJSObject v
tryFromObj errmsg (fromJSObject obj) "tags"
getGroups :: JSValue -> Result [(String, Group.Group)]
getGroups arr = toArray arr >>= mapM parseGroup
parseGroup :: JSValue -> Result (String, Group.Group)
parseGroup (JSArray [ uuid, name ]) = do
xname <- annotateResult "Parsing new group" (fromJVal name)
let convert v = annotateResult ("Node '" ++ xname ++ "'") (fromJVal v)
xuuid <- convert uuid
return $ (xuuid, Group.create xname xuuid AllocPreferred)
parseGroup v = fail ("Invalid group query result: " ++ show v)
-- * Main loader functionality
-- | Builds the cluster data from an URL.
readData :: String -- ^ Unix socket to use as source
-> IO (Result JSValue, Result JSValue, Result JSValue)
-> IO (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
readData master =
E.bracket
(L.getClient master)
......@@ -158,20 +180,23 @@ readData master =
nodes <- queryNodes s
instances <- queryInstances s
cinfo <- queryClusterInfo s
return (nodes, instances, cinfo)
groups <- queryGroups s
return (groups, nodes, instances, cinfo)
)
parseData :: (Result JSValue, Result JSValue, Result JSValue)
-> Result (Node.List, Instance.List, [String])
parseData (nodes, instances, cinfo) = do
parseData :: (Result JSValue, Result JSValue, Result JSValue, Result JSValue)
-> Result (Group.List, Node.List, Instance.List, [String])
parseData (groups, nodes, instances, cinfo) = do
group_data <- groups >>= getGroups
let (_, group_idx) = assignIndices group_data
node_data <- nodes >>= getNodes
let (node_names, node_idx) = assignIndices node_data
inst_data <- instances >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
ctags <- cinfo >>= getClusterTags
return (node_idx, inst_idx, ctags)
return (group_idx, node_idx, inst_idx, ctags)
-- | Top level function for data loading
loadData :: String -- ^ Unix socket to use as source
-> IO (Result (Node.List, Instance.List, [String]))
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
loadData master = readData master >>= return . parseData
......@@ -835,9 +835,10 @@ prop_Loader_assignIndices nodes =
-- is zero
prop_Loader_mergeData ns =
let na = Container.fromAssocList $ map (\n -> (Node.idx n, n)) ns
in case Loader.mergeData [] [] [] (na, Container.empty, []) of
in case Loader.mergeData [] [] []
(Container.empty, na, Container.empty, []) of
Types.Bad _ -> False
Types.Ok (nl, il, _) ->
Types.Ok (_, nl, il, _) ->
let nodes = Container.elems nl
instances = Container.elems il
in (sum . map (length . Node.pList)) nodes == 0 &&
......
......@@ -39,6 +39,7 @@ import Text.Printf (printf)
import Ganeti.HTools.Utils
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
......@@ -74,6 +75,11 @@ getNodes :: String -> Result [(String, Node.Node)]
getNodes body = loadJSArray "Parsing node data" body >>=
mapM (parseNode . fromJSObject)
-- | Parse a group list in JSON format.
getGroups :: String -> Result [(String, Group.Group)]
getGroups body = loadJSArray "Parsing group data" body >>=
mapM (parseGroup . fromJSObject)
-- | Construct an instance from a JSON object.
parseInstance :: NameAssoc
-> [(String, JSValue)]
......@@ -119,28 +125,39 @@ parseNode a = do
dtotal dfree ctotal False guuid)
return (name, node)
-- | Construct a group from a JSON object.
parseGroup :: [(String, JSValue)] -> Result (String, Group.Group)
parseGroup a = do
name <- tryFromObj "Parsing new group" a "name"
let extract s = tryFromObj ("Group '" ++ name ++ "'") a s
uuid <- extract "uuid"
return (uuid, Group.create name uuid AllocPreferred)
-- | Loads the raw cluster data from an URL.
readData :: String -- ^ Cluster or URL to use as source
-> IO (Result String, Result String, Result String)
-> IO (Result String, Result String, Result String, Result String)
readData master = do
let url = formatHost master
group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
inst_body <- getUrl $ printf "%s/2/instances?bulk=1" url
tags_body <- getUrl $ printf "%s/2/tags" url
return (node_body, inst_body, tags_body)
return (group_body, node_body, inst_body, tags_body)
-- | Builds the cluster data from the raw Rapi content
parseData :: (Result String, Result String, Result String)
-> Result (Node.List, Instance.List, [String])
parseData (node_body, inst_body, tags_body) = do
parseData :: (Result String, Result String, Result String, Result String)
-> Result (Group.List, Node.List, Instance.List, [String])
parseData (group_body, node_body, inst_body, tags_body) = do
group_data <- group_body >>= getGroups
let (_, group_idx) = assignIndices group_data
node_data <- node_body >>= getNodes
let (node_names, node_idx) = assignIndices node_data
inst_data <- inst_body >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
tags_data <- tags_body >>= (fromJResult "Parsing tags data" . decodeStrict)
return (node_idx, inst_idx, tags_data)
return (group_idx, node_idx, inst_idx, tags_data)
-- | Top level function for data loading
loadData :: String -- ^ Cluster or URL to use as source
-> IO (Result (Node.List, Instance.List, [String]))
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
loadData master = readData master >>= return . parseData
......@@ -36,6 +36,7 @@ import Text.Printf (printf)
import Ganeti.HTools.Utils
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
......@@ -53,9 +54,10 @@ parseDesc desc =
-- | Builds the cluster data from node\/instance files.
parseData :: String -- ^ Cluster description in text format
-> Result (Node.List, Instance.List, [String])
-> Result (Group.List, Node.List, Instance.List, [String])
parseData ndata = do
(cnt, disk, mem, cpu) <- parseDesc ndata
let defgroup = Group.create "default" defaultGroupID AllocPreferred
let nodes = map (\idx ->
let n = Node.create (printf "node%03d" idx)
(fromIntegral mem) 0 mem
......@@ -63,10 +65,11 @@ parseData ndata = do
(fromIntegral cpu) False defaultGroupID
in (idx, Node.setIdx n idx)
) [1..cnt]
return (Container.fromAssocList nodes, Container.empty, [])
return (Container.fromAssocList [(0, Group.setIdx defgroup 0)],
Container.fromAssocList nodes, Container.empty, [])
-- | Builds the cluster data from node\/instance files.
loadData :: String -- ^ Cluster description in text format
-> IO (Result (Node.List, Instance.List, [String]))
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
loadData = -- IO monad, just for consistency with the other loaders
return . parseData
......@@ -47,6 +47,7 @@ import Ganeti.HTools.Utils
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
......@@ -91,6 +92,13 @@ serializeCluster nl il =
idata = serializeInstances nl il
in ndata ++ ['\n'] ++ idata
-- | Load a group from a field list.
loadGroup :: (Monad m) => [String] -> m (String, Group.Group)
loadGroup [name, gid] =
return $ (gid, Group.create name gid AllocPreferred)
loadGroup s = fail $ "Invalid/incomplete group data: '" ++ show s ++ "'"
-- | Load a node from a field list.
loadNode :: (Monad m) => [String] -> m (String, Node.Node)
-- compatibility wrapper for old text files
......@@ -148,20 +156,26 @@ readData = readFile
-- | Builds the cluster data from text input.
parseData :: String -- ^ Text data
-> Result (Node.List, Instance.List, [String])
-> Result (Group.List, Node.List, Instance.List, [String])
parseData fdata = do
let flines = lines fdata
(nlines, ilines) = break null flines
(glines, nilines) = break null flines
(nlines, ilines) = break null (tail nilines)
nfixed <- case nlines of
[] -> Bad "Invalid format of the input file (no node data)"
xs -> Ok xs
ifixed <- case ilines of
[] -> Bad "Invalid format of the input file (no instance data)"
_:xs -> Ok xs
{- group file: name uuid -}
(_, gl) <- loadTabular glines loadGroup
{- node file: name t_mem n_mem f_mem t_disk f_disk -}
(ktn, nl) <- loadTabular nlines loadNode
(ktn, nl) <- loadTabular nfixed loadNode
{- instance file: name mem disk status pnode snode -}
(_, il) <- loadTabular ifixed (loadInst ktn)
return (nl, il, [])
return (gl, nl, il, [])
-- | Top level function for data loading
loadData :: String -- ^ Path to the text file
-> IO (Result (Node.List, Instance.List, [String]))
-> IO (Result (Group.List, Node.List, Instance.List, [String]))
loadData afile = readData afile >>= return . parseData
......@@ -61,7 +61,7 @@ processResults _ as =
processRequest :: Request
-> Result Cluster.AllocSolution
processRequest request =
let Request rqtype nl il _ = request
let Request rqtype _ nl il _ = request
in case rqtype of
Allocate xi reqn -> Cluster.tryMGAlloc nl il xi reqn
Relocate idx reqn exnodes -> Cluster.tryReloc nl il idx reqn exnodes
......@@ -87,7 +87,7 @@ main = do
exitWith $ ExitFailure 1
Ok rq -> return rq
let Request rq nl _ _ = request
let Request rq _ nl _ _ = request
when (isJust shownodes) $ do
hPutStrLn stderr "Initial cluster status:"
......
......@@ -226,7 +226,7 @@ main = do
verbose = optVerbose opts
shownodes = optShowNodes opts
(fixed_nl, ilf, ctags) <- loadExternalData opts
(_, fixed_nl, ilf, ctags) <- loadExternalData opts
let offline_names = optOffline opts
all_nodes = Container.elems fixed_nl
......
......@@ -38,6 +38,7 @@ import Text.Printf (printf)
import qualified Ganeti.HTools.Container as Container
import qualified Ganeti.HTools.Cluster as Cluster
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
#ifndef NO_CURL
......@@ -89,25 +90,25 @@ fixSlash = map (\x -> if x == '/' then '_' else x)
-- | Generates serialized data from loader input
processData :: Result (Node.List, Instance.List, [String])
-> Result (Node.List, Instance.List, String)
processData :: Result (Group.List, Node.List, Instance.List, [String])
-> Result (Group.List, Node.List, Instance.List, String)
processData input_data = do
(nl, il, _) <- input_data >>= Loader.mergeData [] [] []
(gl, nl, il, _) <- input_data >>= Loader.mergeData [] [] []
let (_, fix_nl) = Loader.checkData nl il
adata = serializeCluster nl il
return (fix_nl, il, adata)
return (gl, fix_nl, il, adata)
-- | Writes cluster data out
writeData :: Int
-> String
-> Options
-> Result (Node.List, Instance.List, String)
-> Result (Group.List, Node.List, Instance.List, String)
-> IO Bool
writeData _ name _ (Bad err) =
printf "\nError for %s: failed to load data. Details:\n%s\n" name err >>
return False
writeData nlen name opts (Ok (nl, il, adata)) = do
writeData nlen name opts (Ok (_, nl, il, adata)) = do
printf "%-*s " nlen name :: IO ()
hFlush stdout
let shownodes = optShowNodes opts
......
......@@ -207,7 +207,7 @@ main = do
ispec = optISpec opts
shownodes = optShowNodes opts
(fixed_nl, il, _) <- loadExternalData opts
(_, fixed_nl, il, _) <- loadExternalData opts
printKeys $ map (\(a, fn) -> ("SPEC_" ++ a, fn ispec)) specData
printKeys [ ("SPEC_RQN", printf "%d" (optINodes opts)) ]
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment