diff --git a/Ganeti/HTools/ExtLoader.hs b/Ganeti/HTools/ExtLoader.hs index 41c84539917486f03d2e9bb4cdff54c7fa419d5a..8ec5a2cc0c6508a112ec7fc011b742533738cc82 100644 --- a/Ganeti/HTools/ExtLoader.hs +++ b/Ganeti/HTools/ExtLoader.hs @@ -10,7 +10,7 @@ libraries implementing the low-level protocols. {- -Copyright (C) 2009 Google Inc. +Copyright (C) 2009, 2010 Google Inc. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by @@ -49,6 +49,7 @@ import qualified Ganeti.HTools.Text as Text import qualified Ganeti.HTools.Loader as Loader import qualified Ganeti.HTools.Instance as Instance import qualified Ganeti.HTools.Node as Node +import qualified Ganeti.HTools.Group as Group import Ganeti.HTools.Types import Ganeti.HTools.CLI @@ -74,7 +75,7 @@ parseUtilisation line = -- | External tool data loader from a variety of sources. loadExternalData :: Options - -> IO (Node.List, Instance.List, [String]) + -> IO (Group.List, Node.List, Instance.List, [String]) loadExternalData opts = do let mhost = optMaster opts lsock = optLuxi opts @@ -120,7 +121,7 @@ loadExternalData opts = do | otherwise -> return $ Bad "No backend selected! Exiting." let ldresult = input_data >>= Loader.mergeData util_data' exTags exInsts - (loaded_nl, il, tags) <- + (gl, loaded_nl, il, tags) <- (case ldresult of Ok x -> return x Bad s -> do @@ -134,4 +135,4 @@ loadExternalData opts = do hPutStrLn stderr "Warning: cluster has inconsistent data:" hPutStrLn stderr . unlines . map (printf " - %s") $ fix_msgs - return (fixed_nl, il, tags) + return (gl, fixed_nl, il, tags) diff --git a/Ganeti/HTools/IAlloc.hs b/Ganeti/HTools/IAlloc.hs index c60e36185ff3a18082554103d5e557ae1d7cb2fc..4117fd478f52503971f728c1eb28a8a9e7dbe698 100644 --- a/Ganeti/HTools/IAlloc.hs +++ b/Ganeti/HTools/IAlloc.hs @@ -34,6 +34,7 @@ import Text.JSON (JSObject, JSValue(JSBool, JSString, JSArray), makeObj, encodeStrict, decodeStrict, fromJSObject, toJSString) import qualified Ganeti.HTools.Container as Container +import qualified Ganeti.HTools.Group as Group import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance import Ganeti.HTools.Loader @@ -94,6 +95,14 @@ parseNode n a = do dtotal dfree ctotal False guuid) return (n, node) +-- | Parses a group as found in the cluster group list. +parseGroup :: String -- ^ The group UUID + -> [(String, JSValue)] -- ^ The JSON object + -> Result (String, Group.Group) +parseGroup u a = do + name <- fromObj "name" a + return (u, Group.create name u AllocPreferred) + -- | Top-level parser. parseData :: String -- ^ The JSON message as received from Ganeti -> Result Request -- ^ A (possible valid) request @@ -102,6 +111,10 @@ parseData body = do let obj = fromJSObject decoded -- request parser request <- liftM fromJSObject (fromObj "request" obj) + -- existing group parsing + glist <- liftM fromJSObject (fromObj "nodegroups" obj) + gobj <- mapM (\(x, y) -> asJSObject y >>= parseGroup x . fromJSObject) glist + let (_, gl) = assignIndices gobj -- existing node parsing nlist <- liftM fromJSObject (fromObj "nodes" obj) nobj <- mapM (\(x,y) -> asJSObject y >>= parseNode x . fromJSObject) nlist @@ -114,7 +127,7 @@ parseData body = do let (kti, il) = assignIndices iobj -- cluster tags ctags <- fromObj "cluster_tags" obj - (map_n, map_i, ptags) <- mergeData [] [] [] (nl, il, ctags) + (map_g, map_n, map_i, ptags) <- mergeData [] [] [] (gl, nl, il, ctags) optype <- fromObj "type" request rqtype <- case optype of @@ -140,7 +153,7 @@ parseData body = do let ex_ndx = map Node.idx ex_nodes return $ Evacuate ex_ndx other -> fail ("Invalid request type '" ++ other ++ "'") - return $ Request rqtype map_n map_i ptags + return $ Request rqtype map_g map_n map_i ptags -- | Format the result formatRVal :: RqType -> [Node.AllocElement] -> JSValue diff --git a/Ganeti/HTools/Loader.hs b/Ganeti/HTools/Loader.hs index 08d2290ae76edf85fc888988bdf6f91624074f91..b7c74a0f832c9dcf8008a0fcfbe5f8864e9e2786 100644 --- a/Ganeti/HTools/Loader.hs +++ b/Ganeti/HTools/Loader.hs @@ -45,6 +45,7 @@ import Text.Printf (printf) import qualified Ganeti.HTools.Container as Container import qualified Ganeti.HTools.Instance as Instance import qualified Ganeti.HTools.Node as Node +import qualified Ganeti.HTools.Group as Group import Ganeti.HTools.Types @@ -70,7 +71,7 @@ data RqType deriving (Show) -- | A complete request, as received from Ganeti. -data Request = Request RqType Node.List Instance.List [String] +data Request = Request RqType Group.List Node.List Instance.List [String] deriving (Show) -- * Functions @@ -168,10 +169,10 @@ commonSuffix nl il = mergeData :: [(String, DynUtil)] -- ^ Instance utilisation data -> [String] -- ^ Exclusion tags -> [String] -- ^ Untouchable instances - -> (Node.List, Instance.List, [String]) + -> (Group.List, Node.List, Instance.List, [String]) -- ^ Data from backends - -> Result (Node.List, Instance.List, [String]) -mergeData um extags exinsts (nl, il2, tags) = + -> Result (Group.List, Node.List, Instance.List, [String]) +mergeData um extags exinsts (gl, nl, il2, tags) = let il = Container.elems il2 il3 = foldl' (\im (name, n_util) -> case Container.findByName im name of @@ -194,7 +195,7 @@ mergeData um extags exinsts (nl, il2, tags) = in if not $ all (`elem` all_inst_names) exinsts then Bad $ "Some of the excluded instances are unknown: " ++ show (exinsts \\ all_inst_names) - else Ok (snl, sil, tags) + else Ok (gl, snl, sil, tags) -- | Checks the cluster data for consistency. checkData :: Node.List -> Instance.List diff --git a/Ganeti/HTools/Luxi.hs b/Ganeti/HTools/Luxi.hs index cc8090815682e1526731506843e49213e53fff77..c5b93437c22fe2be99f8b5216c9d498428f51a34 100644 --- a/Ganeti/HTools/Luxi.hs +++ b/Ganeti/HTools/Luxi.hs @@ -35,6 +35,7 @@ import Text.JSON.Types import qualified Ganeti.Luxi as L import Ganeti.HTools.Loader import Ganeti.HTools.Types +import qualified Ganeti.HTools.Group as Group import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance import Ganeti.HTools.Utils (fromJVal, annotateResult, tryFromObj, asJSObject) @@ -63,10 +64,15 @@ queryInstancesMsg = L.QueryInstances [] ["name", "disk_usage", "be/memory", "be/vcpus", "status", "pnode", "snodes", "tags", "oper_ram"] False --- | The input data for cluster query +-- | The input data for cluster query. queryClusterInfoMsg :: L.LuxiOp queryClusterInfoMsg = L.QueryClusterInfo +-- | The input data for node group query. +queryGroupsMsg :: L.LuxiOp +queryGroupsMsg = + L.QueryGroups [] ["uuid", "name"] False + -- | Wraper over callMethod doing node query. queryNodes :: L.Client -> IO (Result JSValue) queryNodes = L.callMethod queryNodesMsg @@ -78,6 +84,10 @@ queryInstances = L.callMethod queryInstancesMsg queryClusterInfo :: L.Client -> IO (Result JSValue) queryClusterInfo = L.callMethod queryClusterInfoMsg +-- | Wrapper over callMethod doing group query. +queryGroups :: L.Client -> IO (Result JSValue) +queryGroups = L.callMethod queryGroupsMsg + -- | Parse a instance list in JSON format. getInstances :: NameAssoc -> JSValue @@ -145,11 +155,23 @@ getClusterTags v = do obj <- annotateResult errmsg $ asJSObject v tryFromObj errmsg (fromJSObject obj) "tags" +getGroups :: JSValue -> Result [(String, Group.Group)] +getGroups arr = toArray arr >>= mapM parseGroup + +parseGroup :: JSValue -> Result (String, Group.Group) +parseGroup (JSArray [ uuid, name ]) = do + xname <- annotateResult "Parsing new group" (fromJVal name) + let convert v = annotateResult ("Node '" ++ xname ++ "'") (fromJVal v) + xuuid <- convert uuid + return $ (xuuid, Group.create xname xuuid AllocPreferred) + +parseGroup v = fail ("Invalid group query result: " ++ show v) + -- * Main loader functionality -- | Builds the cluster data from an URL. readData :: String -- ^ Unix socket to use as source - -> IO (Result JSValue, Result JSValue, Result JSValue) + -> IO (Result JSValue, Result JSValue, Result JSValue, Result JSValue) readData master = E.bracket (L.getClient master) @@ -158,20 +180,23 @@ readData master = nodes <- queryNodes s instances <- queryInstances s cinfo <- queryClusterInfo s - return (nodes, instances, cinfo) + groups <- queryGroups s + return (groups, nodes, instances, cinfo) ) -parseData :: (Result JSValue, Result JSValue, Result JSValue) - -> Result (Node.List, Instance.List, [String]) -parseData (nodes, instances, cinfo) = do +parseData :: (Result JSValue, Result JSValue, Result JSValue, Result JSValue) + -> Result (Group.List, Node.List, Instance.List, [String]) +parseData (groups, nodes, instances, cinfo) = do + group_data <- groups >>= getGroups + let (_, group_idx) = assignIndices group_data node_data <- nodes >>= getNodes let (node_names, node_idx) = assignIndices node_data inst_data <- instances >>= getInstances node_names let (_, inst_idx) = assignIndices inst_data ctags <- cinfo >>= getClusterTags - return (node_idx, inst_idx, ctags) + return (group_idx, node_idx, inst_idx, ctags) -- | Top level function for data loading loadData :: String -- ^ Unix socket to use as source - -> IO (Result (Node.List, Instance.List, [String])) + -> IO (Result (Group.List, Node.List, Instance.List, [String])) loadData master = readData master >>= return . parseData diff --git a/Ganeti/HTools/QC.hs b/Ganeti/HTools/QC.hs index ebcc93970d55762220a4aaeda778feba033211ef..bf83fcc412781769c8a40fa1a9abb4bf8a458664 100644 --- a/Ganeti/HTools/QC.hs +++ b/Ganeti/HTools/QC.hs @@ -835,9 +835,10 @@ prop_Loader_assignIndices nodes = -- is zero prop_Loader_mergeData ns = let na = Container.fromAssocList $ map (\n -> (Node.idx n, n)) ns - in case Loader.mergeData [] [] [] (na, Container.empty, []) of + in case Loader.mergeData [] [] [] + (Container.empty, na, Container.empty, []) of Types.Bad _ -> False - Types.Ok (nl, il, _) -> + Types.Ok (_, nl, il, _) -> let nodes = Container.elems nl instances = Container.elems il in (sum . map (length . Node.pList)) nodes == 0 && diff --git a/Ganeti/HTools/Rapi.hs b/Ganeti/HTools/Rapi.hs index 58df9bd65d8e4c6f55ad2db459e2115e2191ffa6..5b6b3f2957c12af840be082b71ef016cd4f9f8ea 100644 --- a/Ganeti/HTools/Rapi.hs +++ b/Ganeti/HTools/Rapi.hs @@ -39,6 +39,7 @@ import Text.Printf (printf) import Ganeti.HTools.Utils import Ganeti.HTools.Loader import Ganeti.HTools.Types +import qualified Ganeti.HTools.Group as Group import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance @@ -74,6 +75,11 @@ getNodes :: String -> Result [(String, Node.Node)] getNodes body = loadJSArray "Parsing node data" body >>= mapM (parseNode . fromJSObject) +-- | Parse a group list in JSON format. +getGroups :: String -> Result [(String, Group.Group)] +getGroups body = loadJSArray "Parsing group data" body >>= + mapM (parseGroup . fromJSObject) + -- | Construct an instance from a JSON object. parseInstance :: NameAssoc -> [(String, JSValue)] @@ -119,28 +125,39 @@ parseNode a = do dtotal dfree ctotal False guuid) return (name, node) +-- | Construct a group from a JSON object. +parseGroup :: [(String, JSValue)] -> Result (String, Group.Group) +parseGroup a = do + name <- tryFromObj "Parsing new group" a "name" + let extract s = tryFromObj ("Group '" ++ name ++ "'") a s + uuid <- extract "uuid" + return (uuid, Group.create name uuid AllocPreferred) + -- | Loads the raw cluster data from an URL. readData :: String -- ^ Cluster or URL to use as source - -> IO (Result String, Result String, Result String) + -> IO (Result String, Result String, Result String, Result String) readData master = do let url = formatHost master + group_body <- getUrl $ printf "%s/2/groups?bulk=1" url node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url inst_body <- getUrl $ printf "%s/2/instances?bulk=1" url tags_body <- getUrl $ printf "%s/2/tags" url - return (node_body, inst_body, tags_body) + return (group_body, node_body, inst_body, tags_body) -- | Builds the cluster data from the raw Rapi content -parseData :: (Result String, Result String, Result String) - -> Result (Node.List, Instance.List, [String]) -parseData (node_body, inst_body, tags_body) = do +parseData :: (Result String, Result String, Result String, Result String) + -> Result (Group.List, Node.List, Instance.List, [String]) +parseData (group_body, node_body, inst_body, tags_body) = do + group_data <- group_body >>= getGroups + let (_, group_idx) = assignIndices group_data node_data <- node_body >>= getNodes let (node_names, node_idx) = assignIndices node_data inst_data <- inst_body >>= getInstances node_names let (_, inst_idx) = assignIndices inst_data tags_data <- tags_body >>= (fromJResult "Parsing tags data" . decodeStrict) - return (node_idx, inst_idx, tags_data) + return (group_idx, node_idx, inst_idx, tags_data) -- | Top level function for data loading loadData :: String -- ^ Cluster or URL to use as source - -> IO (Result (Node.List, Instance.List, [String])) + -> IO (Result (Group.List, Node.List, Instance.List, [String])) loadData master = readData master >>= return . parseData diff --git a/Ganeti/HTools/Simu.hs b/Ganeti/HTools/Simu.hs index 47618b99f65b389e63f8d01d8a168233f308d7bf..503394e221c6654b3f3c3b4fce3bd679ed86c42e 100644 --- a/Ganeti/HTools/Simu.hs +++ b/Ganeti/HTools/Simu.hs @@ -36,6 +36,7 @@ import Text.Printf (printf) import Ganeti.HTools.Utils import Ganeti.HTools.Types import qualified Ganeti.HTools.Container as Container +import qualified Ganeti.HTools.Group as Group import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance @@ -53,9 +54,10 @@ parseDesc desc = -- | Builds the cluster data from node\/instance files. parseData :: String -- ^ Cluster description in text format - -> Result (Node.List, Instance.List, [String]) + -> Result (Group.List, Node.List, Instance.List, [String]) parseData ndata = do (cnt, disk, mem, cpu) <- parseDesc ndata + let defgroup = Group.create "default" defaultGroupID AllocPreferred let nodes = map (\idx -> let n = Node.create (printf "node%03d" idx) (fromIntegral mem) 0 mem @@ -63,10 +65,11 @@ parseData ndata = do (fromIntegral cpu) False defaultGroupID in (idx, Node.setIdx n idx) ) [1..cnt] - return (Container.fromAssocList nodes, Container.empty, []) + return (Container.fromAssocList [(0, Group.setIdx defgroup 0)], + Container.fromAssocList nodes, Container.empty, []) -- | Builds the cluster data from node\/instance files. loadData :: String -- ^ Cluster description in text format - -> IO (Result (Node.List, Instance.List, [String])) + -> IO (Result (Group.List, Node.List, Instance.List, [String])) loadData = -- IO monad, just for consistency with the other loaders return . parseData diff --git a/Ganeti/HTools/Text.hs b/Ganeti/HTools/Text.hs index b4f9b72d68903e5e676ea78eaec7b376cf4045b6..e49e30a5da37ba4fd4769f422375fafda4c89e0b 100644 --- a/Ganeti/HTools/Text.hs +++ b/Ganeti/HTools/Text.hs @@ -47,6 +47,7 @@ import Ganeti.HTools.Utils import Ganeti.HTools.Loader import Ganeti.HTools.Types import qualified Ganeti.HTools.Container as Container +import qualified Ganeti.HTools.Group as Group import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance @@ -91,6 +92,13 @@ serializeCluster nl il = idata = serializeInstances nl il in ndata ++ ['\n'] ++ idata +-- | Load a group from a field list. +loadGroup :: (Monad m) => [String] -> m (String, Group.Group) +loadGroup [name, gid] = + return $ (gid, Group.create name gid AllocPreferred) + +loadGroup s = fail $ "Invalid/incomplete group data: '" ++ show s ++ "'" + -- | Load a node from a field list. loadNode :: (Monad m) => [String] -> m (String, Node.Node) -- compatibility wrapper for old text files @@ -148,20 +156,26 @@ readData = readFile -- | Builds the cluster data from text input. parseData :: String -- ^ Text data - -> Result (Node.List, Instance.List, [String]) + -> Result (Group.List, Node.List, Instance.List, [String]) parseData fdata = do let flines = lines fdata - (nlines, ilines) = break null flines + (glines, nilines) = break null flines + (nlines, ilines) = break null (tail nilines) + nfixed <- case nlines of + [] -> Bad "Invalid format of the input file (no node data)" + xs -> Ok xs ifixed <- case ilines of [] -> Bad "Invalid format of the input file (no instance data)" _:xs -> Ok xs + {- group file: name uuid -} + (_, gl) <- loadTabular glines loadGroup {- node file: name t_mem n_mem f_mem t_disk f_disk -} - (ktn, nl) <- loadTabular nlines loadNode + (ktn, nl) <- loadTabular nfixed loadNode {- instance file: name mem disk status pnode snode -} (_, il) <- loadTabular ifixed (loadInst ktn) - return (nl, il, []) + return (gl, nl, il, []) -- | Top level function for data loading loadData :: String -- ^ Path to the text file - -> IO (Result (Node.List, Instance.List, [String])) + -> IO (Result (Group.List, Node.List, Instance.List, [String])) loadData afile = readData afile >>= return . parseData diff --git a/hail.hs b/hail.hs index 8a08d7f889143862081c18146f0482300ac19f4c..58a787e235f8e7eeba67f804e967a846724bc50d 100644 --- a/hail.hs +++ b/hail.hs @@ -61,7 +61,7 @@ processResults _ as = processRequest :: Request -> Result Cluster.AllocSolution processRequest request = - let Request rqtype nl il _ = request + let Request rqtype _ nl il _ = request in case rqtype of Allocate xi reqn -> Cluster.tryMGAlloc nl il xi reqn Relocate idx reqn exnodes -> Cluster.tryReloc nl il idx reqn exnodes @@ -87,7 +87,7 @@ main = do exitWith $ ExitFailure 1 Ok rq -> return rq - let Request rq nl _ _ = request + let Request rq _ nl _ _ = request when (isJust shownodes) $ do hPutStrLn stderr "Initial cluster status:" diff --git a/hbal.hs b/hbal.hs index f8ad8d0659b7c58d934091d1d36aed1dc3060a27..68a391f4c137e243a42bcbefb94e8b0f52ea796e 100644 --- a/hbal.hs +++ b/hbal.hs @@ -226,7 +226,7 @@ main = do verbose = optVerbose opts shownodes = optShowNodes opts - (fixed_nl, ilf, ctags) <- loadExternalData opts + (_, fixed_nl, ilf, ctags) <- loadExternalData opts let offline_names = optOffline opts all_nodes = Container.elems fixed_nl diff --git a/hscan.hs b/hscan.hs index ad503f6aaa8d73eefeeb94603e173cfab645967b..9e187776039a07599db2f94fc3b65caf6be5b284 100644 --- a/hscan.hs +++ b/hscan.hs @@ -38,6 +38,7 @@ import Text.Printf (printf) import qualified Ganeti.HTools.Container as Container import qualified Ganeti.HTools.Cluster as Cluster +import qualified Ganeti.HTools.Group as Group import qualified Ganeti.HTools.Node as Node import qualified Ganeti.HTools.Instance as Instance #ifndef NO_CURL @@ -89,25 +90,25 @@ fixSlash = map (\x -> if x == '/' then '_' else x) -- | Generates serialized data from loader input -processData :: Result (Node.List, Instance.List, [String]) - -> Result (Node.List, Instance.List, String) +processData :: Result (Group.List, Node.List, Instance.List, [String]) + -> Result (Group.List, Node.List, Instance.List, String) processData input_data = do - (nl, il, _) <- input_data >>= Loader.mergeData [] [] [] + (gl, nl, il, _) <- input_data >>= Loader.mergeData [] [] [] let (_, fix_nl) = Loader.checkData nl il adata = serializeCluster nl il - return (fix_nl, il, adata) + return (gl, fix_nl, il, adata) -- | Writes cluster data out writeData :: Int -> String -> Options - -> Result (Node.List, Instance.List, String) + -> Result (Group.List, Node.List, Instance.List, String) -> IO Bool writeData _ name _ (Bad err) = printf "\nError for %s: failed to load data. Details:\n%s\n" name err >> return False -writeData nlen name opts (Ok (nl, il, adata)) = do +writeData nlen name opts (Ok (_, nl, il, adata)) = do printf "%-*s " nlen name :: IO () hFlush stdout let shownodes = optShowNodes opts diff --git a/hspace.hs b/hspace.hs index 90986d950ec9bf35d3fa2c3549878939141d2fba..189b0ba981402a121401ff9c4a4d9d25d91d0898 100644 --- a/hspace.hs +++ b/hspace.hs @@ -207,7 +207,7 @@ main = do ispec = optISpec opts shownodes = optShowNodes opts - (fixed_nl, il, _) <- loadExternalData opts + (_, fixed_nl, il, _) <- loadExternalData opts printKeys $ map (\(a, fn) -> ("SPEC_" ++ a, fn ispec)) specData printKeys [ ("SPEC_RQN", printf "%d" (optINodes opts)) ]