Skip to content
Snippets Groups Projects
Commit d575c755 authored by Iustin Pop's avatar Iustin Pop
Browse files

Enhance the RAPI backend to accept file:// URLs


This will allow offline testing of this backend (except for the actual
curl part), also in the case where we didn't compile against curl.

Signed-off-by: default avatarIustin Pop <iustin@google.com>
Reviewed-by: default avatarRené Nussbaumer <rn@google.com>
parent 6bbd76d2
No related branches found
No related tags found
No related merge requests found
......@@ -30,6 +30,7 @@ module Ganeti.HTools.Rapi
, parseData
) where
import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
#ifndef NO_CURL
import Network.Curl
......@@ -39,6 +40,7 @@ import Control.Monad
import Text.JSON (JSObject, fromJSObject, decodeStrict)
import Text.JSON.Types (JSValue(..))
import Text.Printf (printf)
import System.FilePath
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
......@@ -50,6 +52,10 @@ import qualified Ganeti.Constants as C
{-# ANN module "HLint: ignore Eta reduce" #-}
-- | File method prefix.
filePrefix :: String
filePrefix = "file://"
-- | Read an URL via curl and return the body if successful.
getUrl :: (Monad m) => String -> IO (m String)
......@@ -74,6 +80,11 @@ getUrl url = do
url (show code))
#endif
-- | Helper to convert I/O errors in 'Bad' values.
ioErrToResult :: IO a -> IO (Result a)
ioErrToResult ioaction =
catch (ioaction >>= return . Ok) (return . Bad . show)
-- | Append the default port if not passed in.
formatHost :: String -> String
formatHost master =
......@@ -175,9 +186,9 @@ parseCluster obj = do
return (tags, ipolicy)
-- | Loads the raw cluster data from an URL.
readData :: String -- ^ Cluster or URL to use as source
-> IO (Result String, Result String, Result String, Result String)
readData master = do
readDataHttp :: String -- ^ Cluster or URL to use as source
-> IO (Result String, Result String, Result String, Result String)
readDataHttp master = do
let url = formatHost master
group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
......@@ -185,6 +196,24 @@ readData master = do
info_body <- getUrl $ printf "%s/2/info" url
return (group_body, node_body, inst_body, info_body)
-- | Loads the raw cluster data from the filesystem.
readDataFile:: String -- ^ Path to the directory containing the files
-> IO (Result String, Result String, Result String, Result String)
readDataFile path = do
group_body <- ioErrToResult $ readFile $ path </> "groups.json"
node_body <- ioErrToResult $ readFile $ path </> "nodes.json"
inst_body <- ioErrToResult $ readFile $ path </> "instances.json"
info_body <- ioErrToResult $ readFile $ path </> "info.json"
return (group_body, node_body, inst_body, info_body)
-- | Loads data via either 'readDataFile' or 'readDataHttp'.
readData :: String -- ^ URL to use as source
-> IO (Result String, Result String, Result String, Result String)
readData url = do
if filePrefix `isPrefixOf` url
then readDataFile (drop (length filePrefix) url)
else readDataHttp url
-- | Builds the cluster data from the raw Rapi content.
parseData :: (Result String, Result String, Result String, Result String)
-> Result ClusterData
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment