Rename htools/ to src/
[ganeti-github.git] / src / Ganeti / HTools / Backend / Rapi.hs
1 {-| Implementation of the RAPI client interface.
2
3 -}
4
5 {-
6
7 Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
8
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2 of the License, or
12 (at your option) any later version.
13
14 This program is distributed in the hope that it will be useful, but
15 WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 General Public License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with this program; if not, write to the Free Software
21 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA.
23
24 -}
25
26 {-# LANGUAGE BangPatterns, CPP #-}
27
28 module Ganeti.HTools.Backend.Rapi
29 ( loadData
30 , parseData
31 ) where
32
33 import Control.Exception
34 import Data.List (isPrefixOf)
35 import Data.Maybe (fromMaybe)
36 #ifndef NO_CURL
37 import Network.Curl
38 import Network.Curl.Types ()
39 #endif
40 import Control.Monad
41 import Text.JSON (JSObject, fromJSObject, decodeStrict)
42 import Text.JSON.Types (JSValue(..))
43 import Text.Printf (printf)
44 import System.FilePath
45
46 import Ganeti.BasicTypes
47 import Ganeti.HTools.Loader
48 import Ganeti.HTools.Types
49 import Ganeti.JSON
50 import qualified Ganeti.HTools.Group as Group
51 import qualified Ganeti.HTools.Node as Node
52 import qualified Ganeti.HTools.Instance as Instance
53 import qualified Ganeti.Constants as C
54
55 {-# ANN module "HLint: ignore Eta reduce" #-}
56
57 -- | File method prefix.
58 filePrefix :: String
59 filePrefix = "file://"
60
61 -- | Read an URL via curl and return the body if successful.
62 getUrl :: (Monad m) => String -> IO (m String)
63
64 #ifdef NO_CURL
65 getUrl _ = return $ fail "RAPI/curl backend disabled at compile time"
66
67 #else
68
69 -- | Connection timeout (when using non-file methods).
70 connTimeout :: Long
71 connTimeout = 15
72
73 -- | The default timeout for queries (when using non-file methods).
74 queryTimeout :: Long
75 queryTimeout = 60
76
77 -- | The curl options we use.
78 curlOpts :: [CurlOption]
79 curlOpts = [ CurlSSLVerifyPeer False
80 , CurlSSLVerifyHost 0
81 , CurlTimeout queryTimeout
82 , CurlConnectTimeout connTimeout
83 ]
84
85 getUrl url = do
86 (code, !body) <- curlGetString url curlOpts
87 return (case code of
88 CurlOK -> return body
89 _ -> fail $ printf "Curl error for '%s', error %s"
90 url (show code))
91 #endif
92
93 -- | Helper to convert I/O errors in 'Bad' values.
94 ioErrToResult :: IO a -> IO (Result a)
95 ioErrToResult ioaction =
96 Control.Exception.catch (liftM Ok ioaction)
97 (\e -> return . Bad . show $ (e::IOException))
98
99 -- | Append the default port if not passed in.
100 formatHost :: String -> String
101 formatHost master =
102 if ':' `elem` master
103 then master
104 else "https://" ++ master ++ ":" ++ show C.defaultRapiPort
105
106 -- | Parse a instance list in JSON format.
107 getInstances :: NameAssoc
108 -> String
109 -> Result [(String, Instance.Instance)]
110 getInstances ktn body =
111 loadJSArray "Parsing instance data" body >>=
112 mapM (parseInstance ktn . fromJSObject)
113
114 -- | Parse a node list in JSON format.
115 getNodes :: NameAssoc -> String -> Result [(String, Node.Node)]
116 getNodes ktg body = loadJSArray "Parsing node data" body >>=
117 mapM (parseNode ktg . fromJSObject)
118
119 -- | Parse a group list in JSON format.
120 getGroups :: String -> Result [(String, Group.Group)]
121 getGroups body = loadJSArray "Parsing group data" body >>=
122 mapM (parseGroup . fromJSObject)
123
124 -- | Construct an instance from a JSON object.
125 parseInstance :: NameAssoc
126 -> JSRecord
127 -> Result (String, Instance.Instance)
128 parseInstance ktn a = do
129 name <- tryFromObj "Parsing new instance" a "name"
130 let owner_name = "Instance '" ++ name ++ "', error while parsing data"
131 let extract s x = tryFromObj owner_name x s
132 disk <- extract "disk_usage" a
133 beparams <- liftM fromJSObject (extract "beparams" a)
134 omem <- extract "oper_ram" a
135 mem <- case omem of
136 JSRational _ _ -> annotateResult owner_name (fromJVal omem)
137 _ -> extract "memory" beparams `mplus` extract "maxmem" beparams
138 vcpus <- extract "vcpus" beparams
139 pnode <- extract "pnode" a >>= lookupNode ktn name
140 snodes <- extract "snodes" a
141 snode <- if null snodes
142 then return Node.noSecondary
143 else readEitherString (head snodes) >>= lookupNode ktn name
144 running <- extract "status" a
145 tags <- extract "tags" a
146 auto_balance <- extract "auto_balance" beparams
147 dt <- extract "disk_template" a
148 su <- extract "spindle_use" beparams
149 let inst = Instance.create name mem disk vcpus running tags
150 auto_balance pnode snode dt su
151 return (name, inst)
152
153 -- | Construct a node from a JSON object.
154 parseNode :: NameAssoc -> JSRecord -> Result (String, Node.Node)
155 parseNode ktg a = do
156 name <- tryFromObj "Parsing new node" a "name"
157 let desc = "Node '" ++ name ++ "', error while parsing data"
158 extract s = tryFromObj desc a s
159 offline <- extract "offline"
160 drained <- extract "drained"
161 vm_cap <- annotateResult desc $ maybeFromObj a "vm_capable"
162 let vm_cap' = fromMaybe True vm_cap
163 ndparams <- extract "ndparams" >>= asJSObject
164 spindles <- tryFromObj desc (fromJSObject ndparams) "spindle_count"
165 guuid <- annotateResult desc $ maybeFromObj a "group.uuid"
166 guuid' <- lookupGroup ktg name (fromMaybe defaultGroupID guuid)
167 node <- if offline || drained || not vm_cap'
168 then return $ Node.create name 0 0 0 0 0 0 True 0 guuid'
169 else do
170 mtotal <- extract "mtotal"
171 mnode <- extract "mnode"
172 mfree <- extract "mfree"
173 dtotal <- extract "dtotal"
174 dfree <- extract "dfree"
175 ctotal <- extract "ctotal"
176 return $ Node.create name mtotal mnode mfree
177 dtotal dfree ctotal False spindles guuid'
178 return (name, node)
179
180 -- | Construct a group from a JSON object.
181 parseGroup :: JSRecord -> Result (String, Group.Group)
182 parseGroup a = do
183 name <- tryFromObj "Parsing new group" a "name"
184 let extract s = tryFromObj ("Group '" ++ name ++ "'") a s
185 uuid <- extract "uuid"
186 apol <- extract "alloc_policy"
187 ipol <- extract "ipolicy"
188 tags <- extract "tags"
189 return (uuid, Group.create name uuid apol ipol tags)
190
191 -- | Parse cluster data from the info resource.
192 parseCluster :: JSObject JSValue -> Result ([String], IPolicy)
193 parseCluster obj = do
194 let obj' = fromJSObject obj
195 extract s = tryFromObj "Parsing cluster data" obj' s
196 tags <- extract "tags"
197 ipolicy <- extract "ipolicy"
198 return (tags, ipolicy)
199
200 -- | Loads the raw cluster data from an URL.
201 readDataHttp :: String -- ^ Cluster or URL to use as source
202 -> IO (Result String, Result String, Result String, Result String)
203 readDataHttp master = do
204 let url = formatHost master
205 group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
206 node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
207 inst_body <- getUrl $ printf "%s/2/instances?bulk=1" url
208 info_body <- getUrl $ printf "%s/2/info" url
209 return (group_body, node_body, inst_body, info_body)
210
211 -- | Loads the raw cluster data from the filesystem.
212 readDataFile:: String -- ^ Path to the directory containing the files
213 -> IO (Result String, Result String, Result String, Result String)
214 readDataFile path = do
215 group_body <- ioErrToResult . readFile $ path </> "groups.json"
216 node_body <- ioErrToResult . readFile $ path </> "nodes.json"
217 inst_body <- ioErrToResult . readFile $ path </> "instances.json"
218 info_body <- ioErrToResult . readFile $ path </> "info.json"
219 return (group_body, node_body, inst_body, info_body)
220
221 -- | Loads data via either 'readDataFile' or 'readDataHttp'.
222 readData :: String -- ^ URL to use as source
223 -> IO (Result String, Result String, Result String, Result String)
224 readData url =
225 if filePrefix `isPrefixOf` url
226 then readDataFile (drop (length filePrefix) url)
227 else readDataHttp url
228
229 -- | Builds the cluster data from the raw Rapi content.
230 parseData :: (Result String, Result String, Result String, Result String)
231 -> Result ClusterData
232 parseData (group_body, node_body, inst_body, info_body) = do
233 group_data <- group_body >>= getGroups
234 let (group_names, group_idx) = assignIndices group_data
235 node_data <- node_body >>= getNodes group_names
236 let (node_names, node_idx) = assignIndices node_data
237 inst_data <- inst_body >>= getInstances node_names
238 let (_, inst_idx) = assignIndices inst_data
239 (tags, ipolicy) <- info_body >>=
240 (fromJResult "Parsing cluster info" . decodeStrict) >>=
241 parseCluster
242 return (ClusterData group_idx node_idx inst_idx tags ipolicy)
243
244 -- | Top level function for data loading.
245 loadData :: String -- ^ Cluster or URL to use as source
246 -> IO (Result ClusterData)
247 loadData = fmap parseData . readData