Make the disks parameter available to the constructor
[ganeti-github.git] / src / Ganeti / HTools / Backend / Rapi.hs
1 {-| Implementation of the RAPI client interface.
2
3 -}
4
5 {-
6
7 Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
8
9 This program is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2 of the License, or
12 (at your option) any later version.
13
14 This program is distributed in the hope that it will be useful, but
15 WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 General Public License for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with this program; if not, write to the Free Software
21 Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA.
23
24 -}
25
26 {-# LANGUAGE BangPatterns, CPP #-}
27
28 module Ganeti.HTools.Backend.Rapi
29 ( loadData
30 , parseData
31 ) where
32
33 import Control.Exception
34 import Data.List (isPrefixOf)
35 import Data.Maybe (fromMaybe)
36 #ifndef NO_CURL
37 import Network.Curl
38 import Network.Curl.Types ()
39 #endif
40 import Control.Monad
41 import Text.JSON (JSObject, fromJSObject, decodeStrict)
42 import Text.JSON.Types (JSValue(..))
43 import Text.Printf (printf)
44 import System.FilePath
45
46 import Ganeti.BasicTypes
47 import Ganeti.HTools.Loader
48 import Ganeti.HTools.Types
49 import Ganeti.JSON
50 import qualified Ganeti.HTools.Group as Group
51 import qualified Ganeti.HTools.Node as Node
52 import qualified Ganeti.HTools.Instance as Instance
53 import qualified Ganeti.Constants as C
54
55 {-# ANN module "HLint: ignore Eta reduce" #-}
56
57 -- | File method prefix.
58 filePrefix :: String
59 filePrefix = "file://"
60
61 -- | Read an URL via curl and return the body if successful.
62 getUrl :: (Monad m) => String -> IO (m String)
63
64 #ifdef NO_CURL
65 getUrl _ = return $ fail "RAPI/curl backend disabled at compile time"
66
67 #else
68
69 -- | Connection timeout (when using non-file methods).
70 connTimeout :: Long
71 connTimeout = 15
72
73 -- | The default timeout for queries (when using non-file methods).
74 queryTimeout :: Long
75 queryTimeout = 60
76
77 -- | The curl options we use.
78 curlOpts :: [CurlOption]
79 curlOpts = [ CurlSSLVerifyPeer False
80 , CurlSSLVerifyHost 0
81 , CurlTimeout queryTimeout
82 , CurlConnectTimeout connTimeout
83 ]
84
85 getUrl url = do
86 (code, !body) <- curlGetString url curlOpts
87 return (case code of
88 CurlOK -> return body
89 _ -> fail $ printf "Curl error for '%s', error %s"
90 url (show code))
91 #endif
92
93 -- | Helper to convert I/O errors in 'Bad' values.
94 ioErrToResult :: IO a -> IO (Result a)
95 ioErrToResult ioaction =
96 Control.Exception.catch (liftM Ok ioaction)
97 (\e -> return . Bad . show $ (e::IOException))
98
99 -- | Append the default port if not passed in.
100 formatHost :: String -> String
101 formatHost master =
102 if ':' `elem` master
103 then master
104 else "https://" ++ master ++ ":" ++ show C.defaultRapiPort
105
106 -- | Parse a instance list in JSON format.
107 getInstances :: NameAssoc
108 -> String
109 -> Result [(String, Instance.Instance)]
110 getInstances ktn body =
111 loadJSArray "Parsing instance data" body >>=
112 mapM (parseInstance ktn . fromJSObject)
113
114 -- | Parse a node list in JSON format.
115 getNodes :: NameAssoc -> String -> Result [(String, Node.Node)]
116 getNodes ktg body = loadJSArray "Parsing node data" body >>=
117 mapM (parseNode ktg . fromJSObject)
118
119 -- | Parse a group list in JSON format.
120 getGroups :: String -> Result [(String, Group.Group)]
121 getGroups body = loadJSArray "Parsing group data" body >>=
122 mapM (parseGroup . fromJSObject)
123
124 -- | Construct an instance from a JSON object.
125 parseInstance :: NameAssoc
126 -> JSRecord
127 -> Result (String, Instance.Instance)
128 parseInstance ktn a = do
129 name <- tryFromObj "Parsing new instance" a "name"
130 let owner_name = "Instance '" ++ name ++ "', error while parsing data"
131 let extract s x = tryFromObj owner_name x s
132 disk <- extract "disk_usage" a
133 disks <- extract "disk.sizes" a
134 beparams <- liftM fromJSObject (extract "beparams" a)
135 omem <- extract "oper_ram" a
136 mem <- case omem of
137 JSRational _ _ -> annotateResult owner_name (fromJVal omem)
138 _ -> extract "memory" beparams `mplus` extract "maxmem" beparams
139 vcpus <- extract "vcpus" beparams
140 pnode <- extract "pnode" a >>= lookupNode ktn name
141 snodes <- extract "snodes" a
142 snode <- case snodes of
143 [] -> return Node.noSecondary
144 x:_ -> readEitherString x >>= lookupNode ktn name
145 running <- extract "status" a
146 tags <- extract "tags" a
147 auto_balance <- extract "auto_balance" beparams
148 dt <- extract "disk_template" a
149 su <- extract "spindle_use" beparams
150 let inst = Instance.create name mem disk disks vcpus running tags
151 auto_balance pnode snode dt su
152 return (name, inst)
153
154 -- | Construct a node from a JSON object.
155 parseNode :: NameAssoc -> JSRecord -> Result (String, Node.Node)
156 parseNode ktg a = do
157 name <- tryFromObj "Parsing new node" a "name"
158 let desc = "Node '" ++ name ++ "', error while parsing data"
159 extract s = tryFromObj desc a s
160 offline <- extract "offline"
161 drained <- extract "drained"
162 vm_cap <- annotateResult desc $ maybeFromObj a "vm_capable"
163 let vm_cap' = fromMaybe True vm_cap
164 ndparams <- extract "ndparams" >>= asJSObject
165 spindles <- tryFromObj desc (fromJSObject ndparams) "spindle_count"
166 guuid <- annotateResult desc $ maybeFromObj a "group.uuid"
167 guuid' <- lookupGroup ktg name (fromMaybe defaultGroupID guuid)
168 node <- if offline || drained || not vm_cap'
169 then return $ Node.create name 0 0 0 0 0 0 True 0 guuid'
170 else do
171 mtotal <- extract "mtotal"
172 mnode <- extract "mnode"
173 mfree <- extract "mfree"
174 dtotal <- extract "dtotal"
175 dfree <- extract "dfree"
176 ctotal <- extract "ctotal"
177 return $ Node.create name mtotal mnode mfree
178 dtotal dfree ctotal False spindles guuid'
179 return (name, node)
180
181 -- | Construct a group from a JSON object.
182 parseGroup :: JSRecord -> Result (String, Group.Group)
183 parseGroup a = do
184 name <- tryFromObj "Parsing new group" a "name"
185 let extract s = tryFromObj ("Group '" ++ name ++ "'") a s
186 uuid <- extract "uuid"
187 apol <- extract "alloc_policy"
188 ipol <- extract "ipolicy"
189 tags <- extract "tags"
190 return (uuid, Group.create name uuid apol ipol tags)
191
192 -- | Parse cluster data from the info resource.
193 parseCluster :: JSObject JSValue -> Result ([String], IPolicy)
194 parseCluster obj = do
195 let obj' = fromJSObject obj
196 extract s = tryFromObj "Parsing cluster data" obj' s
197 tags <- extract "tags"
198 ipolicy <- extract "ipolicy"
199 return (tags, ipolicy)
200
201 -- | Loads the raw cluster data from an URL.
202 readDataHttp :: String -- ^ Cluster or URL to use as source
203 -> IO (Result String, Result String, Result String, Result String)
204 readDataHttp master = do
205 let url = formatHost master
206 group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
207 node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
208 inst_body <- getUrl $ printf "%s/2/instances?bulk=1" url
209 info_body <- getUrl $ printf "%s/2/info" url
210 return (group_body, node_body, inst_body, info_body)
211
212 -- | Loads the raw cluster data from the filesystem.
213 readDataFile:: String -- ^ Path to the directory containing the files
214 -> IO (Result String, Result String, Result String, Result String)
215 readDataFile path = do
216 group_body <- ioErrToResult . readFile $ path </> "groups.json"
217 node_body <- ioErrToResult . readFile $ path </> "nodes.json"
218 inst_body <- ioErrToResult . readFile $ path </> "instances.json"
219 info_body <- ioErrToResult . readFile $ path </> "info.json"
220 return (group_body, node_body, inst_body, info_body)
221
222 -- | Loads data via either 'readDataFile' or 'readDataHttp'.
223 readData :: String -- ^ URL to use as source
224 -> IO (Result String, Result String, Result String, Result String)
225 readData url =
226 if filePrefix `isPrefixOf` url
227 then readDataFile (drop (length filePrefix) url)
228 else readDataHttp url
229
230 -- | Builds the cluster data from the raw Rapi content.
231 parseData :: (Result String, Result String, Result String, Result String)
232 -> Result ClusterData
233 parseData (group_body, node_body, inst_body, info_body) = do
234 group_data <- group_body >>= getGroups
235 let (group_names, group_idx) = assignIndices group_data
236 node_data <- node_body >>= getNodes group_names
237 let (node_names, node_idx) = assignIndices node_data
238 inst_data <- inst_body >>= getInstances node_names
239 let (_, inst_idx) = assignIndices inst_data
240 (tags, ipolicy) <- info_body >>=
241 (fromJResult "Parsing cluster info" . decodeStrict) >>=
242 parseCluster
243 return (ClusterData group_idx node_idx inst_idx tags ipolicy)
244
245 -- | Top level function for data loading.
246 loadData :: String -- ^ Cluster or URL to use as source
247 -> IO (Result ClusterData)
248 loadData = fmap parseData . readData