-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathZuulWeeder.hs
350 lines (320 loc) · 10.5 KB
/
ZuulWeeder.hs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
-- |
-- Module : ZuulWeeder
-- Description : The project entrypoint
-- Copyright : (c) Red Hat, 2022
-- License : Apache-2.0
--
-- Maintainer : [email protected], [email protected]
-- Stability : provisional
-- Portability : portable
--
-- The project entrypoint.
module ZuulWeeder (main, runDemo, demoConfig) where
import Control.Concurrent.CGroup qualified
import Data.Map qualified as Map
import Data.Set qualified as Set
import Data.Text qualified as Text
import Data.Yaml (decodeThrow)
import Network.Wai.Handler.Warp qualified as Warp
import Streaming
import Streaming.Prelude qualified as S
import System.Environment
import System.Posix.Signals (Handler (Catch), installHandler, sigTERM)
import Web.HttpApiData (toHeader)
import Zuul.Config (CanonicalProjectName, TenantName)
import Zuul.ConfigLoader (Config (..), ConnectionUrlMap, emptyConfig, loadConfig, postProcess)
import Zuul.ServiceConfig (ServiceConfig (..), readServiceConfig)
import Zuul.Tenant
import Zuul.ZooKeeper
import ZuulWeeder.Graph
import ZuulWeeder.Monitoring qualified
import ZuulWeeder.Prelude
import ZuulWeeder.UI qualified
import ZuulWeeder.UI.App (CacheRender)
import ZuulWeeder.UI.App qualified
data Args = Args
{ zkPath :: FilePathT
, configPath :: FilePathT
}
getEnvArgs :: IO Args
getEnvArgs =
Args <$> envPath "WEEDER_DATA" "/var/tmp/weeder" <*> envPath "ZUUL_CONF" "/etc/zuul/zuul.conf"
where
envPath :: String -> FilePath -> IO FilePathT
envPath name def = FilePathT . Text.pack . fromMaybe def <$> lookupEnv name
-- | The main function loads the config, prepare the analysis and serve the UI.
main :: IO ()
main = do
Control.Concurrent.CGroup.initRTSThreads
withUtf8 $ withLogger (\l -> getEnvArgs >>= mainWithArgs l)
mainWithArgs :: Logger -> Args -> IO ()
mainWithArgs logger args = do
(configDump, configLoader) <- do
configLoader <- runExceptT $ mkConfigLoader logger args.zkPath args.configPath
case configLoader of
Left e -> error $ Text.unpack $ "Can't load config: " <> e
Right x -> pure x
cacheRender <- newMVar mempty
config <- configReloader logger configDump configLoader cacheRender
runWeb logger config cacheRender
-- | Start the web interface with the "demoConfig".
-- This is useful for ghcid powered hot-reload development.
runDemo :: IO ()
runDemo = do
cacheRender <- newMVar mempty
withLogger $ \logger -> runWeb logger demoConfig cacheRender
runWeb :: Logger -> IO AnalysisStatus -> MVar CacheRender -> IO ()
runWeb logger config cacheRender = do
rootUrl <- ensureTrailingSlash . Text.pack . fromMaybe "/" <$> lookupEnv "WEEDER_ROOT_URL"
distPath <- fromMaybe "dists" <$> lookupEnv "WEEDER_DIST_PATH"
port <- maybe 9001 read <$> lookupEnv "WEEDER_PORT"
info logger ("[+] serving 0.0.0.0:" <> toHeader port <> encodeUtf8 rootUrl)
let app = ZuulWeeder.UI.App.app config cacheRender (ZuulWeeder.UI.BasePath rootUrl) distPath
-- monitornig
monitoring <- ZuulWeeder.Monitoring.mkMonitoring logger
Warp.runSettings (wsettings port) (monitoring app)
where
wsettings port = Warp.defaultSettings & Warp.setPort port & Warp.setInstallShutdownHandler tHandler
tHandler closeSocket = void $ installHandler sigTERM (Catch closeSocket) Nothing
ensureTrailingSlash url = case Text.unsnoc url of
Nothing -> "/"
Just (x, '/') -> ensureTrailingSlash x
_ -> Text.snoc url '/'
newtype ConfigDumper = ConfigDumper {dumpConfig :: ExceptT Text IO ()}
newtype ConfigLoader = ConfigLoader {loadConfig :: ExceptT Text IO (TenantsConfig, Config)}
-- | Create a IO action that reloads the config every hour.
configReloader :: Logger -> ConfigDumper -> ConfigLoader -> MVar CacheRender -> IO (IO AnalysisStatus)
configReloader logger configDumper configLoader cacheRender = do
-- Get current time
now <- getSec
-- Read the inital conf, error is fatal here
conf <- either (error . Text.unpack) id <$> runExceptT (configLoader.loadConfig)
-- Cache the result
cache <- newIORef (newAnalysisStatus $ uncurry analyzeConfig conf)
ts <- newMVar (now, cache)
pure (modifyMVar ts go)
where
go :: (Int64, IORef AnalysisStatus) -> IO ((Int64, IORef AnalysisStatus), AnalysisStatus)
go (ts, cache) = do
status <- readIORef cache
now <- getSec
if now - ts < 3600
then pure ((ts, cache), status)
else do
modifyIORef cache (#refreshing `set` True)
reload cache
pure ((now, cache), status)
-- Load the config in a background thread
reload :: IORef AnalysisStatus -> IO ()
reload cache = void $ forkIO do
let setError err = modifyIORef cache ((#loadingError `set` Just err) . (#refreshing `set` False))
res <- timeout 600_000_000 $ do
info logger "ReLoading the configuration"
confE <- runExceptT do
configDumper.dumpConfig
configLoader.loadConfig
case confE of
Left e -> do
info logger ("Error reloading config: " <> encodeUtf8 e)
setError e
Right conf -> do
info logger "Caching the graph result"
writeIORef cache (newAnalysisStatus $ uncurry analyzeConfig conf)
modifyMVar cacheRender (\_ -> pure (mempty, ()))
when (isNothing res) do
info logger "Error reloading config timeout"
setError "Loading config timeout"
-- | Create IO actions to dump and load the config
mkConfigLoader :: Logger -> FilePathT -> FilePathT -> ExceptT Text IO (ConfigDumper, ConfigLoader)
mkConfigLoader logger dataBaseDir configFile = do
-- Load the zuul.conf
serviceConfig <- readServiceConfig (readFileText configFile)
pure (configDumper serviceConfig, go serviceConfig)
where
dataDir = dataBaseDir </> "data"
configDumper :: ServiceConfig -> ConfigDumper
configDumper serviceConfig = ConfigDumper do
env <- lift $ lookupEnv "ZUUL_WEEDER_NO_ZK"
case env of
Just _ -> lift $ hPutStrLn stderr "[+] ZUUL_WEEDER_NO_ZK is set, skipping dumpZK"
Nothing -> Zuul.ZooKeeper.fetchConfigs logger dataDir serviceConfig.zookeeper
cp = dataDir </> FilePathT "zuul/system/conf/0000000000"
go :: ServiceConfig -> ConfigLoader
go serviceConfig = ConfigLoader do
-- ensure data-dir exists
whenM (not <$> lift (doesDirectoryExist cp)) (dumpConfig $ configDumper serviceConfig)
-- read the tenants config from dataDir
systemConfig <- readTenantsConfig dataDir
-- decode the tenants config
tenantsConfig <- except (decodeTenantsConfig systemConfig)
-- load all the config objects
let tr = Zuul.Tenant.mkResolver serviceConfig tenantsConfig
allTenants = Set.fromList $ Map.keys tenantsConfig.tenants
allProjects = getCanonicalProjects serviceConfig.connections tenantsConfig
config <- lift $ loadConfigFiles allProjects allTenants serviceConfig.urlBuilders tr dataDir
pure (tenantsConfig, Zuul.ConfigLoader.postProcess config)
loadConfigFiles :: Map CanonicalProjectName (Set TenantName) -> Set TenantName -> ConnectionUrlMap -> TenantResolver -> FilePathT -> IO Zuul.ConfigLoader.Config
loadConfigFiles projs tenants ub tr =
flip execStateT (Zuul.ConfigLoader.emptyConfig ub projs tenants)
-- StateT Config IO ()
. S.effects
-- Apply the loadConfig function to each element
. S.chain (Zuul.ConfigLoader.loadConfig ub tr)
-- Stream (Of ZKFile) (StateT Config IO)
. hoist lift
-- Stream (Of ZKFile) IO
. walkConfigNodes
-- | The demo configuration.
demoConfig :: IO AnalysisStatus
demoConfig = do
(tenantsConfig, config) <-
either (error . show) id
<$> runExceptT do
serviceConfig <-
readServiceConfig
( pure
[s|
[zookeeper]
hosts=localhost
tls_cert=cert.pem
tls_key=key.pem
tls_ca=ca.pem
[connection "gerrit"]
driver=gerrit
server=managesf.sftests.com
canonical_hostname=sftests.com
[connection git]
driver=git
baseurl=http://localhost/cgit
|]
)
systemConfig <-
ZKTenantsConfig
<$> decodeThrow
[s|
unparsed_abide:
tenants:
demo:
source:
git:
config-projects:
- project-config: {}
local:
source:
gerrit:
config-projects:
- config: {}
untrusted-projects:
- sf-jobs: {}
- triple-o
- zuul-jobs:
include: [job]
shadow: sf-jobs
|]
tenantsConfig <- except (decodeTenantsConfig systemConfig)
let tr = Zuul.Tenant.mkResolver serviceConfig tenantsConfig
allTenants = Set.fromList $ Map.keys tenantsConfig.tenants
allProjects = getCanonicalProjects serviceConfig.connections tenantsConfig
initialConfig = Zuul.ConfigLoader.emptyConfig serviceConfig.urlBuilders allProjects allTenants
conf <- lift $ flip execStateT initialConfig do
xs <- sequence configFiles
traverse_ (Zuul.ConfigLoader.loadConfig serviceConfig.urlBuilders tr) (pure <$> xs)
pure (tenantsConfig, Zuul.ConfigLoader.postProcess conf)
let analysis = analyzeConfig tenantsConfig config
-- pPrint analysis.config.triggers
-- pPrint (Algebra.Graph.edgeList analysis.dependentGraph)
pure (newAnalysisStatus analysis)
where
mkConfigFile conn proj conf =
ZKFile conn proj "main" (FilePathT ".zuul.yaml") (FilePathT "/") <$> decodeThrow conf
configFiles =
[ mkConfigFile
"sftests.com"
"config"
[s|
- job:
name: base
abstract: true
nodeset: centos
secrets: log-key
- secret:
name: log-key
- nodeset:
name: centos
nodes:
- name: runner
label: cloud-centos-7
- queue:
name: queue
- pipeline:
name: check
trigger:
gerrit: {}
success:
elastic:
- pipeline:
name: periodic
trigger:
timer:
- time: '0 8 * * 6'
- job:
name: wallaby-job
required-projects:
- triple-o
- job:
name: zena-job
- job:
name: config-check
- project-template:
name: common
check:
jobs:
- linter
- project:
templates:
- common
check:
jobs:
- config-check
- project:
name: triple-o
queue: queue
check:
jobs:
- wallaby-job
- zena-job
- linter:
nodeset: centos
|]
, mkConfigFile
"sftests.com"
"sf-jobs"
[s|
- job:
name: linter
nodeset:
nodes:
- name: container
label: pod-centos-7
|]
, mkConfigFile
"localhost"
"project-config"
[s|
- job:
name: base
nodeset: rhel
semaphores: testy-sem
- pipeline:
name: check
trigger:
gerrit: {}
success:
elastic:
- nodeset:
name: rhel
nodes:
- name: runner
label: cloud-rhel-7
|]
]