Skip to content

Commit 662a393

Browse files
[add] Created the remote profiler daemon (#233)
1 parent c07f6b8 commit 662a393

File tree

13 files changed

+604
-60
lines changed

13 files changed

+604
-60
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
*.txt
77
binaries
88
datasets
9+
profile_*
910

1011
# Byte-compiled / optimized / DLL files
1112
__pycache__/

poetry.lock

Lines changed: 20 additions & 8 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
[tool.poetry]
22
name = "redisbench-admin"
3-
version = "0.5.11"
3+
version = "0.5.12"
44
description = "Redis benchmark run helper. A wrapper around Redis and Redis Modules benchmark tools ( ftsb_redisearch, memtier_benchmark, redis-benchmark, aibench, etc... )."
55
authors = ["filipecosta90 <[email protected]>","Redis Performance Group <[email protected]>"]
66
readme = "README.md"
77

88
[tool.poetry.scripts]
99
redisbench-admin = "redisbench_admin.cli:main"
10+
perf-daemon = "redisbench_admin.profilers.daemon:main"
1011

1112
[tool.poetry.dependencies]
1213
python = "^3.6.1"
@@ -34,6 +35,7 @@ flask-restx = "^0.5.1"
3435
Flask-HTTPAuth = "^4.4.0"
3536
docker = "^5.0.0"
3637
tox-docker = "^3.1.0"
38+
daemonize = "^2.5.0"
3739

3840
[tool.poetry.dev-dependencies]
3941
pytest = "^4.6"
Lines changed: 275 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,275 @@
1+
# BSD 3-Clause License
2+
#
3+
# Copyright (c) 2021., Redis Labs Modules
4+
# All rights reserved.
5+
#
6+
7+
# !/usr/bin/env python3
8+
import logging
9+
from threading import Thread
10+
11+
from flask import Flask, request
12+
import daemonize
13+
from time import sleep
14+
import json
15+
import sys
16+
import os
17+
18+
from redisbench_admin.run.args import S3_BUCKET_NAME
19+
from redisbench_admin.run.s3 import get_test_s3_bucket_path
20+
from redisbench_admin.utils.remote import extract_git_vars
21+
22+
from redisbench_admin.profilers.perf import Perf
23+
from redisbench_admin.run.common import get_start_time_vars
24+
25+
from redisbench_admin.run_local.profile_local import local_profilers_platform_checks
26+
from redisbench_admin.utils.utils import upload_artifacts_to_s3
27+
28+
PID_FILE = "/tmp/perfdaemon.pid"
29+
DEFAULT_PROFILE_FREQ = 99
30+
LOG_LEVEL = logging.DEBUG
31+
if os.getenv("VERBOSE", "0") == "0":
32+
LOG_LEVEL = logging.INFO
33+
LOG_FORMAT = "%(asctime)s %(levelname)-4s %(message)s"
34+
LOG_DATEFMT = "%Y-%m-%d %H:%M:%S"
35+
LOGNAME = "/tmp/perf-daemon.log"
36+
37+
38+
class PerfDaemon:
39+
def __init__(self):
40+
pass
41+
42+
def main(self):
43+
app = Flask(__name__)
44+
app.debug = False
45+
app.use_reloader = True
46+
self.perf = Perf()
47+
48+
print("Writting log to {}".format(LOGNAME))
49+
handler = logging.handlers.RotatingFileHandler(LOGNAME, maxBytes=1024 * 1024)
50+
logging.getLogger("werkzeug").setLevel(logging.DEBUG)
51+
logging.getLogger("werkzeug").addHandler(handler)
52+
app.logger.setLevel(LOG_LEVEL)
53+
app.logger.addHandler(handler)
54+
self.perf.set_logger(app.logger)
55+
56+
@app.before_first_request
57+
def before_first_request():
58+
app.logger.setLevel(logging.INFO)
59+
60+
@app.route("/profiler/<profiler_name>/start/<pid>", methods=["POST"])
61+
def profile_start(profiler_name, pid):
62+
setup_process_number = 1
63+
total_involved_processes = 1
64+
(
65+
start_time,
66+
start_time_ms,
67+
start_time_str,
68+
) = get_start_time_vars()
69+
(
70+
self.github_org_name,
71+
self.github_repo_name,
72+
self.github_sha,
73+
self.github_actor,
74+
self.github_branch,
75+
github_branch_detached,
76+
) = extract_git_vars()
77+
self.dso = ""
78+
self.test_name = ""
79+
self.setup_name = ""
80+
if request.is_json:
81+
data = request.get_json()
82+
if "dso" in data:
83+
self.dso = data["dso"]
84+
if "test_name" in data:
85+
self.test_name = data["test_name"]
86+
if "setup_name" in data:
87+
self.setup_name = data["setup_name"]
88+
if "github_actor" in data:
89+
self.github_actor = data["github_actor"]
90+
if "github_branch" in data:
91+
self.github_branch = data["github_branch"]
92+
if "github_repo_name" in data:
93+
self.github_repo_name = data["github_repo_name"]
94+
if "github_org_name" in data:
95+
self.github_org_name = data["github_org_name"]
96+
if "github_sha" in data:
97+
self.github_sha = data["github_sha"]
98+
99+
self.collection_summary_str = local_profilers_platform_checks(
100+
self.dso,
101+
self.github_actor,
102+
self.github_branch,
103+
self.github_repo_name,
104+
self.github_sha,
105+
)
106+
msg = "Starting profiler {} for Process {} of {}: pid {}".format(
107+
profiler_name,
108+
setup_process_number,
109+
total_involved_processes,
110+
pid,
111+
)
112+
app.logger.info(msg)
113+
profile_filename = (
114+
"profile_{setup_name}".format(
115+
setup_name=self.setup_name,
116+
)
117+
+ "__primary-{primary_n}-of-{total_primaries}".format(
118+
primary_n=setup_process_number,
119+
total_primaries=total_involved_processes,
120+
)
121+
+ "__{test_name}_{profile}_{start_time_str}.out".format(
122+
profile=profiler_name,
123+
test_name=self.test_name,
124+
start_time_str=start_time_str,
125+
)
126+
)
127+
result = self.perf.start_profile(
128+
pid,
129+
profile_filename,
130+
DEFAULT_PROFILE_FREQ,
131+
)
132+
status_dict = {
133+
"result": result,
134+
"message": msg,
135+
"start_time": start_time_str,
136+
}
137+
return json.dumps(status_dict)
138+
139+
@app.get("/profiler/<profiler_name>/status/<pid>")
140+
def profile_status(profiler_name, pid):
141+
_is_alive = self.perf._is_alive(self.perf.profiler_process)
142+
status_dict = {"running": _is_alive}
143+
return json.dumps(status_dict)
144+
145+
@app.post("/profiler/<profiler_name>/stop/<pid>")
146+
def profile_stop(profiler_name, pid):
147+
profile_res = self.perf.stop_profile()
148+
profilers_artifacts_matrix = []
149+
primary_id = 1
150+
total_primaries = 1
151+
if profile_res is True:
152+
# Generate:
153+
# - artifact with Flame Graph SVG
154+
# - artifact with output graph image in PNG format
155+
# - artifact with top entries in text form
156+
(
157+
profile_res,
158+
profile_res_artifacts_map,
159+
tabular_data_map,
160+
) = self.perf.generate_outputs(
161+
self.test_name,
162+
details=self.collection_summary_str,
163+
binary=self.dso,
164+
primary_id=primary_id,
165+
total_primaries=total_primaries,
166+
)
167+
summary_msg = "Profiler {} for pid {} ran successfully and generated {} artifacts. Generated also {} tables with data(keys:{}).".format(
168+
profiler_name,
169+
self.perf.profiler_process.pid,
170+
len(profile_res_artifacts_map.values()),
171+
len(tabular_data_map.values()),
172+
",".join(tabular_data_map.keys()),
173+
)
174+
if profile_res is True:
175+
app.logger.info(summary_msg)
176+
s3_bucket_path = get_test_s3_bucket_path(
177+
S3_BUCKET_NAME,
178+
self.test_name,
179+
self.github_org_name,
180+
self.github_repo_name,
181+
"profiles",
182+
)
183+
for (
184+
artifact_name,
185+
profile_artifact,
186+
) in profile_res_artifacts_map.items():
187+
s3_link = None
188+
upload_results_s3 = True
189+
if upload_results_s3:
190+
logging.info(
191+
"Uploading results to s3. s3 bucket name: {}. s3 bucket path: {}".format(
192+
S3_BUCKET_NAME, s3_bucket_path
193+
)
194+
)
195+
url_map = upload_artifacts_to_s3(
196+
[profile_artifact],
197+
S3_BUCKET_NAME,
198+
s3_bucket_path,
199+
)
200+
s3_link = list(url_map.values())[0]
201+
profilers_artifacts_matrix.append(
202+
{
203+
"test_name": self.test_name,
204+
"profiler_name": profiler_name,
205+
"artifact_name": artifact_name,
206+
"s3_link": s3_link,
207+
}
208+
)
209+
210+
status_dict = {
211+
"result": profile_res,
212+
"summary": summary_msg,
213+
"profiler_artifacts": profilers_artifacts_matrix,
214+
}
215+
self.perf = Perf()
216+
return json.dumps(status_dict)
217+
218+
Thread(target=app.run).start()
219+
220+
self.loop()
221+
222+
def loop(self):
223+
while True:
224+
sleep(1)
225+
226+
227+
d = PerfDaemon()
228+
229+
230+
def main():
231+
global stop
232+
global d
233+
234+
def start():
235+
current_path = os.path.abspath(os.getcwd())
236+
print(
237+
"Starting perfdaemon. PID file {}. Daemon workdir: {}".format(
238+
PID_FILE, current_path
239+
)
240+
)
241+
daemonize.Daemonize(
242+
app="perfdaemon", pid=PID_FILE, action=d.main, chdir=current_path
243+
).start()
244+
245+
def stop():
246+
if not os.path.exists(PID_FILE):
247+
sys.exit(0)
248+
with open(PID_FILE, "r") as pidfile:
249+
pid = pidfile.read()
250+
os.system("kill -9 %s" % pid)
251+
252+
def foreground():
253+
d.main()
254+
255+
def usage():
256+
print("usage: start|stop|restart|foreground")
257+
sys.exit(1)
258+
259+
if len(sys.argv) < 2:
260+
usage()
261+
if sys.argv[1] == "start":
262+
start()
263+
elif sys.argv[1] == "stop":
264+
stop()
265+
elif sys.argv[1] == "restart":
266+
stop()
267+
start()
268+
elif sys.argv[1] == "foreground":
269+
foreground()
270+
else:
271+
sys.exit(1)
272+
273+
274+
if __name__ == "__main__":
275+
main()

0 commit comments

Comments
 (0)