Skip to content

Commit 51c6d4e

Browse files
authored
add analyze workloads (redhat-performance#608)
1 parent c0263fa commit 51c6d4e

File tree

17 files changed

+1825
-77
lines changed

17 files changed

+1825
-77
lines changed

MANIFEST.in

+4
Original file line numberDiff line numberDiff line change
@@ -43,5 +43,9 @@ include benchmark_runner/common/prometheus/metrics-default.yaml
4343
include benchmark_runner/common/prometheus/metrics-examples.yaml
4444

4545
# Jupyterlab templates
46+
include benchmark_runner/jupyterlab/templates/analyze_perfci_cluster/*.ipynb
4647
include benchmark_runner/jupyterlab/templates/analyze_prometheus_logs/*.ipynb
48+
include benchmark_runner/jupyterlab/templates/analyze_workloads/analyze_hammerdb_logs/*.ipynb
49+
include benchmark_runner/jupyterlab/templates/analyze_workloads/analyze_uperf_logs/*.ipynb
50+
include benchmark_runner/jupyterlab/templates/analyze_workloads/analyze_vdbench_logs/*.ipynb
4751
include benchmark_runner/jupyterlab/templates/elasticsearch_operations/*.ipynb
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "0a9caaa6-1742-4edc-ae57-b3e7a3ec876e",
6+
"metadata": {
7+
"tags": []
8+
},
9+
"source": [
10+
"# Analyze PerfCi Cluster"
11+
]
12+
},
13+
{
14+
"cell_type": "raw",
15+
"id": "aef896e6-5b88-4a29-b325-818f0beca9e3",
16+
"metadata": {
17+
"tags": []
18+
},
19+
"source": [
20+
"Template for analyzing PerfCi cluster:\n",
21+
"\n",
22+
"1. CRC Error validation"
23+
]
24+
},
25+
{
26+
"cell_type": "markdown",
27+
"id": "ae373941-3d6f-4e0d-86c3-c62b8d10316d",
28+
"metadata": {
29+
"tags": []
30+
},
31+
"source": [
32+
"# Prerequsite"
33+
]
34+
},
35+
{
36+
"cell_type": "code",
37+
"execution_count": null,
38+
"id": "38972083-3ace-41bf-9551-d17d2cd406c9",
39+
"metadata": {
40+
"tags": []
41+
},
42+
"outputs": [],
43+
"source": [
44+
"\n",
45+
"# SET log level\n",
46+
"# logging\n",
47+
"import logging\n",
48+
"logger = logging.getLogger()\n",
49+
"logger.setLevel(logging.WARN)\n",
50+
"logging.info(\"Prerequisite\")\n"
51+
]
52+
},
53+
{
54+
"cell_type": "markdown",
55+
"id": "bdc48d22-996f-4ecb-af3f-767591eb19d8",
56+
"metadata": {
57+
"tags": []
58+
},
59+
"source": [
60+
"# CRC Error validation"
61+
]
62+
},
63+
{
64+
"cell_type": "code",
65+
"execution_count": null,
66+
"id": "50eae36c-8fd4-4741-bcc6-077d2402a143",
67+
"metadata": {
68+
"tags": []
69+
},
70+
"outputs": [],
71+
"source": [
72+
"!for i in `oc get pod -n openshift-storage | grep osd | awk '{print $1}'`; do echo $i; oc logs -n openshift-storage $i -c osd | grep -i \"bad crc in data\" ; done"
73+
]
74+
},
75+
{
76+
"cell_type": "code",
77+
"execution_count": null,
78+
"id": "32ba3beb-7a22-4063-a565-12bfafbd55b3",
79+
"metadata": {},
80+
"outputs": [],
81+
"source": []
82+
}
83+
],
84+
"metadata": {
85+
"kernelspec": {
86+
"display_name": "Python 3 (ipykernel)",
87+
"language": "python",
88+
"name": "python3"
89+
},
90+
"language_info": {
91+
"codemirror_mode": {
92+
"name": "ipython",
93+
"version": 3
94+
},
95+
"file_extension": ".py",
96+
"mimetype": "text/x-python",
97+
"name": "python",
98+
"nbconvert_exporter": "python",
99+
"pygments_lexer": "ipython3",
100+
"version": "3.11.3"
101+
}
102+
},
103+
"nbformat": 4,
104+
"nbformat_minor": 5
105+
}

benchmark_runner/jupyterlab/templates/analyze_prometheus_logs/analyze_prometheus_logs.ipynb

+10-6
Original file line numberDiff line numberDiff line change
@@ -118,11 +118,15 @@
118118
"# Update fedora\n",
119119
"!dnf update -y\n",
120120
"\n",
121+
"# Logs operations\n",
122+
"from benchmark_runner.jupyterlab.templates.logs_operations.logs_operations import LogsOperations\n",
123+
"logs_operations = LogsOperations(s3_logs_url=S3_LOGS_URL)\n",
124+
"# Logs cleanup\n",
125+
"logs_operations.cleanup()\n",
126+
"\n",
121127
"# Import AnalyzePrometheusLogs class and initialized\n",
122128
"from benchmark_runner.jupyterlab.templates.analyze_prometheus_logs.analyze_prometheus_logs import AnalyzePrometheusLogs\n",
123-
"analyze_prometheus_logs = AnalyzePrometheusLogs(s3_logs_url=S3_LOGS_URL)\n",
124-
"# cleanup\n",
125-
"analyze_prometheus_logs.cleanup()"
129+
"analyze_prometheus_logs = AnalyzePrometheusLogs(s3_logs_url=S3_LOGS_URL)\n"
126130
]
127131
},
128132
{
@@ -142,17 +146,17 @@
142146
"metadata": {},
143147
"outputs": [],
144148
"source": [
145-
"analyze_prometheus_logs.download_s3_logs(username=username, password=password)"
149+
"logs_operations.download_s3_logs(username=username, password=password)"
146150
]
147151
},
148152
{
149153
"cell_type": "markdown",
150-
"id": "7ae1f18f-ab21-46bf-839e-3d22c36f9817",
154+
"id": "5fdc7609-326b-499b-b10d-5db5fb751301",
151155
"metadata": {
152156
"tags": []
153157
},
154158
"source": [
155-
"## Untar logs & chmod "
159+
"## Untar prometheus logs & chmod "
156160
]
157161
},
158162
{
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,7 @@
11
import os
22

3-
# Progress bar
3+
# Run command
44
import subprocess
5-
import requests
6-
import ipywidgets as widgets
7-
from tqdm.auto import tqdm
85

96
# Open grafana url
107
from IPython.display import HTML, display
@@ -15,8 +12,13 @@
1512

1613
# logging
1714
import logging
15+
1816
logger = logging.getLogger()
1917
logger.setLevel(logging.INFO)
18+
from typeguard import typechecked
19+
20+
# log operations
21+
from benchmark_runner.jupyterlab.templates.logs_operations.logs_operations import LogsOperations
2022

2123

2224
class AnalyzePrometheusLogs:
@@ -25,76 +27,32 @@ class AnalyzePrometheusLogs:
2527
"""
2628
TIMEOUT = 30
2729
SLEEP = 3
28-
CHUNK_SIZE = 8192
2930

3031
def __init__(self, s3_logs_url: str):
31-
self.__s3_logs_url = s3_logs_url
32-
self.__logs_dir = os.path.join(os.path.join(os.getcwd(), 'logs'))
33-
self.__filename = self.__s3_logs_url.split('/')[-1]
34-
self.__log_dir_path = os.path.join(self.__logs_dir, self.__filename)
35-
36-
def cleanup(self):
37-
"""
38-
This method cleans up existing logs and Prometheus images
39-
@return:
40-
"""
41-
# Delete Prometheus container
42-
os.system('podman rmi -f docker.io/prom/prometheus;')
43-
# delete logs dir if exist
44-
if os.path.exists(self.__logs_dir):
45-
os.system(f"rm -rf {self.__logs_dir}")
46-
47-
def download_s3_logs(self, username: str, password: str):
48-
"""
49-
This method downloads s3 logs
50-
@param username:
51-
@param password:
52-
@return:
53-
"""
54-
if not os.path.exists(self.__logs_dir):
55-
os.mkdir(self.__logs_dir)
56-
57-
# create a session with the credentials
58-
session = requests.Session()
59-
session.auth = (username, password)
60-
61-
# download with progress bar
62-
response = session.get(self.__s3_logs_url, stream=True)
63-
size = int(response.headers.get('Content-Length', 0))
64-
65-
progress = widgets.IntProgress(description='Downloading', min=0, max=size)
66-
display(progress)
67-
68-
with open(os.path.join(self.__logs_dir, self.__filename), 'wb') as f:
69-
with tqdm.wrapattr(f, "write", total=size) as fileobj:
70-
for chunk in response.iter_content(chunk_size=self.CHUNK_SIZE):
71-
if chunk:
72-
fileobj.write(chunk)
73-
progress.value += len(chunk)
74-
75-
logger.info('Download complete!')
32+
self.logs_operations = LogsOperations(s3_logs_url=s3_logs_url)
7633

7734
def untar_and_chmod_prometheus_logs(self):
7835
"""
79-
This method untars and sets the chmod for Prometheus logs, and returns the path to the 'promdb' directory
80-
@return:
36+
This method untars and sets the chmod for prometheus logs directory, and returns the path to the prometheus logs directory
37+
@return: prometheus_logs path
8138
"""
82-
logger.info(f'untar download file {self.__log_dir_path}')
83-
os.system(f"tar -xvf {self.__log_dir_path} -C {self.__logs_dir}")
84-
85-
promdb_file = [f for f in os.listdir(self.__log_dir_path.split('.')[0]) if f.startswith('promdb')]
86-
87-
logger.info(f"untar prometheus file: {os.path.join(self.__logs_dir, self.__filename.split('.')[0], promdb_file[0])}")
88-
os.system(f"tar -xvf {os.path.join(self.__logs_dir, self.__filename.split('.')[0], promdb_file[0])} -C {os.path.join(self.__logs_dir, self.__filename.split('.')[0])}")
89-
90-
promdb_file = [f for f in os.listdir(self.__log_dir_path.split('.')[0]) if f.startswith('promdb') and not f.endswith('tar')]
91-
promdb_dir_path = f"{os.path.join(self.__logs_dir, self.__filename.split('.')[0], promdb_file[0])}"
39+
self.logs_operations.untar_and_chmod_logs()
40+
promdb_file = [f for f in os.listdir(self.logs_operations.log_dir_path.split('.')[0]) if f.startswith('promdb')]
41+
logger.info(
42+
f"untar prometheus file: {os.path.join(self.logs_operations.logs_dir, self.logs_operations.filename.split('.')[0], promdb_file[0])}")
43+
os.system(
44+
f"tar -xvf {os.path.join(self.logs_operations.logs_dir, self.logs_operations.filename.split('.')[0], promdb_file[0])} -C {os.path.join(self.logs_operations.logs_dir, self.logs_operations.filename.split('.')[0])}")
45+
promdb_file = [f for f in os.listdir(self.logs_operations.log_dir_path.split('.')[0]) if
46+
f.startswith('promdb') and not f.endswith('tar')]
47+
promdb_dir_path = f"{os.path.join(self.logs_operations.logs_dir, self.logs_operations.filename.split('.')[0], promdb_file[0])}"
9248

9349
logger.info(f'chmod {promdb_dir_path}')
9450
os.system(f"chmod -R g-s,a+rw {promdb_dir_path}")
9551
return promdb_dir_path
9652

97-
def run_container(self, image_name, command):
53+
@staticmethod
54+
@typechecked
55+
def run_container(image_name: str, command: str):
9856
"""
9957
This method runs the container and waits until it finishes running
10058
@param image_name:
@@ -106,21 +64,23 @@ def run_container(self, image_name, command):
10664
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
10765

10866
current_wait_time = 0
109-
while current_wait_time <= self.TIMEOUT:
67+
while current_wait_time <= AnalyzePrometheusLogs.TIMEOUT:
11068
output = process.stdout.readline()
11169
if output == b'' and process.poll() is not None:
112-
time.sleep(self.SLEEP)
70+
time.sleep(AnalyzePrometheusLogs.SLEEP)
11371
break
11472
if output:
11573
logger.info(output.strip())
116-
time.sleep(self.SLEEP)
117-
current_wait_time += self.SLEEP
74+
time.sleep(AnalyzePrometheusLogs.SLEEP)
75+
current_wait_time += AnalyzePrometheusLogs.SLEEP
11876

11977
return_code = process.poll()
12078
logger.info(f"Container exited with return code {return_code}")
12179
return return_code
12280

123-
def open_grafana_dashboard(self, promdb_dir_path: str, grafana_dashboard_url: str):
81+
@staticmethod
82+
@typechecked
83+
def open_grafana_dashboard(promdb_dir_path: str, grafana_dashboard_url: str):
12484
"""
12585
This method opens the Grafana dashboard that is mounted to promdb
12686
@param promdb_dir_path:
@@ -137,5 +97,4 @@ def open_grafana_dashboard(self, promdb_dir_path: str, grafana_dashboard_url: st
13797
logger.info(f"Grafana direct link:: {grafana_url}")
13898
js_code = f"window.open('{grafana_url}')"
13999
html_code = f"<script>{js_code}</script>"
140-
if self.__s3_logs_url:
141-
display(HTML(html_code))
100+
display(HTML(html_code))

benchmark_runner/jupyterlab/templates/analyze_workloads/__init__.py

Whitespace-only changes.

benchmark_runner/jupyterlab/templates/analyze_workloads/analyze_hammerdb_logs/__init__.py

Whitespace-only changes.

0 commit comments

Comments
 (0)