Skip to content
This repository was archived by the owner on Jan 6, 2018. It is now read-only.

Commit 84bdf27

Browse files
Merge pull request #18 from deepeshmittal/master
Updated sample tests log files and added other new files
2 parents 3f45279 + c73cbfc commit 84bdf27

File tree

67 files changed

+2335
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

67 files changed

+2335
-0
lines changed
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
#!/bin/bash
2+
3+
# Sample execution script to run a simple performance test within Daytona
4+
# Demonstrates the steps of gathering arguments from the UI and runs a performance test
5+
# Command line arguments:
6+
7+
echo "--------------------execscript--------------------"
8+
echo $@
9+
echo "--------------------------------------------------"
10+
11+
12+
iterations=`echo $1 | sed "s/\"//g"`
13+
delay=`echo $2 | sed "s/\"//g"`
14+
15+
# Run your performance/benchmark/workload here
16+
x=1
17+
while [ $x -le $iterations ]
18+
do
19+
dd if=/dev/zero of=/dev/null count=20000000
20+
sleep $delay
21+
x=$(( $x + 1 ))
22+
done
23+
24+
echo "Iterations : " $iterations
25+
echo "Delay : " $delay
26+
27+
echo "Benchmark Test Completed"
28+
29+
# These KPI's would be computed by your benchmark
30+
avglatency=10.5
31+
maxlatency=50
32+
minlatency=6
33+
failurerate=0.1
34+
35+
# Create your results.csv with all KPI's in name, value format
36+
echo Key, Value > results.csv
37+
echo Iterations, $iterations >> results.csv
38+
echo AvgLatency-ms, $avglatency >> results.csv
39+
echo MaxLatency-ms, $maxlatency >> results.csv
40+
echo MinLatency-ms, $minlatency >> results.csv
41+
echo FailureRate, $failurerate >> results.csv
42+
echo Delay-secs, $delay >> results.csv
43+
44+
# Create a multi-column ( > 2) csv file with multiple rows
45+
echo Col1, Col2, Col3, Col4 > multicol.csv
46+
echo 1, 2, 3, 4 >> multicol.csv
47+
echo 5, 6, 7, 8 >> multicol.csv
48+
echo 9, 10, 11, 12 >> multicol.csv
49+
echo 13, 14, 15, 16 >> multicol.csv
50+
51+
# Collect /proc/cpuinfo and /proc/meminfo data
52+
cat /proc/cpuinfo > cpuinfo.txt
53+
cat /proc/meminfo > meminfo.txt

Scheduler+Agent/process_files.py

Lines changed: 185 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,185 @@
1+
import os
2+
import datetime
3+
import csv
4+
import re
5+
6+
7+
class ProcessOutputFiles:
8+
def __init__(self, logctx):
9+
self.lctx = logctx
10+
11+
def process_output_files(self, path):
12+
# Process top output files
13+
self.process_top_output(path)
14+
self.process_docker_output(path)
15+
return "Files processing completed"
16+
17+
def process_top_output(self, path):
18+
output_file = path + "top_output.txt"
19+
self.lctx.debug("Top output processing started for file : " + output_file)
20+
21+
cpu_map = {}
22+
mem_map = {}
23+
res_mem_map = {}
24+
cpu_array = []
25+
mem_array = []
26+
res_mem_array = []
27+
cpu_array.append(["Time"])
28+
mem_array.append(["Time"])
29+
res_mem_array.append(["Time"])
30+
timestamp = 0
31+
32+
if not os.path.exists(output_file):
33+
return "File Not Found Exception"
34+
35+
cpu_file = open(path + "cpu_usage.plt", 'w+')
36+
if not cpu_file:
37+
return "Not able to create cpu_usage.plt"
38+
39+
mem_file = open(path + "memory_usage.plt", 'w+')
40+
if not mem_file:
41+
return "Not able to create memory_usage.plt"
42+
43+
res_mem_file = open(path + "res_memory_usage.plt", 'w+')
44+
if not res_mem_file:
45+
return "Not able to create res_memory_usage.plt"
46+
47+
process_data = True
48+
with open(output_file) as f:
49+
for line in f:
50+
line = line.strip('\n')
51+
line = line.strip()
52+
line = re.sub(' +', ' ', line)
53+
line = line.replace(' ', ',')
54+
55+
if line.startswith("top"):
56+
process_data = False
57+
continue
58+
59+
if line.startswith("PID"):
60+
process_data = True
61+
continue
62+
63+
if process_data:
64+
if len(line) > 0:
65+
try:
66+
datetime.datetime.strptime(line, "%Y-%m-%dT%H:%M:%S%fz")
67+
cpu_array = self.update_array_from_map(timestamp, cpu_array, cpu_map)
68+
mem_array = self.update_array_from_map(timestamp, mem_array, mem_map)
69+
res_mem_array = self.update_array_from_map(timestamp, res_mem_array, res_mem_map)
70+
timestamp = line
71+
cpu_map.clear()
72+
mem_map.clear()
73+
res_mem_map.clear()
74+
except:
75+
line_array = line.split(",")
76+
cpu_map[line_array[11] + " - " + line_array[0]] = line_array[8]
77+
mem_map[line_array[11] + " - " + line_array[0]] = line_array[9]
78+
res_mem_map[line_array[11] + " - " + line_array[0]] = line_array[5]
79+
80+
cpu_array = self.update_array_from_map(timestamp, cpu_array, cpu_map)
81+
mem_array = self.update_array_from_map(timestamp, mem_array, mem_map)
82+
res_mem_array = self.update_array_from_map(timestamp, res_mem_array, res_mem_map)
83+
84+
self.create_plt_from_array(cpu_file, cpu_array)
85+
self.create_plt_from_array(mem_file, mem_array)
86+
self.create_plt_from_array(res_mem_file, res_mem_array)
87+
88+
def process_docker_output(self, path):
89+
output_file = path + "docker_stat.txt"
90+
self.lctx.debug("Docker stat output processing started for file : " + output_file)
91+
92+
cpu_map = {}
93+
mem_map = {}
94+
95+
cpu_array = []
96+
mem_array = []
97+
98+
cpu_array.append(["Time"])
99+
mem_array.append(["Time"])
100+
101+
timestamp = 0
102+
103+
if not os.path.exists(output_file):
104+
return "File Not Found Exception"
105+
106+
cpu_file = open(path + "docker_cpu.plt", 'w+')
107+
if not cpu_file:
108+
return "Not able to create docker_cpu.plt"
109+
110+
mem_file = open(path + "docker_memory.plt", 'w+')
111+
if not mem_file:
112+
return "Not able to create docker_memory.plt"
113+
114+
with open(output_file) as f:
115+
for line in f:
116+
line = line.strip('\n')
117+
line = line.strip()
118+
line = re.sub(' +', ' ', line)
119+
line = line.replace(' ', ',')
120+
121+
if line.startswith("NAME"):
122+
continue
123+
124+
if len(line) > 0:
125+
try:
126+
datetime.datetime.strptime(line, "%Y-%m-%dT%H:%M:%S%fz")
127+
cpu_array = self.update_array_from_map(timestamp, cpu_array, cpu_map)
128+
mem_array = self.update_array_from_map(timestamp, mem_array, mem_map)
129+
130+
timestamp = line
131+
cpu_map.clear()
132+
mem_map.clear()
133+
except:
134+
line_array = line.split(",")
135+
cpu_map[line_array[0] + " - " + line_array[1]] = line_array[2]
136+
mem_map[line_array[0] + " - " + line_array[1]] = line_array[3]
137+
138+
cpu_array = self.update_array_from_map(timestamp, cpu_array, cpu_map)
139+
mem_array = self.update_array_from_map(timestamp, mem_array, mem_map)
140+
141+
self.create_plt_from_array(cpu_file, cpu_array)
142+
self.create_plt_from_array(mem_file, mem_array)
143+
144+
def create_plt_from_array(self, fh, array):
145+
with fh as f:
146+
writer = csv.writer(f)
147+
writer.writerows(array)
148+
fh.close()
149+
150+
def update_array_from_map(self, ts, input_array, input_map):
151+
row_count = len(input_array)
152+
col_count = len(input_array[0])
153+
if len(input_map) == 0 and ts != 0:
154+
temp_list = list()
155+
temp_list.append(ts)
156+
for i in range(1, col_count):
157+
temp_list.append(0.0)
158+
159+
input_array.append(temp_list)
160+
161+
elif len(input_map) > 0 and ts != 0:
162+
temp_list = list()
163+
temp_list.append(ts)
164+
for i in range(1, col_count):
165+
if input_array[0][i] in input_map:
166+
temp_list.append(input_map.get(input_array[0][i]))
167+
input_map.pop(input_array[0][i], None)
168+
else:
169+
temp_list.append(0.0)
170+
171+
input_array.append(temp_list)
172+
row_count += 1
173+
174+
if len(input_map) > 0:
175+
for x in input_map:
176+
input_array[0].append(x)
177+
col_count += 1
178+
for i in range(1, row_count):
179+
if input_array[i][0] == ts:
180+
input_array[i].append(input_map[x])
181+
else:
182+
input_array[i].append(0.0)
183+
184+
return input_array
185+

0 commit comments

Comments
 (0)