Merge pull request #20 from alexmonthy/fix-scope
[lttng-ci.git] / scripts / lttng-baremetal-tests / parse-results.py
index ee058f31d4c360fee828b82996a1964ecc5f1b93..b518436f2d4ddf67573d1597934315b4bfc03d7b 100755 (executable)
@@ -1,45 +1,75 @@
 #! /usr/bin/python3
 from subprocess import call
-import sys
+from collections import defaultdict
+import csv
 import numpy as np
 import pandas as pd
+import sys
 
 def test_case(df):
-    df['nsecperiter']=(df['duration']*1000)/(df['nbiter'])
-    stdev = pd.DataFrame({'perevent_stdev' : 
-                          df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nsecperiter'].std()}).reset_index()
-    mean = pd.DataFrame({'perevent_mean' :
-                         df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nsecperiter'].mean()}).reset_index()
-    mem_mean = pd.DataFrame({'mem_mean' :
-                             df.groupby(['nbthreads','tracer','testcase','sleeptime'])['maxmem'].mean()}).reset_index()
-    mem_stdev = pd.DataFrame({'mem_stdev' :
-                              df.groupby(['nbthreads','tracer','testcase','sleeptime'])['maxmem'].std()}).reset_index()
-    tmp = mean.merge(stdev)
-    tmp = tmp.merge(mem_mean)
-    tmp = tmp.merge(mem_stdev)
+    # Duration is in usec
+    # usecPecIter = Duration/(average number of iteration per thread)
+    df['usecperiter'] = (df['nbthreads'] * df['duration']) / df['nbiter']
+
+    periter_mean = pd.DataFrame({'periter_mean' :
+                         df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['usecperiter'].mean()}).reset_index()
+
+    periter_stdev = pd.DataFrame({'periter_stdev' :
+                          df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['usecperiter'].std()}).reset_index()
+
+    nbiter_mean = pd.DataFrame({'nbiter_mean' :
+                          df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nbiter'].mean()}).reset_index()
+
+    nbiter_stdev = pd.DataFrame({'nbiter_stdev' :
+                          df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nbiter'].std()}).reset_index()
+
+    duration_mean = pd.DataFrame({'duration_mean' :
+                         df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['duration'].mean()}).reset_index()
 
+    duration_stdev = pd.DataFrame({'duration_stdev' :
+                         df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['duration'].std()}).reset_index()
+
+    tmp = periter_mean.merge(periter_stdev)
+
+    tmp = tmp.merge(nbiter_mean)
+    tmp = tmp.merge(nbiter_stdev)
+
+    tmp = tmp.merge(duration_mean)
+    tmp = tmp.merge(duration_stdev)
+
+    # if there is any NaN or None value in the DF we raise an exeception
+    if tmp.isnull().values.any():
+        raise Exception('NaN value found in dataframe')
 
     for i, row in tmp.iterrows():
-        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'pereventmean'])
-        yield( {"name": testcase_name, "result": "pass", "units": "nsec/event",
-            "measurement": str(row['perevent_mean'])})
+        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'peritermean'])
+        yield( {"name": testcase_name, "result": "pass", "units": "usec/iter",
+            "measurement": str(row['periter_mean'])})
 
-        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'pereventstdev'])
-        yield( {"name": testcase_name, "result": "pass", "units": "nsec/event",
-            "measurement": str(row['perevent_stdev'])})
+        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'periterstdev'])
+        yield( {"name": testcase_name, "result": "pass", "units": "usec/iter",
+            "measurement": str(row['periter_stdev'])})
 
-        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'memmean'])
-        yield( {"name": testcase_name, "result": "pass", "units": "kB",
-            "measurement": str(row['mem_mean'])})
+        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'nbitermean'])
+        yield( {"name": testcase_name, "result": "pass", "units": "iterations",
+            "measurement": str(row['nbiter_mean'])})
 
-        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'memstdev'])
-        yield( {"name": testcase_name, "result": "pass", "units": "kB",
-            "measurement": str(row['mem_stdev'])})
+        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'nbiterstdev'])
+        yield( {"name": testcase_name, "result": "pass", "units": "iterations",
+            "measurement": str(row['nbiter_stdev'])})
 
+        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'durationmean'])
+        yield( {"name": testcase_name, "result": "pass", "units": "usec",
+            "measurement": str(row['duration_mean'])})
+
+        testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'durationstdev'])
+        yield( {"name": testcase_name, "result": "pass", "units": "usec",
+            "measurement": str(row['duration_stdev'])})
 
 def main():
     results_file=sys.argv[1]
     df = pd.read_csv(results_file)
+    results=defaultdict()
     data = test_case(df)
     for res in data:
         call(
@@ -49,5 +79,14 @@ def main():
             '--measurement', res['measurement'],
             '--units', res['units']])
 
+        # Save the results to write to the CSV file
+        results[res['name']]=res['measurement']
+
+    # Write the dictionnary to a csv file where each key is a column
+    with open('processed_results.csv', 'w') as output_csv:
+        dict_csv_write=csv.DictWriter(output_csv, results.keys())
+        dict_csv_write.writeheader()
+        dict_csv_write.writerow(results)
+
 if __name__ == '__main__':
     main()
This page took 0.029382 seconds and 4 git commands to generate.