From f758d98bd64ca90f1b2fa6a5edd99413c8cf2ce3 Mon Sep 17 00:00:00 2001 From: Francis Deslauriers Date: Thu, 19 Jan 2017 16:22:14 -0500 Subject: [PATCH] Lava: Consider threads when computing time per event Also, changed the time unit from nsec to usec Signed-off-by: Francis Deslauriers --- .../lttng-baremetal-tests/generate-plots.py | 22 +++--- .../lttng-baremetal-tests/parse-results.py | 76 ++++++++++++++----- 2 files changed, 67 insertions(+), 31 deletions(-) diff --git a/scripts/lttng-baremetal-tests/generate-plots.py b/scripts/lttng-baremetal-tests/generate-plots.py index 6df689f..1db638a 100644 --- a/scripts/lttng-baremetal-tests/generate-plots.py +++ b/scripts/lttng-baremetal-tests/generate-plots.py @@ -27,16 +27,16 @@ from matplotlib.ticker import MaxNLocator from cycler import cycler def rename_cols(df): - new_cols = {'baseline_1thr_pereventmean': 'basel_1thr', - 'baseline_2thr_pereventmean': 'basel_2thr', - 'baseline_4thr_pereventmean': 'basel_4thr', - 'baseline_8thr_pereventmean': 'basel_8thr', - 'baseline_16thr_pereventmean': 'basel_16thr', - 'lttng_1thr_pereventmean': 'lttng_1thr', - 'lttng_2thr_pereventmean': 'lttng_2thr', - 'lttng_4thr_pereventmean': 'lttng_4thr', - 'lttng_8thr_pereventmean': 'lttng_8thr', - 'lttng_16thr_pereventmean': 'lttng_16thr' + new_cols = {'baseline_1thr_peritermean': 'basel_1thr', + 'baseline_2thr_peritermean': 'basel_2thr', + 'baseline_4thr_peritermean': 'basel_4thr', + 'baseline_8thr_peritermean': 'basel_8thr', + 'baseline_16thr_peritermean': 'basel_16thr', + 'lttng_1thr_peritermean': 'lttng_1thr', + 'lttng_2thr_peritermean': 'lttng_2thr', + 'lttng_4thr_peritermean': 'lttng_4thr', + 'lttng_8thr_peritermean': 'lttng_8thr', + 'lttng_16thr_peritermean': 'lttng_16thr' } df.rename(columns=new_cols, inplace=True) return df @@ -63,7 +63,7 @@ def create_plot(df, graph_type): ax.set_ylim(0) ax.grid() ax.set_xlabel('Jenkins Build ID') - ax.set_ylabel('Meantime per syscall [ns]') + ax.set_ylabel('Meantime per syscall [us]') ax.legend(labels=curr_df.columns.values, bbox_to_anchor=(1.2,1)) ax.xaxis.set_major_locator(MaxNLocator(integer=True)) diff --git a/scripts/lttng-baremetal-tests/parse-results.py b/scripts/lttng-baremetal-tests/parse-results.py index 25fcd0d..544539f 100755 --- a/scripts/lttng-baremetal-tests/parse-results.py +++ b/scripts/lttng-baremetal-tests/parse-results.py @@ -7,27 +7,64 @@ import pandas as pd import sys def test_case(df): - df['nsecperiter']=(df['duration']*1000)/(df['nbiter']) - stdev = pd.DataFrame({'perevent_stdev' : - df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nsecperiter'].std()}).reset_index() - mean = pd.DataFrame({'perevent_mean' : - df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nsecperiter'].mean()}).reset_index() - mem_mean = pd.DataFrame({'mem_mean' : - df.groupby(['nbthreads','tracer','testcase','sleeptime'])['maxmem'].mean()}).reset_index() - mem_stdev = pd.DataFrame({'mem_stdev' : - df.groupby(['nbthreads','tracer','testcase','sleeptime'])['maxmem'].std()}).reset_index() - tmp = mean.merge(stdev) - tmp = tmp.merge(mem_mean) - tmp = tmp.merge(mem_stdev) + # Duration is in usec + # usecPecIter = Duration/(average number of iteration per thread) + df['usecperiter'] = (df['nbthreads'] * df['duration']) / df['nbiter'] + + periter_mean = pd.DataFrame({'periter_mean' : + df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['usecperiter'].mean()}).reset_index() + + periter_stdev = pd.DataFrame({'periter_stdev' : + df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['usecperiter'].std()}).reset_index() + + nbiter_mean = pd.DataFrame({'nbiter_mean' : + df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nbiter'].mean()}).reset_index() + + nbiter_stdev = pd.DataFrame({'nbiter_stdev' : + df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['nbiter'].std()}).reset_index() + + duration_mean = pd.DataFrame({'duration_mean' : + df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['duration'].mean()}).reset_index() + + duration_stdev = pd.DataFrame({'duration_stdev' : + df.groupby(['nbthreads', 'tracer', 'testcase','sleeptime'])['duration'].std()}).reset_index() + + tmp = periter_mean.merge(periter_stdev) + + tmp = tmp.merge(nbiter_mean) + tmp = tmp.merge(nbiter_stdev) + + tmp = tmp.merge(duration_mean) + tmp = tmp.merge(duration_stdev) + + # if there is any NaN or None value in the DF we raise an exeception + if tmp.isnull().values.any(): + raise Exception('NaN value found in dataframe') for i, row in tmp.iterrows(): - testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'pereventmean']) - yield( {"name": testcase_name, "result": "pass", "units": "nsec/event", - "measurement": str(row['perevent_mean'])}) + testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'peritermean']) + yield( {"name": testcase_name, "result": "pass", "units": "usec/iter", + "measurement": str(row['periter_mean'])}) + + testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'periterstdev']) + yield( {"name": testcase_name, "result": "pass", "units": "usec/iter", + "measurement": str(row['periter_stdev'])}) + + testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'nbitermean']) + yield( {"name": testcase_name, "result": "pass", "units": "iterations", + "measurement": str(row['nbiter_mean'])}) + + testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'nbiterstdev']) + yield( {"name": testcase_name, "result": "pass", "units": "iterations", + "measurement": str(row['nbiter_stdev'])}) + + testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'durationmean']) + yield( {"name": testcase_name, "result": "pass", "units": "usec", + "measurement": str(row['duration_mean'])}) - testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'pereventstdev']) - yield( {"name": testcase_name, "result": "pass", "units": "nsec/event", - "measurement": str(row['perevent_stdev'])}) + testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'durationstdev']) + yield( {"name": testcase_name, "result": "pass", "units": "usec", + "measurement": str(row['duration_stdev'])}) testcase_name='_'.join([row['tracer'],str(row['nbthreads'])+'thr', 'memmean']) yield( {"name": testcase_name, "result": "pass", "units": "kB", @@ -52,8 +89,7 @@ def main(): '--units', res['units']]) # Save the results to write to the CSV file - if 'pereventmean' in res['name']: - results[res['name']]=res['measurement'] + results[res['name']]=res['measurement'] # Write the dictionnary to a csv file where each key is a column with open('processed_results.csv', 'w') as output_csv: -- 2.34.1