aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoel Fernandes <joelaf@google.com>2017-07-10 19:33:09 -0700
committerJoel Fernandes <joelaf@google.com>2017-07-10 19:33:09 -0700
commit49f4c42b6750e96c668840a7d54e2cc2f322ab09 (patch)
tree750a28c37e75e786a302872d0f4bfff2bbf7cfd7
parent89ce9a07de9a9a7720b72a8204fdfc0b331ef5e8 (diff)
downloadtrappy-49f4c42b6750e96c668840a7d54e2cc2f322ab09.tar.gz
trappy/utils: Make apply_callbacks more generic for single DF use
Change-Id: I3e8ff4d9d11220cddeac955e9a949fc3464ecc36 Signed-off-by: Joel Fernandes <joelaf@google.com>
-rw-r--r--trappy/ftrace.py9
-rw-r--r--trappy/utils.py42
2 files changed, 31 insertions, 20 deletions
diff --git a/trappy/ftrace.py b/trappy/ftrace.py
index 2c5fc60..3948cb6 100644
--- a/trappy/ftrace.py
+++ b/trappy/ftrace.py
@@ -415,7 +415,7 @@ is part of the trace.
return ret
- def apply_callbacks(self, fn_map):
+ def apply_callbacks(self, fn_map, *kwarg):
"""
Apply callback functions to trace events in chronological order.
@@ -457,7 +457,12 @@ is part of the trace.
event_dict = {
col: event_tuple[idx] for col, idx in col_idxs[event_name].iteritems()
}
- fn_map[event_name](event_dict)
+
+ if kwarg:
+ fn_map[event_name](event_dict, kwarg)
+ else:
+ fn_map[event_name](event_dict)
+
event_row = next(iters[event_name], None)
if event_row:
next_rows[event_name] = event_row
diff --git a/trappy/utils.py b/trappy/utils.py
index cee6f6c..57b4ccd 100644
--- a/trappy/utils.py
+++ b/trappy/utils.py
@@ -106,6 +106,29 @@ def handle_duplicate_index(data,
return data.reindex(new_index)
+# Iterate fast over all rows in a data frame and apply fn
+def apply_callback(df, fn, *kwargs):
+ iters = df.itertuples()
+ event_tuple = iters.next()
+
+ # Column names beginning with underscore will not be preserved in tuples
+ # due to constraints on namedtuple field names, so store mappings from
+ # column name to column number for each trace event.
+ col_idxs = { name: idx for idx, name in enumerate(['Time'] + df.columns.tolist()) }
+
+ while True:
+ if not event_tuple:
+ break
+ event_dict = { col: event_tuple[idx] for col, idx in col_idxs.iteritems() }
+
+ if kwargs:
+ fn(event_dict, kwargs)
+ else:
+ fn(event_dict)
+
+ event_tuple = next(iters, None)
+
+
def merge_dfs(pr_df, sec_df, pivot):
# Keep track of last secondary event
pivot_map = {}
@@ -134,25 +157,8 @@ def merge_dfs(pr_df, sec_df, pivot):
data['Time'] = data['Time'][1]
merged_data.append(data)
- # Iterate fast over all rows in a data frame and apply fn
- def apply_callbacks(df, fn):
- iters = df.itertuples()
- event_tuple = iters.next()
-
- # Column names beginning with underscore will not be preserved in tuples
- # due to constraints on namedtuple field names, so store mappings from
- # column name to column number for each trace event.
- col_idxs = { name: idx for idx, name in enumerate(['Time'] + df.columns.tolist()) }
-
- while True:
- if not event_tuple:
- break
- event_dict = { col: event_tuple[idx] for col, idx in col_idxs.iteritems() }
- fn(event_dict)
- event_tuple = next(iters, None)
-
df = pd.concat([pr_df, sec_df], keys=['primary', 'secondary']).sort(columns='__line')
- apply_callbacks(df, df_fn)
+ apply_callback(df, df_fn)
merged_df = pd.DataFrame.from_dict(merged_data)
merged_df.set_index('Time', inplace=True)